formatting: blacken the codebase
authorAugie Fackler <augie@google.com>
Sun, 06 Oct 2019 09:45:02 -0400
changeset 43076 2372284d9457
parent 43075 57875cf423c9
child 43077 687b865b95ad
formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
contrib/automation/automation.py
contrib/automation/hgautomation/__init__.py
contrib/automation/hgautomation/aws.py
contrib/automation/hgautomation/cli.py
contrib/automation/hgautomation/linux.py
contrib/automation/hgautomation/pypi.py
contrib/automation/hgautomation/ssh.py
contrib/automation/hgautomation/windows.py
contrib/automation/hgautomation/winrm.py
contrib/bdiff-torture.py
contrib/benchmarks/__init__.py
contrib/benchmarks/perf.py
contrib/benchmarks/revset.py
contrib/byteify-strings.py
contrib/casesmash.py
contrib/catapipe.py
contrib/check-code.py
contrib/check-config.py
contrib/check-py3-compat.py
contrib/debugcmdserver.py
contrib/debugshell.py
contrib/dirstatenonnormalcheck.py
contrib/fuzz/dirstate_corpus.py
contrib/fuzz/fm1readmarkers_corpus.py
contrib/fuzz/manifest_corpus.py
contrib/fuzz/mpatch_corpus.py
contrib/fuzz/revlog_corpus.py
contrib/genosxversion.py
contrib/hgclient.py
contrib/memory.py
contrib/packaging/hgpackaging/downloads.py
contrib/packaging/hgpackaging/inno.py
contrib/packaging/hgpackaging/py2exe.py
contrib/packaging/hgpackaging/util.py
contrib/packaging/hgpackaging/wix.py
contrib/packaging/inno/build.py
contrib/packaging/wix/build.py
contrib/perf-utils/perf-revlog-write-plot.py
contrib/perf.py
contrib/python-hook-examples.py
contrib/python3-ratchet.py
contrib/revsetbenchmarks.py
contrib/showstack.py
contrib/synthrepo.py
contrib/testparseutil.py
contrib/win32/hgwebdir_wsgi.py
doc/check-seclevel.py
doc/gendoc.py
doc/hgmanpage.py
hgdemandimport/__init__.py
hgdemandimport/demandimportpy2.py
hgdemandimport/demandimportpy3.py
hgdemandimport/tracing.py
hgext/__init__.py
hgext/absorb.py
hgext/acl.py
hgext/amend.py
hgext/automv.py
hgext/beautifygraph.py
hgext/blackbox.py
hgext/bookflow.py
hgext/bugzilla.py
hgext/censor.py
hgext/children.py
hgext/churn.py
hgext/clonebundles.py
hgext/closehead.py
hgext/commitextras.py
hgext/convert/__init__.py
hgext/convert/bzr.py
hgext/convert/common.py
hgext/convert/convcmd.py
hgext/convert/cvs.py
hgext/convert/cvsps.py
hgext/convert/darcs.py
hgext/convert/filemap.py
hgext/convert/git.py
hgext/convert/gnuarch.py
hgext/convert/hg.py
hgext/convert/monotone.py
hgext/convert/p4.py
hgext/convert/subversion.py
hgext/convert/transport.py
hgext/eol.py
hgext/extdiff.py
hgext/factotum.py
hgext/fastannotate/__init__.py
hgext/fastannotate/commands.py
hgext/fastannotate/context.py
hgext/fastannotate/error.py
hgext/fastannotate/formatter.py
hgext/fastannotate/protocol.py
hgext/fastannotate/revmap.py
hgext/fastannotate/support.py
hgext/fetch.py
hgext/fix.py
hgext/fsmonitor/__init__.py
hgext/fsmonitor/state.py
hgext/fsmonitor/watchmanclient.py
hgext/githelp.py
hgext/gpg.py
hgext/graphlog.py
hgext/hgk.py
hgext/highlight/__init__.py
hgext/highlight/highlight.py
hgext/histedit.py
hgext/infinitepush/__init__.py
hgext/infinitepush/bundleparts.py
hgext/infinitepush/common.py
hgext/infinitepush/fileindexapi.py
hgext/infinitepush/indexapi.py
hgext/infinitepush/sqlindexapi.py
hgext/infinitepush/store.py
hgext/journal.py
hgext/keyword.py
hgext/largefiles/__init__.py
hgext/largefiles/basestore.py
hgext/largefiles/lfcommands.py
hgext/largefiles/lfutil.py
hgext/largefiles/localstore.py
hgext/largefiles/overrides.py
hgext/largefiles/proto.py
hgext/largefiles/remotestore.py
hgext/largefiles/reposetup.py
hgext/largefiles/storefactory.py
hgext/largefiles/wirestore.py
hgext/lfs/__init__.py
hgext/lfs/blobstore.py
hgext/lfs/pointer.py
hgext/lfs/wireprotolfsserver.py
hgext/lfs/wrapper.py
hgext/logtoprocess.py
hgext/mq.py
hgext/narrow/__init__.py
hgext/narrow/narrowbundle2.py
hgext/narrow/narrowcommands.py
hgext/narrow/narrowdirstate.py
hgext/narrow/narrowrepo.py
hgext/narrow/narrowtemplates.py
hgext/narrow/narrowwirepeer.py
hgext/notify.py
hgext/pager.py
hgext/patchbomb.py
hgext/phabricator.py
hgext/purge.py
hgext/rebase.py
hgext/record.py
hgext/releasenotes.py
hgext/relink.py
hgext/remotefilelog/__init__.py
hgext/remotefilelog/basepack.py
hgext/remotefilelog/basestore.py
hgext/remotefilelog/connectionpool.py
hgext/remotefilelog/constants.py
hgext/remotefilelog/contentstore.py
hgext/remotefilelog/datapack.py
hgext/remotefilelog/debugcommands.py
hgext/remotefilelog/fileserverclient.py
hgext/remotefilelog/historypack.py
hgext/remotefilelog/metadatastore.py
hgext/remotefilelog/remotefilectx.py
hgext/remotefilelog/remotefilelog.py
hgext/remotefilelog/remotefilelogserver.py
hgext/remotefilelog/repack.py
hgext/remotefilelog/shallowbundle.py
hgext/remotefilelog/shallowrepo.py
hgext/remotefilelog/shallowstore.py
hgext/remotefilelog/shallowutil.py
hgext/remotefilelog/shallowverifier.py
hgext/remotenames.py
hgext/schemes.py
hgext/share.py
hgext/show.py
hgext/sparse.py
hgext/split.py
hgext/sqlitestore.py
hgext/strip.py
hgext/transplant.py
hgext/uncommit.py
hgext/win32mbcs.py
hgext/win32text.py
hgext/zeroconf/Zeroconf.py
hgext/zeroconf/__init__.py
hgext3rd/__init__.py
i18n/check-translation.py
i18n/polib.py
mercurial/__init__.py
mercurial/ancestor.py
mercurial/archival.py
mercurial/bookmarks.py
mercurial/branchmap.py
mercurial/bundle2.py
mercurial/bundlerepo.py
mercurial/cffi/bdiff.py
mercurial/cffi/bdiffbuild.py
mercurial/cffi/mpatch.py
mercurial/cffi/mpatchbuild.py
mercurial/cffi/osutil.py
mercurial/cffi/osutilbuild.py
mercurial/changegroup.py
mercurial/changelog.py
mercurial/chgserver.py
mercurial/cmdutil.py
mercurial/color.py
mercurial/commands.py
mercurial/commandserver.py
mercurial/config.py
mercurial/configitems.py
mercurial/context.py
mercurial/copies.py
mercurial/crecord.py
mercurial/dagop.py
mercurial/dagparser.py
mercurial/debugcommands.py
mercurial/destutil.py
mercurial/diffutil.py
mercurial/dirstate.py
mercurial/discovery.py
mercurial/dispatch.py
mercurial/encoding.py
mercurial/error.py
mercurial/exchange.py
mercurial/exchangev2.py
mercurial/extensions.py
mercurial/exthelper.py
mercurial/fancyopts.py
mercurial/filelog.py
mercurial/filemerge.py
mercurial/fileset.py
mercurial/filesetlang.py
mercurial/formatter.py
mercurial/graphmod.py
mercurial/hbisect.py
mercurial/help.py
mercurial/hg.py
mercurial/hgweb/__init__.py
mercurial/hgweb/common.py
mercurial/hgweb/hgweb_mod.py
mercurial/hgweb/hgwebdir_mod.py
mercurial/hgweb/request.py
mercurial/hgweb/server.py
mercurial/hgweb/webcommands.py
mercurial/hgweb/webutil.py
mercurial/hgweb/wsgicgi.py
mercurial/hgweb/wsgiheaders.py
mercurial/hook.py
mercurial/httppeer.py
mercurial/i18n.py
mercurial/interfaces/dirstate.py
mercurial/interfaces/repository.py
mercurial/interfaces/util.py
mercurial/keepalive.py
mercurial/linelog.py
mercurial/localrepo.py
mercurial/lock.py
mercurial/logcmdutil.py
mercurial/logexchange.py
mercurial/loggingutil.py
mercurial/lsprof.py
mercurial/lsprofcalltree.py
mercurial/mail.py
mercurial/manifest.py
mercurial/match.py
mercurial/mdiff.py
mercurial/merge.py
mercurial/mergeutil.py
mercurial/minirst.py
mercurial/namespaces.py
mercurial/narrowspec.py
mercurial/obsolete.py
mercurial/obsutil.py
mercurial/parser.py
mercurial/patch.py
mercurial/pathutil.py
mercurial/phases.py
mercurial/posix.py
mercurial/profiling.py
mercurial/progress.py
mercurial/pure/base85.py
mercurial/pure/bdiff.py
mercurial/pure/charencode.py
mercurial/pure/mpatch.py
mercurial/pure/osutil.py
mercurial/pure/parsers.py
mercurial/pvec.py
mercurial/pycompat.py
mercurial/registrar.py
mercurial/repair.py
mercurial/repocache.py
mercurial/repoview.py
mercurial/revlog.py
mercurial/revlogutils/constants.py
mercurial/revlogutils/deltas.py
mercurial/revlogutils/flagutil.py
mercurial/revlogutils/sidedata.py
mercurial/revset.py
mercurial/revsetlang.py
mercurial/scmutil.py
mercurial/server.py
mercurial/setdiscovery.py
mercurial/shelve.py
mercurial/similar.py
mercurial/simplemerge.py
mercurial/smartset.py
mercurial/sparse.py
mercurial/sshpeer.py
mercurial/sslutil.py
mercurial/state.py
mercurial/statichttprepo.py
mercurial/statprof.py
mercurial/store.py
mercurial/streamclone.py
mercurial/subrepo.py
mercurial/subrepoutil.py
mercurial/tagmerge.py
mercurial/tags.py
mercurial/templatefilters.py
mercurial/templatefuncs.py
mercurial/templatekw.py
mercurial/templater.py
mercurial/templateutil.py
mercurial/testing/storage.py
mercurial/transaction.py
mercurial/treediscovery.py
mercurial/txnutil.py
mercurial/ui.py
mercurial/unionrepo.py
mercurial/upgrade.py
mercurial/url.py
mercurial/urllibcompat.py
mercurial/util.py
mercurial/utils/cborutil.py
mercurial/utils/compression.py
mercurial/utils/dateutil.py
mercurial/utils/procutil.py
mercurial/utils/repoviewutil.py
mercurial/utils/storageutil.py
mercurial/utils/stringutil.py
mercurial/verify.py
mercurial/vfs.py
mercurial/win32.py
mercurial/windows.py
mercurial/wireprotoframing.py
mercurial/wireprotoserver.py
mercurial/wireprototypes.py
mercurial/wireprotov1peer.py
mercurial/wireprotov1server.py
mercurial/wireprotov2peer.py
mercurial/wireprotov2server.py
mercurial/worker.py
setup.py
tests/artifacts/scripts/generate-churning-bundle.py
tests/autodiff.py
tests/badserverext.py
tests/basic_test_result.py
tests/blackbox-readonly-dispatch.py
tests/bruterebase.py
tests/check-perf-code.py
tests/common-pattern.py
tests/crashgetbundler.py
tests/drawdag.py
tests/dumbhttp.py
tests/dummysmtpd.py
tests/failfilemerge.py
tests/fakedirstatewritetime.py
tests/fakemergerecord.py
tests/fakepatchtime.py
tests/filterpyflakes.py
tests/flagprocessorext.py
tests/fsmonitor-run-tests.py
tests/generate-working-copy-states.py
tests/get-with-headers.py
tests/heredoctest.py
tests/hghave.py
tests/hgweberror.py
tests/httpserverauth.py
tests/hypothesishelpers.py
tests/killdaemons.py
tests/list-tree.py
tests/lockdelay.py
tests/logexceptions.py
tests/ls-l.py
tests/md5sum.py
tests/mockblackbox.py
tests/mockmakedate.py
tests/mocktime.py
tests/printenv.py
tests/printrevset.py
tests/pullext.py
tests/remotefilelog-getflogheads.py
tests/revlog-formatv0.py
tests/revnamesext.py
tests/run-tests.py
tests/seq.py
tests/silenttestrunner.py
tests/simplestorerepo.py
tests/sitecustomize.py
tests/sshprotoext.py
tests/svnurlof.py
tests/svnxml.py
tests/test-absorb-filefixupstate.py
tests/test-ancestor.py
tests/test-annotate.py
tests/test-atomictempfile.py
tests/test-batching.py
tests/test-bdiff.py
tests/test-cappedreader.py
tests/test-cbor.py
tests/test-check-interfaces.py
tests/test-config-env.py
tests/test-context.py
tests/test-demandimport.py
tests/test-dispatch.py
tests/test-doctest.py
tests/test-duplicateoptions.py
tests/test-encoding-func.py
tests/test-extensions-wrapfunction.py
tests/test-fastannotate-revmap.py
tests/test-filecache.py
tests/test-filelog.py
tests/test-flagprocessor.t
tests/test-hg-parseurl.py
tests/test-hgweb-auth.py
tests/test-hgwebdir-paths.py
tests/test-hook.t
tests/test-hybridencode.py
tests/test-lfs-pointer.py
tests/test-lfs-serve-access.t
tests/test-linelog.py
tests/test-linerange.py
tests/test-lock.py
tests/test-lrucachedict.py
tests/test-manifest.py
tests/test-match.py
tests/test-mdiff.py
tests/test-minifileset.py
tests/test-minirst.py
tests/test-parseindex2.py
tests/test-pathencode.py
tests/test-propertycache.py
tests/test-remotefilelog-datapack.py
tests/test-remotefilelog-histpack.py
tests/test-revlog-ancestry.py
tests/test-revlog-raw.py
tests/test-run-tests.py
tests/test-rust-ancestor.py
tests/test-rust-discovery.py
tests/test-simplekeyvaluefile.py
tests/test-simplemerge.py
tests/test-sshserver.py
tests/test-status-inprocess.py
tests/test-storage.py
tests/test-symlink-os-yes-fs-no.py
tests/test-trusted.py
tests/test-ui-color.py
tests/test-ui-config.py
tests/test-ui-verbosity.py
tests/test-url.py
tests/test-util.py
tests/test-verify-repo-operations.py
tests/test-walkrepo.py
tests/test-wireproto-clientreactor.py
tests/test-wireproto-framing.py
tests/test-wireproto-serverreactor.py
tests/test-wireproto.py
tests/test-wsgirequest.py
tests/testlib/ext-phase-report.py
tests/testlib/ext-sidedata.py
tests/tinyproxy.py
tests/wireprotosimplecache.py
--- a/contrib/automation/automation.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/automation.py	Sun Oct 06 09:45:02 2019 -0400
@@ -36,8 +36,13 @@
         pip = venv_bin / 'pip'
         python = venv_bin / 'python'
 
-    args = [str(pip), 'install', '-r', str(REQUIREMENTS_TXT),
-            '--disable-pip-version-check']
+    args = [
+        str(pip),
+        'install',
+        '-r',
+        str(REQUIREMENTS_TXT),
+        '--disable-pip-version-check',
+    ]
 
     if not venv_created:
         args.append('-q')
@@ -45,8 +50,7 @@
     subprocess.run(args, check=True)
 
     os.environ['HGAUTOMATION_BOOTSTRAPPED'] = '1'
-    os.environ['PATH'] = '%s%s%s' % (
-        venv_bin, os.pathsep, os.environ['PATH'])
+    os.environ['PATH'] = '%s%s%s' % (venv_bin, os.pathsep, os.environ['PATH'])
 
     subprocess.run([str(python), __file__] + sys.argv[1:], check=True)
 
--- a/contrib/automation/hgautomation/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,9 +10,7 @@
 import pathlib
 import secrets
 
-from .aws import (
-    AWSConnection,
-)
+from .aws import AWSConnection
 
 
 class HGAutomation:
@@ -53,7 +51,7 @@
 
         return password
 
-    def aws_connection(self, region: str, ensure_ec2_state: bool=True):
+    def aws_connection(self, region: str, ensure_ec2_state: bool = True):
         """Obtain an AWSConnection instance bound to a specific region."""
 
         return AWSConnection(self, region, ensure_ec2_state=ensure_ec2_state)
--- a/contrib/automation/hgautomation/aws.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/aws.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,9 +19,7 @@
 import boto3
 import botocore.exceptions
 
-from .linux import (
-    BOOTSTRAP_DEBIAN,
-)
+from .linux import BOOTSTRAP_DEBIAN
 from .ssh import (
     exec_command as ssh_exec_command,
     wait_for_ssh,
@@ -32,10 +30,13 @@
 )
 
 
-SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent
+SOURCE_ROOT = pathlib.Path(
+    os.path.abspath(__file__)
+).parent.parent.parent.parent
 
-INSTALL_WINDOWS_DEPENDENCIES = (SOURCE_ROOT / 'contrib' /
-                                'install-windows-dependencies.ps1')
+INSTALL_WINDOWS_DEPENDENCIES = (
+    SOURCE_ROOT / 'contrib' / 'install-windows-dependencies.ps1'
+)
 
 
 INSTANCE_TYPES_WITH_STORAGE = {
@@ -107,7 +108,6 @@
                         'Description': 'RDP from entire Internet',
                     },
                 ],
-
             },
             {
                 'FromPort': 5985,
@@ -119,7 +119,7 @@
                         'Description': 'PowerShell Remoting (Windows Remote Management)',
                     },
                 ],
-            }
+            },
         ],
     },
 }
@@ -152,11 +152,7 @@
 
 
 IAM_INSTANCE_PROFILES = {
-    'ephemeral-ec2-1': {
-        'roles': [
-            'ephemeral-ec2-role-1',
-        ],
-    }
+    'ephemeral-ec2-1': {'roles': ['ephemeral-ec2-role-1',],}
 }
 
 
@@ -226,7 +222,7 @@
 class AWSConnection:
     """Manages the state of a connection with AWS."""
 
-    def __init__(self, automation, region: str, ensure_ec2_state: bool=True):
+    def __init__(self, automation, region: str, ensure_ec2_state: bool = True):
         self.automation = automation
         self.local_state_path = automation.state_path
 
@@ -257,10 +253,19 @@
 
     # TODO use rsa package.
     res = subprocess.run(
-        ['openssl', 'pkcs8', '-in', str(p), '-nocrypt', '-topk8',
-         '-outform', 'DER'],
+        [
+            'openssl',
+            'pkcs8',
+            '-in',
+            str(p),
+            '-nocrypt',
+            '-topk8',
+            '-outform',
+            'DER',
+        ],
         capture_output=True,
-        check=True)
+        check=True,
+    )
 
     sha1 = hashlib.sha1(res.stdout).hexdigest()
     return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2]))
@@ -271,7 +276,7 @@
 
     for kpi in ec2resource.key_pairs.all():
         if kpi.name.startswith(prefix):
-            remote_existing[kpi.name[len(prefix):]] = kpi.key_fingerprint
+            remote_existing[kpi.name[len(prefix) :]] = kpi.key_fingerprint
 
     # Validate that we have these keys locally.
     key_path = state_path / 'keys'
@@ -297,7 +302,7 @@
         if not f.startswith('keypair-') or not f.endswith('.pub'):
             continue
 
-        name = f[len('keypair-'):-len('.pub')]
+        name = f[len('keypair-') : -len('.pub')]
 
         pub_full = key_path / f
         priv_full = key_path / ('keypair-%s' % name)
@@ -306,8 +311,9 @@
             data = fh.read()
 
         if not data.startswith('ssh-rsa '):
-            print('unexpected format for key pair file: %s; removing' %
-                  pub_full)
+            print(
+                'unexpected format for key pair file: %s; removing' % pub_full
+            )
             pub_full.unlink()
             priv_full.unlink()
             continue
@@ -327,8 +333,10 @@
             del local_existing[name]
 
         elif remote_existing[name] != local_existing[name]:
-            print('key fingerprint mismatch for %s; '
-                  'removing from local and remote' % name)
+            print(
+                'key fingerprint mismatch for %s; '
+                'removing from local and remote' % name
+            )
             remove_local(name)
             remove_remote('%s%s' % (prefix, name))
             del local_existing[name]
@@ -356,15 +364,18 @@
             subprocess.run(
                 ['ssh-keygen', '-y', '-f', str(priv_full)],
                 stdout=fh,
-                check=True)
+                check=True,
+            )
 
         pub_full.chmod(0o0600)
 
 
 def delete_instance_profile(profile):
     for role in profile.roles:
-        print('removing role %s from instance profile %s' % (role.name,
-                                                             profile.name))
+        print(
+            'removing role %s from instance profile %s'
+            % (role.name, profile.name)
+        )
         profile.remove_role(RoleName=role.name)
 
     print('deleting instance profile %s' % profile.name)
@@ -378,7 +389,7 @@
 
     for profile in iamresource.instance_profiles.all():
         if profile.name.startswith(prefix):
-            remote_profiles[profile.name[len(prefix):]] = profile
+            remote_profiles[profile.name[len(prefix) :]] = profile
 
     for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)):
         delete_instance_profile(remote_profiles[name])
@@ -388,7 +399,7 @@
 
     for role in iamresource.roles.all():
         if role.name.startswith(prefix):
-            remote_roles[role.name[len(prefix):]] = role
+            remote_roles[role.name[len(prefix) :]] = role
 
     for name in sorted(set(remote_roles) - set(IAM_ROLES)):
         role = remote_roles[name]
@@ -404,7 +415,8 @@
         print('creating IAM instance profile %s' % actual)
 
         profile = iamresource.create_instance_profile(
-            InstanceProfileName=actual)
+            InstanceProfileName=actual
+        )
         remote_profiles[name] = profile
 
         waiter = iamclient.get_waiter('instance_profile_exists')
@@ -453,23 +465,12 @@
 
     images = ec2resource.images.filter(
         Filters=[
-            {
-                'Name': 'owner-id',
-                'Values': [owner_id],
-            },
-            {
-                'Name': 'state',
-                'Values': ['available'],
-            },
-            {
-                'Name': 'image-type',
-                'Values': ['machine'],
-            },
-            {
-                'Name': 'name',
-                'Values': [name],
-            },
-        ])
+            {'Name': 'owner-id', 'Values': [owner_id],},
+            {'Name': 'state', 'Values': ['available'],},
+            {'Name': 'image-type', 'Values': ['machine'],},
+            {'Name': 'name', 'Values': [name],},
+        ]
+    )
 
     for image in images:
         return image
@@ -487,7 +488,7 @@
 
     for group in ec2resource.security_groups.all():
         if group.group_name.startswith(prefix):
-            existing[group.group_name[len(prefix):]] = group
+            existing[group.group_name[len(prefix) :]] = group
 
     purge = set(existing) - set(SECURITY_GROUPS)
 
@@ -507,13 +508,10 @@
         print('adding security group %s' % actual)
 
         group_res = ec2resource.create_security_group(
-            Description=group['description'],
-            GroupName=actual,
+            Description=group['description'], GroupName=actual,
         )
 
-        group_res.authorize_ingress(
-            IpPermissions=group['ingress'],
-        )
+        group_res.authorize_ingress(IpPermissions=group['ingress'],)
 
         security_groups[name] = group_res
 
@@ -577,8 +575,10 @@
                 instance.reload()
                 continue
 
-            print('public IP address for %s: %s' % (
-                instance.id, instance.public_ip_address))
+            print(
+                'public IP address for %s: %s'
+                % (instance.id, instance.public_ip_address)
+            )
             break
 
 
@@ -603,10 +603,7 @@
     while True:
         res = ssmclient.describe_instance_information(
             Filters=[
-                {
-                    'Key': 'InstanceIds',
-                    'Values': [i.id for i in instances],
-                },
+                {'Key': 'InstanceIds', 'Values': [i.id for i in instances],},
             ],
         )
 
@@ -628,9 +625,7 @@
         InstanceIds=[i.id for i in instances],
         DocumentName=document_name,
         Parameters=parameters,
-        CloudWatchOutputConfig={
-            'CloudWatchOutputEnabled': True,
-        },
+        CloudWatchOutputConfig={'CloudWatchOutputEnabled': True,},
     )
 
     command_id = res['Command']['CommandId']
@@ -639,8 +634,7 @@
         while True:
             try:
                 res = ssmclient.get_command_invocation(
-                    CommandId=command_id,
-                    InstanceId=instance.id,
+                    CommandId=command_id, InstanceId=instance.id,
                 )
             except botocore.exceptions.ClientError as e:
                 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
@@ -655,8 +649,9 @@
             elif res['Status'] in ('Pending', 'InProgress', 'Delayed'):
                 time.sleep(2)
             else:
-                raise Exception('command failed on %s: %s' % (
-                    instance.id, res['Status']))
+                raise Exception(
+                    'command failed on %s: %s' % (instance.id, res['Status'])
+                )
 
 
 @contextlib.contextmanager
@@ -711,10 +706,12 @@
     config['IamInstanceProfile'] = {
         'Name': 'hg-ephemeral-ec2-1',
     }
-    config.setdefault('TagSpecifications', []).append({
-        'ResourceType': 'instance',
-        'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}],
-    })
+    config.setdefault('TagSpecifications', []).append(
+        {
+            'ResourceType': 'instance',
+            'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}],
+        }
+    )
     config['UserData'] = WINDOWS_USER_DATA % password
 
     with temporary_ec2_instances(c.ec2resource, config) as instances:
@@ -723,7 +720,9 @@
         print('waiting for Windows Remote Management service...')
 
         for instance in instances:
-            client = wait_for_winrm(instance.public_ip_address, 'Administrator', password)
+            client = wait_for_winrm(
+                instance.public_ip_address, 'Administrator', password
+            )
             print('established WinRM connection to %s' % instance.id)
             instance.winrm_client = client
 
@@ -748,14 +747,17 @@
     # Store a reference to a good image so it can be returned one the
     # image state is reconciled.
     images = ec2resource.images.filter(
-        Filters=[{'Name': 'name', 'Values': [name]}])
+        Filters=[{'Name': 'name', 'Values': [name]}]
+    )
 
     existing_image = None
 
     for image in images:
         if image.tags is None:
-            print('image %s for %s lacks required tags; removing' % (
-                image.id, image.name))
+            print(
+                'image %s for %s lacks required tags; removing'
+                % (image.id, image.name)
+            )
             remove_ami(ec2resource, image)
         else:
             tags = {t['Key']: t['Value'] for t in image.tags}
@@ -763,15 +765,18 @@
             if tags.get('HGIMAGEFINGERPRINT') == fingerprint:
                 existing_image = image
             else:
-                print('image %s for %s has wrong fingerprint; removing' % (
-                      image.id, image.name))
+                print(
+                    'image %s for %s has wrong fingerprint; removing'
+                    % (image.id, image.name)
+                )
                 remove_ami(ec2resource, image)
 
     return existing_image
 
 
-def create_ami_from_instance(ec2client, instance, name, description,
-                             fingerprint):
+def create_ami_from_instance(
+    ec2client, instance, name, description, fingerprint
+):
     """Create an AMI from a running instance.
 
     Returns the ``ec2resource.Image`` representing the created AMI.
@@ -779,29 +784,19 @@
     instance.stop()
 
     ec2client.get_waiter('instance_stopped').wait(
-        InstanceIds=[instance.id],
-        WaiterConfig={
-            'Delay': 5,
-        })
+        InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
+    )
     print('%s is stopped' % instance.id)
 
-    image = instance.create_image(
-        Name=name,
-        Description=description,
-    )
+    image = instance.create_image(Name=name, Description=description,)
 
-    image.create_tags(Tags=[
-        {
-            'Key': 'HGIMAGEFINGERPRINT',
-            'Value': fingerprint,
-        },
-    ])
+    image.create_tags(
+        Tags=[{'Key': 'HGIMAGEFINGERPRINT', 'Value': fingerprint,},]
+    )
 
     print('waiting for image %s' % image.id)
 
-    ec2client.get_waiter('image_available').wait(
-        ImageIds=[image.id],
-    )
+    ec2client.get_waiter('image_available').wait(ImageIds=[image.id],)
 
     print('image %s available as %s' % (image.id, image.name))
 
@@ -827,9 +822,7 @@
         ssh_username = 'admin'
     elif distro == 'debian10':
         image = find_image(
-            ec2resource,
-            DEBIAN_ACCOUNT_ID_2,
-            'debian-10-amd64-20190909-10',
+            ec2resource, DEBIAN_ACCOUNT_ID_2, 'debian-10-amd64-20190909-10',
         )
         ssh_username = 'admin'
     elif distro == 'ubuntu18.04':
@@ -871,10 +864,12 @@
         'SecurityGroupIds': [c.security_groups['linux-dev-1'].id],
     }
 
-    requirements2_path = (pathlib.Path(__file__).parent.parent /
-                          'linux-requirements-py2.txt')
-    requirements3_path = (pathlib.Path(__file__).parent.parent /
-                          'linux-requirements-py3.txt')
+    requirements2_path = (
+        pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt'
+    )
+    requirements3_path = (
+        pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt'
+    )
     with requirements2_path.open('r', encoding='utf-8') as fh:
         requirements2 = fh.read()
     with requirements3_path.open('r', encoding='utf-8') as fh:
@@ -882,12 +877,14 @@
 
     # Compute a deterministic fingerprint to determine whether image needs to
     # be regenerated.
-    fingerprint = resolve_fingerprint({
-        'instance_config': config,
-        'bootstrap_script': BOOTSTRAP_DEBIAN,
-        'requirements_py2': requirements2,
-        'requirements_py3': requirements3,
-    })
+    fingerprint = resolve_fingerprint(
+        {
+            'instance_config': config,
+            'bootstrap_script': BOOTSTRAP_DEBIAN,
+            'requirements_py2': requirements2,
+            'requirements_py3': requirements3,
+        }
+    )
 
     existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
 
@@ -902,9 +899,11 @@
         instance = instances[0]
 
         client = wait_for_ssh(
-            instance.public_ip_address, 22,
+            instance.public_ip_address,
+            22,
             username=ssh_username,
-            key_filename=str(c.key_pair_path_private('automation')))
+            key_filename=str(c.key_pair_path_private('automation')),
+        )
 
         home = '/home/%s' % ssh_username
 
@@ -926,8 +925,9 @@
                 fh.chmod(0o0700)
 
             print('executing bootstrap')
-            chan, stdin, stdout = ssh_exec_command(client,
-                                                   '%s/bootstrap' % home)
+            chan, stdin, stdout = ssh_exec_command(
+                client, '%s/bootstrap' % home
+            )
             stdin.close()
 
             for line in stdout:
@@ -937,17 +937,28 @@
             if res:
                 raise Exception('non-0 exit from bootstrap: %d' % res)
 
-            print('bootstrap completed; stopping %s to create %s' % (
-                  instance.id, name))
+            print(
+                'bootstrap completed; stopping %s to create %s'
+                % (instance.id, name)
+            )
 
-        return create_ami_from_instance(ec2client, instance, name,
-                                        'Mercurial Linux development environment',
-                                        fingerprint)
+        return create_ami_from_instance(
+            ec2client,
+            instance,
+            name,
+            'Mercurial Linux development environment',
+            fingerprint,
+        )
 
 
 @contextlib.contextmanager
-def temporary_linux_dev_instances(c: AWSConnection, image, instance_type,
-                                  prefix='hg-', ensure_extra_volume=False):
+def temporary_linux_dev_instances(
+    c: AWSConnection,
+    image,
+    instance_type,
+    prefix='hg-',
+    ensure_extra_volume=False,
+):
     """Create temporary Linux development EC2 instances.
 
     Context manager resolves to a list of ``ec2.Instance`` that were created
@@ -979,8 +990,9 @@
 
     # This is not an exhaustive list of instance types having instance storage.
     # But
-    if (ensure_extra_volume
-        and not instance_type.startswith(tuple(INSTANCE_TYPES_WITH_STORAGE))):
+    if ensure_extra_volume and not instance_type.startswith(
+        tuple(INSTANCE_TYPES_WITH_STORAGE)
+    ):
         main_device = block_device_mappings[0]['DeviceName']
 
         if main_device == 'xvda':
@@ -988,17 +1000,20 @@
         elif main_device == '/dev/sda1':
             second_device = '/dev/sdb'
         else:
-            raise ValueError('unhandled primary EBS device name: %s' %
-                             main_device)
+            raise ValueError(
+                'unhandled primary EBS device name: %s' % main_device
+            )
 
-        block_device_mappings.append({
-            'DeviceName': second_device,
-            'Ebs': {
-                'DeleteOnTermination': True,
-                'VolumeSize': 8,
-                'VolumeType': 'gp2',
+        block_device_mappings.append(
+            {
+                'DeviceName': second_device,
+                'Ebs': {
+                    'DeleteOnTermination': True,
+                    'VolumeSize': 8,
+                    'VolumeType': 'gp2',
+                },
             }
-        })
+        )
 
     config = {
         'BlockDeviceMappings': block_device_mappings,
@@ -1019,9 +1034,11 @@
 
         for instance in instances:
             client = wait_for_ssh(
-                instance.public_ip_address, 22,
+                instance.public_ip_address,
+                22,
                 username='hg',
-                key_filename=ssh_private_key_path)
+                key_filename=ssh_private_key_path,
+            )
 
             instance.ssh_client = client
             instance.ssh_private_key_path = ssh_private_key_path
@@ -1033,8 +1050,9 @@
                 instance.ssh_client.close()
 
 
-def ensure_windows_dev_ami(c: AWSConnection, prefix='hg-',
-                           base_image_name=WINDOWS_BASE_IMAGE_NAME):
+def ensure_windows_dev_ami(
+    c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME
+):
     """Ensure Windows Development AMI is available and up-to-date.
 
     If necessary, a modern AMI will be built by starting a temporary EC2
@@ -1100,13 +1118,15 @@
 
     # Compute a deterministic fingerprint to determine whether image needs
     # to be regenerated.
-    fingerprint = resolve_fingerprint({
-        'instance_config': config,
-        'user_data': WINDOWS_USER_DATA,
-        'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
-        'bootstrap_commands': commands,
-        'base_image_name': base_image_name,
-    })
+    fingerprint = resolve_fingerprint(
+        {
+            'instance_config': config,
+            'user_data': WINDOWS_USER_DATA,
+            'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL,
+            'bootstrap_commands': commands,
+            'base_image_name': base_image_name,
+        }
+    )
 
     existing_image = find_and_reconcile_image(ec2resource, name, fingerprint)
 
@@ -1131,9 +1151,7 @@
             ssmclient,
             [instance],
             'AWS-RunPowerShellScript',
-            {
-                'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),
-            },
+            {'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),},
         )
 
         # Reboot so all updates are fully applied.
@@ -1145,10 +1163,8 @@
         print('rebooting instance %s' % instance.id)
         instance.stop()
         ec2client.get_waiter('instance_stopped').wait(
-            InstanceIds=[instance.id],
-            WaiterConfig={
-                'Delay': 5,
-            })
+            InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
+        )
 
         instance.start()
         wait_for_ip_addresses([instance])
@@ -1159,8 +1175,11 @@
         # TODO figure out a workaround.
 
         print('waiting for Windows Remote Management to come back...')
-        client = wait_for_winrm(instance.public_ip_address, 'Administrator',
-                                c.automation.default_password())
+        client = wait_for_winrm(
+            instance.public_ip_address,
+            'Administrator',
+            c.automation.default_password(),
+        )
         print('established WinRM connection to %s' % instance.id)
         instance.winrm_client = client
 
@@ -1168,14 +1187,23 @@
         run_powershell(instance.winrm_client, '\n'.join(commands))
 
         print('bootstrap completed; stopping %s to create image' % instance.id)
-        return create_ami_from_instance(ec2client, instance, name,
-                                        'Mercurial Windows development environment',
-                                        fingerprint)
+        return create_ami_from_instance(
+            ec2client,
+            instance,
+            name,
+            'Mercurial Windows development environment',
+            fingerprint,
+        )
 
 
 @contextlib.contextmanager
-def temporary_windows_dev_instances(c: AWSConnection, image, instance_type,
-                                    prefix='hg-', disable_antivirus=False):
+def temporary_windows_dev_instances(
+    c: AWSConnection,
+    image,
+    instance_type,
+    prefix='hg-',
+    disable_antivirus=False,
+):
     """Create a temporary Windows development EC2 instance.
 
     Context manager resolves to the list of ``EC2.Instance`` that were created.
@@ -1205,6 +1233,7 @@
             for instance in instances:
                 run_powershell(
                     instance.winrm_client,
-                    'Set-MpPreference -DisableRealtimeMonitoring $true')
+                    'Set-MpPreference -DisableRealtimeMonitoring $true',
+                )
 
         yield instances
--- a/contrib/automation/hgautomation/cli.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/cli.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,12 +22,15 @@
 )
 
 
-SOURCE_ROOT = pathlib.Path(os.path.abspath(__file__)).parent.parent.parent.parent
+SOURCE_ROOT = pathlib.Path(
+    os.path.abspath(__file__)
+).parent.parent.parent.parent
 DIST_PATH = SOURCE_ROOT / 'dist'
 
 
-def bootstrap_linux_dev(hga: HGAutomation, aws_region, distros=None,
-                        parallel=False):
+def bootstrap_linux_dev(
+    hga: HGAutomation, aws_region, distros=None, parallel=False
+):
     c = hga.aws_connection(aws_region)
 
     if distros:
@@ -59,8 +62,9 @@
     print('Windows development AMI available as %s' % image.id)
 
 
-def build_inno(hga: HGAutomation, aws_region, arch, revision, version,
-               base_image_name):
+def build_inno(
+    hga: HGAutomation, aws_region, arch, revision, version, base_image_name
+):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
     DIST_PATH.mkdir(exist_ok=True)
@@ -71,13 +75,14 @@
         windows.synchronize_hg(SOURCE_ROOT, revision, instance)
 
         for a in arch:
-            windows.build_inno_installer(instance.winrm_client, a,
-                                         DIST_PATH,
-                                         version=version)
+            windows.build_inno_installer(
+                instance.winrm_client, a, DIST_PATH, version=version
+            )
 
 
-def build_wix(hga: HGAutomation, aws_region, arch, revision, version,
-              base_image_name):
+def build_wix(
+    hga: HGAutomation, aws_region, arch, revision, version, base_image_name
+):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
     DIST_PATH.mkdir(exist_ok=True)
@@ -88,12 +93,14 @@
         windows.synchronize_hg(SOURCE_ROOT, revision, instance)
 
         for a in arch:
-            windows.build_wix_installer(instance.winrm_client, a,
-                                        DIST_PATH, version=version)
+            windows.build_wix_installer(
+                instance.winrm_client, a, DIST_PATH, version=version
+            )
 
 
-def build_windows_wheel(hga: HGAutomation, aws_region, arch, revision,
-                        base_image_name):
+def build_windows_wheel(
+    hga: HGAutomation, aws_region, arch, revision, base_image_name
+):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
     DIST_PATH.mkdir(exist_ok=True)
@@ -107,8 +114,9 @@
             windows.build_wheel(instance.winrm_client, a, DIST_PATH)
 
 
-def build_all_windows_packages(hga: HGAutomation, aws_region, revision,
-                               version, base_image_name):
+def build_all_windows_packages(
+    hga: HGAutomation, aws_region, revision, version, base_image_name
+):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
     DIST_PATH.mkdir(exist_ok=True)
@@ -124,11 +132,13 @@
             windows.purge_hg(winrm_client)
             windows.build_wheel(winrm_client, arch, DIST_PATH)
             windows.purge_hg(winrm_client)
-            windows.build_inno_installer(winrm_client, arch, DIST_PATH,
-                                         version=version)
+            windows.build_inno_installer(
+                winrm_client, arch, DIST_PATH, version=version
+            )
             windows.purge_hg(winrm_client)
-            windows.build_wix_installer(winrm_client, arch, DIST_PATH,
-                                        version=version)
+            windows.build_wix_installer(
+                winrm_client, arch, DIST_PATH, version=version
+            )
 
 
 def terminate_ec2_instances(hga: HGAutomation, aws_region):
@@ -141,8 +151,15 @@
     aws.remove_resources(c)
 
 
-def run_tests_linux(hga: HGAutomation, aws_region, instance_type,
-                    python_version, test_flags, distro, filesystem):
+def run_tests_linux(
+    hga: HGAutomation,
+    aws_region,
+    instance_type,
+    python_version,
+    test_flags,
+    distro,
+    filesystem,
+):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_linux_dev_ami(c, distro=distro)
 
@@ -151,17 +168,17 @@
     ensure_extra_volume = filesystem not in ('default', 'tmpfs')
 
     with aws.temporary_linux_dev_instances(
-        c, image, instance_type,
-        ensure_extra_volume=ensure_extra_volume) as insts:
+        c, image, instance_type, ensure_extra_volume=ensure_extra_volume
+    ) as insts:
 
         instance = insts[0]
 
-        linux.prepare_exec_environment(instance.ssh_client,
-                                       filesystem=filesystem)
+        linux.prepare_exec_environment(
+            instance.ssh_client, filesystem=filesystem
+        )
         linux.synchronize_hg(SOURCE_ROOT, instance, '.')
         t_prepared = time.time()
-        linux.run_tests(instance.ssh_client, python_version,
-                        test_flags)
+        linux.run_tests(instance.ssh_client, python_version, test_flags)
         t_done = time.time()
 
     t_setup = t_prepared - t_start
@@ -169,29 +186,48 @@
 
     print(
         'total time: %.1fs; setup: %.1fs; tests: %.1fs; setup overhead: %.1f%%'
-        % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0))
+        % (t_all, t_setup, t_done - t_prepared, t_setup / t_all * 100.0)
+    )
 
 
-def run_tests_windows(hga: HGAutomation, aws_region, instance_type,
-                      python_version, arch, test_flags, base_image_name):
+def run_tests_windows(
+    hga: HGAutomation,
+    aws_region,
+    instance_type,
+    python_version,
+    arch,
+    test_flags,
+    base_image_name,
+):
     c = hga.aws_connection(aws_region)
     image = aws.ensure_windows_dev_ami(c, base_image_name=base_image_name)
 
-    with aws.temporary_windows_dev_instances(c, image, instance_type,
-                                             disable_antivirus=True) as insts:
+    with aws.temporary_windows_dev_instances(
+        c, image, instance_type, disable_antivirus=True
+    ) as insts:
         instance = insts[0]
 
         windows.synchronize_hg(SOURCE_ROOT, '.', instance)
-        windows.run_tests(instance.winrm_client, python_version, arch,
-                          test_flags)
+        windows.run_tests(
+            instance.winrm_client, python_version, arch, test_flags
+        )
 
 
-def publish_windows_artifacts(hg: HGAutomation, aws_region, version: str,
-                              pypi: bool, mercurial_scm_org: bool,
-                              ssh_username: str):
-    windows.publish_artifacts(DIST_PATH, version,
-                              pypi=pypi, mercurial_scm_org=mercurial_scm_org,
-                              ssh_username=ssh_username)
+def publish_windows_artifacts(
+    hg: HGAutomation,
+    aws_region,
+    version: str,
+    pypi: bool,
+    mercurial_scm_org: bool,
+    ssh_username: str,
+):
+    windows.publish_artifacts(
+        DIST_PATH,
+        version,
+        pypi=pypi,
+        mercurial_scm_org=mercurial_scm_org,
+        ssh_username=ssh_username,
+    )
 
 
 def run_try(hga: HGAutomation, aws_region: str, rev: str):
@@ -208,25 +244,21 @@
         help='Path for local state files',
     )
     parser.add_argument(
-        '--aws-region',
-        help='AWS region to use',
-        default='us-west-2',
+        '--aws-region', help='AWS region to use', default='us-west-2',
     )
 
     subparsers = parser.add_subparsers()
 
     sp = subparsers.add_parser(
-        'bootstrap-linux-dev',
-        help='Bootstrap Linux development environments',
+        'bootstrap-linux-dev', help='Bootstrap Linux development environments',
     )
     sp.add_argument(
-        '--distros',
-        help='Comma delimited list of distros to bootstrap',
+        '--distros', help='Comma delimited list of distros to bootstrap',
     )
     sp.add_argument(
         '--parallel',
         action='store_true',
-        help='Generate AMIs in parallel (not CTRL-c safe)'
+        help='Generate AMIs in parallel (not CTRL-c safe)',
     )
     sp.set_defaults(func=bootstrap_linux_dev)
 
@@ -242,17 +274,13 @@
     sp.set_defaults(func=bootstrap_windows_dev)
 
     sp = subparsers.add_parser(
-        'build-all-windows-packages',
-        help='Build all Windows packages',
+        'build-all-windows-packages', help='Build all Windows packages',
     )
     sp.add_argument(
-        '--revision',
-        help='Mercurial revision to build',
-        default='.',
+        '--revision', help='Mercurial revision to build', default='.',
     )
     sp.add_argument(
-        '--version',
-        help='Mercurial version string to use',
+        '--version', help='Mercurial version string to use',
     )
     sp.add_argument(
         '--base-image-name',
@@ -262,8 +290,7 @@
     sp.set_defaults(func=build_all_windows_packages)
 
     sp = subparsers.add_parser(
-        'build-inno',
-        help='Build Inno Setup installer(s)',
+        'build-inno', help='Build Inno Setup installer(s)',
     )
     sp.add_argument(
         '--arch',
@@ -273,13 +300,10 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision',
-        help='Mercurial revision to build',
-        default='.',
+        '--revision', help='Mercurial revision to build', default='.',
     )
     sp.add_argument(
-        '--version',
-        help='Mercurial version string to use in installer',
+        '--version', help='Mercurial version string to use in installer',
     )
     sp.add_argument(
         '--base-image-name',
@@ -289,8 +313,7 @@
     sp.set_defaults(func=build_inno)
 
     sp = subparsers.add_parser(
-        'build-windows-wheel',
-        help='Build Windows wheel(s)',
+        'build-windows-wheel', help='Build Windows wheel(s)',
     )
     sp.add_argument(
         '--arch',
@@ -300,9 +323,7 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision',
-        help='Mercurial revision to build',
-        default='.',
+        '--revision', help='Mercurial revision to build', default='.',
     )
     sp.add_argument(
         '--base-image-name',
@@ -311,10 +332,7 @@
     )
     sp.set_defaults(func=build_windows_wheel)
 
-    sp = subparsers.add_parser(
-        'build-wix',
-        help='Build WiX installer(s)'
-    )
+    sp = subparsers.add_parser('build-wix', help='Build WiX installer(s)')
     sp.add_argument(
         '--arch',
         help='Architecture to build for',
@@ -323,13 +341,10 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision',
-        help='Mercurial revision to build',
-        default='.',
+        '--revision', help='Mercurial revision to build', default='.',
     )
     sp.add_argument(
-        '--version',
-        help='Mercurial version string to use in installer',
+        '--version', help='Mercurial version string to use in installer',
     )
     sp.add_argument(
         '--base-image-name',
@@ -345,15 +360,11 @@
     sp.set_defaults(func=terminate_ec2_instances)
 
     sp = subparsers.add_parser(
-        'purge-ec2-resources',
-        help='Purge all EC2 resources managed by us',
+        'purge-ec2-resources', help='Purge all EC2 resources managed by us',
     )
     sp.set_defaults(func=purge_ec2_resources)
 
-    sp = subparsers.add_parser(
-        'run-tests-linux',
-        help='Run tests on Linux',
-    )
+    sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
     sp.add_argument(
         '--distro',
         help='Linux distribution to run tests on',
@@ -374,8 +385,18 @@
     sp.add_argument(
         '--python-version',
         help='Python version to use',
-        choices={'system2', 'system3', '2.7', '3.5', '3.6', '3.7', '3.8',
-                 'pypy', 'pypy3.5', 'pypy3.6'},
+        choices={
+            'system2',
+            'system3',
+            '2.7',
+            '3.5',
+            '3.6',
+            '3.7',
+            '3.8',
+            'pypy',
+            'pypy3.5',
+            'pypy3.6',
+        },
         default='system2',
     )
     sp.add_argument(
@@ -386,13 +407,10 @@
     sp.set_defaults(func=run_tests_linux)
 
     sp = subparsers.add_parser(
-        'run-tests-windows',
-        help='Run tests on Windows',
+        'run-tests-windows', help='Run tests on Windows',
     )
     sp.add_argument(
-        '--instance-type',
-        help='EC2 instance type to use',
-        default='t3.medium',
+        '--instance-type', help='EC2 instance type to use', default='t3.medium',
     )
     sp.add_argument(
         '--python-version',
@@ -407,8 +425,7 @@
         default='x64',
     )
     sp.add_argument(
-        '--test-flags',
-        help='Extra command line flags to pass to run-tests.py',
+        '--test-flags', help='Extra command line flags to pass to run-tests.py',
     )
     sp.add_argument(
         '--base-image-name',
@@ -419,7 +436,7 @@
 
     sp = subparsers.add_parser(
         'publish-windows-artifacts',
-        help='Publish built Windows artifacts (wheels, installers, etc)'
+        help='Publish built Windows artifacts (wheels, installers, etc)',
     )
     sp.add_argument(
         '--no-pypi',
@@ -436,22 +453,17 @@
         help='Skip uploading to www.mercurial-scm.org',
     )
     sp.add_argument(
-        '--ssh-username',
-        help='SSH username for mercurial-scm.org',
+        '--ssh-username', help='SSH username for mercurial-scm.org',
     )
     sp.add_argument(
-        'version',
-        help='Mercurial version string to locate local packages',
+        'version', help='Mercurial version string to locate local packages',
     )
     sp.set_defaults(func=publish_windows_artifacts)
 
     sp = subparsers.add_parser(
-        'try',
-        help='Run CI automation against a custom changeset'
+        'try', help='Run CI automation against a custom changeset'
     )
-    sp.add_argument('-r', '--rev',
-                    default='.',
-                    help='Revision to run CI on')
+    sp.add_argument('-r', '--rev', default='.', help='Revision to run CI on')
     sp.set_defaults(func=run_try)
 
     return parser
--- a/contrib/automation/hgautomation/linux.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/linux.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,9 +13,7 @@
 import subprocess
 import tempfile
 
-from .ssh import (
-    exec_command,
-)
+from .ssh import exec_command
 
 
 # Linux distributions that are supported.
@@ -62,7 +60,9 @@
 done
 
 pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system
-'''.lstrip().replace('\r\n', '\n')
+'''.lstrip().replace(
+    '\r\n', '\n'
+)
 
 
 INSTALL_RUST = r'''
@@ -87,10 +87,13 @@
 echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check -
 
 /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL}
-'''.lstrip().replace('\r\n', '\n')
+'''.lstrip().replace(
+    '\r\n', '\n'
+)
 
 
-BOOTSTRAP_DEBIAN = r'''
+BOOTSTRAP_DEBIAN = (
+    r'''
 #!/bin/bash
 
 set -ex
@@ -323,11 +326,14 @@
 EOF
 
 sudo chown -R hg:hg /hgdev
-'''.lstrip().format(
-    install_rust=INSTALL_RUST,
-    install_pythons=INSTALL_PYTHONS,
-    bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV
-).replace('\r\n', '\n')
+'''.lstrip()
+    .format(
+        install_rust=INSTALL_RUST,
+        install_pythons=INSTALL_PYTHONS,
+        bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV,
+    )
+    .replace('\r\n', '\n')
+)
 
 
 # Prepares /hgdev for operations.
@@ -409,7 +415,9 @@
 chown hg:hg /hgwork/tmp
 
 rsync -a /hgdev/src /hgwork/
-'''.lstrip().replace('\r\n', '\n')
+'''.lstrip().replace(
+    '\r\n', '\n'
+)
 
 
 HG_UPDATE_CLEAN = '''
@@ -421,7 +429,9 @@
 ${HG} --config extensions.purge= purge --all
 ${HG} update -C $1
 ${HG} log -r .
-'''.lstrip().replace('\r\n', '\n')
+'''.lstrip().replace(
+    '\r\n', '\n'
+)
 
 
 def prepare_exec_environment(ssh_client, filesystem='default'):
@@ -456,11 +466,12 @@
     res = chan.recv_exit_status()
 
     if res:
-        raise Exception('non-0 exit code updating working directory; %d'
-                        % res)
+        raise Exception('non-0 exit code updating working directory; %d' % res)
 
 
-def synchronize_hg(source_path: pathlib.Path, ec2_instance, revision: str=None):
+def synchronize_hg(
+    source_path: pathlib.Path, ec2_instance, revision: str = None
+):
     """Synchronize a local Mercurial source path to remote EC2 instance."""
 
     with tempfile.TemporaryDirectory() as temp_dir:
@@ -482,8 +493,10 @@
             fh.write('  IdentityFile %s\n' % ec2_instance.ssh_private_key_path)
 
         if not (source_path / '.hg').is_dir():
-            raise Exception('%s is not a Mercurial repository; synchronization '
-                            'not yet supported' % source_path)
+            raise Exception(
+                '%s is not a Mercurial repository; synchronization '
+                'not yet supported' % source_path
+            )
 
         env = dict(os.environ)
         env['HGPLAIN'] = '1'
@@ -493,17 +506,29 @@
 
         res = subprocess.run(
             ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
-            cwd=str(source_path), env=env, check=True, capture_output=True)
+            cwd=str(source_path),
+            env=env,
+            check=True,
+            capture_output=True,
+        )
 
         full_revision = res.stdout.decode('ascii')
 
         args = [
-            'python2.7', str(hg_bin),
-            '--config', 'ui.ssh=ssh -F %s' % ssh_config,
-            '--config', 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
+            'python2.7',
+            str(hg_bin),
+            '--config',
+            'ui.ssh=ssh -F %s' % ssh_config,
+            '--config',
+            'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg',
             # Also ensure .hgtags changes are present so auto version
             # calculation works.
-            'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
+            'push',
+            '-f',
+            '-r',
+            full_revision,
+            '-r',
+            'file(.hgtags)',
             'ssh://%s//hgwork/src' % public_ip,
         ]
 
@@ -522,7 +547,8 @@
             fh.chmod(0o0700)
 
         chan, stdin, stdout = exec_command(
-            ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision)
+            ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision
+        )
         stdin.close()
 
         for line in stdout:
@@ -531,8 +557,9 @@
         res = chan.recv_exit_status()
 
         if res:
-            raise Exception('non-0 exit code updating working directory; %d'
-                            % res)
+            raise Exception(
+                'non-0 exit code updating working directory; %d' % res
+            )
 
 
 def run_tests(ssh_client, python_version, test_flags=None):
@@ -554,8 +581,8 @@
 
     command = (
         '/bin/sh -c "export TMPDIR=/hgwork/tmp; '
-        'cd /hgwork/src/tests && %s run-tests.py %s"' % (
-            python, test_flags))
+        'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags)
+    )
 
     chan, stdin, stdout = exec_command(ssh_client, command)
 
--- a/contrib/automation/hgautomation/pypi.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/pypi.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,12 +7,8 @@
 
 # no-check-code because Python 3 native.
 
-from twine.commands.upload import (
-    upload as twine_upload,
-)
-from twine.settings import (
-    Settings,
-)
+from twine.commands.upload import upload as twine_upload
+from twine.settings import Settings
 
 
 def upload(paths):
--- a/contrib/automation/hgautomation/ssh.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/ssh.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,14 +11,13 @@
 import time
 import warnings
 
-from cryptography.utils import (
-    CryptographyDeprecationWarning,
-)
+from cryptography.utils import CryptographyDeprecationWarning
 import paramiko
 
 
 def wait_for_ssh(hostname, port, timeout=60, username=None, key_filename=None):
     """Wait for an SSH server to start on the specified host and port."""
+
     class IgnoreHostKeyPolicy(paramiko.MissingHostKeyPolicy):
         def missing_host_key(self, client, hostname, key):
             return
@@ -28,17 +27,23 @@
     # paramiko triggers a CryptographyDeprecationWarning in the cryptography
     # package. Let's suppress
     with warnings.catch_warnings():
-        warnings.filterwarnings('ignore',
-                                category=CryptographyDeprecationWarning)
+        warnings.filterwarnings(
+            'ignore', category=CryptographyDeprecationWarning
+        )
 
         while True:
             client = paramiko.SSHClient()
             client.set_missing_host_key_policy(IgnoreHostKeyPolicy())
             try:
-                client.connect(hostname, port=port, username=username,
-                               key_filename=key_filename,
-                               timeout=5.0, allow_agent=False,
-                               look_for_keys=False)
+                client.connect(
+                    hostname,
+                    port=port,
+                    username=username,
+                    key_filename=key_filename,
+                    timeout=5.0,
+                    allow_agent=False,
+                    look_for_keys=False,
+                )
 
                 return client
             except socket.error:
--- a/contrib/automation/hgautomation/windows.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/windows.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,12 +15,8 @@
 import subprocess
 import tempfile
 
-from .pypi import (
-    upload as pypi_upload,
-)
-from .winrm import (
-    run_powershell,
-)
+from .pypi import upload as pypi_upload
+from .winrm import run_powershell
 
 
 # PowerShell commands to activate a Visual Studio 2008 environment.
@@ -117,14 +113,21 @@
 X86_USER_AGENT_PATTERN = '.*Windows.*'
 X64_USER_AGENT_PATTERN = '.*Windows.*(WOW|x)64.*'
 
-X86_EXE_DESCRIPTION = ('Mercurial {version} Inno Setup installer - x86 Windows '
-                       '- does not require admin rights')
-X64_EXE_DESCRIPTION = ('Mercurial {version} Inno Setup installer - x64 Windows '
-                       '- does not require admin rights')
-X86_MSI_DESCRIPTION = ('Mercurial {version} MSI installer - x86 Windows '
-                       '- requires admin rights')
-X64_MSI_DESCRIPTION = ('Mercurial {version} MSI installer - x64 Windows '
-                       '- requires admin rights')
+X86_EXE_DESCRIPTION = (
+    'Mercurial {version} Inno Setup installer - x86 Windows '
+    '- does not require admin rights'
+)
+X64_EXE_DESCRIPTION = (
+    'Mercurial {version} Inno Setup installer - x64 Windows '
+    '- does not require admin rights'
+)
+X86_MSI_DESCRIPTION = (
+    'Mercurial {version} MSI installer - x86 Windows ' '- requires admin rights'
+)
+X64_MSI_DESCRIPTION = (
+    'Mercurial {version} MSI installer - x64 Windows ' '- requires admin rights'
+)
+
 
 def get_vc_prefix(arch):
     if arch == 'x86':
@@ -158,10 +161,21 @@
         ssh_dir.chmod(0o0700)
 
         # Generate SSH key to use for communication.
-        subprocess.run([
-            'ssh-keygen', '-t', 'rsa', '-b', '4096', '-N', '',
-            '-f', str(ssh_dir / 'id_rsa')],
-            check=True, capture_output=True)
+        subprocess.run(
+            [
+                'ssh-keygen',
+                '-t',
+                'rsa',
+                '-b',
+                '4096',
+                '-N',
+                '',
+                '-f',
+                str(ssh_dir / 'id_rsa'),
+            ],
+            check=True,
+            capture_output=True,
+        )
 
         # Add it to ~/.ssh/authorized_keys on remote.
         # This assumes the file doesn't already exist.
@@ -182,8 +196,10 @@
             fh.write('  IdentityFile %s\n' % (ssh_dir / 'id_rsa'))
 
         if not (hg_repo / '.hg').is_dir():
-            raise Exception('%s is not a Mercurial repository; '
-                            'synchronization not yet supported' % hg_repo)
+            raise Exception(
+                '%s is not a Mercurial repository; '
+                'synchronization not yet supported' % hg_repo
+            )
 
         env = dict(os.environ)
         env['HGPLAIN'] = '1'
@@ -193,17 +209,29 @@
 
         res = subprocess.run(
             ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'],
-            cwd=str(hg_repo), env=env, check=True, capture_output=True)
+            cwd=str(hg_repo),
+            env=env,
+            check=True,
+            capture_output=True,
+        )
 
         full_revision = res.stdout.decode('ascii')
 
         args = [
-            'python2.7', hg_bin,
-            '--config', 'ui.ssh=ssh -F %s' % ssh_config,
-            '--config', 'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
+            'python2.7',
+            hg_bin,
+            '--config',
+            'ui.ssh=ssh -F %s' % ssh_config,
+            '--config',
+            'ui.remotecmd=c:/hgdev/venv-bootstrap/Scripts/hg.exe',
             # Also ensure .hgtags changes are present so auto version
             # calculation works.
-            'push', '-f', '-r', full_revision, '-r', 'file(.hgtags)',
+            'push',
+            '-f',
+            '-r',
+            full_revision,
+            '-r',
+            'file(.hgtags)',
             'ssh://%s/c:/hgdev/src' % public_ip,
         ]
 
@@ -213,8 +241,9 @@
         if res.returncode not in (0, 1):
             res.check_returncode()
 
-        run_powershell(winrm_client,
-                       HG_UPDATE_CLEAN.format(revision=full_revision))
+        run_powershell(
+            winrm_client, HG_UPDATE_CLEAN.format(revision=full_revision)
+        )
 
         # TODO detect dirty local working directory and synchronize accordingly.
 
@@ -250,8 +279,9 @@
     winrm_client.fetch(source, str(dest))
 
 
-def build_inno_installer(winrm_client, arch: str, dest_path: pathlib.Path,
-                         version=None):
+def build_inno_installer(
+    winrm_client, arch: str, dest_path: pathlib.Path, version=None
+):
     """Build the Inno Setup installer on a remote machine.
 
     Using a WinRM client, remote commands are executed to build
@@ -263,8 +293,9 @@
     if version:
         extra_args.extend(['--version', version])
 
-    ps = get_vc_prefix(arch) + BUILD_INNO.format(arch=arch,
-                                                 extra_args=' '.join(extra_args))
+    ps = get_vc_prefix(arch) + BUILD_INNO.format(
+        arch=arch, extra_args=' '.join(extra_args)
+    )
     run_powershell(winrm_client, ps)
     copy_latest_dist(winrm_client, '*.exe', dest_path)
 
@@ -281,8 +312,9 @@
     copy_latest_dist(winrm_client, '*.whl', dest_path)
 
 
-def build_wix_installer(winrm_client, arch: str, dest_path: pathlib.Path,
-                        version=None):
+def build_wix_installer(
+    winrm_client, arch: str, dest_path: pathlib.Path, version=None
+):
     """Build the WiX installer on a remote machine.
 
     Using a WinRM client, remote commands are executed to build a WiX installer.
@@ -292,8 +324,9 @@
     if version:
         extra_args.extend(['--version', version])
 
-    ps = get_vc_prefix(arch) + BUILD_WIX.format(arch=arch,
-                                                extra_args=' '.join(extra_args))
+    ps = get_vc_prefix(arch) + BUILD_WIX.format(
+        arch=arch, extra_args=' '.join(extra_args)
+    )
     run_powershell(winrm_client, ps)
     copy_latest_dist(winrm_client, '*.msi', dest_path)
 
@@ -307,18 +340,16 @@
     ``run-tests.py``.
     """
     if not re.match(r'\d\.\d', python_version):
-        raise ValueError(r'python_version must be \d.\d; got %s' %
-                         python_version)
+        raise ValueError(
+            r'python_version must be \d.\d; got %s' % python_version
+        )
 
     if arch not in ('x86', 'x64'):
         raise ValueError('arch must be x86 or x64; got %s' % arch)
 
     python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
 
-    ps = RUN_TESTS.format(
-        python_path=python_path,
-        test_flags=test_flags or '',
-    )
+    ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
 
     run_powershell(winrm_client, ps)
 
@@ -374,8 +405,8 @@
             version,
             X64_USER_AGENT_PATTERN,
             '%s/%s' % (MERCURIAL_SCM_BASE_URL, x64_msi_filename),
-            X64_MSI_DESCRIPTION.format(version=version)
-        )
+            X64_MSI_DESCRIPTION.format(version=version),
+        ),
     )
 
     lines = ['\t'.join(e) for e in entries]
@@ -396,8 +427,9 @@
     pypi_upload(wheel_paths)
 
 
-def publish_artifacts_mercurial_scm_org(dist_path: pathlib.Path, version: str,
-                                        ssh_username=None):
+def publish_artifacts_mercurial_scm_org(
+    dist_path: pathlib.Path, version: str, ssh_username=None
+):
     """Publish Windows release artifacts to mercurial-scm.org."""
     all_paths = resolve_all_artifacts(dist_path, version)
 
@@ -436,7 +468,8 @@
 
     now = datetime.datetime.utcnow()
     backup_path = dist_path / (
-        'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S'))
+        'latest-windows-%s.dat' % now.strftime('%Y%m%dT%H%M%S')
+    )
     print('backing up %s to %s' % (latest_dat_path, backup_path))
 
     with sftp.open(latest_dat_path, 'rb') as fh:
@@ -453,9 +486,13 @@
         fh.write(latest_dat_content.encode('ascii'))
 
 
-def publish_artifacts(dist_path: pathlib.Path, version: str,
-                      pypi=True, mercurial_scm_org=True,
-                      ssh_username=None):
+def publish_artifacts(
+    dist_path: pathlib.Path,
+    version: str,
+    pypi=True,
+    mercurial_scm_org=True,
+    ssh_username=None,
+):
     """Publish Windows release artifacts.
 
     Files are found in `dist_path`. We will look for files with version string
@@ -468,5 +505,6 @@
         publish_artifacts_pypi(dist_path, version)
 
     if mercurial_scm_org:
-        publish_artifacts_mercurial_scm_org(dist_path, version,
-                                            ssh_username=ssh_username)
+        publish_artifacts_mercurial_scm_org(
+            dist_path, version, ssh_username=ssh_username
+        )
--- a/contrib/automation/hgautomation/winrm.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/automation/hgautomation/winrm.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,9 +11,7 @@
 import pprint
 import time
 
-from pypsrp.client import (
-    Client,
-)
+from pypsrp.client import Client
 from pypsrp.powershell import (
     PowerShell,
     PSInvocationState,
@@ -35,8 +33,13 @@
 
     while True:
         try:
-            client = Client(host, username=username, password=password,
-                            ssl=ssl, connection_timeout=5)
+            client = Client(
+                host,
+                username=username,
+                password=password,
+                ssl=ssl,
+                connection_timeout=5,
+            )
             client.execute_ps("Write-Host 'Hello, World!'")
             return client
         except requests.exceptions.ConnectionError:
@@ -78,5 +81,7 @@
             print(format_object(o))
 
         if ps.state == PSInvocationState.FAILED:
-            raise Exception('PowerShell execution failed: %s' %
-                            ' '.join(map(format_object, ps.streams.error)))
+            raise Exception(
+                'PowerShell execution failed: %s'
+                % ' '.join(map(format_object, ps.streams.error))
+            )
--- a/contrib/bdiff-torture.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/bdiff-torture.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,15 +9,20 @@
     pycompat,
 )
 
+
 def reducetest(a, b):
     tries = 0
     reductions = 0
     print("reducing...")
     while tries < 1000:
-        a2 = "\n".join(l for l in a.splitlines()
-                       if random.randint(0, 100) > 0) + "\n"
-        b2 = "\n".join(l for l in b.splitlines()
-                       if random.randint(0, 100) > 0) + "\n"
+        a2 = (
+            "\n".join(l for l in a.splitlines() if random.randint(0, 100) > 0)
+            + "\n"
+        )
+        b2 = (
+            "\n".join(l for l in b.splitlines() if random.randint(0, 100) > 0)
+            + "\n"
+        )
         if a2 == a and b2 == b:
             continue
         if a2 == b2:
@@ -32,8 +37,7 @@
             a = a2
             b = b2
 
-    print("reduced:", reductions, len(a) + len(b),
-          repr(a), repr(b))
+    print("reduced:", reductions, len(a) + len(b), repr(a), repr(b))
     try:
         test1(a, b)
     except Exception as inst:
@@ -41,6 +45,7 @@
 
     sys.exit(0)
 
+
 def test1(a, b):
     d = mdiff.textdiff(a, b)
     if not d:
@@ -49,6 +54,7 @@
     if c != b:
         raise ValueError("bad")
 
+
 def testwrap(a, b):
     try:
         test1(a, b)
@@ -57,10 +63,12 @@
         print("exception:", inst)
     reducetest(a, b)
 
+
 def test(a, b):
     testwrap(a, b)
     testwrap(b, a)
 
+
 def rndtest(size, noise):
     a = []
     src = "                aaaaaaaabbbbccd"
@@ -82,6 +90,7 @@
 
     test(a, b)
 
+
 maxvol = 10000
 startsize = 2
 while True:
--- a/contrib/benchmarks/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/benchmarks/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -44,15 +44,24 @@
     util,
 )
 
-basedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
-                          os.path.pardir, os.path.pardir))
+basedir = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)
+)
 reposdir = os.environ['REPOS_DIR']
-reposnames = [name for name in os.listdir(reposdir)
-              if os.path.isdir(os.path.join(reposdir, name, ".hg"))]
+reposnames = [
+    name
+    for name in os.listdir(reposdir)
+    if os.path.isdir(os.path.join(reposdir, name, ".hg"))
+]
 if not reposnames:
     raise ValueError("No repositories found in $REPO_DIR")
-outputre = re.compile((r'! wall (\d+.\d+) comb \d+.\d+ user \d+.\d+ sys '
-                       r'\d+.\d+ \(best of \d+\)'))
+outputre = re.compile(
+    (
+        r'! wall (\d+.\d+) comb \d+.\d+ user \d+.\d+ sys '
+        r'\d+.\d+ \(best of \d+\)'
+    )
+)
+
 
 def runperfcommand(reponame, command, *args, **kwargs):
     os.environ["HGRCPATH"] = os.environ.get("ASVHGRCPATH", "")
@@ -63,8 +72,9 @@
     else:
         ui = uimod.ui()
     repo = hg.repository(ui, os.path.join(reposdir, reponame))
-    perfext = extensions.load(ui, 'perfext',
-                              os.path.join(basedir, 'contrib', 'perf.py'))
+    perfext = extensions.load(
+        ui, 'perfext', os.path.join(basedir, 'contrib', 'perf.py')
+    )
     cmd = getattr(perfext, command)
     ui.pushbuffer()
     cmd(ui, repo, *args, **kwargs)
@@ -74,6 +84,7 @@
         raise ValueError("Invalid output {0}".format(output))
     return float(match.group(1))
 
+
 def perfbench(repos=reposnames, name=None, params=None):
     """decorator to declare ASV benchmark based on contrib/perf.py extension
 
@@ -104,10 +115,12 @@
         def wrapped(repo, *args):
             def perf(command, *a, **kw):
                 return runperfcommand(repo, command, *a, **kw)
+
             return func(perf, *args)
 
         wrapped.params = [p[1] for p in params]
         wrapped.param_names = [p[0] for p in params]
         wrapped.pretty_name = name
         return wrapped
+
     return decorator
--- a/contrib/benchmarks/perf.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/benchmarks/perf.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,18 +9,22 @@
 
 from . import perfbench
 
+
 @perfbench()
 def track_tags(perf):
     return perf("perftags")
 
+
 @perfbench()
 def track_status(perf):
     return perf("perfstatus", unknown=False)
 
+
 @perfbench(params=[('rev', ['1000', '10000', 'tip'])])
 def track_manifest(perf, rev):
     return perf("perfmanifest", rev)
 
+
 @perfbench()
 def track_heads(perf):
     return perf("perfheads")
--- a/contrib/benchmarks/revset.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/benchmarks/revset.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,15 +18,16 @@
 
 from . import basedir, perfbench
 
+
 def createrevsetbenchmark(baseset, variants=None):
     if variants is None:
         # Default variants
-        variants = ["plain", "first", "last", "sort", "sort+first",
-                    "sort+last"]
-    fname = "track_" + "_".join("".join([
-        c if c in string.digits + string.letters else " "
-        for c in baseset
-    ]).split())
+        variants = ["plain", "first", "last", "sort", "sort+first", "sort+last"]
+    fname = "track_" + "_".join(
+        "".join(
+            [c if c in string.digits + string.letters else " " for c in baseset]
+        ).split()
+    )
 
     def wrap(fname, baseset):
         @perfbench(name=baseset, params=[("variant", variants)])
@@ -36,18 +37,21 @@
                 for var in variant.split("+"):
                     revset = "%s(%s)" % (var, revset)
             return perf("perfrevset", revset)
+
         f.__name__ = fname
         return f
+
     return wrap(fname, baseset)
 
+
 def initializerevsetbenchmarks():
     mod = sys.modules[__name__]
-    with open(os.path.join(basedir, 'contrib', 'base-revsets.txt'),
-              'rb') as fh:
+    with open(os.path.join(basedir, 'contrib', 'base-revsets.txt'), 'rb') as fh:
         for line in fh:
             baseset = line.strip()
             if baseset and not baseset.startswith('#'):
                 func = createrevsetbenchmark(baseset)
                 setattr(mod, func.__name__, func)
 
+
 initializerevsetbenchmarks()
--- a/contrib/byteify-strings.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/byteify-strings.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,10 +18,13 @@
 import token
 import tokenize
 
+
 def adjusttokenpos(t, ofs):
     """Adjust start/end column of the given token"""
-    return t._replace(start=(t.start[0], t.start[1] + ofs),
-                      end=(t.end[0], t.end[1] + ofs))
+    return t._replace(
+        start=(t.start[0], t.start[1] + ofs), end=(t.end[0], t.end[1] + ofs)
+    )
+
 
 def replacetokens(tokens, opts):
     """Transform a stream of tokens from raw to Python 3.
@@ -82,9 +85,8 @@
         currtoken = tokens[k]
         while currtoken.type in (token.STRING, token.NEWLINE, tokenize.NL):
             k += 1
-            if (
-                currtoken.type == token.STRING
-                and currtoken.string.startswith(("'", '"'))
+            if currtoken.type == token.STRING and currtoken.string.startswith(
+                ("'", '"')
             ):
                 sysstrtokens.add(currtoken)
             try:
@@ -126,7 +128,7 @@
     coloffset = -1  # column offset for the current line (-1: TBD)
     parens = [(0, 0, 0, -1)]  # stack of (line, end-column, column-offset, type)
     ignorenextline = False  # don't transform the next line
-    insideignoreblock = False # don't transform until turned off
+    insideignoreblock = False  # don't transform until turned off
     for i, t in enumerate(tokens):
         # Compute the column offset for the current line, such that
         # the current line will be aligned to the last opening paren
@@ -135,9 +137,9 @@
             lastparen = parens[-1]
             if t.start[1] == lastparen[1]:
                 coloffset = lastparen[2]
-            elif (
-                t.start[1] + 1 == lastparen[1]
-                and lastparen[3] not in (token.NEWLINE, tokenize.NL)
+            elif t.start[1] + 1 == lastparen[1] and lastparen[3] not in (
+                token.NEWLINE,
+                tokenize.NL,
             ):
                 # fix misaligned indent of s/util.Abort/error.Abort/
                 coloffset = lastparen[2] + (lastparen[1] - t.start[1])
@@ -202,8 +204,7 @@
                 continue
 
             # String literal. Prefix to make a b'' string.
-            yield adjusttokenpos(t._replace(string='b%s' % t.string),
-                                 coloffset)
+            yield adjusttokenpos(t._replace(string='b%s' % t.string), coloffset)
             coldelta += 1
             continue
 
@@ -213,8 +214,13 @@
 
             # *attr() builtins don't accept byte strings to 2nd argument.
             if fn in (
-                'getattr', 'setattr', 'hasattr', 'safehasattr', 'wrapfunction',
-                'wrapclass', 'addattr'
+                'getattr',
+                'setattr',
+                'hasattr',
+                'safehasattr',
+                'wrapfunction',
+                'wrapclass',
+                'addattr',
             ) and (opts['allow-attr-methods'] or not _isop(i - 1, '.')):
                 arg1idx = _findargnofcall(1)
                 if arg1idx is not None:
@@ -241,18 +247,23 @@
                 _ensuresysstr(i + 4)
 
         # Looks like "if __name__ == '__main__'".
-        if (t.type == token.NAME and t.string == '__name__'
-            and _isop(i + 1, '==')):
+        if (
+            t.type == token.NAME
+            and t.string == '__name__'
+            and _isop(i + 1, '==')
+        ):
             _ensuresysstr(i + 2)
 
         # Emit unmodified token.
         yield adjusttokenpos(t, coloffset)
 
+
 def process(fin, fout, opts):
     tokens = tokenize.tokenize(fin.readline)
     tokens = replacetokens(list(tokens), opts)
     fout.write(tokenize.untokenize(tokens))
 
+
 def tryunlink(fname):
     try:
         os.unlink(fname)
@@ -260,12 +271,14 @@
         if err.errno != errno.ENOENT:
             raise
 
+
 @contextlib.contextmanager
 def editinplace(fname):
     n = os.path.basename(fname)
     d = os.path.dirname(fname)
-    fp = tempfile.NamedTemporaryFile(prefix='.%s-' % n, suffix='~', dir=d,
-                                     delete=False)
+    fp = tempfile.NamedTemporaryFile(
+        prefix='.%s-' % n, suffix='~', dir=d, delete=False
+    )
     try:
         yield fp
         fp.close()
@@ -276,19 +289,37 @@
         fp.close()
         tryunlink(fp.name)
 
+
 def main():
     ap = argparse.ArgumentParser()
-    ap.add_argument('--version', action='version',
-                    version='Byteify strings 1.0')
-    ap.add_argument('-i', '--inplace', action='store_true', default=False,
-                    help='edit files in place')
-    ap.add_argument('--dictiter', action='store_true', default=False,
-                    help='rewrite iteritems() and itervalues()'),
-    ap.add_argument('--allow-attr-methods', action='store_true',
-                    default=False,
-                    help='also handle attr*() when they are methods'),
-    ap.add_argument('--treat-as-kwargs', nargs="+", default=[],
-                    help="ignore kwargs-like objects"),
+    ap.add_argument(
+        '--version', action='version', version='Byteify strings 1.0'
+    )
+    ap.add_argument(
+        '-i',
+        '--inplace',
+        action='store_true',
+        default=False,
+        help='edit files in place',
+    )
+    ap.add_argument(
+        '--dictiter',
+        action='store_true',
+        default=False,
+        help='rewrite iteritems() and itervalues()',
+    ),
+    ap.add_argument(
+        '--allow-attr-methods',
+        action='store_true',
+        default=False,
+        help='also handle attr*() when they are methods',
+    ),
+    ap.add_argument(
+        '--treat-as-kwargs',
+        nargs="+",
+        default=[],
+        help="ignore kwargs-like objects",
+    ),
     ap.add_argument('files', metavar='FILE', nargs='+', help='source file')
     args = ap.parse_args()
     opts = {
@@ -306,6 +337,7 @@
                 fout = sys.stdout.buffer
                 process(fin, fout, opts)
 
+
 if __name__ == '__main__':
     if sys.version_info.major < 3:
         print('This script must be run under Python 3.')
--- a/contrib/casesmash.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/casesmash.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,12 +1,12 @@
 from __future__ import absolute_import
 import __builtin__
 import os
-from mercurial import (
-    util,
-)
+from mercurial import util
+
 
 def lowerwrap(scope, funcname):
     f = getattr(scope, funcname)
+
     def wrap(fname, *args, **kwargs):
         d, base = os.path.split(fname)
         try:
@@ -19,11 +19,14 @@
             if fn.lower() == base.lower():
                 return f(os.path.join(d, fn), *args, **kwargs)
         return f(fname, *args, **kwargs)
+
     scope.__dict__[funcname] = wrap
 
+
 def normcase(path):
     return path.lower()
 
+
 os.path.normcase = normcase
 
 for f in 'file open'.split():
--- a/contrib/catapipe.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/catapipe.py	Sun Oct 06 09:45:02 2019 -0400
@@ -53,15 +53,28 @@
 # Python version and OS
 timer = timeit.default_timer
 
+
 def main():
     parser = argparse.ArgumentParser()
-    parser.add_argument('pipe', type=str, nargs=1,
-                        help='Path of named pipe to create and listen on.')
-    parser.add_argument('output', default='trace.json', type=str, nargs='?',
-                        help='Path of json file to create where the traces '
-                             'will be stored.')
-    parser.add_argument('--debug', default=False, action='store_true',
-                        help='Print useful debug messages')
+    parser.add_argument(
+        'pipe',
+        type=str,
+        nargs=1,
+        help='Path of named pipe to create and listen on.',
+    )
+    parser.add_argument(
+        'output',
+        default='trace.json',
+        type=str,
+        nargs='?',
+        help='Path of json file to create where the traces ' 'will be stored.',
+    )
+    parser.add_argument(
+        '--debug',
+        default=False,
+        action='store_true',
+        help='Print useful debug messages',
+    )
     args = parser.parse_args()
     fn = args.pipe[0]
     os.mkfifo(fn)
@@ -86,19 +99,23 @@
                     payload_args = {}
                 pid = _threadmap[session]
                 ts_micros = (now - start) * 1000000
-                out.write(json.dumps(
-                    {
-                        "name": label,
-                        "cat": "misc",
-                        "ph": _TYPEMAP[verb],
-                        "ts": ts_micros,
-                        "pid": pid,
-                        "tid": 1,
-                        "args": payload_args,
-                    }))
+                out.write(
+                    json.dumps(
+                        {
+                            "name": label,
+                            "cat": "misc",
+                            "ph": _TYPEMAP[verb],
+                            "ts": ts_micros,
+                            "pid": pid,
+                            "tid": 1,
+                            "args": payload_args,
+                        }
+                    )
+                )
                 out.write(',\n')
     finally:
         os.unlink(fn)
 
+
 if __name__ == '__main__':
     main()
--- a/contrib/check-code.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/check-code.py	Sun Oct 06 09:45:02 2019 -0400
@@ -26,11 +26,15 @@
 import os
 import re
 import sys
+
 if sys.version_info[0] < 3:
     opentext = open
 else:
+
     def opentext(f):
         return open(f, encoding='latin1')
+
+
 try:
     xrange
 except NameError:
@@ -42,6 +46,7 @@
 
 import testparseutil
 
+
 def compilere(pat, multiline=False):
     if multiline:
         pat = '(?m)' + pat
@@ -52,10 +57,22 @@
             pass
     return re.compile(pat)
 
+
 # check "rules depending on implementation of repquote()" in each
 # patterns (especially pypats), before changing around repquote()
-_repquotefixedmap = {' ': ' ', '\n': '\n', '.': 'p', ':': 'q',
-                     '%': '%', '\\': 'b', '*': 'A', '+': 'P', '-': 'M'}
+_repquotefixedmap = {
+    ' ': ' ',
+    '\n': '\n',
+    '.': 'p',
+    ':': 'q',
+    '%': '%',
+    '\\': 'b',
+    '*': 'A',
+    '+': 'P',
+    '-': 'M',
+}
+
+
 def _repquoteencodechr(i):
     if i > 255:
         return 'u'
@@ -67,13 +84,17 @@
     if c.isdigit():
         return 'n'
     return 'o'
+
+
 _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256))
 
+
 def repquote(m):
     t = m.group('text')
     t = t.translate(_repquotett)
     return m.group('quote') + t + m.group('quote')
 
+
 def reppython(m):
     comment = m.group('comment')
     if comment:
@@ -81,86 +102,103 @@
         return "#" * l + comment[l:]
     return repquote(m)
 
+
 def repcomment(m):
     return m.group(1) + "#" * len(m.group(2))
 
+
 def repccomment(m):
     t = re.sub(r"((?<=\n) )|\S", "x", m.group(2))
     return m.group(1) + t + "*/"
 
+
 def repcallspaces(m):
     t = re.sub(r"\n\s+", "\n", m.group(2))
     return m.group(1) + t
 
+
 def repinclude(m):
     return m.group(1) + "<foo>"
 
+
 def rephere(m):
     t = re.sub(r"\S", "x", m.group(2))
     return m.group(1) + t
 
 
 testpats = [
-  [
-    (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"),
-    (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
-    (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
-    (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"),
-    (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
-    (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
-    (r'echo -n', "don't use 'echo -n', use printf"),
-    (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
-    (r'head -c', "don't use 'head -c', use 'dd'"),
-    (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
-    (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
-    (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"),
-    (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
-    (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
-    (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
-    (r'\[[^\]]+==', '[ foo == bar ] is a bashism, use [ foo = bar ] instead'),
-    (r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
-     "use egrep for extended grep syntax"),
-    (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"),
-    (r'(?<!!)/bin/', "don't use explicit paths for tools"),
-    (r'#!.*/bash', "don't use bash in shebang, use sh"),
-    (r'[^\n]\Z', "no trailing newline"),
-    (r'export .*=', "don't export and assign at once"),
-    (r'^source\b', "don't use 'source', use '.'"),
-    (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
-    (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
-    (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
-    (r'^stop\(\)', "don't use 'stop' as a shell function name"),
-    (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
-    (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"),
-    (r'^alias\b.*=', "don't use alias, use a function"),
-    (r'if\s*!', "don't use '!' to negate exit status"),
-    (r'/dev/u?random', "don't use entropy, use /dev/zero"),
-    (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
-    (r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
-     "put a backslash-escaped newline after sed 'i' command"),
-    (r'^diff *-\w*[uU].*$\n(^  \$ |^$)', "prefix diff -u/-U with cmp"),
-    (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
-    (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"),
-    (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
-    (r'\butil\.Abort\b', "directly use error.Abort"),
-    (r'\|&', "don't use |&, use 2>&1"),
-    (r'\w =  +\w', "only one space after = allowed"),
-    (r'\bsed\b.*[^\\]\\n', "don't use 'sed ... \\n', use a \\ and a newline"),
-    (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
-    (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
-    (r'grep.* -[ABC]', "don't use grep's context flags"),
-    (r'find.*-printf',
-     "don't use 'find -printf', it doesn't exist on BSD find(1)"),
-    (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"),
-  ],
-  # warnings
-  [
-    (r'^function', "don't use 'function', use old style"),
-    (r'^diff.*-\w*N', "don't use 'diff -N'"),
-    (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"),
-    (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
-    (r'kill (`|\$\()', "don't use kill, use killdaemons.py")
-  ]
+    [
+        (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"),
+        (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"),
+        (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"),
+        (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"),
+        (r'sed.*-i', "don't use 'sed -i', use a temporary file"),
+        (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"),
+        (r'echo -n', "don't use 'echo -n', use printf"),
+        (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"),
+        (r'head -c', "don't use 'head -c', use 'dd'"),
+        (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"),
+        (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"),
+        (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"),
+        (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"),
+        (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"),
+        (r'rm -rf \*', "don't use naked rm -rf, target a directory"),
+        (
+            r'\[[^\]]+==',
+            '[ foo == bar ] is a bashism, use [ foo = bar ] instead',
+        ),
+        (
+            r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w',
+            "use egrep for extended grep syntax",
+        ),
+        (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"),
+        (r'(?<!!)/bin/', "don't use explicit paths for tools"),
+        (r'#!.*/bash', "don't use bash in shebang, use sh"),
+        (r'[^\n]\Z', "no trailing newline"),
+        (r'export .*=', "don't export and assign at once"),
+        (r'^source\b', "don't use 'source', use '.'"),
+        (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"),
+        (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"),
+        (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"),
+        (r'^stop\(\)', "don't use 'stop' as a shell function name"),
+        (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"),
+        (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"),
+        (r'^alias\b.*=', "don't use alias, use a function"),
+        (r'if\s*!', "don't use '!' to negate exit status"),
+        (r'/dev/u?random', "don't use entropy, use /dev/zero"),
+        (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"),
+        (
+            r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)',
+            "put a backslash-escaped newline after sed 'i' command",
+        ),
+        (r'^diff *-\w*[uU].*$\n(^  \$ |^$)', "prefix diff -u/-U with cmp"),
+        (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"),
+        (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"),
+        (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"),
+        (r'\butil\.Abort\b', "directly use error.Abort"),
+        (r'\|&', "don't use |&, use 2>&1"),
+        (r'\w =  +\w', "only one space after = allowed"),
+        (
+            r'\bsed\b.*[^\\]\\n',
+            "don't use 'sed ... \\n', use a \\ and a newline",
+        ),
+        (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"),
+        (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"),
+        (r'grep.* -[ABC]', "don't use grep's context flags"),
+        (
+            r'find.*-printf',
+            "don't use 'find -printf', it doesn't exist on BSD find(1)",
+        ),
+        (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"),
+    ],
+    # warnings
+    [
+        (r'^function', "don't use 'function', use old style"),
+        (r'^diff.*-\w*N', "don't use 'diff -N'"),
+        (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"),
+        (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"),
+        (r'kill (`|\$\()', "don't use kill, use killdaemons.py"),
+    ],
 ]
 
 testfilters = [
@@ -170,45 +208,72 @@
 
 uprefix = r"^  \$ "
 utestpats = [
-  [
-    (r'^(\S.*||  [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"),
-    (uprefix + r'.*\|\s*sed[^|>\n]*\n',
-     "use regex test output patterns instead of sed"),
-    (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
-    (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
-    (uprefix + r'.*\|\| echo.*(fail|error)',
-     "explicit exit code checks unnecessary"),
-    (uprefix + r'set -e', "don't use set -e"),
-    (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
-    (uprefix + r'.*:\.\S*/', "x:.y in a path does not work on msys, rewrite "
-     "as x://.y, or see `hg log -k msys` for alternatives", r'-\S+:\.|' #-Rxxx
-     '# no-msys'), # in test-pull.t which is skipped on windows
-    (r'^  [^$>].*27\.0\.0\.1',
-     'use $LOCALIP not an explicit loopback address'),
-    (r'^  (?![>$] ).*\$LOCALIP.*[^)]$',
-     'mark $LOCALIP output lines with (glob) to help tests in BSD jails'),
-    (r'^  (cat|find): .*: \$ENOENT\$',
-     'use test -f to test for file existence'),
-    (r'^  diff -[^ -]*p',
-     "don't use (external) diff with -p for portability"),
-    (r' readlink ', 'use readlink.py instead of readlink'),
-    (r'^  [-+][-+][-+] .* [-+]0000 \(glob\)',
-     "glob timezone field in diff output for portability"),
-    (r'^  @@ -[0-9]+ [+][0-9]+,[0-9]+ @@',
-     "use '@@ -N* +N,n @@ (glob)' style chunk header for portability"),
-    (r'^  @@ -[0-9]+,[0-9]+ [+][0-9]+ @@',
-     "use '@@ -N,n +N* @@ (glob)' style chunk header for portability"),
-    (r'^  @@ -[0-9]+ [+][0-9]+ @@',
-     "use '@@ -N* +N* @@ (glob)' style chunk header for portability"),
-    (uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff'
-     r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$',
-     "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)"),
-  ],
-  # warnings
-  [
-    (r'^  (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$',
-     "glob match with no glob string (?, *, /, and $LOCALIP)"),
-  ]
+    [
+        (r'^(\S.*||  [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"),
+        (
+            uprefix + r'.*\|\s*sed[^|>\n]*\n',
+            "use regex test output patterns instead of sed",
+        ),
+        (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"),
+        (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"),
+        (
+            uprefix + r'.*\|\| echo.*(fail|error)',
+            "explicit exit code checks unnecessary",
+        ),
+        (uprefix + r'set -e', "don't use set -e"),
+        (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"),
+        (
+            uprefix + r'.*:\.\S*/',
+            "x:.y in a path does not work on msys, rewrite "
+            "as x://.y, or see `hg log -k msys` for alternatives",
+            r'-\S+:\.|' '# no-msys',  # -Rxxx
+        ),  # in test-pull.t which is skipped on windows
+        (
+            r'^  [^$>].*27\.0\.0\.1',
+            'use $LOCALIP not an explicit loopback address',
+        ),
+        (
+            r'^  (?![>$] ).*\$LOCALIP.*[^)]$',
+            'mark $LOCALIP output lines with (glob) to help tests in BSD jails',
+        ),
+        (
+            r'^  (cat|find): .*: \$ENOENT\$',
+            'use test -f to test for file existence',
+        ),
+        (
+            r'^  diff -[^ -]*p',
+            "don't use (external) diff with -p for portability",
+        ),
+        (r' readlink ', 'use readlink.py instead of readlink'),
+        (
+            r'^  [-+][-+][-+] .* [-+]0000 \(glob\)',
+            "glob timezone field in diff output for portability",
+        ),
+        (
+            r'^  @@ -[0-9]+ [+][0-9]+,[0-9]+ @@',
+            "use '@@ -N* +N,n @@ (glob)' style chunk header for portability",
+        ),
+        (
+            r'^  @@ -[0-9]+,[0-9]+ [+][0-9]+ @@',
+            "use '@@ -N,n +N* @@ (glob)' style chunk header for portability",
+        ),
+        (
+            r'^  @@ -[0-9]+ [+][0-9]+ @@',
+            "use '@@ -N* +N* @@ (glob)' style chunk header for portability",
+        ),
+        (
+            uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff'
+            r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$',
+            "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)",
+        ),
+    ],
+    # warnings
+    [
+        (
+            r'^  (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$',
+            "glob match with no glob string (?, *, /, and $LOCALIP)",
+        ),
+    ],
 ]
 
 # transform plain test rules to unified test's
@@ -234,148 +299,212 @@
 
 # common patterns to check *.py
 commonpypats = [
-  [
-    (r'\\$', 'Use () to wrap long lines in Python, not \\'),
-    (r'^\s*def\s*\w+\s*\(.*,\s*\(',
-     "tuple parameter unpacking not available in Python 3+"),
-    (r'lambda\s*\(.*,.*\)',
-     "tuple parameter unpacking not available in Python 3+"),
-    (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
-    (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"),
-    (r'\bdict\(.*=', 'dict() is different in Py2 and 3 and is slower than {}',
-     'dict-from-generator'),
-    (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
-    (r'\s<>\s', '<> operator is not available in Python 3+, use !='),
-    (r'^\s*\t', "don't use tabs"),
-    (r'\S;\s*\n', "semicolon"),
-    (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"),
-    (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"),
-    (r'(\w|\)),\w', "missing whitespace after ,"),
-    (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"),
-    (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
-    ((
-        # a line ending with a colon, potentially with trailing comments
-        r':([ \t]*#[^\n]*)?\n'
-        # one that is not a pass and not only a comment
-        r'(?P<indent>[ \t]+)[^#][^\n]+\n'
-        # more lines at the same indent level
-        r'((?P=indent)[^\n]+\n)*'
-        # a pass at the same indent level, which is bogus
-        r'(?P=indent)pass[ \t\n#]'
-      ), 'omit superfluous pass'),
-    (r'[^\n]\Z', "no trailing newline"),
-    (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
-#    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
-#     "don't use underbars in identifiers"),
-    (r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ',
-     "don't use camelcase in identifiers", r'#.*camelcase-required'),
-    (r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
-     "linebreak after :"),
-    (r'class\s[^( \n]+:', "old-style class, use class foo(object)",
-     r'#.*old-style'),
-    (r'class\s[^( \n]+\(\):',
-     "class foo() creates old style object, use class foo(object)",
-     r'#.*old-style'),
-    (r'\b(%s)\(' % '|'.join(k for k in keyword.kwlist
-                            if k not in ('print', 'exec')),
-     "Python keyword is not a function"),
-#    (r'class\s[A-Z][^\(]*\((?!Exception)',
-#     "don't capitalize non-exception classes"),
-#    (r'in range\(', "use xrange"),
-#    (r'^\s*print\s+', "avoid using print in core and extensions"),
-    (r'[\x80-\xff]', "non-ASCII character literal"),
-    (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"),
-    (r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', "gratuitous whitespace in () or []"),
-#    (r'\s\s=', "gratuitous whitespace before ="),
-    (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
-     "missing whitespace around operator"),
-    (r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
-     "missing whitespace around operator"),
-    (r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
-     "missing whitespace around operator"),
-    (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]',
-     "wrong whitespace around ="),
-    (r'\([^()]*( =[^=]|[^<>!=]= )',
-     "no whitespace around = for named parameters"),
-    (r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
-     "don't use old-style two-argument raise, use Exception(message)"),
-    (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
-    (r' [=!]=\s+(True|False|None)',
-     "comparison with singleton, use 'is' or 'is not' instead"),
-    (r'^\s*(while|if) [01]:',
-     "use True/False for constant Boolean expression"),
-    (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'),
-    (r'(?:(?<!def)\s+|\()hasattr\(',
-     'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
-     'instead', r'#.*hasattr-py3-only'),
-    (r'opener\([^)]*\).read\(',
-     "use opener.read() instead"),
-    (r'opener\([^)]*\).write\(',
-     "use opener.write() instead"),
-    (r'(?i)descend[e]nt', "the proper spelling is descendAnt"),
-    (r'\.debug\(\_', "don't mark debug messages for translation"),
-    (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
-    (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'),
-    (r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
-     'legacy exception syntax; use "as" instead of ","'),
-    (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
-    (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
-    (r'os\.path\.join\(.*, *(""|\'\')\)',
-     "use pathutil.normasprefix(path) instead of os.path.join(path, '')"),
-    (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'),
-    # XXX only catch mutable arguments on the first line of the definition
-    (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"),
-    (r'\butil\.Abort\b', "directly use error.Abort"),
-    (r'^@(\w*\.)?cachefunc', "module-level @cachefunc is risky, please avoid"),
-    (r'^import Queue', "don't use Queue, use pycompat.queue.Queue + "
-                       "pycompat.queue.Empty"),
-    (r'^import cStringIO', "don't use cStringIO.StringIO, use util.stringio"),
-    (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
-    (r'^import SocketServer', "don't use SockerServer, use util.socketserver"),
-    (r'^import urlparse', "don't use urlparse, use util.urlreq"),
-    (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
-    (r'^import cPickle', "don't use cPickle, use util.pickle"),
-    (r'^import pickle', "don't use pickle, use util.pickle"),
-    (r'^import httplib', "don't use httplib, use util.httplib"),
-    (r'^import BaseHTTPServer', "use util.httpserver instead"),
-    (r'^(from|import) mercurial\.(cext|pure|cffi)',
-     "use mercurial.policy.importmod instead"),
-    (r'\.next\(\)', "don't use .next(), use next(...)"),
-    (r'([a-z]*).revision\(\1\.node\(',
-     "don't convert rev to node before passing to revision(nodeorrev)"),
-    (r'platform\.system\(\)', "don't use platform.system(), use pycompat"),
-
-  ],
-  # warnings
-  [
-  ]
+    [
+        (r'\\$', 'Use () to wrap long lines in Python, not \\'),
+        (
+            r'^\s*def\s*\w+\s*\(.*,\s*\(',
+            "tuple parameter unpacking not available in Python 3+",
+        ),
+        (
+            r'lambda\s*\(.*,.*\)',
+            "tuple parameter unpacking not available in Python 3+",
+        ),
+        (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"),
+        (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"),
+        (
+            r'\bdict\(.*=',
+            'dict() is different in Py2 and 3 and is slower than {}',
+            'dict-from-generator',
+        ),
+        (r'\.has_key\b', "dict.has_key is not available in Python 3+"),
+        (r'\s<>\s', '<> operator is not available in Python 3+, use !='),
+        (r'^\s*\t', "don't use tabs"),
+        (r'\S;\s*\n', "semicolon"),
+        (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"),
+        (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"),
+        (r'(\w|\)),\w', "missing whitespace after ,"),
+        (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"),
+        (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
+        (
+            (
+                # a line ending with a colon, potentially with trailing comments
+                r':([ \t]*#[^\n]*)?\n'
+                # one that is not a pass and not only a comment
+                r'(?P<indent>[ \t]+)[^#][^\n]+\n'
+                # more lines at the same indent level
+                r'((?P=indent)[^\n]+\n)*'
+                # a pass at the same indent level, which is bogus
+                r'(?P=indent)pass[ \t\n#]'
+            ),
+            'omit superfluous pass',
+        ),
+        (r'[^\n]\Z', "no trailing newline"),
+        (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
+        #    (r'^\s+[^_ \n][^_. \n]+_[^_\n]+\s*=',
+        #     "don't use underbars in identifiers"),
+        (
+            r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ',
+            "don't use camelcase in identifiers",
+            r'#.*camelcase-required',
+        ),
+        (
+            r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+',
+            "linebreak after :",
+        ),
+        (
+            r'class\s[^( \n]+:',
+            "old-style class, use class foo(object)",
+            r'#.*old-style',
+        ),
+        (
+            r'class\s[^( \n]+\(\):',
+            "class foo() creates old style object, use class foo(object)",
+            r'#.*old-style',
+        ),
+        (
+            r'\b(%s)\('
+            % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')),
+            "Python keyword is not a function",
+        ),
+        #    (r'class\s[A-Z][^\(]*\((?!Exception)',
+        #     "don't capitalize non-exception classes"),
+        #    (r'in range\(', "use xrange"),
+        #    (r'^\s*print\s+', "avoid using print in core and extensions"),
+        (r'[\x80-\xff]', "non-ASCII character literal"),
+        (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"),
+        (
+            r'([\(\[][ \t]\S)|(\S[ \t][\)\]])',
+            "gratuitous whitespace in () or []",
+        ),
+        #    (r'\s\s=', "gratuitous whitespace before ="),
+        (
+            r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
+            "missing whitespace around operator",
+        ),
+        (
+            r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s',
+            "missing whitespace around operator",
+        ),
+        (
+            r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S',
+            "missing whitespace around operator",
+        ),
+        (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="),
+        (
+            r'\([^()]*( =[^=]|[^<>!=]= )',
+            "no whitespace around = for named parameters",
+        ),
+        (
+            r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$',
+            "don't use old-style two-argument raise, use Exception(message)",
+        ),
+        (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"),
+        (
+            r' [=!]=\s+(True|False|None)',
+            "comparison with singleton, use 'is' or 'is not' instead",
+        ),
+        (
+            r'^\s*(while|if) [01]:',
+            "use True/False for constant Boolean expression",
+        ),
+        (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'),
+        (
+            r'(?:(?<!def)\s+|\()hasattr\(',
+            'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) '
+            'instead',
+            r'#.*hasattr-py3-only',
+        ),
+        (r'opener\([^)]*\).read\(', "use opener.read() instead"),
+        (r'opener\([^)]*\).write\(', "use opener.write() instead"),
+        (r'(?i)descend[e]nt', "the proper spelling is descendAnt"),
+        (r'\.debug\(\_', "don't mark debug messages for translation"),
+        (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"),
+        (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'),
+        (
+            r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,',
+            'legacy exception syntax; use "as" instead of ","',
+        ),
+        (r'release\(.*wlock, .*lock\)', "wrong lock release order"),
+        (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"),
+        (
+            r'os\.path\.join\(.*, *(""|\'\')\)',
+            "use pathutil.normasprefix(path) instead of os.path.join(path, '')",
+        ),
+        (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'),
+        # XXX only catch mutable arguments on the first line of the definition
+        (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"),
+        (r'\butil\.Abort\b', "directly use error.Abort"),
+        (
+            r'^@(\w*\.)?cachefunc',
+            "module-level @cachefunc is risky, please avoid",
+        ),
+        (
+            r'^import Queue',
+            "don't use Queue, use pycompat.queue.Queue + "
+            "pycompat.queue.Empty",
+        ),
+        (
+            r'^import cStringIO',
+            "don't use cStringIO.StringIO, use util.stringio",
+        ),
+        (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"),
+        (
+            r'^import SocketServer',
+            "don't use SockerServer, use util.socketserver",
+        ),
+        (r'^import urlparse', "don't use urlparse, use util.urlreq"),
+        (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"),
+        (r'^import cPickle', "don't use cPickle, use util.pickle"),
+        (r'^import pickle', "don't use pickle, use util.pickle"),
+        (r'^import httplib', "don't use httplib, use util.httplib"),
+        (r'^import BaseHTTPServer', "use util.httpserver instead"),
+        (
+            r'^(from|import) mercurial\.(cext|pure|cffi)',
+            "use mercurial.policy.importmod instead",
+        ),
+        (r'\.next\(\)', "don't use .next(), use next(...)"),
+        (
+            r'([a-z]*).revision\(\1\.node\(',
+            "don't convert rev to node before passing to revision(nodeorrev)",
+        ),
+        (r'platform\.system\(\)', "don't use platform.system(), use pycompat"),
+    ],
+    # warnings
+    [],
 ]
 
 # patterns to check normal *.py files
 pypats = [
-  [
-    # Ideally, these should be placed in "commonpypats" for
-    # consistency of coding rules in Mercurial source tree.
-    # But on the other hand, these are not so seriously required for
-    # python code fragments embedded in test scripts. Fixing test
-    # scripts for these patterns requires many changes, and has less
-    # profit than effort.
-    (r'raise Exception', "don't raise generic exceptions"),
-    (r'[\s\(](open|file)\([^)]*\)\.read\(',
-     "use util.readfile() instead"),
-    (r'[\s\(](open|file)\([^)]*\)\.write\(',
-     "use util.writefile() instead"),
-    (r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))',
-     "always assign an opened file to a variable, and close it afterwards"),
-    (r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))',
-     "always assign an opened file to a variable, and close it afterwards"),
-    (r':\n(    )*( ){1,3}[^ ]', "must indent 4 spaces"),
-    (r'^import atexit', "don't use atexit, use ui.atexit"),
-
-    # rules depending on implementation of repquote()
-    (r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
-     'string join across lines with no space'),
-    (r'''(?x)ui\.(status|progress|write|note|warn)\(
+    [
+        # Ideally, these should be placed in "commonpypats" for
+        # consistency of coding rules in Mercurial source tree.
+        # But on the other hand, these are not so seriously required for
+        # python code fragments embedded in test scripts. Fixing test
+        # scripts for these patterns requires many changes, and has less
+        # profit than effort.
+        (r'raise Exception', "don't raise generic exceptions"),
+        (r'[\s\(](open|file)\([^)]*\)\.read\(', "use util.readfile() instead"),
+        (
+            r'[\s\(](open|file)\([^)]*\)\.write\(',
+            "use util.writefile() instead",
+        ),
+        (
+            r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))',
+            "always assign an opened file to a variable, and close it afterwards",
+        ),
+        (
+            r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))',
+            "always assign an opened file to a variable, and close it afterwards",
+        ),
+        (r':\n(    )*( ){1,3}[^ ]', "must indent 4 spaces"),
+        (r'^import atexit', "don't use atexit, use ui.atexit"),
+        # rules depending on implementation of repquote()
+        (
+            r' x+[xpqo%APM][\'"]\n\s+[\'"]x',
+            'string join across lines with no space',
+        ),
+        (
+            r'''(?x)ui\.(status|progress|write|note|warn)\(
          [ \t\n#]*
          (?# any strings/comments might precede a string, which
            # contains translatable message)
@@ -389,51 +518,55 @@
          (?# this regexp can't use [^...] style,
            # because _preparepats forcibly adds "\n" into [^...],
            # even though this regexp wants match it against "\n")''',
-     "missing _() in ui message (use () to hide false-positives)"),
-  ] + commonpypats[0],
-  # warnings
-  [
-    # rules depending on implementation of repquote()
-    (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
-  ] + commonpypats[1]
+            "missing _() in ui message (use () to hide false-positives)",
+        ),
+    ]
+    + commonpypats[0],
+    # warnings
+    [
+        # rules depending on implementation of repquote()
+        (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"),
+    ]
+    + commonpypats[1],
 ]
 
 # patterns to check *.py for embedded ones in test script
 embeddedpypats = [
-  [
-  ] + commonpypats[0],
-  # warnings
-  [
-  ] + commonpypats[1]
+    [] + commonpypats[0],
+    # warnings
+    [] + commonpypats[1],
 ]
 
 # common filters to convert *.py
 commonpyfilters = [
-    (r"""(?msx)(?P<comment>\#.*?$)|
+    (
+        r"""(?msx)(?P<comment>\#.*?$)|
          ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!")))
           (?P<text>(([^\\]|\\.)*?))
-          (?P=quote))""", reppython),
+          (?P=quote))""",
+        reppython,
+    ),
 ]
 
 # filters to convert normal *.py files
-pyfilters = [
-] + commonpyfilters
+pyfilters = [] + commonpyfilters
 
 # non-filter patterns
 pynfpats = [
     [
-    (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"),
-    (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"),
-    (r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]',
-     "use pycompat.isdarwin"),
+        (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"),
+        (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"),
+        (
+            r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]',
+            "use pycompat.isdarwin",
+        ),
     ],
     # warnings
     [],
 ]
 
 # filters to convert *.py for embedded ones in test script
-embeddedpyfilters = [
-] + commonpyfilters
+embeddedpyfilters = [] + commonpyfilters
 
 # extension non-filter patterns
 pyextnfpats = [
@@ -445,41 +578,40 @@
 txtfilters = []
 
 txtpats = [
-  [
-    (r'\s$', 'trailing whitespace'),
-    ('.. note::[ \n][^\n]', 'add two newlines after note::')
-  ],
-  []
+    [
+        (r'\s$', 'trailing whitespace'),
+        ('.. note::[ \n][^\n]', 'add two newlines after note::'),
+    ],
+    [],
 ]
 
 cpats = [
-  [
-    (r'//', "don't use //-style comments"),
-    (r'\S\t', "don't use tabs except for indent"),
-    (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
-    (r'(while|if|do|for)\(', "use space after while/if/do/for"),
-    (r'return\(', "return is not a function"),
-    (r' ;', "no space before ;"),
-    (r'[^;] \)', "no space before )"),
-    (r'[)][{]', "space between ) and {"),
-    (r'\w+\* \w+', "use int *foo, not int* foo"),
-    (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
-    (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
-    (r'\w,\w', "missing whitespace after ,"),
-    (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
-    (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
-    (r'^#\s+\w', "use #foo, not # foo"),
-    (r'[^\n]\Z', "no trailing newline"),
-    (r'^\s*#import\b', "use only #include in standard C code"),
-    (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
-    (r'strcat\(', "don't use strcat"),
-
-    # rules depending on implementation of repquote()
-  ],
-  # warnings
-  [
-    # rules depending on implementation of repquote()
-  ]
+    [
+        (r'//', "don't use //-style comments"),
+        (r'\S\t', "don't use tabs except for indent"),
+        (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"),
+        (r'(while|if|do|for)\(', "use space after while/if/do/for"),
+        (r'return\(', "return is not a function"),
+        (r' ;', "no space before ;"),
+        (r'[^;] \)', "no space before )"),
+        (r'[)][{]', "space between ) and {"),
+        (r'\w+\* \w+', "use int *foo, not int* foo"),
+        (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"),
+        (r'\w+ (\+\+|--)', "use foo++, not foo ++"),
+        (r'\w,\w', "missing whitespace after ,"),
+        (r'^[^#]\w[+/*]\w', "missing whitespace in expression"),
+        (r'\w\s=\s\s+\w', "gratuitous whitespace after ="),
+        (r'^#\s+\w', "use #foo, not # foo"),
+        (r'[^\n]\Z', "no trailing newline"),
+        (r'^\s*#import\b', "use only #include in standard C code"),
+        (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"),
+        (r'strcat\(', "don't use strcat"),
+        # rules depending on implementation of repquote()
+    ],
+    # warnings
+    [
+        # rules depending on implementation of repquote()
+    ],
 ]
 
 cfilters = [
@@ -490,82 +622,109 @@
 ]
 
 inutilpats = [
-  [
-    (r'\bui\.', "don't use ui in util"),
-  ],
-  # warnings
-  []
+    [(r'\bui\.', "don't use ui in util"),],
+    # warnings
+    [],
 ]
 
 inrevlogpats = [
-  [
-    (r'\brepo\.', "don't use repo in revlog"),
-  ],
-  # warnings
-  []
+    [(r'\brepo\.', "don't use repo in revlog"),],
+    # warnings
+    [],
 ]
 
 webtemplatefilters = []
 
 webtemplatepats = [
-  [],
-  [
-    (r'{desc(\|(?!websub|firstline)[^\|]*)+}',
-     'follow desc keyword with either firstline or websub'),
-  ]
+    [],
+    [
+        (
+            r'{desc(\|(?!websub|firstline)[^\|]*)+}',
+            'follow desc keyword with either firstline or websub',
+        ),
+    ],
 ]
 
 allfilesfilters = []
 
 allfilespats = [
-  [
-    (r'(http|https)://[a-zA-Z0-9./]*selenic.com/',
-     'use mercurial-scm.org domain URL'),
-    (r'mercurial@selenic\.com',
-     'use mercurial-scm.org domain for mercurial ML address'),
-    (r'mercurial-devel@selenic\.com',
-     'use mercurial-scm.org domain for mercurial-devel ML address'),
-  ],
-  # warnings
-  [],
+    [
+        (
+            r'(http|https)://[a-zA-Z0-9./]*selenic.com/',
+            'use mercurial-scm.org domain URL',
+        ),
+        (
+            r'mercurial@selenic\.com',
+            'use mercurial-scm.org domain for mercurial ML address',
+        ),
+        (
+            r'mercurial-devel@selenic\.com',
+            'use mercurial-scm.org domain for mercurial-devel ML address',
+        ),
+    ],
+    # warnings
+    [],
 ]
 
 py3pats = [
-  [
-    (r'os\.environ', "use encoding.environ instead (py3)", r'#.*re-exports'),
-    (r'os\.name', "use pycompat.osname instead (py3)"),
-    (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'),
-    (r'os\.sep', "use pycompat.ossep instead (py3)"),
-    (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"),
-    (r'os\.altsep', "use pycompat.osaltsep instead (py3)"),
-    (r'sys\.platform', "use pycompat.sysplatform instead (py3)"),
-    (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"),
-    (r'os\.getenv', "use encoding.environ.get instead"),
-    (r'os\.setenv', "modifying the environ dict is not preferred"),
-    (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"),
-  ],
-  # warnings
-  [],
+    [
+        (
+            r'os\.environ',
+            "use encoding.environ instead (py3)",
+            r'#.*re-exports',
+        ),
+        (r'os\.name', "use pycompat.osname instead (py3)"),
+        (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'),
+        (r'os\.sep', "use pycompat.ossep instead (py3)"),
+        (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"),
+        (r'os\.altsep', "use pycompat.osaltsep instead (py3)"),
+        (r'sys\.platform', "use pycompat.sysplatform instead (py3)"),
+        (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"),
+        (r'os\.getenv', "use encoding.environ.get instead"),
+        (r'os\.setenv', "modifying the environ dict is not preferred"),
+        (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"),
+    ],
+    # warnings
+    [],
 ]
 
 checks = [
     ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats),
     ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats),
     ('python', r'.*hgext.*\.py$', '', [], pyextnfpats),
-    ('python 3', r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
-     '', pyfilters, py3pats),
+    (
+        'python 3',
+        r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py',
+        '',
+        pyfilters,
+        py3pats,
+    ),
     ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats),
     ('c', r'.*\.[ch]$', '', cfilters, cpats),
     ('unified test', r'.*\.t$', '', utestfilters, utestpats),
-    ('layering violation repo in revlog', r'mercurial/revlog\.py', '',
-     pyfilters, inrevlogpats),
-    ('layering violation ui in util', r'mercurial/util\.py', '', pyfilters,
-     inutilpats),
+    (
+        'layering violation repo in revlog',
+        r'mercurial/revlog\.py',
+        '',
+        pyfilters,
+        inrevlogpats,
+    ),
+    (
+        'layering violation ui in util',
+        r'mercurial/util\.py',
+        '',
+        pyfilters,
+        inutilpats,
+    ),
     ('txt', r'.*\.txt$', '', txtfilters, txtpats),
-    ('web template', r'mercurial/templates/.*\.tmpl', '',
-     webtemplatefilters, webtemplatepats),
-    ('all except for .po', r'.*(?<!\.po)$', '',
-     allfilesfilters, allfilespats),
+    (
+        'web template',
+        r'mercurial/templates/.*\.tmpl',
+        '',
+        webtemplatefilters,
+        webtemplatepats,
+    ),
+    ('all except for .po', r'.*(?<!\.po)$', '', allfilesfilters, allfilespats),
 ]
 
 # (desc,
@@ -573,10 +732,15 @@
 #  list of patterns to convert target files
 #  list of patterns to detect errors/warnings)
 embeddedchecks = [
-    ('embedded python',
-     testparseutil.pyembedded, embeddedpyfilters, embeddedpypats)
+    (
+        'embedded python',
+        testparseutil.pyembedded,
+        embeddedpyfilters,
+        embeddedpypats,
+    )
 ]
 
+
 def _preparepats():
     def preparefailandwarn(failandwarn):
         for pats in failandwarn:
@@ -605,6 +769,7 @@
             filters = c[-2]
             preparefilters(filters)
 
+
 class norepeatlogger(object):
     def __init__(self):
         self._lastseen = None
@@ -630,8 +795,10 @@
             self._lastseen = msgid
         print(" " + msg)
 
+
 _defaultlogger = norepeatlogger()
 
+
 def getblame(f):
     lines = []
     for l in os.popen('hg annotate -un %s' % f):
@@ -640,8 +807,16 @@
         lines.append((line[1:-1], user, rev))
     return lines
 
-def checkfile(f, logfunc=_defaultlogger.log, maxerr=None, warnings=False,
-              blame=False, debug=False, lineno=True):
+
+def checkfile(
+    f,
+    logfunc=_defaultlogger.log,
+    maxerr=None,
+    warnings=False,
+    blame=False,
+    debug=False,
+    lineno=True,
+):
     """checks style and portability of a given file
 
     :f: filepath
@@ -673,8 +848,9 @@
             print(name, f)
         if not (re.match(match, f) or (magic and re.search(magic, pre))):
             if debug:
-                print("Skipping %s for %s it doesn't match %s" % (
-                       name, match, f))
+                print(
+                    "Skipping %s for %s it doesn't match %s" % (name, match, f)
+                )
             continue
         if "no-" "check-code" in pre:
             # If you're looking at this line, it's because a file has:
@@ -684,16 +860,28 @@
             # spelling, we write it with the expected spelling from
             # tests/test-check-code.t
             print("Skipping %s it has no-che?k-code (glob)" % f)
-            return "Skip" # skip checking this file
+            return "Skip"  # skip checking this file
 
-        fc = _checkfiledata(name, f, pre, filters, pats, context,
-                            logfunc, maxerr, warnings, blame, debug, lineno)
+        fc = _checkfiledata(
+            name,
+            f,
+            pre,
+            filters,
+            pats,
+            context,
+            logfunc,
+            maxerr,
+            warnings,
+            blame,
+            debug,
+            lineno,
+        )
         if fc:
             result = False
 
     if f.endswith('.t') and "no-" "check-code" not in pre:
         if debug:
-            print("Checking embedded code in %s" % (f))
+            print("Checking embedded code in %s" % f)
 
         prelines = pre.splitlines()
         embeddederros = []
@@ -705,9 +893,21 @@
 
             for found in embedded(f, prelines, embeddederros):
                 filename, starts, ends, code = found
-                fc = _checkfiledata(name, f, code, filters, pats, context,
-                                    logfunc, curmaxerr, warnings, blame, debug,
-                                    lineno, offset=starts - 1)
+                fc = _checkfiledata(
+                    name,
+                    f,
+                    code,
+                    filters,
+                    pats,
+                    context,
+                    logfunc,
+                    curmaxerr,
+                    warnings,
+                    blame,
+                    debug,
+                    lineno,
+                    offset=starts - 1,
+                )
                 if fc:
                     result = False
                     if curmaxerr:
@@ -717,9 +917,22 @@
 
     return result
 
-def _checkfiledata(name, f, filedata, filters, pats, context,
-                   logfunc, maxerr, warnings, blame, debug, lineno,
-                   offset=None):
+
+def _checkfiledata(
+    name,
+    f,
+    filedata,
+    filters,
+    pats,
+    context,
+    logfunc,
+    maxerr,
+    warnings,
+    blame,
+    debug,
+    lineno,
+    offset=None,
+):
     """Execute actual error check for file data
 
     :name: of the checking category
@@ -752,10 +965,10 @@
     fc = 0
     pre = post = filedata
 
-    if True: # TODO: get rid of this redundant 'if' block
+    if True:  # TODO: get rid of this redundant 'if' block
         for p, r in filters:
             post = re.sub(p, r, post)
-        nerrs = len(pats[0]) # nerr elements are errors
+        nerrs = len(pats[0])  # nerr elements are errors
         if warnings:
             pats = pats[0] + pats[1]
         else:
@@ -794,8 +1007,10 @@
 
                 if ignore and re.search(ignore, l, re.MULTILINE):
                     if debug:
-                        print("Skipping %s for %s:%s (ignore pattern)" % (
-                            name, f, (n + lineoffset)))
+                        print(
+                            "Skipping %s for %s:%s (ignore pattern)"
+                            % (name, f, (n + lineoffset))
+                        )
                     continue
                 bd = ""
                 if blame:
@@ -830,21 +1045,38 @@
 
     return fc
 
+
 def main():
     parser = optparse.OptionParser("%prog [options] [files | -]")
-    parser.add_option("-w", "--warnings", action="store_true",
-                      help="include warning-level checks")
-    parser.add_option("-p", "--per-file", type="int",
-                      help="max warnings per file")
-    parser.add_option("-b", "--blame", action="store_true",
-                      help="use annotate to generate blame info")
-    parser.add_option("", "--debug", action="store_true",
-                      help="show debug information")
-    parser.add_option("", "--nolineno", action="store_false",
-                      dest='lineno', help="don't show line numbers")
+    parser.add_option(
+        "-w",
+        "--warnings",
+        action="store_true",
+        help="include warning-level checks",
+    )
+    parser.add_option(
+        "-p", "--per-file", type="int", help="max warnings per file"
+    )
+    parser.add_option(
+        "-b",
+        "--blame",
+        action="store_true",
+        help="use annotate to generate blame info",
+    )
+    parser.add_option(
+        "", "--debug", action="store_true", help="show debug information"
+    )
+    parser.add_option(
+        "",
+        "--nolineno",
+        action="store_false",
+        dest='lineno',
+        help="don't show line numbers",
+    )
 
-    parser.set_defaults(per_file=15, warnings=False, blame=False, debug=False,
-                        lineno=True)
+    parser.set_defaults(
+        per_file=15, warnings=False, blame=False, debug=False, lineno=True
+    )
     (options, args) = parser.parse_args()
 
     if len(args) == 0:
@@ -859,11 +1091,17 @@
 
     ret = 0
     for f in check:
-        if not checkfile(f, maxerr=options.per_file, warnings=options.warnings,
-                         blame=options.blame, debug=options.debug,
-                         lineno=options.lineno):
+        if not checkfile(
+            f,
+            maxerr=options.per_file,
+            warnings=options.warnings,
+            blame=options.blame,
+            debug=options.debug,
+            lineno=options.lineno,
+        ):
             ret = 1
     return ret
 
+
 if __name__ == "__main__":
     sys.exit(main())
--- a/contrib/check-config.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/check-config.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,7 +15,8 @@
 documented = {}
 allowinconsistent = set()
 
-configre = re.compile(br'''
+configre = re.compile(
+    br'''
     # Function call
     ui\.config(?P<ctype>|int|bool|list)\(
         # First argument.
@@ -23,9 +24,12 @@
         # Second argument
         ['"](?P<option>\S+)['"](,\s+
         (?:default=)?(?P<default>\S+?))?
-    \)''', re.VERBOSE | re.MULTILINE)
+    \)''',
+    re.VERBOSE | re.MULTILINE,
+)
 
-configwithre = re.compile(br'''
+configwithre = re.compile(
+    br'''
     ui\.config(?P<ctype>with)\(
         # First argument is callback function. This doesn't parse robustly
         # if it is e.g. a function call.
@@ -33,23 +37,32 @@
         ['"](?P<section>\S+)['"],\s*
         ['"](?P<option>\S+)['"](,\s+
         (?:default=)?(?P<default>\S+?))?
-    \)''', re.VERBOSE | re.MULTILINE)
+    \)''',
+    re.VERBOSE | re.MULTILINE,
+)
 
-configpartialre = (br"""ui\.config""")
+configpartialre = br"""ui\.config"""
 
-ignorere = re.compile(br'''
+ignorere = re.compile(
+    br'''
     \#\s(?P<reason>internal|experimental|deprecated|developer|inconsistent)\s
     config:\s(?P<config>\S+\.\S+)$
-    ''', re.VERBOSE | re.MULTILINE)
+    ''',
+    re.VERBOSE | re.MULTILINE,
+)
 
 if sys.version_info[0] > 2:
+
     def mkstr(b):
         if isinstance(b, str):
             return b
         return b.decode('utf8')
+
+
 else:
     mkstr = lambda x: x
 
+
 def main(args):
     for f in args:
         sect = b''
@@ -115,18 +128,32 @@
                 name = m.group('section') + b"." + m.group('option')
                 default = m.group('default')
                 if default in (
-                        None, b'False', b'None', b'0', b'[]', b'""', b"''"):
+                    None,
+                    b'False',
+                    b'None',
+                    b'0',
+                    b'[]',
+                    b'""',
+                    b"''",
+                ):
                     default = b''
                 if re.match(b'[a-z.]+$', default):
                     default = b'<variable>'
-                if (name in foundopts and (ctype, default) != foundopts[name]
-                    and name not in allowinconsistent):
+                if (
+                    name in foundopts
+                    and (ctype, default) != foundopts[name]
+                    and name not in allowinconsistent
+                ):
                     print(mkstr(l.rstrip()))
                     fctype, fdefault = foundopts[name]
-                    print("conflict on %s: %r != %r" % (
-                        mkstr(name),
-                        (mkstr(ctype), mkstr(default)),
-                        (mkstr(fctype), mkstr(fdefault))))
+                    print(
+                        "conflict on %s: %r != %r"
+                        % (
+                            mkstr(name),
+                            (mkstr(ctype), mkstr(default)),
+                            (mkstr(fctype), mkstr(fdefault)),
+                        )
+                    )
                     print("at %s:%d:" % (mkstr(f), linenum))
                 foundopts[name] = (ctype, default)
                 carryover = b''
@@ -139,9 +166,11 @@
 
     for name in sorted(foundopts):
         if name not in documented:
-            if not (name.startswith(b"devel.") or
-                    name.startswith(b"experimental.") or
-                    name.startswith(b"debug.")):
+            if not (
+                name.startswith(b"devel.")
+                or name.startswith(b"experimental.")
+                or name.startswith(b"debug.")
+            ):
                 ctype, default = foundopts[name]
                 if default:
                     if isinstance(default, bytes):
@@ -149,8 +178,11 @@
                     default = ' [%s]' % default
                 elif isinstance(default, bytes):
                     default = mkstr(default)
-                print("undocumented: %s (%s)%s" % (
-                    mkstr(name), mkstr(ctype), default))
+                print(
+                    "undocumented: %s (%s)%s"
+                    % (mkstr(name), mkstr(ctype), default)
+                )
+
 
 if __name__ == "__main__":
     if len(sys.argv) > 1:
--- a/contrib/check-py3-compat.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/check-py3-compat.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,6 +16,7 @@
 import traceback
 import warnings
 
+
 def check_compat_py2(f):
     """Check Python 3 compatibility for a file with Python 2"""
     with open(f, 'rb') as fh:
@@ -40,6 +41,7 @@
     if haveprint and 'print_function' not in futures:
         print('%s requires print_function' % f)
 
+
 def check_compat_py3(f):
     """Check Python 3 compatibility of a file with Python 3."""
     with open(f, 'rb') as fh:
@@ -54,8 +56,9 @@
     # Try to import the module.
     # For now we only support modules in packages because figuring out module
     # paths for things not in a package can be confusing.
-    if (f.startswith(('hgdemandimport/', 'hgext/', 'mercurial/'))
-        and not f.endswith('__init__.py')):
+    if f.startswith(
+        ('hgdemandimport/', 'hgext/', 'mercurial/')
+    ) and not f.endswith('__init__.py'):
         assert f.endswith('.py')
         name = f.replace('/', '.')[:-3]
         try:
@@ -79,11 +82,16 @@
 
             if frame.filename:
                 filename = os.path.basename(frame.filename)
-                print('%s: error importing: <%s> %s (error at %s:%d)' % (
-                      f, type(e).__name__, e, filename, frame.lineno))
+                print(
+                    '%s: error importing: <%s> %s (error at %s:%d)'
+                    % (f, type(e).__name__, e, filename, frame.lineno)
+                )
             else:
-                print('%s: error importing module: <%s> %s (line %d)' % (
-                      f, type(e).__name__, e, frame.lineno))
+                print(
+                    '%s: error importing module: <%s> %s (line %d)'
+                    % (f, type(e).__name__, e, frame.lineno)
+                )
+
 
 if __name__ == '__main__':
     if sys.version_info[0] == 2:
@@ -96,7 +104,10 @@
             fn(f)
 
         for w in warns:
-            print(warnings.formatwarning(w.message, w.category,
-                                         w.filename, w.lineno).rstrip())
+            print(
+                warnings.formatwarning(
+                    w.message, w.category, w.filename, w.lineno
+                ).rstrip()
+            )
 
     sys.exit(0)
--- a/contrib/debugcmdserver.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/debugcmdserver.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,6 +23,7 @@
 else:
     log = open(sys.argv[1], 'a')
 
+
 def read(size):
     data = sys.stdin.read(size)
     if not data:
@@ -31,6 +32,7 @@
     sys.stdout.flush()
     return data
 
+
 try:
     while True:
         header = read(outputfmtsize)
--- a/contrib/debugshell.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/debugshell.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,6 +14,7 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
+
 def pdb(ui, repo, msg, **opts):
     objects = {
         'mercurial': mercurial,
@@ -24,25 +25,25 @@
 
     code.interact(msg, local=objects)
 
+
 def ipdb(ui, repo, msg, **opts):
     import IPython
 
     cl = repo.changelog
     mf = repo.manifestlog
-    cl, mf # use variables to appease pyflakes
+    cl, mf  # use variables to appease pyflakes
 
     IPython.embed()
 
+
 @command(b'debugshell|dbsh', [])
 def debugshell(ui, repo, **opts):
-    bannermsg = ("loaded repo : %s\n"
-                 "using source: %s" % (pycompat.sysstr(repo.root),
-                                       mercurial.__path__[0]))
+    bannermsg = "loaded repo : %s\n" "using source: %s" % (
+        pycompat.sysstr(repo.root),
+        mercurial.__path__[0],
+    )
 
-    pdbmap = {
-        'pdb'  : 'code',
-        'ipdb' : 'IPython'
-    }
+    pdbmap = {'pdb': 'code', 'ipdb': 'IPython'}
 
     debugger = ui.config(b"ui", b"debugger")
     if not debugger:
@@ -55,8 +56,10 @@
         with demandimport.deactivated():
             __import__(pdbmap[debugger])
     except ImportError:
-        ui.warn((b"%s debugger specified but %s module was not found\n")
-                % (debugger, pdbmap[debugger]))
+        ui.warn(
+            b"%s debugger specified but %s module was not found\n"
+            % (debugger, pdbmap[debugger])
+        )
         debugger = b'pdb'
 
     getattr(sys.modules[__name__], debugger)(ui, repo, bannermsg, **opts)
--- a/contrib/dirstatenonnormalcheck.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/dirstatenonnormalcheck.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,6 +13,7 @@
     extensions,
 )
 
+
 def nonnormalentries(dmap):
     """Compute nonnormal entries from dirstate's dmap"""
     res = set()
@@ -21,6 +22,7 @@
             res.add(f)
     return res
 
+
 def checkconsistency(ui, orig, dmap, _nonnormalset, label):
     """Compute nonnormalset from dmap, check that it matches _nonnormalset"""
     nonnormalcomputedmap = nonnormalentries(dmap)
@@ -30,15 +32,19 @@
         ui.develwarn(b"[nonnormalset] %s\n" % _nonnormalset, config=b'dirstate')
         ui.develwarn(b"[map] %s\n" % nonnormalcomputedmap, config=b'dirstate')
 
+
 def _checkdirstate(orig, self, arg):
     """Check nonnormal set consistency before and after the call to orig"""
-    checkconsistency(self._ui, orig, self._map, self._map.nonnormalset,
-                     b"before")
+    checkconsistency(
+        self._ui, orig, self._map, self._map.nonnormalset, b"before"
+    )
     r = orig(self, arg)
-    checkconsistency(self._ui, orig, self._map, self._map.nonnormalset,
-                     b"after")
+    checkconsistency(
+        self._ui, orig, self._map, self._map.nonnormalset, b"after"
+    )
     return r
 
+
 def extsetup(ui):
     """Wrap functions modifying dirstate to check nonnormalset consistency"""
     dirstatecl = dirstate.dirstate
--- a/contrib/fuzz/dirstate_corpus.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/fuzz/dirstate_corpus.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,8 +8,7 @@
 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
 args = ap.parse_args()
 
-reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
-                                         '..', '..'))
+reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
 dirstate = os.path.join(reporoot, '.hg', 'dirstate')
 
 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
--- a/contrib/fuzz/fm1readmarkers_corpus.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/fuzz/fm1readmarkers_corpus.py	Sun Oct 06 09:45:02 2019 -0400
@@ -33,4 +33,6 @@
             'nhistedituserAugie Fackler <raf@durin42.com>\x00\x00\x00yA\xd7\x02'
             'MtA\xd4\xe1\x01,\x00\x00\x01\x03\x03"\xa5\xcb\x86\xb6\xf4\xbaO\xa0'
             'sH\xe7?\xcb\x9b\xc2n\xcfI\x9e\x14\xf0D\xf0!\x18DN\xcd\x97\x016\xa5'
-            '\xef\xa06\xcb\x884\x8a\x03\x01\t\x08\x04\x1fef14operationhisted'))
+            '\xef\xa06\xcb\x884\x8a\x03\x01\t\x08\x04\x1fef14operationhisted'
+        ),
+    )
--- a/contrib/fuzz/manifest_corpus.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/fuzz/manifest_corpus.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,8 +8,9 @@
 args = ap.parse_args()
 
 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
-    zf.writestr("manifest_zero",
-'''PKG-INFO\09b3ed8f2b81095a13064402e930565f083346e9a
+    zf.writestr(
+        "manifest_zero",
+        '''PKG-INFO\09b3ed8f2b81095a13064402e930565f083346e9a
 README\080b6e76643dcb44d4bc729e932fc464b3e36dbe3
 hg\0b6444347c629cc058d478023905cfb83b7f5bb9d
 mercurial/__init__.py\0b80de5d138758541c5f05265ad144ab9fa86d1db
@@ -22,9 +23,11 @@
 notes.txt\0703afcec5edb749cf5cec67831f554d6da13f2fb
 setup.py\0ccf3f6daf0f13101ca73631f7a1769e328b472c9
 tkmerge\03c922edb43a9c143682f7bc7b00f98b3c756ebe7
-''')
-    zf.writestr("badmanifest_shorthashes",
-                "narf\0aa\nnarf2\0aaa\n")
-    zf.writestr("badmanifest_nonull",
-                "narf\0cccccccccccccccccccccccccccccccccccccccc\n"
-                "narf2aaaaaaaaaaaaaaaaaaaa\n")
+''',
+    )
+    zf.writestr("badmanifest_shorthashes", "narf\0aa\nnarf2\0aaa\n")
+    zf.writestr(
+        "badmanifest_nonull",
+        "narf\0cccccccccccccccccccccccccccccccccccccccc\n"
+        "narf2aaaaaaaaaaaaaaaaaaaa\n",
+    )
--- a/contrib/fuzz/mpatch_corpus.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/fuzz/mpatch_corpus.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,6 +13,7 @@
 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
 args = ap.parse_args()
 
+
 class deltafrag(object):
     def __init__(self, start, end, data):
         self.start = start
@@ -20,8 +21,11 @@
         self.data = data
 
     def __str__(self):
-        return struct.pack(
-            ">lll", self.start, self.end, len(self.data)) + self.data
+        return (
+            struct.pack(">lll", self.start, self.end, len(self.data))
+            + self.data
+        )
+
 
 class delta(object):
     def __init__(self, frags):
@@ -30,8 +34,8 @@
     def __str__(self):
         return ''.join(str(f) for f in self.frags)
 
+
 class corpus(object):
-
     def __init__(self, base, deltas):
         self.base = base
         self.deltas = deltas
@@ -49,19 +53,19 @@
         )
         return "".join(parts)
 
+
 with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf:
     # Manually constructed entries
     zf.writestr(
-        "one_delta_applies",
-        str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
+        "one_delta_applies", str(corpus('a', [delta([deltafrag(0, 1, 'b')])]))
     )
     zf.writestr(
         "one_delta_starts_late",
-        str(corpus('a', [delta([deltafrag(3, 1, 'b')])]))
+        str(corpus('a', [delta([deltafrag(3, 1, 'b')])])),
     )
     zf.writestr(
         "one_delta_ends_late",
-        str(corpus('a', [delta([deltafrag(0, 20, 'b')])]))
+        str(corpus('a', [delta([deltafrag(0, 20, 'b')])])),
     )
 
     try:
@@ -70,9 +74,8 @@
         fl = r.file('mercurial/manifest.py')
         rl = getattr(fl, '_revlog', fl)
         bins = rl._chunks(rl._deltachain(10)[0])
-        zf.writestr('manifest_py_rev_10',
-                    str(corpus(bins[0], bins[1:])))
-    except: # skip this, so no re-raises
+        zf.writestr('manifest_py_rev_10', str(corpus(bins[0], bins[1:])))
+    except:  # skip this, so no re-raises
         print('skipping seed file from repo data')
     # Automatically discovered by running the fuzzer
     zf.writestr(
@@ -81,7 +84,8 @@
     # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876
     zf.writestr(
         "mpatch_ossfuzz_getbe32_ubsan",
-        "\x02\x00\x00\x00\x0c    \xff\xff\xff\xff    ")
+        "\x02\x00\x00\x00\x0c    \xff\xff\xff\xff    ",
+    )
     zf.writestr(
         "mpatch_apply_over_memcpy",
         '\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00'
@@ -342,4 +346,5 @@
         '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
         '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00'
         '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
-        '\x00\x00\x00\x00')
+        '\x00\x00\x00\x00',
+    )
--- a/contrib/fuzz/revlog_corpus.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/fuzz/revlog_corpus.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,13 +8,13 @@
 ap.add_argument("out", metavar="some.zip", type=str, nargs=1)
 args = ap.parse_args()
 
-reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__),
-                                         '..', '..'))
+reporoot = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
 # typically a standalone index
 changelog = os.path.join(reporoot, '.hg', 'store', '00changelog.i')
 # an inline revlog with only a few revisions
 contributing = os.path.join(
-    reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i')
+    reporoot, '.hg', 'store', 'data', 'contrib', 'fuzz', 'mpatch.cc.i'
+)
 
 print(changelog, os.path.exists(changelog))
 print(contributing, os.path.exists(contributing))
--- a/contrib/genosxversion.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/genosxversion.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,21 +7,25 @@
 import sys
 
 # Always load hg libraries from the hg we can find on $PATH.
-hglib = subprocess.check_output(
-    ['hg', 'debuginstall', '-T', '{hgmodules}'])
+hglib = subprocess.check_output(['hg', 'debuginstall', '-T', '{hgmodules}'])
 sys.path.insert(0, os.path.dirname(hglib))
 
 from mercurial import util
 
 ap = argparse.ArgumentParser()
-ap.add_argument('--paranoid',
-                action='store_true',
-                help=("Be paranoid about how version numbers compare and "
-                      "produce something that's more likely to sort "
-                      "reasonably."))
+ap.add_argument(
+    '--paranoid',
+    action='store_true',
+    help=(
+        "Be paranoid about how version numbers compare and "
+        "produce something that's more likely to sort "
+        "reasonably."
+    ),
+)
 ap.add_argument('--selftest', action='store_true', help='Run self-tests.')
 ap.add_argument('versionfile', help='Path to a valid mercurial __version__.py')
 
+
 def paranoidver(ver):
     """Given an hg version produce something that distutils can sort.
 
@@ -108,22 +112,25 @@
         extra = ''
     return '%d.%d.%d%s' % (major, minor, micro, extra)
 
+
 def main(argv):
     opts = ap.parse_args(argv[1:])
     if opts.selftest:
         import doctest
+
         doctest.testmod()
         return
     with open(opts.versionfile) as f:
         for l in f:
             if l.startswith('version = b'):
                 # version number is entire line minus the quotes
-                ver = l[len('version = b') + 1:-2]
+                ver = l[len('version = b') + 1 : -2]
                 break
     if opts.paranoid:
         print(paranoidver(ver))
     else:
         print(ver)
 
+
 if __name__ == '__main__':
     main(sys.argv)
--- a/contrib/hgclient.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/hgclient.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,17 +16,22 @@
     stdout = sys.stdout.buffer
     stderr = sys.stderr.buffer
     stringio = io.BytesIO
+
     def bprint(*args):
         # remove b'' as well for ease of test migration
         pargs = [re.sub(br'''\bb(['"])''', br'\1', b'%s' % a) for a in args]
         stdout.write(b' '.join(pargs) + b'\n')
+
+
 else:
     import cStringIO
+
     stdout = sys.stdout
     stderr = sys.stderr
     stringio = cStringIO.StringIO
     bprint = print
 
+
 def connectpipe(path=None, extraargs=()):
     cmdline = [b'hg', b'serve', b'--cmdserver', b'pipe']
     if path:
@@ -38,11 +43,13 @@
             return cmdline
         return [arg.decode("utf-8") for arg in cmdline]
 
-    server = subprocess.Popen(tonative(cmdline), stdin=subprocess.PIPE,
-                              stdout=subprocess.PIPE)
+    server = subprocess.Popen(
+        tonative(cmdline), stdin=subprocess.PIPE, stdout=subprocess.PIPE
+    )
 
     return server
 
+
 class unixconnection(object):
     def __init__(self, sockpath):
         self.sock = sock = socket.socket(socket.AF_UNIX)
@@ -55,6 +62,7 @@
         self.stdout.close()
         self.sock.close()
 
+
 class unixserver(object):
     def __init__(self, sockpath, logpath=None, repopath=None):
         self.sockpath = sockpath
@@ -80,11 +88,13 @@
         os.kill(self.server.pid, signal.SIGTERM)
         self.server.wait()
 
+
 def writeblock(server, data):
     server.stdin.write(struct.pack(b'>I', len(data)))
     server.stdin.write(data)
     server.stdin.flush()
 
+
 def readchannel(server):
     data = server.stdout.read(5)
     if not data:
@@ -95,11 +105,14 @@
     else:
         return channel, server.stdout.read(length)
 
+
 def sep(text):
     return text.replace(b'\\', b'/')
 
-def runcommand(server, args, output=stdout, error=stderr, input=None,
-               outfilter=lambda x: x):
+
+def runcommand(
+    server, args, output=stdout, error=stderr, input=None, outfilter=lambda x: x
+):
     bprint(b'*** runcommand', b' '.join(args))
     stdout.flush()
     server.stdin.write(b'runcommand\n')
@@ -123,7 +136,7 @@
         elif ch == b'm':
             bprint(b"message: %r" % data)
         elif ch == b'r':
-            ret, = struct.unpack('>i', data)
+            (ret,) = struct.unpack('>i', data)
             if ret != 0:
                 bprint(b' [%d]' % ret)
             return ret
@@ -132,6 +145,7 @@
             if ch.isupper():
                 return
 
+
 def check(func, connect=connectpipe):
     stdout.flush()
     server = connect()
@@ -141,7 +155,9 @@
         server.stdin.close()
         server.wait()
 
+
 def checkwith(connect=connectpipe, **kwargs):
     def wrap(func):
         return check(func, lambda: connect(**kwargs))
+
     return wrap
--- a/contrib/memory.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/memory.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,6 +13,7 @@
 
 from __future__ import absolute_import
 
+
 def memusage(ui):
     """Report memory usage of the current process."""
     result = {'peak': 0, 'rss': 0}
@@ -24,8 +25,13 @@
             key = parts[0][2:-1].lower()
             if key in result:
                 result[key] = int(parts[1])
-    ui.write_err(", ".join(["%s: %.1f MiB" % (k, v / 1024.0)
-                            for k, v in result.iteritems()]) + "\n")
+    ui.write_err(
+        ", ".join(
+            ["%s: %.1f MiB" % (k, v / 1024.0) for k, v in result.iteritems()]
+        )
+        + "\n"
+    )
+
 
 def extsetup(ui):
     ui.atexit(memusage, ui)
--- a/contrib/packaging/hgpackaging/downloads.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/packaging/hgpackaging/downloads.py	Sun Oct 06 09:45:02 2019 -0400
@@ -98,7 +98,10 @@
     length = 0
 
     with urllib.request.urlopen(url) as fh:
-        if not url.endswith('.gz') and fh.info().get('Content-Encoding') == 'gzip':
+        if (
+            not url.endswith('.gz')
+            and fh.info().get('Content-Encoding') == 'gzip'
+        ):
             fh = gzip.GzipFile(fileobj=fh)
 
         while True:
@@ -114,12 +117,14 @@
     digest = h.hexdigest()
 
     if length != size:
-        raise IntegrityError('size mismatch on %s: wanted %d; got %d' % (
-            url, size, length))
+        raise IntegrityError(
+            'size mismatch on %s: wanted %d; got %d' % (url, size, length)
+        )
 
     if digest != sha256:
-        raise IntegrityError('sha256 mismatch on %s: wanted %s; got %s' % (
-            url, sha256, digest))
+        raise IntegrityError(
+            'sha256 mismatch on %s: wanted %s; got %s' % (url, sha256, digest)
+        )
 
 
 def download_to_path(url: str, path: pathlib.Path, size: int, sha256: str):
@@ -162,12 +167,14 @@
     print('successfully downloaded %s' % url)
 
 
-def download_entry(name: dict, dest_path: pathlib.Path, local_name=None) -> pathlib.Path:
+def download_entry(
+    name: dict, dest_path: pathlib.Path, local_name=None
+) -> pathlib.Path:
     entry = DOWNLOADS[name]
 
     url = entry['url']
 
-    local_name = local_name or url[url.rindex('/') + 1:]
+    local_name = local_name or url[url.rindex('/') + 1 :]
 
     local_path = dest_path / local_name
     download_to_path(url, local_path, entry['size'], entry['sha256'])
--- a/contrib/packaging/hgpackaging/inno.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/packaging/hgpackaging/inno.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,12 +12,8 @@
 import shutil
 import subprocess
 
-from .py2exe import (
-    build_py2exe,
-)
-from .util import (
-    find_vc_runtime_files,
-)
+from .py2exe import build_py2exe
+from .util import find_vc_runtime_files
 
 
 EXTRA_PACKAGES = {
@@ -28,9 +24,13 @@
 }
 
 
-def build(source_dir: pathlib.Path, build_dir: pathlib.Path,
-          python_exe: pathlib.Path, iscc_exe: pathlib.Path,
-          version=None):
+def build(
+    source_dir: pathlib.Path,
+    build_dir: pathlib.Path,
+    python_exe: pathlib.Path,
+    iscc_exe: pathlib.Path,
+    version=None,
+):
     """Build the Inno installer.
 
     Build files will be placed in ``build_dir``.
@@ -44,11 +44,18 @@
 
     vc_x64 = r'\x64' in os.environ.get('LIB', '')
 
-    requirements_txt = (source_dir / 'contrib' / 'packaging' /
-                        'inno' / 'requirements.txt')
+    requirements_txt = (
+        source_dir / 'contrib' / 'packaging' / 'inno' / 'requirements.txt'
+    )
 
-    build_py2exe(source_dir, build_dir, python_exe, 'inno',
-                 requirements_txt, extra_packages=EXTRA_PACKAGES)
+    build_py2exe(
+        source_dir,
+        build_dir,
+        python_exe,
+        'inno',
+        requirements_txt,
+        extra_packages=EXTRA_PACKAGES,
+    )
 
     # hg.exe depends on VC9 runtime DLLs. Copy those into place.
     for f in find_vc_runtime_files(vc_x64):
--- a/contrib/packaging/hgpackaging/py2exe.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/packaging/hgpackaging/py2exe.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,9 +11,7 @@
 import pathlib
 import subprocess
 
-from .downloads import (
-    download_entry,
-)
+from .downloads import download_entry
 from .util import (
     extract_tar_to_directory,
     extract_zip_to_directory,
@@ -21,12 +19,17 @@
 )
 
 
-def build_py2exe(source_dir: pathlib.Path, build_dir: pathlib.Path,
-                 python_exe: pathlib.Path, build_name: str,
-                 venv_requirements_txt: pathlib.Path,
-                 extra_packages=None, extra_excludes=None,
-                 extra_dll_excludes=None,
-                 extra_packages_script=None):
+def build_py2exe(
+    source_dir: pathlib.Path,
+    build_dir: pathlib.Path,
+    python_exe: pathlib.Path,
+    build_name: str,
+    venv_requirements_txt: pathlib.Path,
+    extra_packages=None,
+    extra_excludes=None,
+    extra_dll_excludes=None,
+    extra_packages_script=None,
+):
     """Build Mercurial with py2exe.
 
     Build files will be placed in ``build_dir``.
@@ -36,9 +39,11 @@
     to already be configured with an active toolchain.
     """
     if 'VCINSTALLDIR' not in os.environ:
-        raise Exception('not running from a Visual C++ build environment; '
-                        'execute the "Visual C++ <version> Command Prompt" '
-                        'application shortcut or a vcsvarsall.bat file')
+        raise Exception(
+            'not running from a Visual C++ build environment; '
+            'execute the "Visual C++ <version> Command Prompt" '
+            'application shortcut or a vcsvarsall.bat file'
+        )
 
     # Identity x86/x64 and validate the environment matches the Python
     # architecture.
@@ -48,12 +53,16 @@
 
     if vc_x64:
         if py_info['arch'] != '64bit':
-            raise Exception('architecture mismatch: Visual C++ environment '
-                            'is configured for 64-bit but Python is 32-bit')
+            raise Exception(
+                'architecture mismatch: Visual C++ environment '
+                'is configured for 64-bit but Python is 32-bit'
+            )
     else:
         if py_info['arch'] != '32bit':
-            raise Exception('architecture mismatch: Visual C++ environment '
-                            'is configured for 32-bit but Python is 64-bit')
+            raise Exception(
+                'architecture mismatch: Visual C++ environment '
+                'is configured for 32-bit but Python is 64-bit'
+            )
 
     if py_info['py3']:
         raise Exception('Only Python 2 is currently supported')
@@ -65,11 +74,11 @@
     virtualenv_pkg, virtualenv_entry = download_entry('virtualenv', build_dir)
     py2exe_pkg, py2exe_entry = download_entry('py2exe', build_dir)
 
-    venv_path = build_dir / ('venv-%s-%s' % (build_name,
-                                             'x64' if vc_x64 else 'x86'))
+    venv_path = build_dir / (
+        'venv-%s-%s' % (build_name, 'x64' if vc_x64 else 'x86')
+    )
 
-    gettext_root = build_dir / (
-        'gettext-win-%s' % gettext_entry['version'])
+    gettext_root = build_dir / ('gettext-win-%s' % gettext_entry['version'])
 
     if not gettext_root.exists():
         extract_zip_to_directory(gettext_pkg, gettext_root)
@@ -77,7 +86,8 @@
 
     # This assumes Python 2. We don't need virtualenv on Python 3.
     virtualenv_src_path = build_dir / (
-        'virtualenv-%s' % virtualenv_entry['version'])
+        'virtualenv-%s' % virtualenv_entry['version']
+    )
     virtualenv_py = virtualenv_src_path / 'virtualenv.py'
 
     if not virtualenv_src_path.exists():
@@ -91,14 +101,15 @@
     if not venv_path.exists():
         print('creating virtualenv with dependencies')
         subprocess.run(
-            [str(python_exe), str(virtualenv_py), str(venv_path)],
-            check=True)
+            [str(python_exe), str(virtualenv_py), str(venv_path)], check=True
+        )
 
     venv_python = venv_path / 'Scripts' / 'python.exe'
     venv_pip = venv_path / 'Scripts' / 'pip.exe'
 
-    subprocess.run([str(venv_pip), 'install', '-r', str(venv_requirements_txt)],
-                   check=True)
+    subprocess.run(
+        [str(venv_pip), 'install', '-r', str(venv_requirements_txt)], check=True
+    )
 
     # Force distutils to use VC++ settings from environment, which was
     # validated above.
@@ -107,9 +118,13 @@
     env['MSSdk'] = '1'
 
     if extra_packages_script:
-        more_packages = set(subprocess.check_output(
-            extra_packages_script,
-            cwd=build_dir).split(b'\0')[-1].strip().decode('utf-8').splitlines())
+        more_packages = set(
+            subprocess.check_output(extra_packages_script, cwd=build_dir)
+            .split(b'\0')[-1]
+            .strip()
+            .decode('utf-8')
+            .splitlines()
+        )
         if more_packages:
             if not extra_packages:
                 extra_packages = more_packages
@@ -119,32 +134,38 @@
     if extra_packages:
         env['HG_PY2EXE_EXTRA_PACKAGES'] = ' '.join(sorted(extra_packages))
         hgext3rd_extras = sorted(
-            e for e in extra_packages if e.startswith('hgext3rd.'))
+            e for e in extra_packages if e.startswith('hgext3rd.')
+        )
         if hgext3rd_extras:
             env['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'] = ' '.join(hgext3rd_extras)
     if extra_excludes:
         env['HG_PY2EXE_EXTRA_EXCLUDES'] = ' '.join(sorted(extra_excludes))
     if extra_dll_excludes:
         env['HG_PY2EXE_EXTRA_DLL_EXCLUDES'] = ' '.join(
-            sorted(extra_dll_excludes))
+            sorted(extra_dll_excludes)
+        )
 
     py2exe_py_path = venv_path / 'Lib' / 'site-packages' / 'py2exe'
     if not py2exe_py_path.exists():
         print('building py2exe')
-        subprocess.run([str(venv_python), 'setup.py', 'install'],
-                       cwd=py2exe_source_path,
-                       env=env,
-                       check=True)
+        subprocess.run(
+            [str(venv_python), 'setup.py', 'install'],
+            cwd=py2exe_source_path,
+            env=env,
+            check=True,
+        )
 
     # Register location of msgfmt and other binaries.
     env['PATH'] = '%s%s%s' % (
-        env['PATH'], os.pathsep, str(gettext_root / 'bin'))
+        env['PATH'],
+        os.pathsep,
+        str(gettext_root / 'bin'),
+    )
 
     print('building Mercurial')
     subprocess.run(
-        [str(venv_python), 'setup.py',
-         'py2exe',
-         'build_doc', '--html'],
+        [str(venv_python), 'setup.py', 'py2exe', 'build_doc', '--html'],
         cwd=str(source_dir),
         env=env,
-        check=True)
+        check=True,
+    )
--- a/contrib/packaging/hgpackaging/util.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/packaging/hgpackaging/util.py	Sun Oct 06 09:45:02 2019 -0400
@@ -32,8 +32,11 @@
 
     prefix = 'amd64' if x64 else 'x86'
 
-    candidates = sorted(p for p in os.listdir(winsxs)
-                  if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix))
+    candidates = sorted(
+        p
+        for p in os.listdir(winsxs)
+        if p.lower().startswith('%s_microsoft.vc90.crt_' % prefix)
+    )
 
     for p in candidates:
         print('found candidate VC runtime: %s' % p)
@@ -72,7 +75,7 @@
         'version': version,
         'bin_root': bin_version,
         'bin_x86': bin_version / 'x86',
-        'bin_x64': bin_version / 'x64'
+        'bin_x64': bin_version / 'x64',
     }
 
 
@@ -89,9 +92,14 @@
     raise Exception('could not find signtool.exe in Windows 10 SDK')
 
 
-def sign_with_signtool(file_path, description, subject_name=None,
-                       cert_path=None, cert_password=None,
-                       timestamp_url=None):
+def sign_with_signtool(
+    file_path,
+    description,
+    subject_name=None,
+    cert_path=None,
+    cert_password=None,
+    timestamp_url=None,
+):
     """Digitally sign a file with signtool.exe.
 
     ``file_path`` is file to sign.
@@ -114,10 +122,13 @@
         cert_password = getpass.getpass('password for %s: ' % cert_path)
 
     args = [
-        str(find_signtool()), 'sign',
+        str(find_signtool()),
+        'sign',
         '/v',
-        '/fd', 'sha256',
-        '/d', description,
+        '/fd',
+        'sha256',
+        '/d',
+        description,
     ]
 
     if cert_path:
--- a/contrib/packaging/hgpackaging/wix.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/packaging/hgpackaging/wix.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,12 +15,8 @@
 import typing
 import xml.dom.minidom
 
-from .downloads import (
-    download_entry,
-)
-from .py2exe import (
-    build_py2exe,
-)
+from .downloads import download_entry
+from .py2exe import build_py2exe
 from .util import (
     extract_zip_to_directory,
     sign_with_signtool,
@@ -84,17 +80,29 @@
 
 def ensure_vc90_merge_modules(build_dir):
     x86 = (
-        download_entry('vc9-crt-x86-msm', build_dir,
-                       local_name='microsoft.vcxx.crt.x86_msm.msm')[0],
-        download_entry('vc9-crt-x86-msm-policy', build_dir,
-                       local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm')[0]
+        download_entry(
+            'vc9-crt-x86-msm',
+            build_dir,
+            local_name='microsoft.vcxx.crt.x86_msm.msm',
+        )[0],
+        download_entry(
+            'vc9-crt-x86-msm-policy',
+            build_dir,
+            local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm',
+        )[0],
     )
 
     x64 = (
-        download_entry('vc9-crt-x64-msm', build_dir,
-                       local_name='microsoft.vcxx.crt.x64_msm.msm')[0],
-        download_entry('vc9-crt-x64-msm-policy', build_dir,
-                       local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm')[0]
+        download_entry(
+            'vc9-crt-x64-msm',
+            build_dir,
+            local_name='microsoft.vcxx.crt.x64_msm.msm',
+        )[0],
+        download_entry(
+            'vc9-crt-x64-msm-policy',
+            build_dir,
+            local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm',
+        )[0],
     )
     return {
         'x86': x86,
@@ -116,17 +124,26 @@
     subprocess.run(args, cwd=str(cwd), check=True)
 
 
-def make_post_build_signing_fn(name, subject_name=None, cert_path=None,
-                               cert_password=None, timestamp_url=None):
+def make_post_build_signing_fn(
+    name,
+    subject_name=None,
+    cert_path=None,
+    cert_password=None,
+    timestamp_url=None,
+):
     """Create a callable that will use signtool to sign hg.exe."""
 
     def post_build_sign(source_dir, build_dir, dist_dir, version):
         description = '%s %s' % (name, version)
 
-        sign_with_signtool(dist_dir / 'hg.exe', description,
-                           subject_name=subject_name, cert_path=cert_path,
-                           cert_password=cert_password,
-                           timestamp_url=timestamp_url)
+        sign_with_signtool(
+            dist_dir / 'hg.exe',
+            description,
+            subject_name=subject_name,
+            cert_path=cert_path,
+            cert_password=cert_password,
+            timestamp_url=timestamp_url,
+        )
 
     return post_build_sign
 
@@ -155,7 +172,8 @@
     # We can't use ElementTree because it doesn't handle the
     # <?include ?> directives.
     doc = xml.dom.minidom.parseString(
-        LIBRARIES_XML.format(wix_dir=str(wix_dir)))
+        LIBRARIES_XML.format(wix_dir=str(wix_dir))
+    )
 
     component = doc.getElementsByTagName('Component')[0]
 
@@ -177,11 +195,16 @@
     return doc.toprettyxml()
 
 
-def build_installer(source_dir: pathlib.Path, python_exe: pathlib.Path,
-                    msi_name='mercurial', version=None, post_build_fn=None,
-                    extra_packages_script=None,
-                    extra_wxs:typing.Optional[typing.Dict[str,str]]=None,
-                    extra_features:typing.Optional[typing.List[str]]=None):
+def build_installer(
+    source_dir: pathlib.Path,
+    python_exe: pathlib.Path,
+    msi_name='mercurial',
+    version=None,
+    post_build_fn=None,
+    extra_packages_script=None,
+    extra_wxs: typing.Optional[typing.Dict[str, str]] = None,
+    extra_features: typing.Optional[typing.List[str]] = None,
+):
     """Build a WiX MSI installer.
 
     ``source_dir`` is the path to the Mercurial source tree to use.
@@ -209,10 +232,15 @@
 
     requirements_txt = wix_dir / 'requirements.txt'
 
-    build_py2exe(source_dir, hg_build_dir,
-                 python_exe, 'wix', requirements_txt,
-                 extra_packages=EXTRA_PACKAGES,
-                 extra_packages_script=extra_packages_script)
+    build_py2exe(
+        source_dir,
+        hg_build_dir,
+        python_exe,
+        'wix',
+        requirements_txt,
+        extra_packages=EXTRA_PACKAGES,
+        extra_packages_script=extra_packages_script,
+    )
 
     version = version or normalize_version(find_version(source_dir))
     print('using version string: %s' % version)
@@ -265,16 +293,19 @@
 
     run_candle(wix_path, build_dir, source, source_build_rel, defines=defines)
 
-    msi_path = source_dir / 'dist' / (
-        '%s-%s-%s.msi' % (msi_name, version, arch))
+    msi_path = (
+        source_dir / 'dist' / ('%s-%s-%s.msi' % (msi_name, version, arch))
+    )
 
     args = [
         str(wix_path / 'light.exe'),
         '-nologo',
-        '-ext', 'WixUIExtension',
+        '-ext',
+        'WixUIExtension',
         '-sw1076',
         '-spdb',
-        '-o', str(msi_path),
+        '-o',
+        str(msi_path),
     ]
 
     for source, rel_path in SUPPORT_WXS:
@@ -286,10 +317,12 @@
         source = os.path.basename(source)
         args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
 
-    args.extend([
-        str(build_dir / 'library.wixobj'),
-        str(build_dir / 'mercurial.wixobj'),
-    ])
+    args.extend(
+        [
+            str(build_dir / 'library.wixobj'),
+            str(build_dir / 'mercurial.wixobj'),
+        ]
+    )
 
     subprocess.run(args, cwd=str(source_dir), check=True)
 
@@ -300,11 +333,19 @@
     }
 
 
-def build_signed_installer(source_dir: pathlib.Path, python_exe: pathlib.Path,
-                           name: str, version=None, subject_name=None,
-                           cert_path=None, cert_password=None,
-                           timestamp_url=None, extra_packages_script=None,
-                           extra_wxs=None, extra_features=None):
+def build_signed_installer(
+    source_dir: pathlib.Path,
+    python_exe: pathlib.Path,
+    name: str,
+    version=None,
+    subject_name=None,
+    cert_path=None,
+    cert_password=None,
+    timestamp_url=None,
+    extra_packages_script=None,
+    extra_wxs=None,
+    extra_features=None,
+):
     """Build an installer with signed executables."""
 
     post_build_fn = make_post_build_signing_fn(
@@ -312,16 +353,27 @@
         subject_name=subject_name,
         cert_path=cert_path,
         cert_password=cert_password,
-        timestamp_url=timestamp_url)
+        timestamp_url=timestamp_url,
+    )
 
-    info = build_installer(source_dir, python_exe=python_exe,
-                           msi_name=name.lower(), version=version,
-                           post_build_fn=post_build_fn,
-                           extra_packages_script=extra_packages_script,
-                           extra_wxs=extra_wxs, extra_features=extra_features)
+    info = build_installer(
+        source_dir,
+        python_exe=python_exe,
+        msi_name=name.lower(),
+        version=version,
+        post_build_fn=post_build_fn,
+        extra_packages_script=extra_packages_script,
+        extra_wxs=extra_wxs,
+        extra_features=extra_features,
+    )
 
     description = '%s %s' % (name, version)
 
-    sign_with_signtool(info['msi_path'], description,
-                       subject_name=subject_name, cert_path=cert_path,
-                       cert_password=cert_password, timestamp_url=timestamp_url)
+    sign_with_signtool(
+        info['msi_path'],
+        description,
+        subject_name=subject_name,
+        cert_path=cert_path,
+        cert_password=cert_password,
+        timestamp_url=timestamp_url,
+    )
--- a/contrib/packaging/inno/build.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/packaging/inno/build.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,14 +19,15 @@
 if __name__ == '__main__':
     parser = argparse.ArgumentParser()
 
-    parser.add_argument('--python',
-                        required=True,
-                        help='path to python.exe to use')
-    parser.add_argument('--iscc',
-                        help='path to iscc.exe to use')
-    parser.add_argument('--version',
-                        help='Mercurial version string to use '
-                             '(detected from __version__.py if not defined')
+    parser.add_argument(
+        '--python', required=True, help='path to python.exe to use'
+    )
+    parser.add_argument('--iscc', help='path to iscc.exe to use')
+    parser.add_argument(
+        '--version',
+        help='Mercurial version string to use '
+        '(detected from __version__.py if not defined',
+    )
 
     args = parser.parse_args()
 
@@ -36,8 +37,11 @@
     if args.iscc:
         iscc = pathlib.Path(args.iscc)
     else:
-        iscc = (pathlib.Path(os.environ['ProgramFiles(x86)']) / 'Inno Setup 5' /
-            'ISCC.exe')
+        iscc = (
+            pathlib.Path(os.environ['ProgramFiles(x86)'])
+            / 'Inno Setup 5'
+            / 'ISCC.exe'
+        )
 
     here = pathlib.Path(os.path.abspath(os.path.dirname(__file__)))
     source_dir = here.parent.parent.parent
@@ -47,5 +51,10 @@
 
     from hgpackaging.inno import build
 
-    build(source_dir, build_dir, pathlib.Path(args.python), iscc,
-          version=args.version)
+    build(
+        source_dir,
+        build_dir,
+        pathlib.Path(args.python),
+        iscc,
+        version=args.version,
+    )
--- a/contrib/packaging/wix/build.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/packaging/wix/build.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,31 +17,42 @@
 if __name__ == '__main__':
     parser = argparse.ArgumentParser()
 
-    parser.add_argument('--name',
-                        help='Application name',
-                        default='Mercurial')
-    parser.add_argument('--python',
-                        help='Path to Python executable to use',
-                        required=True)
-    parser.add_argument('--sign-sn',
-                        help='Subject name (or fragment thereof) of certificate '
-                             'to use for signing')
-    parser.add_argument('--sign-cert',
-                        help='Path to certificate to use for signing')
-    parser.add_argument('--sign-password',
-                        help='Password for signing certificate')
-    parser.add_argument('--sign-timestamp-url',
-                        help='URL of timestamp server to use for signing')
-    parser.add_argument('--version',
-                        help='Version string to use')
-    parser.add_argument('--extra-packages-script',
-                        help=('Script to execute to include extra packages in '
-                              'py2exe binary.'))
-    parser.add_argument('--extra-wxs',
-                        help='CSV of path_to_wxs_file=working_dir_for_wxs_file')
-    parser.add_argument('--extra-features',
-                        help=('CSV of extra feature names to include '
-                              'in the installer from the extra wxs files'))
+    parser.add_argument('--name', help='Application name', default='Mercurial')
+    parser.add_argument(
+        '--python', help='Path to Python executable to use', required=True
+    )
+    parser.add_argument(
+        '--sign-sn',
+        help='Subject name (or fragment thereof) of certificate '
+        'to use for signing',
+    )
+    parser.add_argument(
+        '--sign-cert', help='Path to certificate to use for signing'
+    )
+    parser.add_argument(
+        '--sign-password', help='Password for signing certificate'
+    )
+    parser.add_argument(
+        '--sign-timestamp-url',
+        help='URL of timestamp server to use for signing',
+    )
+    parser.add_argument('--version', help='Version string to use')
+    parser.add_argument(
+        '--extra-packages-script',
+        help=(
+            'Script to execute to include extra packages in ' 'py2exe binary.'
+        ),
+    )
+    parser.add_argument(
+        '--extra-wxs', help='CSV of path_to_wxs_file=working_dir_for_wxs_file'
+    )
+    parser.add_argument(
+        '--extra-features',
+        help=(
+            'CSV of extra feature names to include '
+            'in the installer from the extra wxs files'
+        ),
+    )
 
     args = parser.parse_args()
 
@@ -69,7 +80,8 @@
         kwargs['extra_packages_script'] = args.extra_packages_script
     if args.extra_wxs:
         kwargs['extra_wxs'] = dict(
-            thing.split("=") for thing in args.extra_wxs.split(','))
+            thing.split("=") for thing in args.extra_wxs.split(',')
+        )
     if args.extra_features:
         kwargs['extra_features'] = args.extra_features.split(',')
 
--- a/contrib/perf-utils/perf-revlog-write-plot.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/perf-utils/perf-revlog-write-plot.py	Sun Oct 06 09:45:02 2019 -0400
@@ -44,18 +44,12 @@
     comb_plt = fig.add_subplot(211)
     other_plt = fig.add_subplot(212)
 
-    comb_plt.plot(ary[0],
-                  np.cumsum(ary[1]),
-                  color='red',
-                  linewidth=1,
-                  label='comb')
+    comb_plt.plot(
+        ary[0], np.cumsum(ary[1]), color='red', linewidth=1, label='comb'
+    )
 
     plots = []
-    p = other_plt.plot(ary[0],
-                       ary[1],
-                       color='red',
-                       linewidth=1,
-                       label='wall')
+    p = other_plt.plot(ary[0], ary[1], color='red', linewidth=1, label='wall')
     plots.append(p)
 
     colors = {
@@ -64,20 +58,24 @@
         1000: ('purple', 'xkcd:dark pink'),
     }
     for n, color in colors.items():
-        avg_n = np.convolve(ary[1], np.full(n, 1. / n), 'valid')
-        p = other_plt.plot(ary[0][n - 1:],
-                           avg_n,
-                           color=color[0],
-                           linewidth=1,
-                           label='avg time last %d' % n)
+        avg_n = np.convolve(ary[1], np.full(n, 1.0 / n), 'valid')
+        p = other_plt.plot(
+            ary[0][n - 1 :],
+            avg_n,
+            color=color[0],
+            linewidth=1,
+            label='avg time last %d' % n,
+        )
         plots.append(p)
 
         med_n = scipy.signal.medfilt(ary[1], n + 1)
-        p = other_plt.plot(ary[0],
-                           med_n,
-                           color=color[1],
-                           linewidth=1,
-                           label='median time last %d' % n)
+        p = other_plt.plot(
+            ary[0],
+            med_n,
+            color=color[1],
+            linewidth=1,
+            label='median time last %d' % n,
+        )
         plots.append(p)
 
     formatter = mticker.ScalarFormatter()
@@ -108,6 +106,7 @@
         else:
             legline.set_alpha(0.2)
         fig.canvas.draw()
+
     if title is not None:
         fig.canvas.set_window_title(title)
     fig.canvas.mpl_connect('pick_event', onpick)
--- a/contrib/perf.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/perf.py	Sun Oct 06 09:45:02 2019 -0400
@@ -84,32 +84,33 @@
 # try to import modules separately (in dict order), and ignore
 # failure, because these aren't available with early Mercurial
 try:
-    from mercurial import branchmap # since 2.5 (or bcee63733aad)
+    from mercurial import branchmap  # since 2.5 (or bcee63733aad)
 except ImportError:
     pass
 try:
-    from mercurial import obsolete # since 2.3 (or ad0d6c2b3279)
+    from mercurial import obsolete  # since 2.3 (or ad0d6c2b3279)
 except ImportError:
     pass
 try:
-    from mercurial import registrar # since 3.7 (or 37d50250b696)
-    dir(registrar) # forcibly load it
+    from mercurial import registrar  # since 3.7 (or 37d50250b696)
+
+    dir(registrar)  # forcibly load it
 except ImportError:
     registrar = None
 try:
-    from mercurial import repoview # since 2.5 (or 3a6ddacb7198)
+    from mercurial import repoview  # since 2.5 (or 3a6ddacb7198)
 except ImportError:
     pass
 try:
-    from mercurial.utils import repoviewutil # since 5.0
+    from mercurial.utils import repoviewutil  # since 5.0
 except ImportError:
     repoviewutil = None
 try:
-    from mercurial import scmutil # since 1.9 (or 8b252e826c68)
+    from mercurial import scmutil  # since 1.9 (or 8b252e826c68)
 except ImportError:
     pass
 try:
-    from mercurial import setdiscovery # since 1.9 (or cb98fed52495)
+    from mercurial import setdiscovery  # since 1.9 (or cb98fed52495)
 except ImportError:
     pass
 
@@ -118,29 +119,33 @@
 except ImportError:
     profiling = None
 
+
 def identity(a):
     return a
 
+
 try:
     from mercurial import pycompat
+
     getargspec = pycompat.getargspec  # added to module after 4.5
     _byteskwargs = pycompat.byteskwargs  # since 4.1 (or fbc3f73dc802)
-    _sysstr = pycompat.sysstr         # since 4.0 (or 2219f4f82ede)
-    _bytestr = pycompat.bytestr       # since 4.2 (or b70407bd84d5)
-    _xrange = pycompat.xrange         # since 4.8 (or 7eba8f83129b)
-    fsencode = pycompat.fsencode      # since 3.9 (or f4a5e0e86a7e)
+    _sysstr = pycompat.sysstr  # since 4.0 (or 2219f4f82ede)
+    _bytestr = pycompat.bytestr  # since 4.2 (or b70407bd84d5)
+    _xrange = pycompat.xrange  # since 4.8 (or 7eba8f83129b)
+    fsencode = pycompat.fsencode  # since 3.9 (or f4a5e0e86a7e)
     if pycompat.ispy3:
         _maxint = sys.maxsize  # per py3 docs for replacing maxint
     else:
         _maxint = sys.maxint
 except (NameError, ImportError, AttributeError):
     import inspect
+
     getargspec = inspect.getargspec
     _byteskwargs = identity
     _bytestr = str
-    fsencode = identity               # no py3 support
-    _maxint = sys.maxint              # no py3 support
-    _sysstr = lambda x: x             # no py3 support
+    fsencode = identity  # no py3 support
+    _maxint = sys.maxint  # no py3 support
+    _sysstr = lambda x: x  # no py3 support
     _xrange = xrange
 
 try:
@@ -155,6 +160,7 @@
 
 try:
     from mercurial import logcmdutil
+
     makelogtemplater = logcmdutil.maketemplater
 except (AttributeError, ImportError):
     try:
@@ -166,8 +172,12 @@
 # define util.safehasattr forcibly, because util.safehasattr has been
 # available since 1.9.3 (or 94b200a11cf7)
 _undefined = object()
+
+
 def safehasattr(thing, attr):
     return getattr(thing, _sysstr(attr), _undefined) is not _undefined
+
+
 setattr(util, 'safehasattr', safehasattr)
 
 # for "historical portability":
@@ -185,20 +195,28 @@
 # available, because commands.formatteropts has been available since
 # 3.2 (or 7a7eed5176a4), even though formatting itself has been
 # available since 2.2 (or ae5f92e154d3)
-formatteropts = getattr(cmdutil, "formatteropts",
-                        getattr(commands, "formatteropts", []))
+formatteropts = getattr(
+    cmdutil, "formatteropts", getattr(commands, "formatteropts", [])
+)
 
 # for "historical portability":
 # use locally defined option list, if debugrevlogopts isn't available,
 # because commands.debugrevlogopts has been available since 3.7 (or
 # 5606f7d0d063), even though cmdutil.openrevlog() has been available
 # since 1.9 (or a79fea6b3e77).
-revlogopts = getattr(cmdutil, "debugrevlogopts",
-                     getattr(commands, "debugrevlogopts", [
-        (b'c', b'changelog', False, (b'open changelog')),
-        (b'm', b'manifest', False, (b'open manifest')),
-        (b'', b'dir', False, (b'open directory manifest')),
-        ]))
+revlogopts = getattr(
+    cmdutil,
+    "debugrevlogopts",
+    getattr(
+        commands,
+        "debugrevlogopts",
+        [
+            (b'c', b'changelog', False, b'open changelog'),
+            (b'm', b'manifest', False, b'open manifest'),
+            (b'', b'dir', False, b'open directory manifest'),
+        ],
+    ),
+)
 
 cmdtable = {}
 
@@ -208,6 +226,7 @@
 def parsealiases(cmd):
     return cmd.split(b"|")
 
+
 if safehasattr(registrar, 'command'):
     command = registrar.command(cmdtable)
 elif safehasattr(cmdutil, 'command'):
@@ -217,10 +236,13 @@
         # wrap original cmdutil.command, because "norepo" option has
         # been available since 3.1 (or 75a96326cecb)
         _command = command
+
         def command(name, options=(), synopsis=None, norepo=False):
             if norepo:
                 commands.norepo += b' %s' % b' '.join(parsealiases(name))
             return _command(name, list(options), synopsis)
+
+
 else:
     # for "historical portability":
     # define "@command" annotation locally, because cmdutil.command
@@ -234,36 +256,51 @@
             if norepo:
                 commands.norepo += b' %s' % b' '.join(parsealiases(name))
             return func
+
         return decorator
 
+
 try:
     import mercurial.registrar
     import mercurial.configitems
+
     configtable = {}
     configitem = mercurial.registrar.configitem(configtable)
-    configitem(b'perf', b'presleep',
+    configitem(
+        b'perf',
+        b'presleep',
         default=mercurial.configitems.dynamicdefault,
         experimental=True,
     )
-    configitem(b'perf', b'stub',
+    configitem(
+        b'perf',
+        b'stub',
         default=mercurial.configitems.dynamicdefault,
         experimental=True,
     )
-    configitem(b'perf', b'parentscount',
+    configitem(
+        b'perf',
+        b'parentscount',
         default=mercurial.configitems.dynamicdefault,
         experimental=True,
     )
-    configitem(b'perf', b'all-timing',
+    configitem(
+        b'perf',
+        b'all-timing',
         default=mercurial.configitems.dynamicdefault,
         experimental=True,
     )
-    configitem(b'perf', b'pre-run',
+    configitem(
+        b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
+    )
+    configitem(
+        b'perf',
+        b'profile-benchmark',
         default=mercurial.configitems.dynamicdefault,
     )
-    configitem(b'perf', b'profile-benchmark',
-        default=mercurial.configitems.dynamicdefault,
-    )
-    configitem(b'perf', b'run-limits',
+    configitem(
+        b'perf',
+        b'run-limits',
         default=mercurial.configitems.dynamicdefault,
         experimental=True,
     )
@@ -272,42 +309,50 @@
 except TypeError:
     # compatibility fix for a11fd395e83f
     # hg version: 5.2
-    configitem(b'perf', b'presleep',
-        default=mercurial.configitems.dynamicdefault,
+    configitem(
+        b'perf', b'presleep', default=mercurial.configitems.dynamicdefault,
+    )
+    configitem(
+        b'perf', b'stub', default=mercurial.configitems.dynamicdefault,
+    )
+    configitem(
+        b'perf', b'parentscount', default=mercurial.configitems.dynamicdefault,
     )
-    configitem(b'perf', b'stub',
-        default=mercurial.configitems.dynamicdefault,
+    configitem(
+        b'perf', b'all-timing', default=mercurial.configitems.dynamicdefault,
     )
-    configitem(b'perf', b'parentscount',
+    configitem(
+        b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
+    )
+    configitem(
+        b'perf',
+        b'profile-benchmark',
         default=mercurial.configitems.dynamicdefault,
     )
-    configitem(b'perf', b'all-timing',
-        default=mercurial.configitems.dynamicdefault,
-    )
-    configitem(b'perf', b'pre-run',
-        default=mercurial.configitems.dynamicdefault,
+    configitem(
+        b'perf', b'run-limits', default=mercurial.configitems.dynamicdefault,
     )
-    configitem(b'perf', b'profile-benchmark',
-        default=mercurial.configitems.dynamicdefault,
-    )
-    configitem(b'perf', b'run-limits',
-        default=mercurial.configitems.dynamicdefault,
-    )
+
 
 def getlen(ui):
     if ui.configbool(b"perf", b"stub", False):
         return lambda x: 1
     return len
 
+
 class noop(object):
     """dummy context manager"""
+
     def __enter__(self):
         pass
+
     def __exit__(self, *args):
         pass
 
+
 NOOPCTX = noop()
 
+
 def gettimer(ui, opts=None):
     """return a timer function and formatter: (timer, formatter)
 
@@ -338,31 +383,42 @@
         # define formatter locally, because ui.formatter has been
         # available since 2.2 (or ae5f92e154d3)
         from mercurial import node
+
         class defaultformatter(object):
             """Minimized composition of baseformatter and plainformatter
             """
+
             def __init__(self, ui, topic, opts):
                 self._ui = ui
                 if ui.debugflag:
                     self.hexfunc = node.hex
                 else:
                     self.hexfunc = node.short
+
             def __nonzero__(self):
                 return False
+
             __bool__ = __nonzero__
+
             def startitem(self):
                 pass
+
             def data(self, **data):
                 pass
+
             def write(self, fields, deftext, *fielddata, **opts):
                 self._ui.write(deftext % fielddata, **opts)
+
             def condwrite(self, cond, fields, deftext, *fielddata, **opts):
                 if cond:
                     self._ui.write(deftext % fielddata, **opts)
+
             def plain(self, text, **opts):
                 self._ui.write(text, **opts)
+
             def end(self):
                 pass
+
         fm = defaultformatter(ui, b'perf', opts)
 
     # stub function, runs code only once instead of in a loop
@@ -379,20 +435,27 @@
     for item in limitspec:
         parts = item.split(b'-', 1)
         if len(parts) < 2:
-            ui.warn((b'malformatted run limit entry, missing "-": %s\n'
-                     % item))
+            ui.warn((b'malformatted run limit entry, missing "-": %s\n' % item))
             continue
         try:
             time_limit = float(_sysstr(parts[0]))
         except ValueError as e:
-            ui.warn((b'malformatted run limit entry, %s: %s\n'
-                     % (_bytestr(e), item)))
+            ui.warn(
+                (
+                    b'malformatted run limit entry, %s: %s\n'
+                    % (_bytestr(e), item)
+                )
+            )
             continue
         try:
             run_limit = int(_sysstr(parts[1]))
         except ValueError as e:
-            ui.warn((b'malformatted run limit entry, %s: %s\n'
-                     % (_bytestr(e), item)))
+            ui.warn(
+                (
+                    b'malformatted run limit entry, %s: %s\n'
+                    % (_bytestr(e), item)
+                )
+            )
             continue
         limits.append((time_limit, run_limit))
     if not limits:
@@ -404,15 +467,23 @@
             profiler = profiling.profile(ui)
 
     prerun = getint(ui, b"perf", b"pre-run", 0)
-    t = functools.partial(_timer, fm, displayall=displayall, limits=limits,
-                          prerun=prerun, profiler=profiler)
+    t = functools.partial(
+        _timer,
+        fm,
+        displayall=displayall,
+        limits=limits,
+        prerun=prerun,
+        profiler=profiler,
+    )
     return t, fm
 
+
 def stub_timer(fm, func, setup=None, title=None):
     if setup is not None:
         setup()
     func()
 
+
 @contextlib.contextmanager
 def timeone():
     r = []
@@ -422,7 +493,7 @@
     cstop = util.timer()
     ostop = os.times()
     a, b = ostart, ostop
-    r.append((cstop - cstart, b[0] - a[0], b[1]-a[1]))
+    r.append((cstop - cstart, b[0] - a[0], b[1] - a[1]))
 
 
 # list of stop condition (elapsed time, minimal run count)
@@ -431,8 +502,17 @@
     (10.0, 3),
 )
 
-def _timer(fm, func, setup=None, title=None, displayall=False,
-           limits=DEFAULTLIMITS, prerun=0, profiler=None):
+
+def _timer(
+    fm,
+    func,
+    setup=None,
+    title=None,
+    displayall=False,
+    limits=DEFAULTLIMITS,
+    prerun=0,
+    profiler=None,
+):
     gc.collect()
     results = []
     begin = util.timer()
@@ -461,8 +541,8 @@
                 keepgoing = False
                 break
 
-    formatone(fm, results, title=title, result=r,
-              displayall=displayall)
+    formatone(fm, results, title=title, result=r, displayall=displayall)
+
 
 def formatone(fm, timings, title=None, result=None, displayall=False):
 
@@ -474,6 +554,7 @@
         fm.write(b'title', b'! %s\n', title)
     if result:
         fm.write(b'result', b'! result: %s\n', result)
+
     def display(role, entry):
         prefix = b''
         if role != b'best':
@@ -482,9 +563,10 @@
         fm.write(prefix + b'wall', b' wall %f', entry[0])
         fm.write(prefix + b'comb', b' comb %f', entry[1] + entry[2])
         fm.write(prefix + b'user', b' user %f', entry[1])
-        fm.write(prefix + b'sys',  b' sys %f', entry[2])
-        fm.write(prefix + b'count',  b' (%s of %%d)' % role, count)
+        fm.write(prefix + b'sys', b' sys %f', entry[2])
+        fm.write(prefix + b'count', b' (%s of %%d)' % role, count)
         fm.plain(b'\n')
+
     timings.sort()
     min_val = timings[0]
     display(b'best', min_val)
@@ -496,8 +578,10 @@
         median = timings[len(timings) // 2]
         display(b'median', median)
 
+
 # utilities for historical portability
 
+
 def getint(ui, section, name, default):
     # for "historical portability":
     # ui.configint has been available since 1.9 (or fa2b596db182)
@@ -507,8 +591,10 @@
     try:
         return int(v)
     except ValueError:
-        raise error.ConfigError((b"%s.%s is not an integer ('%s')")
-                                % (section, name, v))
+        raise error.ConfigError(
+            b"%s.%s is not an integer ('%s')" % (section, name, v)
+        )
+
 
 def safeattrsetter(obj, name, ignoremissing=False):
     """Ensure that 'obj' has 'name' attribute before subsequent setattr
@@ -528,20 +614,29 @@
     if not util.safehasattr(obj, name):
         if ignoremissing:
             return None
-        raise error.Abort((b"missing attribute %s of %s might break assumption"
-                           b" of performance measurement") % (name, obj))
+        raise error.Abort(
+            (
+                b"missing attribute %s of %s might break assumption"
+                b" of performance measurement"
+            )
+            % (name, obj)
+        )
 
     origvalue = getattr(obj, _sysstr(name))
+
     class attrutil(object):
         def set(self, newvalue):
             setattr(obj, _sysstr(name), newvalue)
+
         def restore(self):
             setattr(obj, _sysstr(name), origvalue)
 
     return attrutil()
 
+
 # utilities to examine each internal API changes
 
+
 def getbranchmapsubsettable():
     # for "historical portability":
     # subsettable is defined in:
@@ -556,8 +651,11 @@
     # bisecting in bcee63733aad::59a9f18d4587 can reach here (both
     # branchmap and repoview modules exist, but subsettable attribute
     # doesn't)
-    raise error.Abort((b"perfbranchmap not available with this Mercurial"),
-                      hint=b"use 2.5 or later")
+    raise error.Abort(
+        b"perfbranchmap not available with this Mercurial",
+        hint=b"use 2.5 or later",
+    )
+
 
 def getsvfs(repo):
     """Return appropriate object to access files under .hg/store
@@ -570,6 +668,7 @@
     else:
         return getattr(repo, 'sopener')
 
+
 def getvfs(repo):
     """Return appropriate object to access files under .hg
     """
@@ -581,10 +680,11 @@
     else:
         return getattr(repo, 'opener')
 
+
 def repocleartagscachefunc(repo):
     """Return the function to clear tags cache according to repo internal API
     """
-    if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525)
+    if util.safehasattr(repo, b'_tagscache'):  # since 2.0 (or 9dca7653b525)
         # in this case, setattr(repo, '_tagscache', None) or so isn't
         # correct way to clear tags cache, because existing code paths
         # expect _tagscache to be a structured object.
@@ -593,25 +693,28 @@
             # 98c867ac1330), and delattr() can't work in such case
             if b'_tagscache' in vars(repo):
                 del repo.__dict__[b'_tagscache']
+
         return clearcache
 
     repotags = safeattrsetter(repo, b'_tags', ignoremissing=True)
-    if repotags: # since 1.4 (or 5614a628d173)
-        return lambda : repotags.set(None)
+    if repotags:  # since 1.4 (or 5614a628d173)
+        return lambda: repotags.set(None)
 
     repotagscache = safeattrsetter(repo, b'tagscache', ignoremissing=True)
-    if repotagscache: # since 0.6 (or d7df759d0e97)
-        return lambda : repotagscache.set(None)
+    if repotagscache:  # since 0.6 (or d7df759d0e97)
+        return lambda: repotagscache.set(None)
 
     # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches
     # this point, but it isn't so problematic, because:
     # - repo.tags of such Mercurial isn't "callable", and repo.tags()
     #   in perftags() causes failure soon
     # - perf.py itself has been available since 1.1 (or eb240755386d)
-    raise error.Abort((b"tags API of this hg command is unknown"))
+    raise error.Abort(b"tags API of this hg command is unknown")
+
 
 # utilities to clear cache
 
+
 def clearfilecache(obj, attrname):
     unfiltered = getattr(obj, 'unfiltered', None)
     if unfiltered is not None:
@@ -620,23 +723,32 @@
         delattr(obj, attrname)
     obj._filecache.pop(attrname, None)
 
+
 def clearchangelog(repo):
     if repo is not repo.unfiltered():
         object.__setattr__(repo, r'_clcachekey', None)
         object.__setattr__(repo, r'_clcache', None)
     clearfilecache(repo.unfiltered(), 'changelog')
 
+
 # perf commands
 
+
 @command(b'perfwalk', formatteropts)
 def perfwalk(ui, repo, *pats, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     m = scmutil.match(repo[None], pats, {})
-    timer(lambda: len(list(repo.dirstate.walk(m, subrepos=[], unknown=True,
-                                              ignored=False))))
+    timer(
+        lambda: len(
+            list(
+                repo.dirstate.walk(m, subrepos=[], unknown=True, ignored=False)
+            )
+        )
+    )
     fm.end()
 
+
 @command(b'perfannotate', formatteropts)
 def perfannotate(ui, repo, f, **opts):
     opts = _byteskwargs(opts)
@@ -645,18 +757,22 @@
     timer(lambda: len(fc.annotate(True)))
     fm.end()
 
-@command(b'perfstatus',
-         [(b'u', b'unknown', False,
-           b'ask status to look for unknown files')] + formatteropts)
+
+@command(
+    b'perfstatus',
+    [(b'u', b'unknown', False, b'ask status to look for unknown files')]
+    + formatteropts,
+)
 def perfstatus(ui, repo, **opts):
     opts = _byteskwargs(opts)
-    #m = match.always(repo.root, repo.getcwd())
-    #timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
+    # m = match.always(repo.root, repo.getcwd())
+    # timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False,
     #                                                False))))
     timer, fm = gettimer(ui, opts)
     timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown']))))
     fm.end()
 
+
 @command(b'perfaddremove', formatteropts)
 def perfaddremove(ui, repo, **opts):
     opts = _byteskwargs(opts)
@@ -675,71 +791,89 @@
         repo.ui.quiet = oldquiet
         fm.end()
 
+
 def clearcaches(cl):
     # behave somewhat consistently across internal API changes
     if util.safehasattr(cl, b'clearcaches'):
         cl.clearcaches()
     elif util.safehasattr(cl, b'_nodecache'):
         from mercurial.node import nullid, nullrev
+
         cl._nodecache = {nullid: nullrev}
         cl._nodepos = None
 
+
 @command(b'perfheads', formatteropts)
 def perfheads(ui, repo, **opts):
     """benchmark the computation of a changelog heads"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     cl = repo.changelog
+
     def s():
         clearcaches(cl)
+
     def d():
         len(cl.headrevs())
+
     timer(d, setup=s)
     fm.end()
 
-@command(b'perftags', formatteropts+
-        [
-            (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
-        ])
+
+@command(
+    b'perftags',
+    formatteropts
+    + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
+)
 def perftags(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     repocleartagscache = repocleartagscachefunc(repo)
     clearrevlogs = opts[b'clear_revlogs']
+
     def s():
         if clearrevlogs:
             clearchangelog(repo)
             clearfilecache(repo.unfiltered(), 'manifest')
         repocleartagscache()
+
     def t():
         return len(repo.tags())
+
     timer(t, setup=s)
     fm.end()
 
+
 @command(b'perfancestors', formatteropts)
 def perfancestors(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     heads = repo.changelog.headrevs()
+
     def d():
         for a in repo.changelog.ancestors(heads):
             pass
+
     timer(d)
     fm.end()
 
+
 @command(b'perfancestorset', formatteropts)
 def perfancestorset(ui, repo, revset, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     revs = repo.revs(revset)
     heads = repo.changelog.headrevs()
+
     def d():
         s = repo.changelog.ancestors(heads)
         for rev in revs:
             rev in s
+
     timer(d)
     fm.end()
 
+
 @command(b'perfdiscovery', formatteropts, b'PATH')
 def perfdiscovery(ui, repo, path, **opts):
     """benchmark discovery between local repo and the peer at given path
@@ -750,30 +884,38 @@
 
     def s():
         repos[1] = hg.peer(ui, opts, path)
+
     def d():
         setdiscovery.findcommonheads(ui, *repos)
+
     timer(d, setup=s)
     fm.end()
 
-@command(b'perfbookmarks', formatteropts +
-        [
-            (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
-        ])
+
+@command(
+    b'perfbookmarks',
+    formatteropts
+    + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
+)
 def perfbookmarks(ui, repo, **opts):
     """benchmark parsing bookmarks from disk to memory"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
 
     clearrevlogs = opts[b'clear_revlogs']
+
     def s():
         if clearrevlogs:
             clearchangelog(repo)
         clearfilecache(repo, b'_bookmarks')
+
     def d():
         repo._bookmarks
+
     timer(d, setup=s)
     fm.end()
 
+
 @command(b'perfbundleread', formatteropts, b'BUNDLE')
 def perfbundleread(ui, repo, bundlepath, **opts):
     """Benchmark reading of bundle files.
@@ -863,25 +1005,32 @@
         bundle = exchange.readbundle(ui, fh, bundlepath)
 
         if isinstance(bundle, changegroup.cg1unpacker):
-            benches.extend([
-                (makebench(deltaiter), b'cg1 deltaiter()'),
-                (makebench(iterchunks), b'cg1 getchunks()'),
-                (makereadnbytes(8192), b'cg1 read(8k)'),
-                (makereadnbytes(16384), b'cg1 read(16k)'),
-                (makereadnbytes(32768), b'cg1 read(32k)'),
-                (makereadnbytes(131072), b'cg1 read(128k)'),
-            ])
+            benches.extend(
+                [
+                    (makebench(deltaiter), b'cg1 deltaiter()'),
+                    (makebench(iterchunks), b'cg1 getchunks()'),
+                    (makereadnbytes(8192), b'cg1 read(8k)'),
+                    (makereadnbytes(16384), b'cg1 read(16k)'),
+                    (makereadnbytes(32768), b'cg1 read(32k)'),
+                    (makereadnbytes(131072), b'cg1 read(128k)'),
+                ]
+            )
         elif isinstance(bundle, bundle2.unbundle20):
-            benches.extend([
-                (makebench(forwardchunks), b'bundle2 forwardchunks()'),
-                (makebench(iterparts), b'bundle2 iterparts()'),
-                (makebench(iterpartsseekable), b'bundle2 iterparts() seekable'),
-                (makebench(seek), b'bundle2 part seek()'),
-                (makepartreadnbytes(8192), b'bundle2 part read(8k)'),
-                (makepartreadnbytes(16384), b'bundle2 part read(16k)'),
-                (makepartreadnbytes(32768), b'bundle2 part read(32k)'),
-                (makepartreadnbytes(131072), b'bundle2 part read(128k)'),
-            ])
+            benches.extend(
+                [
+                    (makebench(forwardchunks), b'bundle2 forwardchunks()'),
+                    (makebench(iterparts), b'bundle2 iterparts()'),
+                    (
+                        makebench(iterpartsseekable),
+                        b'bundle2 iterparts() seekable',
+                    ),
+                    (makebench(seek), b'bundle2 part seek()'),
+                    (makepartreadnbytes(8192), b'bundle2 part read(8k)'),
+                    (makepartreadnbytes(16384), b'bundle2 part read(16k)'),
+                    (makepartreadnbytes(32768), b'bundle2 part read(32k)'),
+                    (makepartreadnbytes(131072), b'bundle2 part read(128k)'),
+                ]
+            )
         elif isinstance(bundle, streamclone.streamcloneapplier):
             raise error.Abort(b'stream clone bundles not supported')
         else:
@@ -892,9 +1041,15 @@
         timer(fn, title=title)
         fm.end()
 
-@command(b'perfchangegroupchangelog', formatteropts +
-         [(b'', b'cgversion', b'02', b'changegroup version'),
-          (b'r', b'rev', b'', b'revisions to add to changegroup')])
+
+@command(
+    b'perfchangegroupchangelog',
+    formatteropts
+    + [
+        (b'', b'cgversion', b'02', b'changegroup version'),
+        (b'r', b'rev', b'', b'revisions to add to changegroup'),
+    ],
+)
 def perfchangegroupchangelog(ui, repo, cgversion=b'02', rev=None, **opts):
     """Benchmark producing a changelog group for a changegroup.
 
@@ -923,77 +1078,96 @@
 
     fm.end()
 
+
 @command(b'perfdirs', formatteropts)
 def perfdirs(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     dirstate = repo.dirstate
     b'a' in dirstate
+
     def d():
         dirstate.hasdir(b'a')
         del dirstate._map._dirs
+
     timer(d)
     fm.end()
 
+
 @command(b'perfdirstate', formatteropts)
 def perfdirstate(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     b"a" in repo.dirstate
+
     def d():
         repo.dirstate.invalidate()
         b"a" in repo.dirstate
+
     timer(d)
     fm.end()
 
+
 @command(b'perfdirstatedirs', formatteropts)
 def perfdirstatedirs(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     b"a" in repo.dirstate
+
     def d():
         repo.dirstate.hasdir(b"a")
         del repo.dirstate._map._dirs
+
     timer(d)
     fm.end()
 
+
 @command(b'perfdirstatefoldmap', formatteropts)
 def perfdirstatefoldmap(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     dirstate = repo.dirstate
     b'a' in dirstate
+
     def d():
         dirstate._map.filefoldmap.get(b'a')
         del dirstate._map.filefoldmap
+
     timer(d)
     fm.end()
 
+
 @command(b'perfdirfoldmap', formatteropts)
 def perfdirfoldmap(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     dirstate = repo.dirstate
     b'a' in dirstate
+
     def d():
         dirstate._map.dirfoldmap.get(b'a')
         del dirstate._map.dirfoldmap
         del dirstate._map._dirs
+
     timer(d)
     fm.end()
 
+
 @command(b'perfdirstatewrite', formatteropts)
 def perfdirstatewrite(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     ds = repo.dirstate
     b"a" in ds
+
     def d():
         ds._dirty = True
         ds.write(repo.currenttransaction())
+
     timer(d)
     fm.end()
 
+
 def _getmergerevs(repo, opts):
     """parse command argument to return rev involved in merge
 
@@ -1016,44 +1190,64 @@
         ancestor = wctx.ancestor(rctx)
     return (wctx, rctx, ancestor)
 
-@command(b'perfmergecalculate',
-         [
-             (b'r', b'rev', b'.', b'rev to merge against'),
-             (b'', b'from', b'', b'rev to merge from'),
-             (b'', b'base', b'', b'the revision to use as base'),
-         ] + formatteropts)
+
+@command(
+    b'perfmergecalculate',
+    [
+        (b'r', b'rev', b'.', b'rev to merge against'),
+        (b'', b'from', b'', b'rev to merge from'),
+        (b'', b'base', b'', b'the revision to use as base'),
+    ]
+    + formatteropts,
+)
 def perfmergecalculate(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
 
     wctx, rctx, ancestor = _getmergerevs(repo, opts)
+
     def d():
         # acceptremote is True because we don't want prompts in the middle of
         # our benchmark
-        merge.calculateupdates(repo, wctx, rctx, [ancestor], branchmerge=False,
-                               force=False, acceptremote=True,
-                               followcopies=True)
+        merge.calculateupdates(
+            repo,
+            wctx,
+            rctx,
+            [ancestor],
+            branchmerge=False,
+            force=False,
+            acceptremote=True,
+            followcopies=True,
+        )
+
     timer(d)
     fm.end()
 
-@command(b'perfmergecopies',
-         [
-             (b'r', b'rev', b'.', b'rev to merge against'),
-             (b'', b'from', b'', b'rev to merge from'),
-             (b'', b'base', b'', b'the revision to use as base'),
-         ] + formatteropts)
+
+@command(
+    b'perfmergecopies',
+    [
+        (b'r', b'rev', b'.', b'rev to merge against'),
+        (b'', b'from', b'', b'rev to merge from'),
+        (b'', b'base', b'', b'the revision to use as base'),
+    ]
+    + formatteropts,
+)
 def perfmergecopies(ui, repo, **opts):
     """measure runtime of `copies.mergecopies`"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     wctx, rctx, ancestor = _getmergerevs(repo, opts)
+
     def d():
         # acceptremote is True because we don't want prompts in the middle of
         # our benchmark
         copies.mergecopies(repo, wctx, rctx, ancestor)
+
     timer(d)
     fm.end()
 
+
 @command(b'perfpathcopies', [], b"REV REV")
 def perfpathcopies(ui, repo, rev1, rev2, **opts):
     """benchmark the copy tracing logic"""
@@ -1061,20 +1255,26 @@
     timer, fm = gettimer(ui, opts)
     ctx1 = scmutil.revsingle(repo, rev1, rev1)
     ctx2 = scmutil.revsingle(repo, rev2, rev2)
+
     def d():
         copies.pathcopies(ctx1, ctx2)
+
     timer(d)
     fm.end()
 
-@command(b'perfphases',
-         [(b'', b'full', False, b'include file reading time too'),
-          ], b"")
+
+@command(
+    b'perfphases',
+    [(b'', b'full', False, b'include file reading time too'),],
+    b"",
+)
 def perfphases(ui, repo, **opts):
     """benchmark phasesets computation"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     _phases = repo._phasecache
     full = opts.get(b'full')
+
     def d():
         phases = _phases
         if full:
@@ -1082,30 +1282,32 @@
             phases = repo._phasecache
         phases.invalidate()
         phases.loadphaserevs(repo)
+
     timer(d)
     fm.end()
 
-@command(b'perfphasesremote',
-         [], b"[DEST]")
+
+@command(b'perfphasesremote', [], b"[DEST]")
 def perfphasesremote(ui, repo, dest=None, **opts):
     """benchmark time needed to analyse phases of the remote server"""
-    from mercurial.node import (
-        bin,
-    )
+    from mercurial.node import bin
     from mercurial import (
         exchange,
         hg,
         phases,
     )
+
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
 
     path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
     if not path:
-        raise error.Abort((b'default repository not configured!'),
-                          hint=(b"see 'hg help config.paths'"))
+        raise error.Abort(
+            b'default repository not configured!',
+            hint=b"see 'hg help config.paths'",
+        )
     dest = path.pushloc or path.loc
-    ui.status((b'analysing phase of %s\n') % util.hidepassword(dest))
+    ui.status(b'analysing phase of %s\n' % util.hidepassword(dest))
     other = hg.peer(repo, opts, dest)
 
     # easier to perform discovery through the operation
@@ -1115,36 +1317,43 @@
     remotesubset = op.fallbackheads
 
     with other.commandexecutor() as e:
-        remotephases = e.callcommand(b'listkeys',
-                       {b'namespace': b'phases'}).result()
+        remotephases = e.callcommand(
+            b'listkeys', {b'namespace': b'phases'}
+        ).result()
     del other
     publishing = remotephases.get(b'publishing', False)
     if publishing:
-        ui.status((b'publishing: yes\n'))
+        ui.status(b'publishing: yes\n')
     else:
-        ui.status((b'publishing: no\n'))
+        ui.status(b'publishing: no\n')
 
     nodemap = repo.changelog.nodemap
     nonpublishroots = 0
     for nhex, phase in remotephases.iteritems():
-        if nhex == b'publishing': # ignore data related to publish option
+        if nhex == b'publishing':  # ignore data related to publish option
             continue
         node = bin(nhex)
         if node in nodemap and int(phase):
             nonpublishroots += 1
-    ui.status((b'number of roots: %d\n') % len(remotephases))
-    ui.status((b'number of known non public roots: %d\n') % nonpublishroots)
+    ui.status(b'number of roots: %d\n' % len(remotephases))
+    ui.status(b'number of known non public roots: %d\n' % nonpublishroots)
+
     def d():
-        phases.remotephasessummary(repo,
-                                   remotesubset,
-                                   remotephases)
+        phases.remotephasessummary(repo, remotesubset, remotephases)
+
     timer(d)
     fm.end()
 
-@command(b'perfmanifest',[
-            (b'm', b'manifest-rev', False, b'Look up a manifest node revision'),
-            (b'', b'clear-disk', False, b'clear on-disk caches too'),
-         ] + formatteropts, b'REV|NODE')
+
+@command(
+    b'perfmanifest',
+    [
+        (b'm', b'manifest-rev', False, b'Look up a manifest node revision'),
+        (b'', b'clear-disk', False, b'clear on-disk caches too'),
+    ]
+    + formatteropts,
+    b'REV|NODE',
+)
 def perfmanifest(ui, repo, rev, manifest_rev=False, clear_disk=False, **opts):
     """benchmark the time to read a manifest from disk and return a usable
     dict-like object
@@ -1169,25 +1378,32 @@
                 else:
                     t = repo.manifestlog._revlog.lookup(rev)
             except ValueError:
-                raise error.Abort(b'manifest revision must be integer or full '
-                                  b'node')
+                raise error.Abort(
+                    b'manifest revision must be integer or full ' b'node'
+                )
+
     def d():
         repo.manifestlog.clearcaches(clear_persisted_data=clear_disk)
         repo.manifestlog[t].read()
+
     timer(d)
     fm.end()
 
+
 @command(b'perfchangeset', formatteropts)
 def perfchangeset(ui, repo, rev, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     n = scmutil.revsingle(repo, rev).node()
+
     def d():
         repo.changelog.read(n)
-        #repo.changelog._cache = None
+        # repo.changelog._cache = None
+
     timer(d)
     fm.end()
 
+
 @command(b'perfignore', formatteropts)
 def perfignore(ui, repo, **opts):
     """benchmark operation related to computing ignore"""
@@ -1205,10 +1421,15 @@
     timer(runone, setup=setupone, title=b"load")
     fm.end()
 
-@command(b'perfindex', [
-            (b'', b'rev', [], b'revision to be looked up (default tip)'),
-            (b'', b'no-lookup', None, b'do not revision lookup post creation'),
-         ] + formatteropts)
+
+@command(
+    b'perfindex',
+    [
+        (b'', b'rev', [], b'revision to be looked up (default tip)'),
+        (b'', b'no-lookup', None, b'do not revision lookup post creation'),
+    ]
+    + formatteropts,
+)
 def perfindex(ui, repo, **opts):
     """benchmark index creation time followed by a lookup
 
@@ -1231,9 +1452,10 @@
     It is not currently possible to check for lookup of a missing node. For
     deeper lookup benchmarking, checkout the `perfnodemap` command."""
     import mercurial.revlog
+
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
-    mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
+    mercurial.revlog._prereadsize = 2 ** 24  # disable lazy parser in old hg
     if opts[b'no_lookup']:
         if opts['rev']:
             raise error.Abort('--no-lookup and --rev are mutually exclusive')
@@ -1249,20 +1471,28 @@
     # find the filecache func directly
     # This avoid polluting the benchmark with the filecache logic
     makecl = unfi.__class__.changelog.func
+
     def setup():
         # probably not necessary, but for good measure
         clearchangelog(unfi)
+
     def d():
         cl = makecl(unfi)
         for n in nodes:
             cl.rev(n)
+
     timer(d, setup=setup)
     fm.end()
 
-@command(b'perfnodemap', [
-          (b'', b'rev', [], b'revision to be looked up (default tip)'),
-          (b'', b'clear-caches', True, b'clear revlog cache between calls'),
-    ] + formatteropts)
+
+@command(
+    b'perfnodemap',
+    [
+        (b'', b'rev', [], b'revision to be looked up (default tip)'),
+        (b'', b'clear-caches', True, b'clear revlog cache between calls'),
+    ]
+    + formatteropts,
+)
 def perfnodemap(ui, repo, **opts):
     """benchmark the time necessary to look up revision from a cold nodemap
 
@@ -1281,9 +1511,10 @@
     hexlookup, prefix lookup and missing lookup would also be valuable.
     """
     import mercurial.revlog
+
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
-    mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
+    mercurial.revlog._prereadsize = 2 ** 24  # disable lazy parser in old hg
 
     unfi = repo.unfiltered()
     clearcaches = opts['clear_caches']
@@ -1298,6 +1529,7 @@
 
     # use a list to pass reference to a nodemap from one closure to the next
     nodeget = [None]
+
     def setnodeget():
         # probably not necessary, but for good measure
         clearchangelog(unfi)
@@ -1310,28 +1542,35 @@
 
     setup = None
     if clearcaches:
+
         def setup():
             setnodeget()
+
     else:
         setnodeget()
-        d() # prewarm the data structure
+        d()  # prewarm the data structure
     timer(d, setup=setup)
     fm.end()
 
+
 @command(b'perfstartup', formatteropts)
 def perfstartup(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
+
     def d():
         if os.name != r'nt':
-            os.system(b"HGRCPATH= %s version -q > /dev/null" %
-                      fsencode(sys.argv[0]))
+            os.system(
+                b"HGRCPATH= %s version -q > /dev/null" % fsencode(sys.argv[0])
+            )
         else:
             os.environ[r'HGRCPATH'] = r' '
             os.system(r"%s version -q > NUL" % sys.argv[0])
+
     timer(d)
     fm.end()
 
+
 @command(b'perfparents', formatteropts)
 def perfparents(ui, repo, **opts):
     """benchmark the time necessary to fetch one changeset's parents.
@@ -1350,33 +1589,42 @@
         raise error.Abort(b"repo needs %d commits for this test" % count)
     repo = repo.unfiltered()
     nl = [repo.changelog.node(i) for i in _xrange(count)]
+
     def d():
         for n in nl:
             repo.changelog.parents(n)
+
     timer(d)
     fm.end()
 
+
 @command(b'perfctxfiles', formatteropts)
 def perfctxfiles(ui, repo, x, **opts):
     opts = _byteskwargs(opts)
     x = int(x)
     timer, fm = gettimer(ui, opts)
+
     def d():
         len(repo[x].files())
+
     timer(d)
     fm.end()
 
+
 @command(b'perfrawfiles', formatteropts)
 def perfrawfiles(ui, repo, x, **opts):
     opts = _byteskwargs(opts)
     x = int(x)
     timer, fm = gettimer(ui, opts)
     cl = repo.changelog
+
     def d():
         len(cl.read(x)[3])
+
     timer(d)
     fm.end()
 
+
 @command(b'perflookup', formatteropts)
 def perflookup(ui, repo, rev, **opts):
     opts = _byteskwargs(opts)
@@ -1384,10 +1632,15 @@
     timer(lambda: len(repo.lookup(rev)))
     fm.end()
 
-@command(b'perflinelogedits',
-         [(b'n', b'edits', 10000, b'number of edits'),
-          (b'', b'max-hunk-lines', 10, b'max lines in a hunk'),
-          ], norepo=True)
+
+@command(
+    b'perflinelogedits',
+    [
+        (b'n', b'edits', 10000, b'number of edits'),
+        (b'', b'max-hunk-lines', 10, b'max lines in a hunk'),
+    ],
+    norepo=True,
+)
 def perflinelogedits(ui, **opts):
     from mercurial import linelog
 
@@ -1418,6 +1671,7 @@
     timer(d)
     fm.end()
 
+
 @command(b'perfrevrange', formatteropts)
 def perfrevrange(ui, repo, *specs, **opts):
     opts = _byteskwargs(opts)
@@ -1426,34 +1680,44 @@
     timer(lambda: len(revrange(repo, specs)))
     fm.end()
 
+
 @command(b'perfnodelookup', formatteropts)
 def perfnodelookup(ui, repo, rev, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     import mercurial.revlog
-    mercurial.revlog._prereadsize = 2**24 # disable lazy parser in old hg
+
+    mercurial.revlog._prereadsize = 2 ** 24  # disable lazy parser in old hg
     n = scmutil.revsingle(repo, rev).node()
     cl = mercurial.revlog.revlog(getsvfs(repo), b"00changelog.i")
+
     def d():
         cl.rev(n)
         clearcaches(cl)
+
     timer(d)
     fm.end()
 
-@command(b'perflog',
-         [(b'', b'rename', False, b'ask log to follow renames')
-         ] + formatteropts)
+
+@command(
+    b'perflog',
+    [(b'', b'rename', False, b'ask log to follow renames')] + formatteropts,
+)
 def perflog(ui, repo, rev=None, **opts):
     opts = _byteskwargs(opts)
     if rev is None:
-        rev=[]
+        rev = []
     timer, fm = gettimer(ui, opts)
     ui.pushbuffer()
-    timer(lambda: commands.log(ui, repo, rev=rev, date=b'', user=b'',
-                               copies=opts.get(b'rename')))
+    timer(
+        lambda: commands.log(
+            ui, repo, rev=rev, date=b'', user=b'', copies=opts.get(b'rename')
+        )
+    )
     ui.popbuffer()
     fm.end()
 
+
 @command(b'perfmoonwalk', formatteropts)
 def perfmoonwalk(ui, repo, **opts):
     """benchmark walking the changelog backwards
@@ -1462,21 +1726,27 @@
     """
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
+
     def moonwalk():
         for i in repo.changelog.revs(start=(len(repo) - 1), stop=-1):
             ctx = repo[i]
-            ctx.branch() # read changelog data (in addition to the index)
+            ctx.branch()  # read changelog data (in addition to the index)
+
     timer(moonwalk)
     fm.end()
 
-@command(b'perftemplating',
-         [(b'r', b'rev', [], b'revisions to run the template on'),
-          ] + formatteropts)
+
+@command(
+    b'perftemplating',
+    [(b'r', b'rev', [], b'revisions to run the template on'),] + formatteropts,
+)
 def perftemplating(ui, repo, testedtemplate=None, **opts):
     """test the rendering time of a given template"""
     if makelogtemplater is None:
-        raise error.Abort((b"perftemplating not available with this Mercurial"),
-                          hint=b"use 4.3 or later")
+        raise error.Abort(
+            b"perftemplating not available with this Mercurial",
+            hint=b"use 4.3 or later",
+        )
 
     opts = _byteskwargs(opts)
 
@@ -1488,11 +1758,14 @@
         revs = [b'all()']
     revs = list(scmutil.revrange(repo, revs))
 
-    defaulttemplate = (b'{date|shortdate} [{rev}:{node|short}]'
-                       b' {author|person}: {desc|firstline}\n')
+    defaulttemplate = (
+        b'{date|shortdate} [{rev}:{node|short}]'
+        b' {author|person}: {desc|firstline}\n'
+    )
     if testedtemplate is None:
         testedtemplate = defaulttemplate
     displayer = makelogtemplater(nullui, repo, testedtemplate)
+
     def format():
         for r in revs:
             ctx = repo[r]
@@ -1503,6 +1776,7 @@
     timer(format)
     fm.end()
 
+
 def _displaystats(ui, opts, entries, data):
     pass
     # use a second formatter because the data are quite different, not sure
@@ -1549,12 +1823,16 @@
             fm.plain('%s: %s\n' % (l, stats[l]))
     fm.end()
 
-@command(b'perfhelper-mergecopies', formatteropts +
-         [
-          (b'r', b'revs', [], b'restrict search to these revisions'),
-          (b'', b'timing', False, b'provides extra data (costly)'),
-          (b'', b'stats', False, b'provides statistic about the measured data'),
-         ])
+
+@command(
+    b'perfhelper-mergecopies',
+    formatteropts
+    + [
+        (b'r', b'revs', [], b'restrict search to these revisions'),
+        (b'', b'timing', False, b'provides extra data (costly)'),
+        (b'', b'stats', False, b'provides statistic about the measured data'),
+    ],
+)
 def perfhelpermergecopies(ui, repo, revs=[], **opts):
     """find statistics about potential parameters for `perfmergecopies`
 
@@ -1589,10 +1867,13 @@
         ("p2.time", "%(p2.time)12.3f"),
         ("renames", "%(nbrenamedfiles)12d"),
         ("total.time", "%(time)12.3f"),
-        ]
+    ]
     if not dotiming:
-        output_template = [i for i in output_template
-                           if not ('time' in i[0] or 'renames' in i[0])]
+        output_template = [
+            i
+            for i in output_template
+            if not ('time' in i[0] or 'renames' in i[0])
+        ]
     header_names = [h for (h, v) in output_template]
     output = ' '.join([v for (h, v) in output_template]) + '\n'
     header = ' '.join(['%12s'] * len(header_names)) + '\n'
@@ -1634,27 +1915,19 @@
             }
             if dostats:
                 if p1missing:
-                    alldata['nbrevs'].append((
-                        data['p1.nbrevs'],
-                        b.hex(),
-                        p1.hex()
-                    ))
-                    alldata['nbmissingfiles'].append((
-                        data['p1.nbmissingfiles'],
-                        b.hex(),
-                        p1.hex()
-                    ))
+                    alldata['nbrevs'].append(
+                        (data['p1.nbrevs'], b.hex(), p1.hex())
+                    )
+                    alldata['nbmissingfiles'].append(
+                        (data['p1.nbmissingfiles'], b.hex(), p1.hex())
+                    )
                 if p2missing:
-                    alldata['nbrevs'].append((
-                        data['p2.nbrevs'],
-                        b.hex(),
-                        p2.hex()
-                    ))
-                    alldata['nbmissingfiles'].append((
-                        data['p2.nbmissingfiles'],
-                        b.hex(),
-                        p2.hex()
-                    ))
+                    alldata['nbrevs'].append(
+                        (data['p2.nbrevs'], b.hex(), p2.hex())
+                    )
+                    alldata['nbmissingfiles'].append(
+                        (data['p2.nbmissingfiles'], b.hex(), p2.hex())
+                    )
             if dotiming:
                 begin = util.timer()
                 mergedata = copies.mergecopies(repo, p1, p2, b)
@@ -1682,40 +1955,31 @@
 
                 if dostats:
                     if p1missing:
-                        alldata['parentnbrenames'].append((
-                            data['p1.renamedfiles'],
-                            b.hex(),
-                            p1.hex()
-                        ))
-                        alldata['parenttime'].append((
-                            data['p1.time'],
-                            b.hex(),
-                            p1.hex()
-                        ))
+                        alldata['parentnbrenames'].append(
+                            (data['p1.renamedfiles'], b.hex(), p1.hex())
+                        )
+                        alldata['parenttime'].append(
+                            (data['p1.time'], b.hex(), p1.hex())
+                        )
                     if p2missing:
-                        alldata['parentnbrenames'].append((
-                            data['p2.renamedfiles'],
-                            b.hex(),
-                            p2.hex()
-                        ))
-                        alldata['parenttime'].append((
-                            data['p2.time'],
-                            b.hex(),
-                            p2.hex()
-                        ))
+                        alldata['parentnbrenames'].append(
+                            (data['p2.renamedfiles'], b.hex(), p2.hex())
+                        )
+                        alldata['parenttime'].append(
+                            (data['p2.time'], b.hex(), p2.hex())
+                        )
                     if p1missing or p2missing:
-                        alldata['totalnbrenames'].append((
-                            data['nbrenamedfiles'],
-                            b.hex(),
-                            p1.hex(),
-                            p2.hex()
-                        ))
-                        alldata['totaltime'].append((
-                            data['time'],
-                            b.hex(),
-                            p1.hex(),
-                            p2.hex()
-                        ))
+                        alldata['totalnbrenames'].append(
+                            (
+                                data['nbrenamedfiles'],
+                                b.hex(),
+                                p1.hex(),
+                                p2.hex(),
+                            )
+                        )
+                        alldata['totaltime'].append(
+                            (data['time'], b.hex(), p1.hex(), p2.hex())
+                        )
             fm.startitem()
             fm.data(**data)
             # make node pretty for the human output
@@ -1734,20 +1998,24 @@
             ('nbmissingfiles', 'number of missing files at head'),
         ]
         if dotiming:
-            entries.append(('parentnbrenames',
-                            'rename from one parent to base'))
+            entries.append(
+                ('parentnbrenames', 'rename from one parent to base')
+            )
             entries.append(('totalnbrenames', 'total number of renames'))
             entries.append(('parenttime', 'time for one parent'))
             entries.append(('totaltime', 'time for both parents'))
         _displaystats(ui, opts, entries, alldata)
 
 
-@command(b'perfhelper-pathcopies', formatteropts +
-         [
-          (b'r', b'revs', [], b'restrict search to these revisions'),
-          (b'', b'timing', False, b'provides extra data (costly)'),
-          (b'', b'stats', False, b'provides statistic about the measured data'),
-         ])
+@command(
+    b'perfhelper-pathcopies',
+    formatteropts
+    + [
+        (b'r', b'revs', [], b'restrict search to these revisions'),
+        (b'', b'timing', False, b'provides extra data (costly)'),
+        (b'', b'stats', False, b'provides statistic about the measured data'),
+    ],
+)
 def perfhelperpathcopies(ui, repo, revs=[], **opts):
     """find statistic about potential parameters for the `perftracecopies`
 
@@ -1769,23 +2037,32 @@
 
     if dotiming:
         header = '%12s %12s %12s %12s %12s %12s\n'
-        output = ("%(source)12s %(destination)12s "
-                  "%(nbrevs)12d %(nbmissingfiles)12d "
-                  "%(nbrenamedfiles)12d %(time)18.5f\n")
-        header_names = ("source", "destination", "nb-revs", "nb-files",
-                        "nb-renames", "time")
+        output = (
+            "%(source)12s %(destination)12s "
+            "%(nbrevs)12d %(nbmissingfiles)12d "
+            "%(nbrenamedfiles)12d %(time)18.5f\n"
+        )
+        header_names = (
+            "source",
+            "destination",
+            "nb-revs",
+            "nb-files",
+            "nb-renames",
+            "time",
+        )
         fm.plain(header % header_names)
     else:
         header = '%12s %12s %12s %12s\n'
-        output = ("%(source)12s %(destination)12s "
-                  "%(nbrevs)12d %(nbmissingfiles)12d\n")
+        output = (
+            "%(source)12s %(destination)12s "
+            "%(nbrevs)12d %(nbmissingfiles)12d\n"
+        )
         fm.plain(header % ("source", "destination", "nb-revs", "nb-files"))
 
     if not revs:
         revs = ['all()']
     revs = scmutil.revrange(repo, revs)
 
-
     if dostats:
         alldata = {
             'nbrevs': [],
@@ -1815,16 +2092,12 @@
                     b'nbmissingfiles': len(missing),
                 }
                 if dostats:
-                    alldata['nbrevs'].append((
-                        data['nbrevs'],
-                        base.hex(),
-                        parent.hex(),
-                    ))
-                    alldata['nbmissingfiles'].append((
-                        data['nbmissingfiles'],
-                        base.hex(),
-                        parent.hex(),
-                    ))
+                    alldata['nbrevs'].append(
+                        (data['nbrevs'], base.hex(), parent.hex(),)
+                    )
+                    alldata['nbmissingfiles'].append(
+                        (data['nbmissingfiles'], base.hex(), parent.hex(),)
+                    )
                 if dotiming:
                     begin = util.timer()
                     renames = copies.pathcopies(base, parent)
@@ -1833,16 +2106,12 @@
                     data['time'] = end - begin
                     data['nbrenamedfiles'] = len(renames)
                     if dostats:
-                        alldata['time'].append((
-                            data['time'],
-                            base.hex(),
-                            parent.hex(),
-                        ))
-                        alldata['nbrenames'].append((
-                            data['nbrenamedfiles'],
-                            base.hex(),
-                            parent.hex(),
-                        ))
+                        alldata['time'].append(
+                            (data['time'], base.hex(), parent.hex(),)
+                        )
+                        alldata['nbrenames'].append(
+                            (data['nbrenamedfiles'], base.hex(), parent.hex(),)
+                        )
                 fm.startitem()
                 fm.data(**data)
                 out = data.copy()
@@ -1860,11 +2129,11 @@
             ('nbmissingfiles', 'number of missing files at head'),
         ]
         if dotiming:
-            entries.append(('nbrenames',
-                            'renamed files'))
+            entries.append(('nbrenames', 'renamed files'))
             entries.append(('time', 'time'))
         _displaystats(ui, opts, entries, alldata)
 
+
 @command(b'perfcca', formatteropts)
 def perfcca(ui, repo, **opts):
     opts = _byteskwargs(opts)
@@ -1872,16 +2141,20 @@
     timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate))
     fm.end()
 
+
 @command(b'perffncacheload', formatteropts)
 def perffncacheload(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     s = repo.store
+
     def d():
         s.fncache._load()
+
     timer(d)
     fm.end()
 
+
 @command(b'perffncachewrite', formatteropts)
 def perffncachewrite(ui, repo, **opts):
     opts = _byteskwargs(opts)
@@ -1891,26 +2164,32 @@
     s.fncache._load()
     tr = repo.transaction(b'perffncachewrite')
     tr.addbackup(b'fncache')
+
     def d():
         s.fncache._dirty = True
         s.fncache.write(tr)
+
     timer(d)
     tr.close()
     lock.release()
     fm.end()
 
+
 @command(b'perffncacheencode', formatteropts)
 def perffncacheencode(ui, repo, **opts):
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     s = repo.store
     s.fncache._load()
+
     def d():
         for p in s.fncache.entries:
             s.encode(p)
+
     timer(d)
     fm.end()
 
+
 def _bdiffworker(q, blocks, xdiff, ready, done):
     while not done.is_set():
         pair = q.get()
@@ -1923,10 +2202,11 @@
                 mdiff.textdiff(*pair)
             q.task_done()
             pair = q.get()
-        q.task_done() # for the None one
+        q.task_done()  # for the None one
         with ready:
             ready.wait()
 
+
 def _manifestrevision(repo, mnode):
     ml = repo.manifestlog
 
@@ -1937,15 +2217,25 @@
 
     return store.revision(mnode)
 
-@command(b'perfbdiff', revlogopts + formatteropts + [
-    (b'', b'count', 1, b'number of revisions to test (when using --startrev)'),
-    (b'', b'alldata', False, b'test bdiffs for all associated revisions'),
-    (b'', b'threads', 0, b'number of thread to use (disable with 0)'),
-    (b'', b'blocks', False, b'test computing diffs into blocks'),
-    (b'', b'xdiff', False, b'use xdiff algorithm'),
+
+@command(
+    b'perfbdiff',
+    revlogopts
+    + formatteropts
+    + [
+        (
+            b'',
+            b'count',
+            1,
+            b'number of revisions to test (when using --startrev)',
+        ),
+        (b'', b'alldata', False, b'test bdiffs for all associated revisions'),
+        (b'', b'threads', 0, b'number of thread to use (disable with 0)'),
+        (b'', b'blocks', False, b'test computing diffs into blocks'),
+        (b'', b'xdiff', False, b'use xdiff algorithm'),
     ],
-
-    b'-c|-m|FILE REV')
+    b'-c|-m|FILE REV',
+)
 def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts):
     """benchmark a bdiff between revisions
 
@@ -2001,6 +2291,7 @@
 
     withthreads = threads > 0
     if not withthreads:
+
         def d():
             for pair in textpairs:
                 if xdiff:
@@ -2009,6 +2300,7 @@
                     mdiff.bdiff.blocks(*pair)
                 else:
                     mdiff.textdiff(*pair)
+
     else:
         q = queue()
         for i in _xrange(threads):
@@ -2016,9 +2308,11 @@
         ready = threading.Condition()
         done = threading.Event()
         for i in _xrange(threads):
-            threading.Thread(target=_bdiffworker,
-                             args=(q, blocks, xdiff, ready, done)).start()
+            threading.Thread(
+                target=_bdiffworker, args=(q, blocks, xdiff, ready, done)
+            ).start()
         q.join()
+
         def d():
             for pair in textpairs:
                 q.put(pair)
@@ -2027,6 +2321,7 @@
             with ready:
                 ready.notify_all()
             q.join()
+
     timer, fm = gettimer(ui, opts)
     timer(d)
     fm.end()
@@ -2038,10 +2333,22 @@
         with ready:
             ready.notify_all()
 
-@command(b'perfunidiff', revlogopts + formatteropts + [
-    (b'', b'count', 1, b'number of revisions to test (when using --startrev)'),
-    (b'', b'alldata', False, b'test unidiffs for all associated revisions'),
-    ], b'-c|-m|FILE REV')
+
+@command(
+    b'perfunidiff',
+    revlogopts
+    + formatteropts
+    + [
+        (
+            b'',
+            b'count',
+            1,
+            b'number of revisions to test (when using --startrev)',
+        ),
+        (b'', b'alldata', False, b'test unidiffs for all associated revisions'),
+    ],
+    b'-c|-m|FILE REV',
+)
 def perfunidiff(ui, repo, file_, rev=None, count=None, **opts):
     """benchmark a unified diff between revisions
 
@@ -2096,14 +2403,17 @@
         for left, right in textpairs:
             # The date strings don't matter, so we pass empty strings.
             headerlines, hunks = mdiff.unidiff(
-                left, b'', right, b'', b'left', b'right', binary=False)
+                left, b'', right, b'', b'left', b'right', binary=False
+            )
             # consume iterators in roughly the way patch.py does
             b'\n'.join(headerlines)
             b''.join(sum((list(hlines) for hrange, hlines in hunks), []))
+
     timer, fm = gettimer(ui, opts)
     timer(d)
     fm.end()
 
+
 @command(b'perfdiffwd', formatteropts)
 def perfdiffwd(ui, repo, **opts):
     """Profile diff of working directory changes"""
@@ -2113,21 +2423,23 @@
         'w': 'ignore_all_space',
         'b': 'ignore_space_change',
         'B': 'ignore_blank_lines',
-        }
+    }
 
     for diffopt in ('', 'w', 'b', 'B', 'wB'):
         opts = dict((options[c], b'1') for c in diffopt)
+
         def d():
             ui.pushbuffer()
             commands.diff(ui, repo, **opts)
             ui.popbuffer()
+
         diffopt = diffopt.encode('ascii')
         title = b'diffopts: %s' % (diffopt and (b'-' + diffopt) or b'none')
         timer(d, title=title)
     fm.end()
 
-@command(b'perfrevlogindex', revlogopts + formatteropts,
-         b'-c|-m|FILE')
+
+@command(b'perfrevlogindex', revlogopts + formatteropts, b'-c|-m|FILE')
 def perfrevlogindex(ui, repo, file_=None, **opts):
     """Benchmark operations against a revlog index.
 
@@ -2150,7 +2462,7 @@
         revlogio = revlog.revlogio()
         inline = header & (1 << 16)
     else:
-        raise error.Abort((b'unsupported revlog version: %d') % version)
+        raise error.Abort(b'unsupported revlog version: %d' % version)
 
     rllen = len(rl)
 
@@ -2221,22 +2533,26 @@
         (lambda: resolvenode(node75), b'look up node at 3/4 len'),
         (lambda: resolvenode(node100), b'look up node at tip'),
         # 2x variation is to measure caching impact.
-        (lambda: resolvenodes(allnodes),
-         b'look up all nodes (forward)'),
-        (lambda: resolvenodes(allnodes, 2),
-         b'look up all nodes 2x (forward)'),
-        (lambda: resolvenodes(allnodesrev),
-         b'look up all nodes (reverse)'),
-        (lambda: resolvenodes(allnodesrev, 2),
-         b'look up all nodes 2x (reverse)'),
-        (lambda: getentries(allrevs),
-         b'retrieve all index entries (forward)'),
-        (lambda: getentries(allrevs, 2),
-         b'retrieve all index entries 2x (forward)'),
-        (lambda: getentries(allrevsrev),
-         b'retrieve all index entries (reverse)'),
-        (lambda: getentries(allrevsrev, 2),
-         b'retrieve all index entries 2x (reverse)'),
+        (lambda: resolvenodes(allnodes), b'look up all nodes (forward)'),
+        (lambda: resolvenodes(allnodes, 2), b'look up all nodes 2x (forward)'),
+        (lambda: resolvenodes(allnodesrev), b'look up all nodes (reverse)'),
+        (
+            lambda: resolvenodes(allnodesrev, 2),
+            b'look up all nodes 2x (reverse)',
+        ),
+        (lambda: getentries(allrevs), b'retrieve all index entries (forward)'),
+        (
+            lambda: getentries(allrevs, 2),
+            b'retrieve all index entries 2x (forward)',
+        ),
+        (
+            lambda: getentries(allrevsrev),
+            b'retrieve all index entries (reverse)',
+        ),
+        (
+            lambda: getentries(allrevsrev, 2),
+            b'retrieve all index entries 2x (reverse)',
+        ),
     ]
 
     for fn, title in benches:
@@ -2244,13 +2560,21 @@
         timer(fn, title=title)
         fm.end()
 
-@command(b'perfrevlogrevisions', revlogopts + formatteropts +
-         [(b'd', b'dist', 100, b'distance between the revisions'),
-          (b's', b'startrev', 0, b'revision to start reading at'),
-          (b'', b'reverse', False, b'read in reverse')],
-         b'-c|-m|FILE')
-def perfrevlogrevisions(ui, repo, file_=None, startrev=0, reverse=False,
-                        **opts):
+
+@command(
+    b'perfrevlogrevisions',
+    revlogopts
+    + formatteropts
+    + [
+        (b'd', b'dist', 100, b'distance between the revisions'),
+        (b's', b'startrev', 0, b'revision to start reading at'),
+        (b'', b'reverse', False, b'read in reverse'),
+    ],
+    b'-c|-m|FILE',
+)
+def perfrevlogrevisions(
+    ui, repo, file_=None, startrev=0, reverse=False, **opts
+):
     """Benchmark reading a series of revisions from a revlog.
 
     By default, we read every ``-d/--dist`` revision from 0 to tip of
@@ -2286,16 +2610,22 @@
     timer(d)
     fm.end()
 
-@command(b'perfrevlogwrite', revlogopts + formatteropts +
-         [(b's', b'startrev', 1000, b'revision to start writing at'),
-          (b'', b'stoprev', -1, b'last revision to write'),
-          (b'', b'count', 3, b'number of passes to perform'),
-          (b'', b'details', False, b'print timing for every revisions tested'),
-          (b'', b'source', b'full', b'the kind of data feed in the revlog'),
-          (b'', b'lazydeltabase', True, b'try the provided delta first'),
-          (b'', b'clear-caches', True, b'clear revlog cache between calls'),
-         ],
-         b'-c|-m|FILE')
+
+@command(
+    b'perfrevlogwrite',
+    revlogopts
+    + formatteropts
+    + [
+        (b's', b'startrev', 1000, b'revision to start writing at'),
+        (b'', b'stoprev', -1, b'last revision to write'),
+        (b'', b'count', 3, b'number of passes to perform'),
+        (b'', b'details', False, b'print timing for every revisions tested'),
+        (b'', b'source', b'full', b'the kind of data feed in the revlog'),
+        (b'', b'lazydeltabase', True, b'try the provided delta first'),
+        (b'', b'clear-caches', True, b'clear revlog cache between calls'),
+    ],
+    b'-c|-m|FILE',
+)
 def perfrevlogwrite(ui, repo, file_=None, startrev=1000, stoprev=-1, **opts):
     """Benchmark writing a series of revisions to a revlog.
 
@@ -2329,8 +2659,13 @@
     lazydeltabase = opts['lazydeltabase']
     source = opts['source']
     clearcaches = opts['clear_caches']
-    validsource = (b'full', b'parent-1', b'parent-2', b'parent-smallest',
-                   b'storage')
+    validsource = (
+        b'full',
+        b'parent-1',
+        b'parent-2',
+        b'parent-smallest',
+        b'storage',
+    )
     if source not in validsource:
         raise error.Abort('invalid source type: %s' % source)
 
@@ -2340,9 +2675,16 @@
         raise error.Abort('invalide run count: %d' % count)
     allresults = []
     for c in range(count):
-        timing = _timeonewrite(ui, rl, source, startrev, stoprev, c + 1,
-                               lazydeltabase=lazydeltabase,
-                               clearcaches=clearcaches)
+        timing = _timeonewrite(
+            ui,
+            rl,
+            source,
+            startrev,
+            stoprev,
+            c + 1,
+            lazydeltabase=lazydeltabase,
+            clearcaches=clearcaches,
+        )
         allresults.append(timing)
 
     ### consolidate the results in a single list
@@ -2396,20 +2738,37 @@
     # for now
     totaltime = []
     for item in allresults:
-        totaltime.append((sum(x[1][0] for x in item),
-                          sum(x[1][1] for x in item),
-                          sum(x[1][2] for x in item),)
+        totaltime.append(
+            (
+                sum(x[1][0] for x in item),
+                sum(x[1][1] for x in item),
+                sum(x[1][2] for x in item),
+            )
         )
-    formatone(fm, totaltime, title="total time (%d revs)" % resultcount,
-              displayall=displayall)
+    formatone(
+        fm,
+        totaltime,
+        title="total time (%d revs)" % resultcount,
+        displayall=displayall,
+    )
     fm.end()
 
+
 class _faketr(object):
     def add(s, x, y, z=None):
         return None
 
-def _timeonewrite(ui, orig, source, startrev, stoprev, runidx=None,
-                  lazydeltabase=True, clearcaches=True):
+
+def _timeonewrite(
+    ui,
+    orig,
+    source,
+    startrev,
+    stoprev,
+    runidx=None,
+    lazydeltabase=True,
+    clearcaches=True,
+):
     timings = []
     tr = _faketr()
     with _temprevlog(ui, orig, startrev) as dest:
@@ -2419,16 +2778,21 @@
         topic = 'adding'
         if runidx is not None:
             topic += ' (run #%d)' % runidx
-         # Support both old and new progress API
+        # Support both old and new progress API
         if util.safehasattr(ui, 'makeprogress'):
             progress = ui.makeprogress(topic, unit='revs', total=total)
+
             def updateprogress(pos):
                 progress.update(pos)
+
             def completeprogress():
                 progress.complete()
+
         else:
+
             def updateprogress(pos):
                 ui.progress(topic, pos, unit='revs', total=total)
+
             def completeprogress():
                 ui.progress(topic, None, unit='revs', total=total)
 
@@ -2445,6 +2809,7 @@
         completeprogress()
     return timings
 
+
 def _getrevisionseed(orig, rev, tr, source):
     from mercurial.node import nullid
 
@@ -2481,8 +2846,11 @@
         baserev = orig.deltaparent(rev)
         cachedelta = (baserev, orig.revdiff(orig.node(baserev), rev))
 
-    return ((text, tr, linkrev, p1, p2),
-            {'node': node, 'flags': flags, 'cachedelta': cachedelta})
+    return (
+        (text, tr, linkrev, p1, p2),
+        {'node': node, 'flags': flags, 'cachedelta': cachedelta},
+    )
+
 
 @contextlib.contextmanager
 def _temprevlog(ui, orig, truncaterev):
@@ -2523,9 +2891,9 @@
         vfs = vfsmod.vfs(tmpdir)
         vfs.options = getattr(orig.opener, 'options', None)
 
-        dest = revlog.revlog(vfs,
-                             indexfile=indexname,
-                             datafile=dataname, **revlogkwargs)
+        dest = revlog.revlog(
+            vfs, indexfile=indexname, datafile=dataname, **revlogkwargs
+        )
         if dest._inline:
             raise error.Abort('not supporting inline revlog (yet)')
         # make sure internals are initialized
@@ -2535,10 +2903,17 @@
     finally:
         shutil.rmtree(tmpdir, True)
 
-@command(b'perfrevlogchunks', revlogopts + formatteropts +
-         [(b'e', b'engines', b'', b'compression engines to use'),
-          (b's', b'startrev', 0, b'revision to start at')],
-         b'-c|-m|FILE')
+
+@command(
+    b'perfrevlogchunks',
+    revlogopts
+    + formatteropts
+    + [
+        (b'e', b'engines', b'', b'compression engines to use'),
+        (b's', b'startrev', 0, b'revision to start at'),
+    ],
+    b'-c|-m|FILE',
+)
 def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts):
     """Benchmark operations on revlog chunks.
 
@@ -2645,17 +3020,26 @@
 
     for engine in sorted(engines):
         compressor = util.compengines[engine].revlogcompressor()
-        benches.append((functools.partial(docompress, compressor),
-                        b'compress w/ %s' % engine))
+        benches.append(
+            (
+                functools.partial(docompress, compressor),
+                b'compress w/ %s' % engine,
+            )
+        )
 
     for fn, title in benches:
         timer, fm = gettimer(ui, opts)
         timer(fn, title=title)
         fm.end()
 
-@command(b'perfrevlogrevision', revlogopts + formatteropts +
-         [(b'', b'cache', False, b'use caches instead of clearing')],
-         b'-c|-m|FILE REV')
+
+@command(
+    b'perfrevlogrevision',
+    revlogopts
+    + formatteropts
+    + [(b'', b'cache', False, b'use caches instead of clearing')],
+    b'-c|-m|FILE REV',
+)
 def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts):
     """Benchmark obtaining a revlog revision.
 
@@ -2777,22 +3161,30 @@
         slicing = (lambda: doslice(r, chain, size), b'slice-sparse-chain')
         benches.append(slicing)
 
-    benches.extend([
-        (lambda: dorawchunks(data, slicedchain), b'rawchunks'),
-        (lambda: dodecompress(rawchunks), b'decompress'),
-        (lambda: dopatch(text, bins), b'patch'),
-        (lambda: dohash(text), b'hash'),
-    ])
+    benches.extend(
+        [
+            (lambda: dorawchunks(data, slicedchain), b'rawchunks'),
+            (lambda: dodecompress(rawchunks), b'decompress'),
+            (lambda: dopatch(text, bins), b'patch'),
+            (lambda: dohash(text), b'hash'),
+        ]
+    )
 
     timer, fm = gettimer(ui, opts)
     for fn, title in benches:
         timer(fn, title=title)
     fm.end()
 
-@command(b'perfrevset',
-         [(b'C', b'clear', False, b'clear volatile cache between each call.'),
-          (b'', b'contexts', False, b'obtain changectx for each revision')]
-         + formatteropts, b"REVSET")
+
+@command(
+    b'perfrevset',
+    [
+        (b'C', b'clear', False, b'clear volatile cache between each call.'),
+        (b'', b'contexts', False, b'obtain changectx for each revision'),
+    ]
+    + formatteropts,
+    b"REVSET",
+)
 def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts):
     """benchmark the execution time of a revset
 
@@ -2802,19 +3194,26 @@
     opts = _byteskwargs(opts)
 
     timer, fm = gettimer(ui, opts)
+
     def d():
         if clear:
             repo.invalidatevolatilesets()
         if contexts:
-            for ctx in repo.set(expr): pass
+            for ctx in repo.set(expr):
+                pass
         else:
-            for r in repo.revs(expr): pass
+            for r in repo.revs(expr):
+                pass
+
     timer(d)
     fm.end()
 
-@command(b'perfvolatilesets',
-         [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
-          ] + formatteropts)
+
+@command(
+    b'perfvolatilesets',
+    [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),]
+    + formatteropts,
+)
 def perfvolatilesets(ui, repo, *names, **opts):
     """benchmark the computation of various volatile set
 
@@ -2829,6 +3228,7 @@
             if opts[b'clear_obsstore']:
                 clearfilecache(repo, b'obsstore')
             obsolete.getrevs(repo, name)
+
         return d
 
     allobs = sorted(obsolete.cachefuncs)
@@ -2844,6 +3244,7 @@
             if opts[b'clear_obsstore']:
                 clearfilecache(repo, b'obsstore')
             repoview.filterrevs(repo, name)
+
         return d
 
     allfilter = sorted(repoview.filtertable)
@@ -2854,12 +3255,20 @@
         timer(getfiltered(name), title=name)
     fm.end()
 
-@command(b'perfbranchmap',
-         [(b'f', b'full', False,
-           b'Includes build time of subset'),
-          (b'', b'clear-revbranch', False,
-           b'purge the revbranch cache between computation'),
-          ] + formatteropts)
+
+@command(
+    b'perfbranchmap',
+    [
+        (b'f', b'full', False, b'Includes build time of subset'),
+        (
+            b'',
+            b'clear-revbranch',
+            False,
+            b'purge the revbranch cache between computation',
+        ),
+    ]
+    + formatteropts,
+)
 def perfbranchmap(ui, repo, *filternames, **opts):
     """benchmark the update of a branchmap
 
@@ -2869,6 +3278,7 @@
     full = opts.get(b"full", False)
     clear_revbranch = opts.get(b"clear_revbranch", False)
     timer, fm = gettimer(ui, opts)
+
     def getbranchmap(filtername):
         """generate a benchmark function for the filtername"""
         if filtername is None:
@@ -2880,6 +3290,7 @@
         else:
             # older versions
             filtered = view._branchcaches
+
         def d():
             if clear_revbranch:
                 repo.revbranchcache()._clear()
@@ -2888,7 +3299,9 @@
             else:
                 filtered.pop(filtername, None)
             view.branchmap()
+
         return d
+
     # add filter in smaller subset to bigger subset
     possiblefilters = set(repoview.filtertable)
     if filternames:
@@ -2933,11 +3346,16 @@
         branchcachewrite.restore()
     fm.end()
 
-@command(b'perfbranchmapupdate', [
-     (b'', b'base', [], b'subset of revision to start from'),
-     (b'', b'target', [], b'subset of revision to end with'),
-     (b'', b'clear-caches', False, b'clear cache between each runs')
-    ] + formatteropts)
+
+@command(
+    b'perfbranchmapupdate',
+    [
+        (b'', b'base', [], b'subset of revision to start from'),
+        (b'', b'target', [], b'subset of revision to end with'),
+        (b'', b'clear-caches', False, b'clear cache between each runs'),
+    ]
+    + formatteropts,
+)
 def perfbranchmapupdate(ui, repo, base=(), target=(), **opts):
     """benchmark branchmap update from for <base> revs to <target> revs
 
@@ -2956,11 +3374,12 @@
     """
     from mercurial import branchmap
     from mercurial import repoview
+
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     clearcaches = opts[b'clear_caches']
     unfi = repo.unfiltered()
-    x = [None] # used to pass data between closure
+    x = [None]  # used to pass data between closure
 
     # we use a `list` here to avoid possible side effect from smartset
     baserevs = list(scmutil.revrange(repo, base))
@@ -3037,12 +3456,16 @@
         repoview.filtertable.pop(b'__perf_branchmap_update_base', None)
         repoview.filtertable.pop(b'__perf_branchmap_update_target', None)
 
-@command(b'perfbranchmapload', [
-     (b'f', b'filter', b'', b'Specify repoview filter'),
-     (b'', b'list', False, b'List brachmap filter caches'),
-     (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
-
-    ] + formatteropts)
+
+@command(
+    b'perfbranchmapload',
+    [
+        (b'f', b'filter', b'', b'Specify repoview filter'),
+        (b'', b'list', False, b'List brachmap filter caches'),
+        (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
+    ]
+    + formatteropts,
+)
 def perfbranchmapload(ui, repo, filter=b'', list=False, **opts):
     """benchmark reading the branchmap"""
     opts = _byteskwargs(opts)
@@ -3052,8 +3475,9 @@
         for name, kind, st in repo.cachevfs.readdir(stat=True):
             if name.startswith(b'branch2'):
                 filtername = name.partition(b'-')[2] or b'unfiltered'
-                ui.status(b'%s - %s\n'
-                          % (filtername, util.bytecount(st.st_size)))
+                ui.status(
+                    b'%s - %s\n' % (filtername, util.bytecount(st.st_size))
+                )
         return
     if not filter:
         filter = None
@@ -3063,7 +3487,7 @@
     else:
         repo = repoview.repoview(repo, filter)
 
-    repo.branchmap() # make sure we have a relevant, up to date branchmap
+    repo.branchmap()  # make sure we have a relevant, up to date branchmap
 
     try:
         fromfile = branchmap.branchcache.fromfile
@@ -3076,18 +3500,23 @@
     while fromfile(repo) is None:
         currentfilter = subsettable.get(currentfilter)
         if currentfilter is None:
-            raise error.Abort(b'No branchmap cached for %s repo'
-                              % (filter or b'unfiltered'))
+            raise error.Abort(
+                b'No branchmap cached for %s repo' % (filter or b'unfiltered')
+            )
         repo = repo.filtered(currentfilter)
     timer, fm = gettimer(ui, opts)
+
     def setup():
         if clearrevlogs:
             clearchangelog(repo)
+
     def bench():
         fromfile(repo)
+
     timer(bench, setup=setup)
     fm.end()
 
+
 @command(b'perfloadmarkers')
 def perfloadmarkers(ui, repo):
     """benchmark the time to parse the on-disk markers for a repo
@@ -3098,18 +3527,39 @@
     timer(lambda: len(obsolete.obsstore(svfs)))
     fm.end()
 
-@command(b'perflrucachedict', formatteropts +
-    [(b'', b'costlimit', 0, b'maximum total cost of items in cache'),
-     (b'', b'mincost', 0, b'smallest cost of items in cache'),
-     (b'', b'maxcost', 100, b'maximum cost of items in cache'),
-     (b'', b'size', 4, b'size of cache'),
-     (b'', b'gets', 10000, b'number of key lookups'),
-     (b'', b'sets', 10000, b'number of key sets'),
-     (b'', b'mixed', 10000, b'number of mixed mode operations'),
-     (b'', b'mixedgetfreq', 50, b'frequency of get vs set ops in mixed mode')],
-    norepo=True)
-def perflrucache(ui, mincost=0, maxcost=100, costlimit=0, size=4,
-                 gets=10000, sets=10000, mixed=10000, mixedgetfreq=50, **opts):
+
+@command(
+    b'perflrucachedict',
+    formatteropts
+    + [
+        (b'', b'costlimit', 0, b'maximum total cost of items in cache'),
+        (b'', b'mincost', 0, b'smallest cost of items in cache'),
+        (b'', b'maxcost', 100, b'maximum cost of items in cache'),
+        (b'', b'size', 4, b'size of cache'),
+        (b'', b'gets', 10000, b'number of key lookups'),
+        (b'', b'sets', 10000, b'number of key sets'),
+        (b'', b'mixed', 10000, b'number of mixed mode operations'),
+        (
+            b'',
+            b'mixedgetfreq',
+            50,
+            b'frequency of get vs set ops in mixed mode',
+        ),
+    ],
+    norepo=True,
+)
+def perflrucache(
+    ui,
+    mincost=0,
+    maxcost=100,
+    costlimit=0,
+    size=4,
+    gets=10000,
+    sets=10000,
+    mixed=10000,
+    mixedgetfreq=50,
+    **opts
+):
     opts = _byteskwargs(opts)
 
     def doinit():
@@ -3134,7 +3584,7 @@
             d[v] = v
         for key in getseq:
             value = d[key]
-            value # silence pyflakes warning
+            value  # silence pyflakes warning
 
     def dogetscost():
         d = util.lrucachedict(size, maxcost=costlimit)
@@ -3143,7 +3593,7 @@
         for key in getseq:
             try:
                 value = d[key]
-                value # silence pyflakes warning
+                value  # silence pyflakes warning
             except KeyError:
                 pass
 
@@ -3178,9 +3628,9 @@
         else:
             op = 1
 
-        mixedops.append((op,
-                         random.randint(0, size * 2),
-                         random.choice(costrange)))
+        mixedops.append(
+            (op, random.randint(0, size * 2), random.choice(costrange))
+        )
 
     def domixed():
         d = util.lrucachedict(size)
@@ -3211,24 +3661,29 @@
     ]
 
     if costlimit:
-        benches.extend([
-            (dogetscost, b'gets w/ cost limit'),
-            (doinsertscost, b'inserts w/ cost limit'),
-            (domixedcost, b'mixed w/ cost limit'),
-        ])
+        benches.extend(
+            [
+                (dogetscost, b'gets w/ cost limit'),
+                (doinsertscost, b'inserts w/ cost limit'),
+                (domixedcost, b'mixed w/ cost limit'),
+            ]
+        )
     else:
-        benches.extend([
-            (dogets, b'gets'),
-            (doinserts, b'inserts'),
-            (dosets, b'sets'),
-            (domixed, b'mixed')
-        ])
+        benches.extend(
+            [
+                (dogets, b'gets'),
+                (doinserts, b'inserts'),
+                (dosets, b'sets'),
+                (domixed, b'mixed'),
+            ]
+        )
 
     for fn, title in benches:
         timer, fm = gettimer(ui, opts)
         timer(fn, title=title)
         fm.end()
 
+
 @command(b'perfwrite', formatteropts)
 def perfwrite(ui, repo, **opts):
     """microbenchmark ui.write
@@ -3236,15 +3691,19 @@
     opts = _byteskwargs(opts)
 
     timer, fm = gettimer(ui, opts)
+
     def write():
         for i in range(100000):
-            ui.write((b'Testing write performance\n'))
+            ui.write(b'Testing write performance\n')
+
     timer(write)
     fm.end()
 
+
 def uisetup(ui):
-    if (util.safehasattr(cmdutil, b'openrevlog') and
-        not util.safehasattr(commands, b'debugrevlogopts')):
+    if util.safehasattr(cmdutil, b'openrevlog') and not util.safehasattr(
+        commands, b'debugrevlogopts'
+    ):
         # for "historical portability":
         # In this case, Mercurial should be 1.9 (or a79fea6b3e77) -
         # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for
@@ -3252,15 +3711,24 @@
         # available since 3.5 (or 49c583ca48c4).
         def openrevlog(orig, repo, cmd, file_, opts):
             if opts.get(b'dir') and not util.safehasattr(repo, b'dirlog'):
-                raise error.Abort(b"This version doesn't support --dir option",
-                                  hint=b"use 3.5 or later")
+                raise error.Abort(
+                    b"This version doesn't support --dir option",
+                    hint=b"use 3.5 or later",
+                )
             return orig(repo, cmd, file_, opts)
+
         extensions.wrapfunction(cmdutil, b'openrevlog', openrevlog)
 
-@command(b'perfprogress', formatteropts + [
-    (b'', b'topic', b'topic', b'topic for progress messages'),
-    (b'c', b'total', 1000000, b'total value we are progressing to'),
-], norepo=True)
+
+@command(
+    b'perfprogress',
+    formatteropts
+    + [
+        (b'', b'topic', b'topic', b'topic for progress messages'),
+        (b'c', b'total', 1000000, b'total value we are progressing to'),
+    ],
+    norepo=True,
+)
 def perfprogress(ui, topic=None, total=None, **opts):
     """printing of progress bars"""
     opts = _byteskwargs(opts)
--- a/contrib/python-hook-examples.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/python-hook-examples.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,6 +7,7 @@
     util,
 )
 
+
 def diffstat(ui, repo, **kwargs):
     '''Example usage:
 
--- a/contrib/python3-ratchet.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/python3-ratchet.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,65 +25,103 @@
 import sys
 
 _hgenv = dict(os.environ)
-_hgenv.update({
-    'HGPLAIN': '1',
-    })
+_hgenv.update(
+    {'HGPLAIN': '1',}
+)
 
 _HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
 
+
 def _runhg(*args):
     return subprocess.check_output(args, env=_hgenv)
 
+
 def _is_hg_repo(path):
-    return _runhg('hg', 'log', '-R', path,
-                  '-r0', '--template={node}').strip() == _HG_FIRST_CHANGE
+    return (
+        _runhg('hg', 'log', '-R', path, '-r0', '--template={node}').strip()
+        == _HG_FIRST_CHANGE
+    )
+
 
 def _py3default():
     if sys.version_info[0] >= 3:
         return sys.executable
     return 'python3'
 
+
 def main(argv=()):
     p = argparse.ArgumentParser()
-    p.add_argument('--working-tests',
-                   help='List of tests that already work in Python 3.')
-    p.add_argument('--commit-to-repo',
-                   help='If set, commit newly fixed tests to the given repo')
-    p.add_argument('-j', default=os.sysconf(r'SC_NPROCESSORS_ONLN'), type=int,
-                   help='Number of parallel tests to run.')
-    p.add_argument('--python3', default=_py3default(),
-                   help='python3 interpreter to use for test run')
-    p.add_argument('--commit-user',
-                   default='python3-ratchet@mercurial-scm.org',
-                   help='Username to specify when committing to a repo.')
+    p.add_argument(
+        '--working-tests', help='List of tests that already work in Python 3.'
+    )
+    p.add_argument(
+        '--commit-to-repo',
+        help='If set, commit newly fixed tests to the given repo',
+    )
+    p.add_argument(
+        '-j',
+        default=os.sysconf(r'SC_NPROCESSORS_ONLN'),
+        type=int,
+        help='Number of parallel tests to run.',
+    )
+    p.add_argument(
+        '--python3',
+        default=_py3default(),
+        help='python3 interpreter to use for test run',
+    )
+    p.add_argument(
+        '--commit-user',
+        default='python3-ratchet@mercurial-scm.org',
+        help='Username to specify when committing to a repo.',
+    )
     opts = p.parse_args(argv)
     if opts.commit_to_repo:
         if not _is_hg_repo(opts.commit_to_repo):
             print('abort: specified repository is not the hg repository')
             sys.exit(1)
     if not opts.working_tests or not os.path.isfile(opts.working_tests):
-        print('abort: --working-tests must exist and be a file (got %r)' %
-              opts.working_tests)
+        print(
+            'abort: --working-tests must exist and be a file (got %r)'
+            % opts.working_tests
+        )
         sys.exit(1)
     elif opts.commit_to_repo:
         root = _runhg('hg', 'root').strip()
         if not opts.working_tests.startswith(root):
-            print('abort: if --commit-to-repo is given, '
-                  '--working-tests must be from that repo')
+            print(
+                'abort: if --commit-to-repo is given, '
+                '--working-tests must be from that repo'
+            )
             sys.exit(1)
     try:
-        subprocess.check_call([opts.python3, '-c',
-                               'import sys ; '
-                               'assert ((3, 5) <= sys.version_info < (3, 6) '
-                               'or sys.version_info >= (3, 6, 2))'])
+        subprocess.check_call(
+            [
+                opts.python3,
+                '-c',
+                'import sys ; '
+                'assert ((3, 5) <= sys.version_info < (3, 6) '
+                'or sys.version_info >= (3, 6, 2))',
+            ]
+        )
     except subprocess.CalledProcessError:
-        print('warning: Python 3.6.0 and 3.6.1 have '
-              'a bug which breaks Mercurial')
+        print(
+            'warning: Python 3.6.0 and 3.6.1 have '
+            'a bug which breaks Mercurial'
+        )
         print('(see https://bugs.python.org/issue29714 for details)')
         sys.exit(1)
 
-    rt = subprocess.Popen([opts.python3, 'run-tests.py', '-j', str(opts.j),
-                           '--blacklist', opts.working_tests, '--json'])
+    rt = subprocess.Popen(
+        [
+            opts.python3,
+            'run-tests.py',
+            '-j',
+            str(opts.j),
+            '--blacklist',
+            opts.working_tests,
+            '--json',
+        ]
+    )
     rt.wait()
     with open('report.json') as f:
         data = f.read()
@@ -104,12 +142,20 @@
             with open(opts.working_tests, 'w') as f:
                 for p in sorted(oldpass | newpass):
                     f.write('%s\n' % p)
-            _runhg('hg', 'commit', '-R', opts.commit_to_repo,
-                   '--user', opts.commit_user,
-                   '--message', 'python3: expand list of passing tests')
+            _runhg(
+                'hg',
+                'commit',
+                '-R',
+                opts.commit_to_repo,
+                '--user',
+                opts.commit_user,
+                '--message',
+                'python3: expand list of passing tests',
+            )
         else:
             print('Newly passing tests:', '\n'.join(sorted(newpass)))
             sys.exit(2)
 
+
 if __name__ == '__main__':
     main(sys.argv[1:])
--- a/contrib/revsetbenchmarks.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/revsetbenchmarks.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,9 +16,20 @@
 import subprocess
 import sys
 
-DEFAULTVARIANTS = ['plain', 'min', 'max', 'first', 'last',
-                   'reverse', 'reverse+first', 'reverse+last',
-                   'sort', 'sort+first', 'sort+last']
+DEFAULTVARIANTS = [
+    'plain',
+    'min',
+    'max',
+    'first',
+    'last',
+    'reverse',
+    'reverse+first',
+    'reverse+last',
+    'sort',
+    'sort+first',
+    'sort+last',
+]
+
 
 def check_output(*args, **kwargs):
     kwargs.setdefault('stderr', subprocess.PIPE)
@@ -29,14 +40,16 @@
         raise subprocess.CalledProcessError(proc.returncode, ' '.join(args[0]))
     return output
 
+
 def update(rev):
     """update the repo to a revision"""
     try:
         subprocess.check_call(['hg', 'update', '--quiet', '--check', str(rev)])
-        check_output(['make', 'local'],
-                     stderr=None)  # suppress output except for error/warning
+        check_output(
+            ['make', 'local'], stderr=None
+        )  # suppress output except for error/warning
     except subprocess.CalledProcessError as exc:
-        print('update to revision %s failed, aborting'%rev, file=sys.stderr)
+        print('update to revision %s failed, aborting' % rev, file=sys.stderr)
         sys.exit(exc.returncode)
 
 
@@ -48,11 +61,14 @@
     fullcmd = ['./hg']
     if repo is not None:
         fullcmd += ['-R', repo]
-    fullcmd += ['--config',
-                'extensions.perf=' + os.path.join(contribdir, 'perf.py')]
+    fullcmd += [
+        '--config',
+        'extensions.perf=' + os.path.join(contribdir, 'perf.py'),
+    ]
     fullcmd += cmd
     return check_output(fullcmd, stderr=subprocess.STDOUT)
 
+
 def perf(revset, target=None, contexts=False):
     """run benchmark for this very revset"""
     try:
@@ -64,15 +80,21 @@
         output = hg(args, repo=target)
         return parseoutput(output)
     except subprocess.CalledProcessError as exc:
-        print('abort: cannot run revset benchmark: %s'%exc.cmd, file=sys.stderr)
-        if getattr(exc, 'output', None) is None: # no output before 2.7
+        print(
+            'abort: cannot run revset benchmark: %s' % exc.cmd, file=sys.stderr
+        )
+        if getattr(exc, 'output', None) is None:  # no output before 2.7
             print('(no output)', file=sys.stderr)
         else:
             print(exc.output, file=sys.stderr)
         return None
 
-outputre = re.compile(br'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) '
-                      br'sys (\d+.\d+) \(best of (\d+)\)')
+
+outputre = re.compile(
+    br'! wall (\d+.\d+) comb (\d+.\d+) user (\d+.\d+) '
+    br'sys (\d+.\d+) \(best of (\d+)\)'
+)
+
 
 def parseoutput(output):
     """parse a textual output into a dict
@@ -85,20 +107,30 @@
         print('abort: invalid output:', file=sys.stderr)
         print(output, file=sys.stderr)
         sys.exit(1)
-    return {'comb': float(match.group(2)),
-            'count': int(match.group(5)),
-            'sys': float(match.group(3)),
-            'user': float(match.group(4)),
-            'wall': float(match.group(1)),
-            }
+    return {
+        'comb': float(match.group(2)),
+        'count': int(match.group(5)),
+        'sys': float(match.group(3)),
+        'user': float(match.group(4)),
+        'wall': float(match.group(1)),
+    }
+
 
 def printrevision(rev):
     """print data about a revision"""
     sys.stdout.write("Revision ")
     sys.stdout.flush()
-    subprocess.check_call(['hg', 'log', '--rev', str(rev), '--template',
-                           '{if(tags, " ({tags})")} '
-                           '{rev}:{node|short}: {desc|firstline}\n'])
+    subprocess.check_call(
+        [
+            'hg',
+            'log',
+            '--rev',
+            str(rev),
+            '--template',
+            '{if(tags, " ({tags})")} ' '{rev}:{node|short}: {desc|firstline}\n',
+        ]
+    )
+
 
 def idxwidth(nbidx):
     """return the max width of number used for index
@@ -107,7 +139,7 @@
     because we start with zero and we'd rather not deal with all the
     extra rounding business that log10 would imply.
     """
-    nbidx -= 1 # starts at 0
+    nbidx -= 1  # starts at 0
     idxwidth = 0
     while nbidx:
         idxwidth += 1
@@ -116,6 +148,7 @@
         idxwidth = 1
     return idxwidth
 
+
 def getfactor(main, other, field, sensitivity=0.05):
     """return the relative factor between values for 'field' in main and other
 
@@ -125,10 +158,11 @@
     if main is not None:
         factor = other[field] / main[field]
     low, high = 1 - sensitivity, 1 + sensitivity
-    if (low < factor < high):
+    if low < factor < high:
         return None
     return factor
 
+
 def formatfactor(factor):
     """format a factor into a 4 char string
 
@@ -155,15 +189,19 @@
             factor //= 0
         return 'x%ix%i' % (factor, order)
 
+
 def formattiming(value):
     """format a value to strictly 8 char, dropping some precision if needed"""
-    if value < 10**7:
+    if value < 10 ** 7:
         return ('%.6f' % value)[:8]
     else:
         # value is HUGE very unlikely to happen (4+ month run)
         return '%i' % value
 
+
 _marker = object()
+
+
 def printresult(variants, idx, data, maxidx, verbose=False, reference=_marker):
     """print a line of result to stdout"""
     mask = '%%0%ii) %%s' % idxwidth(maxidx)
@@ -184,9 +222,10 @@
             out.append(formattiming(data[var]['comb']))
             out.append(formattiming(data[var]['user']))
             out.append(formattiming(data[var]['sys']))
-            out.append('%6d'    % data[var]['count'])
+            out.append('%6d' % data[var]['count'])
     print(mask % (idx, ' '.join(out)))
 
+
 def printheader(variants, maxidx, verbose=False, relative=False):
     header = [' ' * (idxwidth(maxidx) + 1)]
     for var in variants:
@@ -204,12 +243,13 @@
             header.append('%6s' % 'count')
     print(' '.join(header))
 
+
 def getrevs(spec):
     """get the list of rev matched by a revset"""
     try:
         out = check_output(['hg', 'log', '--template={rev}\n', '--rev', spec])
     except subprocess.CalledProcessError as exc:
-        print("abort, can't get revision from %s"%spec, file=sys.stderr)
+        print("abort, can't get revision from %s" % spec, file=sys.stderr)
         sys.exit(exc.returncode)
     return [r for r in out.split() if r]
 
@@ -221,31 +261,44 @@
         revset = '%s(%s)' % (var, revset)
     return revset
 
-helptext="""This script will run multiple variants of provided revsets using
+
+helptext = """This script will run multiple variants of provided revsets using
 different revisions in your mercurial repository. After the benchmark are run
 summary output is provided. Use it to demonstrate speed improvements or pin
 point regressions. Revsets to run are specified in a file (or from stdin), one
 revsets per line. Line starting with '#' will be ignored, allowing insertion of
 comments."""
-parser = optparse.OptionParser(usage="usage: %prog [options] <revs>",
-                               description=helptext)
-parser.add_option("-f", "--file",
-                  help="read revset from FILE (stdin if omitted)",
-                  metavar="FILE")
-parser.add_option("-R", "--repo",
-                  help="run benchmark on REPO", metavar="REPO")
+parser = optparse.OptionParser(
+    usage="usage: %prog [options] <revs>", description=helptext
+)
+parser.add_option(
+    "-f",
+    "--file",
+    help="read revset from FILE (stdin if omitted)",
+    metavar="FILE",
+)
+parser.add_option("-R", "--repo", help="run benchmark on REPO", metavar="REPO")
 
-parser.add_option("-v", "--verbose",
-                  action='store_true',
-                  help="display all timing data (not just best total time)")
+parser.add_option(
+    "-v",
+    "--verbose",
+    action='store_true',
+    help="display all timing data (not just best total time)",
+)
 
-parser.add_option("", "--variants",
-                  default=','.join(DEFAULTVARIANTS),
-                  help="comma separated list of variant to test "
-                       "(eg: plain,min,sorted) (plain = no modification)")
-parser.add_option('', '--contexts',
-                  action='store_true',
-                  help='obtain changectx from results instead of integer revs')
+parser.add_option(
+    "",
+    "--variants",
+    default=','.join(DEFAULTVARIANTS),
+    help="comma separated list of variant to test "
+    "(eg: plain,min,sorted) (plain = no modification)",
+)
+parser.add_option(
+    '',
+    '--contexts',
+    action='store_true',
+    help='obtain changectx from results instead of integer revs',
+)
 
 (options, args) = parser.parse_args()
 
@@ -294,17 +347,20 @@
             data = perf(varrset, target=options.repo, contexts=options.contexts)
             varres[var] = data
         res.append(varres)
-        printresult(variants, idx, varres, len(revsets),
-                    verbose=options.verbose)
+        printresult(
+            variants, idx, varres, len(revsets), verbose=options.verbose
+        )
         sys.stdout.flush()
     print("----------------------------")
 
 
-print("""
+print(
+    """
 
 Result by revset
 ================
-""")
+"""
+)
 
 print('Revision:')
 for idx, rev in enumerate(revs):
@@ -321,7 +377,13 @@
     printheader(variants, len(results), verbose=options.verbose, relative=True)
     ref = None
     for idx, data in enumerate(results):
-        printresult(variants, idx, data[ridx], len(results),
-                    verbose=options.verbose, reference=ref)
+        printresult(
+            variants,
+            idx,
+            data[ridx],
+            len(results),
+            verbose=options.verbose,
+            reference=ref,
+        )
         ref = data[ridx]
     print()
--- a/contrib/showstack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/showstack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,16 +9,19 @@
 import sys
 import traceback
 
+
 def sigshow(*args):
     sys.stderr.write("\n")
     traceback.print_stack(args[1], limit=10, file=sys.stderr)
     sys.stderr.write("----\n")
 
+
 def sigexit(*args):
     sigshow(*args)
     print('alarm!')
     sys.exit(1)
 
+
 def extsetup(ui):
     signal.signal(signal.SIGQUIT, sigshow)
     signal.signal(signal.SIGALRM, sigexit)
--- a/contrib/synthrepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/synthrepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -62,9 +62,7 @@
     registrar,
     scmutil,
 )
-from mercurial.utils import (
-    dateutil,
-)
+from mercurial.utils import dateutil
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -77,14 +75,17 @@
 
 newfile = {'new fi', 'rename', 'copy f', 'copy t'}
 
+
 def zerodict():
     return collections.defaultdict(lambda: 0)
 
+
 def roundto(x, k):
     if x > k * 2:
         return int(round(x / float(k)) * k)
     return int(round(x))
 
+
 def parsegitdiff(lines):
     filename, mar, lineadd, lineremove = None, None, zerodict(), 0
     binary = False
@@ -110,10 +111,16 @@
     if filename:
         yield filename, mar, lineadd, lineremove, binary
 
-@command('analyze',
-         [('o', 'output', '', _('write output to given file'), _('FILE')),
-          ('r', 'rev', [], _('analyze specified revisions'), _('REV'))],
-         _('hg analyze'), optionalrepo=True)
+
+@command(
+    'analyze',
+    [
+        ('o', 'output', '', _('write output to given file'), _('FILE')),
+        ('r', 'rev', [], _('analyze specified revisions'), _('REV')),
+    ],
+    _('hg analyze'),
+    optionalrepo=True,
+)
 def analyze(ui, repo, *revs, **opts):
     '''create a simple model of a repository to use for later synthesis
 
@@ -176,8 +183,9 @@
         revs = scmutil.revrange(repo, revs)
         revs.sort()
 
-        progress = ui.makeprogress(_('analyzing'), unit=_('changesets'),
-                                   total=len(revs))
+        progress = ui.makeprogress(
+            _('analyzing'), unit=_('changesets'), total=len(revs)
+        )
         for i, rev in enumerate(revs):
             progress.update(i)
             ctx = repo[rev]
@@ -198,8 +206,9 @@
                 timedelta = ctx.date()[0] - lastctx.date()[0]
                 interarrival[roundto(timedelta, 300)] += 1
             diffopts = diffutil.diffallopts(ui, {'git': True})
-            diff = sum((d.splitlines()
-                       for d in ctx.diff(pctx, opts=diffopts)), [])
+            diff = sum(
+                (d.splitlines() for d in ctx.diff(pctx, opts=diffopts)), []
+            )
             fileadds, diradds, fileremoves, filechanges = 0, 0, 0, 0
             for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
                 if isbin:
@@ -207,8 +216,9 @@
                 added = sum(lineadd.itervalues(), 0)
                 if mar == 'm':
                     if added and lineremove:
-                        lineschanged[roundto(added, 5),
-                                     roundto(lineremove, 5)] += 1
+                        lineschanged[
+                            roundto(added, 5), roundto(lineremove, 5)
+                        ] += 1
                         filechanges += 1
                 elif mar == 'a':
                     fileadds += 1
@@ -238,30 +248,38 @@
     def pronk(d):
         return sorted(d.iteritems(), key=lambda x: x[1], reverse=True)
 
-    json.dump({'revs': len(revs),
-               'initdirs': pronk(dirs),
-               'lineschanged': pronk(lineschanged),
-               'children': pronk(invchildren),
-               'fileschanged': pronk(fileschanged),
-               'filesadded': pronk(filesadded),
-               'linesinfilesadded': pronk(linesinfilesadded),
-               'dirsadded': pronk(dirsadded),
-               'filesremoved': pronk(filesremoved),
-               'linelengths': pronk(linelengths),
-               'parents': pronk(parents),
-               'p1distance': pronk(p1distance),
-               'p2distance': pronk(p2distance),
-               'interarrival': pronk(interarrival),
-               'tzoffset': pronk(tzoffset),
-               },
-              fp)
+    json.dump(
+        {
+            'revs': len(revs),
+            'initdirs': pronk(dirs),
+            'lineschanged': pronk(lineschanged),
+            'children': pronk(invchildren),
+            'fileschanged': pronk(fileschanged),
+            'filesadded': pronk(filesadded),
+            'linesinfilesadded': pronk(linesinfilesadded),
+            'dirsadded': pronk(dirsadded),
+            'filesremoved': pronk(filesremoved),
+            'linelengths': pronk(linelengths),
+            'parents': pronk(parents),
+            'p1distance': pronk(p1distance),
+            'p2distance': pronk(p2distance),
+            'interarrival': pronk(interarrival),
+            'tzoffset': pronk(tzoffset),
+        },
+        fp,
+    )
     fp.close()
 
-@command('synthesize',
-         [('c', 'count', 0, _('create given number of commits'), _('COUNT')),
-          ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
-          ('', 'initfiles', 0, _('initial file count to create'), _('COUNT'))],
-         _('hg synthesize [OPTION].. DESCFILE'))
+
+@command(
+    'synthesize',
+    [
+        ('c', 'count', 0, _('create given number of commits'), _('COUNT')),
+        ('', 'dict', '', _('path to a dictionary of words'), _('FILE')),
+        ('', 'initfiles', 0, _('initial file count to create'), _('COUNT')),
+    ],
+    _('hg synthesize [OPTION].. DESCFILE'),
+)
 def synthesize(ui, repo, descpath, **opts):
     '''synthesize commits based on a model of an existing repository
 
@@ -384,16 +402,23 @@
 
         progress.complete()
         message = 'synthesized wide repo with %d files' % (len(files),)
-        mc = context.memctx(repo, [pctx.node(), nullid], message,
-                            files, filectxfn, ui.username(),
-                            '%d %d' % dateutil.makedate())
+        mc = context.memctx(
+            repo,
+            [pctx.node(), nullid],
+            message,
+            files,
+            filectxfn,
+            ui.username(),
+            '%d %d' % dateutil.makedate(),
+        )
         initnode = mc.commit()
         if ui.debugflag:
             hexfn = hex
         else:
             hexfn = short
-        ui.status(_('added commit %s with %d files\n')
-                  % (hexfn(initnode), len(files)))
+        ui.status(
+            _('added commit %s with %d files\n') % (hexfn(initnode), len(files))
+        )
 
     # Synthesize incremental revisions to the repository, adding repo depth.
     count = int(opts['count'])
@@ -437,8 +462,11 @@
                 for __ in pycompat.xrange(10):
                     fctx = pctx.filectx(random.choice(mfk))
                     path = fctx.path()
-                    if not (path in nevertouch or fctx.isbinary() or
-                            'l' in fctx.flags()):
+                    if not (
+                        path in nevertouch
+                        or fctx.isbinary()
+                        or 'l' in fctx.flags()
+                    ):
                         break
                 lines = fctx.data().splitlines()
                 add, remove = pick(lineschanged)
@@ -466,14 +494,20 @@
                     path.append(random.choice(words))
                 path.append(random.choice(words))
                 pathstr = '/'.join(filter(None, path))
-            data = '\n'.join(
-                makeline()
-                for __ in pycompat.xrange(pick(linesinfilesadded))) + '\n'
+            data = (
+                '\n'.join(
+                    makeline()
+                    for __ in pycompat.xrange(pick(linesinfilesadded))
+                )
+                + '\n'
+            )
             changes[pathstr] = data
+
         def filectxfn(repo, memctx, path):
             if path not in changes:
                 return None
             return context.memfilectx(repo, memctx, path, changes[path])
+
         if not changes:
             continue
         if revs:
@@ -481,11 +515,17 @@
         else:
             date = time.time() - (86400 * count)
         # dates in mercurial must be positive, fit in 32-bit signed integers.
-        date = min(0x7fffffff, max(0, date))
+        date = min(0x7FFFFFFF, max(0, date))
         user = random.choice(words) + '@' + random.choice(words)
-        mc = context.memctx(repo, pl, makeline(minimum=2),
-                            sorted(changes),
-                            filectxfn, user, '%d %d' % (date, pick(tzoffset)))
+        mc = context.memctx(
+            repo,
+            pl,
+            makeline(minimum=2),
+            sorted(changes),
+            filectxfn,
+            user,
+            '%d %d' % (date, pick(tzoffset)),
+        )
         newnode = mc.commit()
         heads.add(repo.changelog.rev(newnode))
         heads.discard(r1)
@@ -495,10 +535,12 @@
     lock.release()
     wlock.release()
 
+
 def renamedirs(dirs, words):
     '''Randomly rename the directory names in the per-dir file count dict.'''
     wordgen = itertools.cycle(words)
     replacements = {'': ''}
+
     def rename(dirpath):
         '''Recursively rename the directory and all path prefixes.
 
@@ -516,6 +558,7 @@
         renamed = os.path.join(head, next(wordgen))
         replacements[dirpath] = renamed
         return renamed
+
     result = []
     for dirpath, count in dirs.iteritems():
         result.append([rename(dirpath.lstrip(os.sep)), count])
--- a/contrib/testparseutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/testparseutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,11 +14,13 @@
 ####################
 # for Python3 compatibility (almost comes from mercurial/pycompat.py)
 
-ispy3 = (sys.version_info[0] >= 3)
+ispy3 = sys.version_info[0] >= 3
+
 
 def identity(a):
     return a
 
+
 def _rapply(f, xs):
     if xs is None:
         # assume None means non-value of optional data
@@ -29,12 +31,14 @@
         return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
     return f(xs)
 
+
 def rapply(f, xs):
     if f is identity:
         # fast path mainly for py2
         return xs
     return _rapply(f, xs)
 
+
 if ispy3:
     import builtins
 
@@ -49,29 +53,37 @@
 
     def opentext(f):
         return open(f, 'r')
+
+
 else:
     bytestr = str
     sysstr = identity
 
     opentext = open
 
+
 def b2s(x):
     # convert BYTES elements in "x" to SYSSTR recursively
     return rapply(sysstr, x)
 
+
 def writeout(data):
     # write "data" in BYTES into stdout
     sys.stdout.write(data)
 
+
 def writeerr(data):
     # write "data" in BYTES into stderr
     sys.stderr.write(data)
 
+
 ####################
 
+
 class embeddedmatcher(object):
     """Base class to detect embedded code fragments in *.t test script
     """
+
     __metaclass__ = abc.ABCMeta
 
     def __init__(self, desc):
@@ -126,6 +138,7 @@
     def codeinside(self, ctx, line):
         """Return actual code at line inside embedded code"""
 
+
 def embedded(basefile, lines, errors, matchers):
     """pick embedded code fragments up from given lines
 
@@ -168,12 +181,12 @@
 
     """
     matcher = None
-    ctx = filename = code = startline = None # for pyflakes
+    ctx = filename = code = startline = None  # for pyflakes
 
     for lineno, line in enumerate(lines, 1):
         if not line.endswith('\n'):
-            line += '\n' # to normalize EOF line
-        if matcher: # now, inside embedded code
+            line += '\n'  # to normalize EOF line
+        if matcher:  # now, inside embedded code
             if matcher.endsat(ctx, line):
                 codeatend = matcher.codeatend(ctx, line)
                 if codeatend is not None:
@@ -185,8 +198,10 @@
             elif not matcher.isinside(ctx, line):
                 # this is an error of basefile
                 # (if matchers are implemented correctly)
-                errors.append('%s:%d: unexpected line for "%s"'
-                              % (basefile, lineno, matcher.desc))
+                errors.append(
+                    '%s:%d: unexpected line for "%s"'
+                    % (basefile, lineno, matcher.desc)
+                )
                 # stop extracting embedded code by current 'matcher',
                 # because appearance of unexpected line might mean
                 # that expected end-of-embedded-code line might never
@@ -208,10 +223,14 @@
         if matched:
             if len(matched) > 1:
                 # this is an error of matchers, maybe
-                errors.append('%s:%d: ambiguous line for %s' %
-                              (basefile, lineno,
-                               ', '.join(['"%s"' % m.desc
-                                           for m, c in matched])))
+                errors.append(
+                    '%s:%d: ambiguous line for %s'
+                    % (
+                        basefile,
+                        lineno,
+                        ', '.join(['"%s"' % m.desc for m, c in matched]),
+                    )
+                )
                 # omit extracting embedded code, because choosing
                 # arbitrary matcher from matched ones might fail to
                 # detect the end of embedded code as expected.
@@ -238,8 +257,11 @@
         else:
             # this is an error of basefile
             # (if matchers are implemented correctly)
-            errors.append('%s:%d: unexpected end of file for "%s"'
-                          % (basefile, lineno, matcher.desc))
+            errors.append(
+                '%s:%d: unexpected end of file for "%s"'
+                % (basefile, lineno, matcher.desc)
+            )
+
 
 # heredoc limit mark to ignore embedded code at check-code.py or so
 heredocignorelimit = 'NO_CHECK_EOF'
@@ -252,6 +274,7 @@
 # - << 'LIMITMARK'
 heredoclimitpat = r'\s*<<\s*(?P<lquote>["\']?)(?P<limit>\w+)(?P=lquote)'
 
+
 class fileheredocmatcher(embeddedmatcher):
     """Detect "cat > FILE << LIMIT" style embedded code
 
@@ -290,6 +313,7 @@
     >>> matcher.ignores(ctx)
     True
     """
+
     _prefix = '  > '
 
     def __init__(self, desc, namepat):
@@ -302,8 +326,9 @@
         # - > NAMEPAT
         # - > "NAMEPAT"
         # - > 'NAMEPAT'
-        namepat = (r'\s*>>?\s*(?P<nquote>["\']?)(?P<name>%s)(?P=nquote)'
-                   % namepat)
+        namepat = (
+            r'\s*>>?\s*(?P<nquote>["\']?)(?P<name>%s)(?P=nquote)' % namepat
+        )
         self._fileres = [
             # "cat > NAME << LIMIT" case
             re.compile(r'  \$ \s*cat' + namepat + heredoclimitpat),
@@ -316,8 +341,10 @@
         for filere in self._fileres:
             matched = filere.match(line)
             if matched:
-                return (matched.group('name'),
-                        '  > %s\n' % matched.group('limit'))
+                return (
+                    matched.group('name'),
+                    '  > %s\n' % matched.group('limit'),
+                )
 
     def endsat(self, ctx, line):
         return ctx[1] == line
@@ -332,17 +359,19 @@
         return ctx[0]
 
     def codeatstart(self, ctx, line):
-        return None # no embedded code at start line
+        return None  # no embedded code at start line
 
     def codeatend(self, ctx, line):
-        return None # no embedded code at end line
+        return None  # no embedded code at end line
 
     def codeinside(self, ctx, line):
-        return line[len(self._prefix):] # strip prefix
+        return line[len(self._prefix) :]  # strip prefix
+
 
 ####
 # for embedded python script
 
+
 class pydoctestmatcher(embeddedmatcher):
     """Detect ">>> code" style embedded python code
 
@@ -395,6 +424,7 @@
     True
     >>> matcher.codeatend(ctx, end)
     """
+
     _prefix = '  >>> '
     _prefixre = re.compile(r'  (>>>|\.\.\.) ')
 
@@ -419,24 +449,25 @@
         return not (self._prefixre.match(line) or self._outputre.match(line))
 
     def isinside(self, ctx, line):
-        return True # always true, if not yet ended
+        return True  # always true, if not yet ended
 
     def ignores(self, ctx):
-        return False # should be checked always
+        return False  # should be checked always
 
     def filename(self, ctx):
-        return None # no filename
+        return None  # no filename
 
     def codeatstart(self, ctx, line):
-        return line[len(self._prefix):] # strip prefix '  >>> '/'  ... '
+        return line[len(self._prefix) :]  # strip prefix '  >>> '/'  ... '
 
     def codeatend(self, ctx, line):
-        return None # no embedded code at end line
+        return None  # no embedded code at end line
 
     def codeinside(self, ctx, line):
         if self._prefixre.match(line):
-            return line[len(self._prefix):] # strip prefix '  >>> '/'  ... '
-        return '\n' # an expected output line is treated as an empty line
+            return line[len(self._prefix) :]  # strip prefix '  >>> '/'  ... '
+        return '\n'  # an expected output line is treated as an empty line
+
 
 class pyheredocmatcher(embeddedmatcher):
     """Detect "python << LIMIT" style embedded python code
@@ -474,10 +505,12 @@
     >>> matcher.ignores(ctx)
     True
     """
+
     _prefix = '  > '
 
-    _startre = re.compile(r'  \$ (\$PYTHON|"\$PYTHON"|python).*' +
-                          heredoclimitpat)
+    _startre = re.compile(
+        r'  \$ (\$PYTHON|"\$PYTHON"|python).*' + heredoclimitpat
+    )
 
     def __init__(self):
         super(pyheredocmatcher, self).__init__("heredoc python invocation")
@@ -498,16 +531,17 @@
         return '  > %s\n' % heredocignorelimit == ctx
 
     def filename(self, ctx):
-        return None # no filename
+        return None  # no filename
 
     def codeatstart(self, ctx, line):
-        return None # no embedded code at start line
+        return None  # no embedded code at start line
 
     def codeatend(self, ctx, line):
-        return None # no embedded code at end line
+        return None  # no embedded code at end line
 
     def codeinside(self, ctx, line):
-        return line[len(self._prefix):] # strip prefix
+        return line[len(self._prefix) :]  # strip prefix
+
 
 _pymatchers = [
     pydoctestmatcher(),
@@ -517,9 +551,11 @@
     fileheredocmatcher('heredoc .py file', r'[^<]+\.py'),
 ]
 
+
 def pyembedded(basefile, lines, errors):
     return embedded(basefile, lines, errors, _pymatchers)
 
+
 ####
 # for embedded shell script
 
@@ -529,22 +565,27 @@
     fileheredocmatcher('heredoc .sh file', r'[^<]+\.sh'),
 ]
 
+
 def shembedded(basefile, lines, errors):
     return embedded(basefile, lines, errors, _shmatchers)
 
+
 ####
 # for embedded hgrc configuration
 
 _hgrcmatchers = [
     # use '[^<]+' instead of '\S+', in order to match against
     # paths including whitespaces
-    fileheredocmatcher('heredoc hgrc file',
-                       r'(([^/<]+/)+hgrc|\$HGRCPATH|\${HGRCPATH})'),
+    fileheredocmatcher(
+        'heredoc hgrc file', r'(([^/<]+/)+hgrc|\$HGRCPATH|\${HGRCPATH})'
+    ),
 ]
 
+
 def hgrcembedded(basefile, lines, errors):
     return embedded(basefile, lines, errors, _hgrcmatchers)
 
+
 ####
 
 if __name__ == "__main__":
@@ -558,8 +599,7 @@
                 name = '<anonymous>'
             writeout("%s:%d: %s starts\n" % (basefile, starts, name))
             if opts.verbose and code:
-                writeout("  |%s\n" %
-                         "\n  |".join(l for l in code.splitlines()))
+                writeout("  |%s\n" % "\n  |".join(l for l in code.splitlines()))
             writeout("%s:%d: %s ends\n" % (basefile, ends, name))
         for e in errors:
             writeerr("%s\n" % e)
@@ -579,9 +619,11 @@
         return ret
 
     commands = {}
+
     def command(name, desc):
         def wrap(func):
             commands[name] = (desc, func)
+
         return wrap
 
     @command("pyembedded", "detect embedded python script")
@@ -596,21 +638,29 @@
     def hgrcembeddedcmd(args, opts):
         return applyembedded(args, hgrcembedded, opts)
 
-    availablecommands = "\n".join(["  - %s: %s" % (key, value[0])
-                                   for key, value in commands.items()])
+    availablecommands = "\n".join(
+        ["  - %s: %s" % (key, value[0]) for key, value in commands.items()]
+    )
 
-    parser = optparse.OptionParser("""%prog COMMAND [file ...]
+    parser = optparse.OptionParser(
+        """%prog COMMAND [file ...]
 
 Pick up embedded code fragments from given file(s) or stdin, and list
 up start/end lines of them in standard compiler format
 ("FILENAME:LINENO:").
 
 Available commands are:
-""" + availablecommands + """
-""")
-    parser.add_option("-v", "--verbose",
-                      help="enable additional output (e.g. actual code)",
-                      action="store_true")
+"""
+        + availablecommands
+        + """
+"""
+    )
+    parser.add_option(
+        "-v",
+        "--verbose",
+        help="enable additional output (e.g. actual code)",
+        action="store_true",
+    )
     (opts, args) = parser.parse_args()
 
     if not args or args[0] not in commands:
--- a/contrib/win32/hgwebdir_wsgi.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/contrib/win32/hgwebdir_wsgi.py	Sun Oct 06 09:45:02 2019 -0400
@@ -84,19 +84,20 @@
 hgweb_config = r'c:\your\directory\wsgi.config'
 
 # Global settings for IIS path translation
-path_strip = 0   # Strip this many path elements off (when using url rewrite)
+path_strip = 0  # Strip this many path elements off (when using url rewrite)
 path_prefix = 1  # This many path elements are prefixes (depends on the
-                 # virtual path of the IIS application).
+# virtual path of the IIS application).
 
 import sys
 
 # Adjust python path if this is not a system-wide install
-#sys.path.insert(0, r'C:\your\custom\hg\build\lib.win32-2.7')
+# sys.path.insert(0, r'C:\your\custom\hg\build\lib.win32-2.7')
 
 # Enable tracing. Run 'python -m win32traceutil' to debug
 if getattr(sys, 'isapidllhandle', None) is not None:
     import win32traceutil
-    win32traceutil.SetupForPrint # silence unused import warning
+
+    win32traceutil.SetupForPrint  # silence unused import warning
 
 import isapi_wsgi
 from mercurial.hgweb.hgwebdir_mod import hgwebdir
@@ -104,13 +105,15 @@
 # Example tweak: Replace isapi_wsgi's handler to provide better error message
 # Other stuff could also be done here, like logging errors etc.
 class WsgiHandler(isapi_wsgi.IsapiWsgiHandler):
-    error_status = '500 Internal Server Error' # less silly error message
+    error_status = '500 Internal Server Error'  # less silly error message
+
 
 isapi_wsgi.IsapiWsgiHandler = WsgiHandler
 
 # Only create the hgwebdir instance once
 application = hgwebdir(hgweb_config)
 
+
 def handler(environ, start_response):
 
     # Translate IIS's weird URLs
@@ -125,10 +128,13 @@
 
     return application(environ, start_response)
 
+
 def __ExtensionFactory__():
     return isapi_wsgi.ISAPISimpleHandler(handler)
 
-if __name__=='__main__':
+
+if __name__ == '__main__':
     from isapi.install import ISAPIParameters, HandleCommandLine
+
     params = ISAPIParameters()
     HandleCommandLine(params)
--- a/doc/check-seclevel.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/doc/check-seclevel.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,7 +11,9 @@
 # import from the live mercurial repo
 os.environ['HGMODULEPOLICY'] = 'py'
 sys.path.insert(0, "..")
-from mercurial import demandimport; demandimport.enable()
+from mercurial import demandimport
+
+demandimport.enable()
 from mercurial import (
     commands,
     extensions,
@@ -36,13 +38,16 @@
 initlevel_ext = 1
 initlevel_ext_cmd = 3
 
+
 def showavailables(ui, initlevel):
-    avail = ('    available marks and order of them in this help: %s\n') % (
-        ', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1:]]))
+    avail = '    available marks and order of them in this help: %s\n' % (
+        ', '.join(['%r' % (m * 4) for m in level2mark[initlevel + 1 :]])
+    )
     ui.warn(avail.encode('utf-8'))
 
+
 def checkseclevel(ui, doc, name, initlevel):
-    ui.note(('checking "%s"\n') % name)
+    ui.note('checking "%s"\n' % name)
     if not isinstance(doc, bytes):
         doc = doc.encode('utf-8')
     blocks, pruned = minirst.parse(doc, 0, ['verbose'])
@@ -54,66 +59,77 @@
         mark = block[b'underline']
         title = block[b'lines'][0]
         if (mark not in mark2level) or (mark2level[mark] <= initlevel):
-            ui.warn((('invalid section mark %r for "%s" of %s\n') %
-                     (mark * 4, title, name)).encode('utf-8'))
+            ui.warn(
+                (
+                    'invalid section mark %r for "%s" of %s\n'
+                    % (mark * 4, title, name)
+                ).encode('utf-8')
+            )
             showavailables(ui, initlevel)
             errorcnt += 1
             continue
         nextlevel = mark2level[mark]
         if curlevel < nextlevel and curlevel + 1 != nextlevel:
-            ui.warn(('gap of section level at "%s" of %s\n') %
-                    (title, name))
+            ui.warn('gap of section level at "%s" of %s\n' % (title, name))
             showavailables(ui, initlevel)
             errorcnt += 1
             continue
-        ui.note(('appropriate section level for "%s %s"\n') %
-                (mark * (nextlevel * 2), title))
+        ui.note(
+            'appropriate section level for "%s %s"\n'
+            % (mark * (nextlevel * 2), title)
+        )
         curlevel = nextlevel
 
     return errorcnt
 
+
 def checkcmdtable(ui, cmdtable, namefmt, initlevel):
     errorcnt = 0
     for k, entry in cmdtable.items():
         name = k.split(b"|")[0].lstrip(b"^")
         if not entry[0].__doc__:
-            ui.note(('skip checking %s: no help document\n') %
-                    (namefmt % name))
+            ui.note('skip checking %s: no help document\n' % (namefmt % name))
             continue
-        errorcnt += checkseclevel(ui, entry[0].__doc__,
-                                  namefmt % name,
-                                  initlevel)
+        errorcnt += checkseclevel(
+            ui, entry[0].__doc__, namefmt % name, initlevel
+        )
     return errorcnt
 
+
 def checkhghelps(ui):
     errorcnt = 0
     for h in helptable:
         names, sec, doc = h[0:3]
         if callable(doc):
             doc = doc(ui)
-        errorcnt += checkseclevel(ui, doc,
-                                  '%s help topic' % names[0],
-                                  initlevel_topic)
+        errorcnt += checkseclevel(
+            ui, doc, '%s help topic' % names[0], initlevel_topic
+        )
 
     errorcnt += checkcmdtable(ui, table, '%s command', initlevel_cmd)
 
-    for name in sorted(list(extensions.enabled()) +
-                       list(extensions.disabled())):
+    for name in sorted(
+        list(extensions.enabled()) + list(extensions.disabled())
+    ):
         mod = extensions.load(ui, name, None)
         if not mod.__doc__:
-            ui.note(('skip checking %s extension: no help document\n') % name)
+            ui.note('skip checking %s extension: no help document\n' % name)
             continue
-        errorcnt += checkseclevel(ui, mod.__doc__,
-                                  '%s extension' % name,
-                                  initlevel_ext)
+        errorcnt += checkseclevel(
+            ui, mod.__doc__, '%s extension' % name, initlevel_ext
+        )
 
         cmdtable = getattr(mod, 'cmdtable', None)
         if cmdtable:
-            errorcnt += checkcmdtable(ui, cmdtable,
-                                      '%%s command of %s extension' % name,
-                                      initlevel_ext_cmd)
+            errorcnt += checkcmdtable(
+                ui,
+                cmdtable,
+                '%%s command of %s extension' % name,
+                initlevel_ext_cmd,
+            )
     return errorcnt
 
+
 def checkfile(ui, filename, initlevel):
     if filename == '-':
         filename = 'stdin'
@@ -122,43 +138,76 @@
         with open(filename) as fp:
             doc = fp.read()
 
-    ui.note(('checking input from %s with initlevel %d\n') %
-            (filename, initlevel))
+    ui.note(
+        'checking input from %s with initlevel %d\n' % (filename, initlevel)
+    )
     return checkseclevel(ui, doc, 'input from %s' % filename, initlevel)
 
+
 def main():
-    optparser = optparse.OptionParser("""%prog [options]
+    optparser = optparse.OptionParser(
+        """%prog [options]
 
 This checks all help documents of Mercurial (topics, commands,
 extensions and commands of them), if no file is specified by --file
 option.
-""")
-    optparser.add_option("-v", "--verbose",
-                         help="enable additional output",
-                         action="store_true")
-    optparser.add_option("-d", "--debug",
-                         help="debug mode",
-                         action="store_true")
-    optparser.add_option("-f", "--file",
-                         help="filename to read in (or '-' for stdin)",
-                         action="store", default="")
+"""
+    )
+    optparser.add_option(
+        "-v", "--verbose", help="enable additional output", action="store_true"
+    )
+    optparser.add_option(
+        "-d", "--debug", help="debug mode", action="store_true"
+    )
+    optparser.add_option(
+        "-f",
+        "--file",
+        help="filename to read in (or '-' for stdin)",
+        action="store",
+        default="",
+    )
 
-    optparser.add_option("-t", "--topic",
-                         help="parse file as help topic",
-                         action="store_const", dest="initlevel", const=0)
-    optparser.add_option("-c", "--command",
-                         help="parse file as help of core command",
-                         action="store_const", dest="initlevel", const=1)
-    optparser.add_option("-e", "--extension",
-                         help="parse file as help of extension",
-                         action="store_const", dest="initlevel", const=1)
-    optparser.add_option("-C", "--extension-command",
-                         help="parse file as help of extension command",
-                         action="store_const", dest="initlevel", const=3)
+    optparser.add_option(
+        "-t",
+        "--topic",
+        help="parse file as help topic",
+        action="store_const",
+        dest="initlevel",
+        const=0,
+    )
+    optparser.add_option(
+        "-c",
+        "--command",
+        help="parse file as help of core command",
+        action="store_const",
+        dest="initlevel",
+        const=1,
+    )
+    optparser.add_option(
+        "-e",
+        "--extension",
+        help="parse file as help of extension",
+        action="store_const",
+        dest="initlevel",
+        const=1,
+    )
+    optparser.add_option(
+        "-C",
+        "--extension-command",
+        help="parse file as help of extension command",
+        action="store_const",
+        dest="initlevel",
+        const=3,
+    )
 
-    optparser.add_option("-l", "--initlevel",
-                         help="set initial section level manually",
-                         action="store", type="int", default=0)
+    optparser.add_option(
+        "-l",
+        "--initlevel",
+        help="set initial section level manually",
+        action="store",
+        type="int",
+        default=0,
+    )
 
     (options, args) = optparser.parse_args()
 
@@ -173,5 +222,6 @@
         if checkhghelps(ui):
             sys.exit(1)
 
+
 if __name__ == "__main__":
     main()
--- a/doc/gendoc.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/doc/gendoc.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,6 +12,7 @@
 
 try:
     import msvcrt
+
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
 except ImportError:
@@ -22,10 +23,13 @@
 os.environ[r'HGMODULEPOLICY'] = r'allow'
 # import from the live mercurial repo
 sys.path.insert(0, r"..")
-from mercurial import demandimport; demandimport.enable()
+from mercurial import demandimport
+
+demandimport.enable()
 # Load util so that the locale path is set by i18n.setdatapath() before
 # calling _().
 from mercurial import util
+
 util.datapath
 from mercurial import (
     commands,
@@ -46,6 +50,7 @@
 helptable = help.helptable
 loaddoc = help.loaddoc
 
+
 def get_desc(docstr):
     if not docstr:
         return b"", b""
@@ -56,7 +61,7 @@
 
     i = docstr.find(b"\n")
     if i != -1:
-        desc = docstr[i + 2:]
+        desc = docstr[i + 2 :]
     else:
         desc = shortdesc
 
@@ -64,6 +69,7 @@
 
     return (shortdesc, desc)
 
+
 def get_opts(opts):
     for opt in opts:
         if len(opt) == 5:
@@ -86,6 +92,7 @@
         desc += default and _(b" (default: %s)") % bytes(default) or b""
         yield (b", ".join(allopts), desc)
 
+
 def get_cmd(cmd, cmdtable):
     d = {}
     attr = cmdtable[cmd]
@@ -106,6 +113,7 @@
 
     return d
 
+
 def showdoc(ui):
     # print options
     ui.write(minirst.section(_(b"Options")))
@@ -127,14 +135,22 @@
     helpprinter(ui, helptable, minirst.section, exclude=[b'config'])
 
     ui.write(minirst.section(_(b"Extensions")))
-    ui.write(_(b"This section contains help for extensions that are "
-               b"distributed together with Mercurial. Help for other "
-               b"extensions is available in the help system."))
-    ui.write((b"\n\n"
-              b".. contents::\n"
-              b"   :class: htmlonly\n"
-              b"   :local:\n"
-              b"   :depth: 1\n\n"))
+    ui.write(
+        _(
+            b"This section contains help for extensions that are "
+            b"distributed together with Mercurial. Help for other "
+            b"extensions is available in the help system."
+        )
+    )
+    ui.write(
+        (
+            b"\n\n"
+            b".. contents::\n"
+            b"   :class: htmlonly\n"
+            b"   :local:\n"
+            b"   :depth: 1\n\n"
+        )
+    )
 
     for extensionname in sorted(allextensionnames()):
         mod = extensions.load(ui, extensionname, None)
@@ -143,24 +159,42 @@
         cmdtable = getattr(mod, 'cmdtable', None)
         if cmdtable:
             ui.write(minirst.subsubsection(_(b'Commands')))
-            commandprinter(ui, cmdtable, minirst.subsubsubsection,
-                    minirst.subsubsubsubsection)
+            commandprinter(
+                ui,
+                cmdtable,
+                minirst.subsubsubsection,
+                minirst.subsubsubsubsection,
+            )
+
 
 def showtopic(ui, topic):
     extrahelptable = [
         ([b"common"], b'', loaddoc(b'common'), help.TOPIC_CATEGORY_MISC),
         ([b"hg.1"], b'', loaddoc(b'hg.1'), help.TOPIC_CATEGORY_CONFIG),
         ([b"hg-ssh.8"], b'', loaddoc(b'hg-ssh.8'), help.TOPIC_CATEGORY_CONFIG),
-        ([b"hgignore.5"], b'', loaddoc(b'hgignore.5'),
-         help.TOPIC_CATEGORY_CONFIG),
+        (
+            [b"hgignore.5"],
+            b'',
+            loaddoc(b'hgignore.5'),
+            help.TOPIC_CATEGORY_CONFIG,
+        ),
         ([b"hgrc.5"], b'', loaddoc(b'hgrc.5'), help.TOPIC_CATEGORY_CONFIG),
-        ([b"hgignore.5.gendoc"], b'', loaddoc(b'hgignore'),
-         help.TOPIC_CATEGORY_CONFIG),
-        ([b"hgrc.5.gendoc"], b'', loaddoc(b'config'),
-         help.TOPIC_CATEGORY_CONFIG),
+        (
+            [b"hgignore.5.gendoc"],
+            b'',
+            loaddoc(b'hgignore'),
+            help.TOPIC_CATEGORY_CONFIG,
+        ),
+        (
+            [b"hgrc.5.gendoc"],
+            b'',
+            loaddoc(b'config'),
+            help.TOPIC_CATEGORY_CONFIG,
+        ),
     ]
     helpprinter(ui, helptable + extrahelptable, None, include=[topic])
 
+
 def helpprinter(ui, helptable, sectionfunc, include=[], exclude=[]):
     for h in helptable:
         names, sec, doc = h[0:3]
@@ -178,6 +212,7 @@
         ui.write(doc)
         ui.write(b"\n")
 
+
 def commandprinter(ui, cmdtable, sectionfunc, subsectionfunc):
     """Render restructuredtext describing a list of commands and their
     documentations, grouped by command category.
@@ -222,7 +257,8 @@
         if helpcategory(cmd) not in cmdsbycategory:
             raise AssertionError(
                 "The following command did not register its (category) in "
-                "help.CATEGORY_ORDER: %s (%s)" % (cmd, helpcategory(cmd)))
+                "help.CATEGORY_ORDER: %s (%s)" % (cmd, helpcategory(cmd))
+            )
         cmdsbycategory[helpcategory(cmd)].append(cmd)
 
     # Print the help for each command. We present the commands grouped by
@@ -270,16 +306,22 @@
                     if optstr.endswith(b"[+]>"):
                         multioccur = True
                 if multioccur:
-                    ui.write(_(b"\n[+] marked option can be specified"
-                               b" multiple times\n"))
+                    ui.write(
+                        _(
+                            b"\n[+] marked option can be specified"
+                            b" multiple times\n"
+                        )
+                    )
                 ui.write(b"\n")
             # aliases
             if d[b'aliases']:
                 ui.write(_(b"    aliases: %s\n\n") % b" ".join(d[b'aliases']))
 
+
 def allextensionnames():
     return set(extensions.enabled().keys()) | set(extensions.disabled().keys())
 
+
 if __name__ == "__main__":
     doc = b'hg.1.gendoc'
     if len(sys.argv) > 1:
--- a/doc/hgmanpage.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/doc/hgmanpage.py	Sun Oct 06 09:45:02 2019 -0400
@@ -53,6 +53,7 @@
     nodes,
     writers,
 )
+
 try:
     import roman
 except ImportError:
@@ -65,7 +66,7 @@
 
 # Define two macros so man/roff can calculate the
 # indent/unindent margins by itself
-MACRO_DEF = (r""".
+MACRO_DEF = r""".
 .nr rst2man-indent-level 0
 .
 .de1 rstReportMargin
@@ -92,11 +93,12 @@
 .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
 .in \\n[rst2man-indent\\n[rst2man-indent-level]]u
 ..
-""")
+"""
+
 
 class Writer(writers.Writer):
 
-    supported = ('manpage')
+    supported = 'manpage'
     """Formats this writer supports."""
 
     output = None
@@ -118,11 +120,14 @@
         self._options = ['center']
         self._tab_char = '\t'
         self._coldefs = []
+
     def new_row(self):
         self._rows.append([])
+
     def append_separator(self, separator):
         """Append the separator for table head."""
         self._rows.append([separator])
+
     def append_cell(self, cell_lines):
         """cell_lines is an array of lines"""
         start = 0
@@ -131,19 +136,21 @@
         self._rows[-1].append(cell_lines[start:])
         if len(self._coldefs) < len(self._rows[-1]):
             self._coldefs.append('l')
+
     def _minimize_cell(self, cell_lines):
         """Remove leading and trailing blank and ``.sp`` lines"""
-        while (cell_lines and cell_lines[0] in ('\n', '.sp\n')):
+        while cell_lines and cell_lines[0] in ('\n', '.sp\n'):
             del cell_lines[0]
-        while (cell_lines and cell_lines[-1] in ('\n', '.sp\n')):
+        while cell_lines and cell_lines[-1] in ('\n', '.sp\n'):
             del cell_lines[-1]
+
     def as_list(self):
         text = ['.TS\n']
         text.append(' '.join(self._options) + ';\n')
         text.append('|%s|.\n' % ('|'.join(self._coldefs)))
         for row in self._rows:
             # row = array of cells. cell = array of lines.
-            text.append('_\n')       # line above
+            text.append('_\n')  # line above
             text.append('T{\n')
             for i in range(len(row)):
                 cell = row[i]
@@ -152,13 +159,14 @@
                 if not text[-1].endswith('\n'):
                     text[-1] += '\n'
                 if i < len(row) - 1:
-                    text.append('T}'+self._tab_char+'T{\n')
+                    text.append('T}' + self._tab_char + 'T{\n')
                 else:
                     text.append('T}\n')
         text.append('_\n')
         text.append('.TE\n')
         return text
 
+
 class Translator(nodes.NodeVisitor):
     """"""
 
@@ -171,8 +179,9 @@
         lcode = settings.language_code
         arglen = len(inspect.getargspec(languages.get_language)[0])
         if arglen == 2:
-            self.language = languages.get_language(lcode,
-                                                   self.document.reporter)
+            self.language = languages.get_language(
+                lcode, self.document.reporter
+            )
         else:
             self.language = languages.get_language(lcode)
         self.head = []
@@ -189,16 +198,18 @@
         # writing the header .TH and .SH NAME is postboned after
         # docinfo.
         self._docinfo = {
-                "title" : "", "title_upper": "",
-                "subtitle" : "",
-                "manual_section" : "", "manual_group" : "",
-                "author" : [],
-                "date" : "",
-                "copyright" : "",
-                "version" : "",
-                    }
-        self._docinfo_keys = []     # a list to keep the sequence as in source.
-        self._docinfo_names = {}    # to get name from text not normalized.
+            "title": "",
+            "title_upper": "",
+            "subtitle": "",
+            "manual_section": "",
+            "manual_group": "",
+            "author": [],
+            "date": "",
+            "copyright": "",
+            "version": "",
+        }
+        self._docinfo_keys = []  # a list to keep the sequence as in source.
+        self._docinfo_names = {}  # to get name from text not normalized.
         self._in_docinfo = None
         self._active_table = None
         self._in_literal = False
@@ -217,25 +228,21 @@
         # ``B`` bold, ``I`` italic, ``R`` roman should be available.
         # Hopefully ``C`` courier too.
         self.defs = {
-                'indent' : ('.INDENT %.1f\n', '.UNINDENT\n'),
-                'definition_list_item' : ('.TP', ''),
-                'field_name' : ('.TP\n.B ', '\n'),
-                'literal' : ('\\fB', '\\fP'),
-                'literal_block' : ('.sp\n.nf\n.ft C\n', '\n.ft P\n.fi\n'),
-
-                'option_list_item' : ('.TP\n', ''),
-
-                'reference' : (r'\%', r'\:'),
-                'emphasis': ('\\fI', '\\fP'),
-                'strong' : ('\\fB', '\\fP'),
-                'term' : ('\n.B ', '\n'),
-                'title_reference' : ('\\fI', '\\fP'),
-
-                'topic-title' : ('.SS ',),
-                'sidebar-title' : ('.SS ',),
-
-                'problematic' : ('\n.nf\n', '\n.fi\n'),
-                    }
+            'indent': ('.INDENT %.1f\n', '.UNINDENT\n'),
+            'definition_list_item': ('.TP', ''),
+            'field_name': ('.TP\n.B ', '\n'),
+            'literal': ('\\fB', '\\fP'),
+            'literal_block': ('.sp\n.nf\n.ft C\n', '\n.ft P\n.fi\n'),
+            'option_list_item': ('.TP\n', ''),
+            'reference': (r'\%', r'\:'),
+            'emphasis': ('\\fI', '\\fP'),
+            'strong': ('\\fB', '\\fP'),
+            'term': ('\n.B ', '\n'),
+            'title_reference': ('\\fI', '\\fP'),
+            'topic-title': ('.SS ',),
+            'sidebar-title': ('.SS ',),
+            'problematic': ('\n.nf\n', '\n.fi\n'),
+        }
         # NOTE don't specify the newline before a dot-command, but ensure
         # it is there.
 
@@ -244,13 +251,13 @@
         line/comment."""
         prefix = '.\\" '
         out_text = ''.join(
-            [(prefix + in_line + '\n')
-            for in_line in text.split('\n')])
+            [(prefix + in_line + '\n') for in_line in text.split('\n')]
+        )
         return out_text
 
     def comment(self, text):
         """Return commented version of the passed text."""
-        return self.comment_begin(text)+'.\n'
+        return self.comment_begin(text) + '.\n'
 
     def ensure_eol(self):
         """Ensure the last line in body is terminated by new line."""
@@ -266,16 +273,21 @@
         for i in range(len(self.body) - 1, 0, -1):
             # remove superfluous vertical gaps.
             if self.body[i] == '.sp\n':
-                if self.body[i - 1][:4] in ('.BI ','.IP '):
+                if self.body[i - 1][:4] in ('.BI ', '.IP '):
                     self.body[i] = '.\n'
-                elif (self.body[i - 1][:3] == '.B ' and
-                    self.body[i - 2][:4] == '.TP\n'):
+                elif (
+                    self.body[i - 1][:3] == '.B '
+                    and self.body[i - 2][:4] == '.TP\n'
+                ):
                     self.body[i] = '.\n'
-                elif (self.body[i - 1] == '\n' and
-                    self.body[i - 2][0] != '.' and
-                    (self.body[i - 3][:7] == '.TP\n.B '
-                        or self.body[i - 3][:4] == '\n.B ')
-                     ):
+                elif (
+                    self.body[i - 1] == '\n'
+                    and self.body[i - 2][0] != '.'
+                    and (
+                        self.body[i - 3][:7] == '.TP\n.B '
+                        or self.body[i - 3][:4] == '\n.B '
+                    )
+                ):
                     self.body[i] = '.\n'
         return ''.join(self.head + self.body + self.foot)
 
@@ -286,13 +298,13 @@
 
     def visit_Text(self, node):
         text = node.astext()
-        text = text.replace('\\','\\e')
+        text = text.replace('\\', '\\e')
         replace_pairs = [
             (u'-', u'\\-'),
             (u"'", u'\\(aq'),
             (u'´', u"\\'"),
             (u'`', u'\\(ga'),
-            ]
+        ]
         for (in_char, out_markup) in replace_pairs:
             text = text.replace(in_char, out_markup)
         # unicode
@@ -310,9 +322,9 @@
     def list_start(self, node):
         class enum_char(object):
             enum_style = {
-                    'bullet'     : '\\(bu',
-                    'emdash'     : '\\(em',
-                     }
+                'bullet': '\\(bu',
+                'emdash': '\\(em',
+            }
 
             def __init__(self, style):
                 self._style = style
@@ -358,6 +370,7 @@
 
             def get_width(self):
                 return self._indent
+
             def __repr__(self):
                 return 'enum_style-%s' % list(self._style)
 
@@ -376,10 +389,12 @@
         self._list_char.pop()
 
     def header(self):
-        tmpl = (".TH %(title_upper)s %(manual_section)s"
-                " \"%(date)s\" \"%(version)s\" \"%(manual_group)s\"\n"
-                ".SH NAME\n"
-                "%(title)s \\- %(subtitle)s\n")
+        tmpl = (
+            ".TH %(title_upper)s %(manual_section)s"
+            " \"%(date)s\" \"%(version)s\" \"%(manual_group)s\"\n"
+            ".SH NAME\n"
+            "%(title)s \\- %(subtitle)s\n"
+        )
         return tmpl % self._docinfo
 
     def append_header(self):
@@ -400,8 +415,7 @@
 
     def visit_admonition(self, node, name=None):
         if name:
-            self.body.append('.IP %s\n' %
-                        self.language.labels.get(name, name))
+            self.body.append('.IP %s\n' % self.language.labels.get(name, name))
 
     def depart_admonition(self, node):
         self.body.append('.RE\n')
@@ -470,7 +484,7 @@
         pass
 
     def visit_citation_reference(self, node):
-        self.body.append('['+node.astext()+']')
+        self.body.append('[' + node.astext() + ']')
         raise nodes.SkipNode()
 
     def visit_classifier(self, node):
@@ -486,10 +500,9 @@
         pass
 
     def write_colspecs(self):
-        self.body.append("%s.\n" % ('L '*len(self.colspecs)))
+        self.body.append("%s.\n" % ('L ' * len(self.colspecs)))
 
-    def visit_comment(self, node,
-                      sub=re.compile('-(?=-)').sub):
+    def visit_comment(self, node, sub=re.compile('-(?=-)').sub):
         self.body.append(self.comment(node.astext()))
         raise nodes.SkipNode()
 
@@ -569,27 +582,39 @@
 
     def visit_document(self, node):
         # no blank line between comment and header.
-        self.body.append(self.comment(self.document_start).rstrip()+'\n')
+        self.body.append(self.comment(self.document_start).rstrip() + '\n')
         # writing header is postboned
         self.header_written = 0
 
     def depart_document(self, node):
         if self._docinfo['author']:
-            self.body.append('.SH AUTHOR\n%s\n'
-                    % ', '.join(self._docinfo['author']))
-        skip = ('author', 'copyright', 'date',
-                'manual_group', 'manual_section',
-                'subtitle',
-                'title', 'title_upper', 'version')
+            self.body.append(
+                '.SH AUTHOR\n%s\n' % ', '.join(self._docinfo['author'])
+            )
+        skip = (
+            'author',
+            'copyright',
+            'date',
+            'manual_group',
+            'manual_section',
+            'subtitle',
+            'title',
+            'title_upper',
+            'version',
+        )
         for name in self._docinfo_keys:
             if name == 'address':
-                self.body.append("\n%s:\n%s%s.nf\n%s\n.fi\n%s%s" % (
-                                    self.language.labels.get(name, name),
-                                    self.defs['indent'][0] % 0,
-                                    self.defs['indent'][0] % BLOCKQOUTE_INDENT,
-                                    self._docinfo[name],
-                                    self.defs['indent'][1],
-                                    self.defs['indent'][1]))
+                self.body.append(
+                    "\n%s:\n%s%s.nf\n%s\n.fi\n%s%s"
+                    % (
+                        self.language.labels.get(name, name),
+                        self.defs['indent'][0] % 0,
+                        self.defs['indent'][0] % BLOCKQOUTE_INDENT,
+                        self._docinfo[name],
+                        self.defs['indent'][1],
+                        self.defs['indent'][1],
+                    )
+                )
             elif name not in skip:
                 if name in self._docinfo_names:
                     label = self._docinfo_names[name]
@@ -597,10 +622,10 @@
                     label = self.language.labels.get(name, name)
                 self.body.append("\n%s: %s\n" % (label, self._docinfo[name]))
         if self._docinfo['copyright']:
-            self.body.append('.SH COPYRIGHT\n%s\n'
-                    % self._docinfo['copyright'])
-        self.body.append(self.comment(
-                        'Generated by docutils manpage writer.\n'))
+            self.body.append('.SH COPYRIGHT\n%s\n' % self._docinfo['copyright'])
+        self.body.append(
+            self.comment('Generated by docutils manpage writer.\n')
+        )
 
     def visit_emphasis(self, node):
         self.body.append(self.defs['emphasis'][0])
@@ -611,11 +636,13 @@
     def visit_entry(self, node):
         # a cell in a table row
         if 'morerows' in node:
-            self.document.reporter.warning('"table row spanning" not supported',
-                    base_node=node)
+            self.document.reporter.warning(
+                '"table row spanning" not supported', base_node=node
+            )
         if 'morecols' in node:
             self.document.reporter.warning(
-                    '"table cell spanning" not supported', base_node=node)
+                '"table cell spanning" not supported', base_node=node
+            )
         self.context.append(len(self.body))
 
     def depart_entry(self, node):
@@ -642,7 +669,7 @@
 
     def visit_field_body(self, node):
         if self._in_docinfo:
-            name_normalized = self._field_name.lower().replace(" ","_")
+            name_normalized = self._field_name.lower().replace(" ", "_")
             self._docinfo_names[name_normalized] = self._field_name
             self.visit_docinfo_item(node, name_normalized)
             raise nodes.SkipNode()
@@ -675,8 +702,7 @@
         self.dedent()
 
     def visit_footer(self, node):
-        self.document.reporter.warning('"footer" not supported',
-                base_node=node)
+        self.document.reporter.warning('"footer" not supported', base_node=node)
 
     def depart_footer(self, node):
         pass
@@ -690,11 +716,12 @@
         pass
 
     def footnote_backrefs(self, node):
-        self.document.reporter.warning('"footnote_backrefs" not supported',
-                base_node=node)
+        self.document.reporter.warning(
+            '"footnote_backrefs" not supported', base_node=node
+        )
 
     def visit_footnote_reference(self, node):
-        self.body.append('['+self.deunicode(node.astext())+']')
+        self.body.append('[' + self.deunicode(node.astext()) + ']')
         raise nodes.SkipNode()
 
     def depart_footnote_reference(self, node):
@@ -736,8 +763,7 @@
         self.body.append('\n')
 
     def visit_image(self, node):
-        self.document.reporter.warning('"image" not supported',
-                base_node=node)
+        self.document.reporter.warning('"image" not supported', base_node=node)
         text = []
         if 'alt' in node.attributes:
             text.append(node.attributes['alt'])
@@ -753,11 +779,11 @@
 
     def visit_label(self, node):
         # footnote and citation
-        if (isinstance(node.parent, nodes.footnote)
-            or isinstance(node.parent, nodes.citation)):
+        if isinstance(node.parent, nodes.footnote) or isinstance(
+            node.parent, nodes.citation
+        ):
             raise nodes.SkipNode()
-        self.document.reporter.warning('"unsupported "label"',
-                base_node=node)
+        self.document.reporter.warning('"unsupported "label"', base_node=node)
         self.body.append('[')
 
     def depart_label(self, node):
@@ -794,9 +820,10 @@
 
     def visit_list_item(self, node):
         # man 7 man argues to use ".IP" instead of ".TP"
-        self.body.append('.IP %s %d\n' % (
-                next(self._list_char[-1]),
-                self._list_char[-1].get_width(),))
+        self.body.append(
+            '.IP %s %d\n'
+            % (next(self._list_char[-1]), self._list_char[-1].get_width(),)
+        )
 
     def depart_list_item(self, node):
         pass
@@ -855,9 +882,9 @@
         # options with parameter bold italic, .BI, -f file
         #
         # we do not know if .B or .BI
-        self.context.append('.B')           # blind guess
-        self.context.append(len(self.body)) # to be able to insert later
-        self.context.append(0)              # option counter
+        self.context.append('.B')  # blind guess
+        self.context.append(len(self.body))  # to be able to insert later
+        self.context.append(0)  # option counter
 
     def depart_option_group(self, node):
         self.context.pop()  # the counter
@@ -885,7 +912,7 @@
         pass
 
     def visit_option_argument(self, node):
-        self.context[-3] = '.BI' # bold/italic alternate
+        self.context[-3] = '.BI'  # bold/italic alternate
         if node['delimiter'] != ' ':
             self.body.append('\\fB%s ' % node['delimiter'])
         elif self.body[len(self.body) - 1].endswith('='):
@@ -968,8 +995,9 @@
         raise nodes.SkipNode()
 
     def visit_substitution_reference(self, node):
-        self.document.reporter.warning('"substitution_reference" not supported',
-                base_node=node)
+        self.document.reporter.warning(
+            '"substitution_reference" not supported', base_node=node
+        )
 
     def visit_subtitle(self, node):
         if isinstance(node.parent, nodes.sidebar):
@@ -981,11 +1009,11 @@
 
     def depart_subtitle(self, node):
         # document subtitle calls SkipNode
-        self.body.append(self.defs['strong'][1]+'\n.PP\n')
+        self.body.append(self.defs['strong'][1] + '\n.PP\n')
 
     def visit_system_message(self, node):
         # TODO add report_level
-        #if node['level'] < self.document.reporter['writer'].report_level:
+        # if node['level'] < self.document.reporter['writer'].report_level:
         #    Level is too low to display:
         #    raise nodes.SkipNode
         attr = {}
@@ -995,8 +1023,10 @@
             line = ', line %s' % node['line']
         else:
             line = ''
-        self.body.append('.IP "System Message: %s/%s (%s:%s)"\n'
-                         % (node['type'], node['level'], node['source'], line))
+        self.body.append(
+            '.IP "System Message: %s/%s (%s:%s)"\n'
+            % (node['type'], node['level'], node['source'], line)
+        )
 
     def depart_system_message(self, node):
         pass
@@ -1111,7 +1141,9 @@
     depart_warning = depart_admonition
 
     def unimplemented_visit(self, node):
-        raise NotImplementedError('visiting unimplemented node type: %s'
-                                  % node.__class__.__name__)
+        raise NotImplementedError(
+            'visiting unimplemented node type: %s' % node.__class__.__name__
+        )
+
 
 # vim: set fileencoding=utf-8 et ts=4 ai :
--- a/hgdemandimport/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgdemandimport/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,10 +30,10 @@
     '_imp',
     '_xmlplus',
     'fcntl',
-    'nt', # pathlib2 tests the existence of built-in 'nt' module
+    'nt',  # pathlib2 tests the existence of built-in 'nt' module
     'win32com.gen_py',
-    'win32com.shell', # 'appdirs' tries to import win32com.shell
-    '_winreg', # 2.7 mimetypes needs immediate ImportError
+    'win32com.shell',  # 'appdirs' tries to import win32com.shell
+    '_winreg',  # 2.7 mimetypes needs immediate ImportError
     'pythoncom',
     # imported by tarfile, not available under Windows
     'pwd',
@@ -46,16 +46,16 @@
     # setuptools' pkg_resources.py expects "from __main__ import x" to
     # raise ImportError if x not defined
     '__main__',
-    '_ssl', # conditional imports in the stdlib, issue1964
-    '_sre', # issue4920
+    '_ssl',  # conditional imports in the stdlib, issue1964
+    '_sre',  # issue4920
     'rfc822',
     'mimetools',
-    'sqlalchemy.events', # has import-time side effects (issue5085)
+    'sqlalchemy.events',  # has import-time side effects (issue5085)
     # setuptools 8 expects this module to explode early when not on windows
     'distutils.msvc9compiler',
     '__builtin__',
     'builtins',
-    'urwid.command_map', # for pudb
+    'urwid.command_map',  # for pudb
 }
 
 _pypy = '__pypy__' in sys.builtin_module_names
@@ -71,8 +71,11 @@
 disable = demandimport.disable
 deactivated = demandimport.deactivated
 
+
 def enable():
     # chg pre-imports modules so do not enable demandimport for it
-    if ('CHGINTERNALMARK' not in os.environ
-        and os.environ.get('HGDEMANDIMPORT') != 'disable'):
+    if (
+        'CHGINTERNALMARK' not in os.environ
+        and os.environ.get('HGDEMANDIMPORT') != 'disable'
+    ):
         demandimport.enable()
--- a/hgdemandimport/demandimportpy2.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgdemandimport/demandimportpy2.py	Sun Oct 06 09:45:02 2019 -0400
@@ -38,6 +38,7 @@
 
 nothing = object()
 
+
 def _hgextimport(importfunc, name, globals, *args, **kwargs):
     try:
         return importfunc(name, globals, *args, **kwargs)
@@ -53,6 +54,7 @@
         # retry to import with "hgext_" prefix
         return importfunc(hgextname, globals, *args, **kwargs)
 
+
 class _demandmod(object):
     """module demand-loader and proxy
 
@@ -67,8 +69,9 @@
         else:
             head = name
             after = []
-        object.__setattr__(self, r"_data",
-                           (head, globals, locals, after, level, set()))
+        object.__setattr__(
+            self, r"_data", (head, globals, locals, after, level, set())
+        )
         object.__setattr__(self, r"_module", None)
 
     def _extend(self, name):
@@ -91,7 +94,8 @@
             with tracing.log('demandimport %s', self._data[0]):
                 head, globals, locals, after, level, modrefs = self._data
                 mod = _hgextimport(
-                    _origimport, head, globals, locals, None, level)
+                    _origimport, head, globals, locals, None, level
+                )
                 if mod is self:
                     # In this case, _hgextimport() above should imply
                     # _demandimport(). Otherwise, _hgextimport() never
@@ -115,8 +119,11 @@
                     if '.' in p:
                         h, t = p.split('.', 1)
                     if getattr(mod, h, nothing) is nothing:
-                        setattr(mod, h, _demandmod(
-                            p, mod.__dict__, mod.__dict__, level=1))
+                        setattr(
+                            mod,
+                            h,
+                            _demandmod(p, mod.__dict__, mod.__dict__, level=1),
+                        )
                     elif t:
                         subload(getattr(mod, h), t)
 
@@ -164,15 +171,17 @@
         self._load()
         return self._module.__doc__
 
+
 _pypy = '__pypy__' in sys.builtin_module_names
 
+
 def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
     if locals is None or name in ignores or fromlist == ('*',):
         # these cases we can't really delay
         return _hgextimport(_origimport, name, globals, locals, fromlist, level)
     elif not fromlist:
         # import a [as b]
-        if '.' in name: # a.b
+        if '.' in name:  # a.b
             base, rest = name.split('.', 1)
             # email.__init__ loading email.mime
             if globals and globals.get('__name__', None) == base:
@@ -244,8 +253,9 @@
         if level >= 0:
             if name:
                 # "from a import b" or "from .a import b" style
-                rootmod = _hgextimport(_origimport, name, globals, locals,
-                                       level=level)
+                rootmod = _hgextimport(
+                    _origimport, name, globals, locals, level=level
+                )
                 mod = chainmodules(rootmod, name)
             elif _pypy:
                 # PyPy's __import__ throws an exception if invoked
@@ -260,8 +270,9 @@
                     mn = mn.rsplit('.', level - 1)[0]
                     mod = sys.modules[mn]
             else:
-                mod = _hgextimport(_origimport, name, globals, locals,
-                                   level=level)
+                mod = _hgextimport(
+                    _origimport, name, globals, locals, level=level
+                )
 
             for x in fromlist:
                 processfromitem(mod, x)
@@ -278,23 +289,29 @@
 
         return mod
 
+
 ignores = set()
 
+
 def init(ignoreset):
     global ignores
     ignores = ignoreset
 
+
 def isenabled():
     return builtins.__import__ == _demandimport
 
+
 def enable():
     "enable global demand-loading of modules"
     builtins.__import__ = _demandimport
 
+
 def disable():
     "disable global demand-loading of modules"
     builtins.__import__ = _origimport
 
+
 @contextmanager
 def deactivated():
     "context manager for disabling demandimport in 'with' blocks"
--- a/hgdemandimport/demandimportpy3.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgdemandimport/demandimportpy3.py	Sun Oct 06 09:45:02 2019 -0400
@@ -36,10 +36,12 @@
 
 _deactivated = False
 
+
 class _lazyloaderex(importlib.util.LazyLoader):
     """This is a LazyLoader except it also follows the _deactivated global and
     the ignore list.
     """
+
     def exec_module(self, module):
         """Make the module load lazily."""
         with tracing.log('demandimport %s', module):
@@ -48,14 +50,18 @@
             else:
                 super().exec_module(module)
 
+
 # This is 3.6+ because with Python 3.5 it isn't possible to lazily load
 # extensions. See the discussion in https://bugs.python.org/issue26186 for more.
 _extensions_loader = _lazyloaderex.factory(
-    importlib.machinery.ExtensionFileLoader)
+    importlib.machinery.ExtensionFileLoader
+)
 _bytecode_loader = _lazyloaderex.factory(
-    importlib.machinery.SourcelessFileLoader)
+    importlib.machinery.SourcelessFileLoader
+)
 _source_loader = _lazyloaderex.factory(importlib.machinery.SourceFileLoader)
 
+
 def _makefinder(path):
     return importlib.machinery.FileFinder(
         path,
@@ -65,15 +71,19 @@
         (_bytecode_loader, importlib.machinery.BYTECODE_SUFFIXES),
     )
 
+
 ignores = set()
 
+
 def init(ignoreset):
     global ignores
     ignores = ignoreset
 
+
 def isenabled():
     return _makefinder in sys.path_hooks and not _deactivated
 
+
 def disable():
     try:
         while True:
@@ -81,9 +91,11 @@
     except ValueError:
         pass
 
+
 def enable():
     sys.path_hooks.insert(0, _makefinder)
 
+
 @contextlib.contextmanager
 def deactivated():
     # This implementation is a bit different from Python 2's. Python 3
--- a/hgdemandimport/tracing.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgdemandimport/tracing.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,6 +14,7 @@
 _checked = False
 _session = 'none'
 
+
 def _isactive():
     global _pipe, _session, _checked
     if _pipe is None:
@@ -26,6 +27,7 @@
         _session = os.environ.get('HGCATAPULTSESSION', 'none')
     return True
 
+
 @contextlib.contextmanager
 def log(whencefmt, *whenceargs):
     if not _isactive():
@@ -48,6 +50,7 @@
         except IOError:
             pass
 
+
 def counter(label, amount, *labelargs):
     if not _isactive():
         return
--- a/hgext/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,3 +1,4 @@
 from __future__ import absolute_import
 import pkgutil
+
 __path__ = pkgutil.extend_path(__path__, __name__)
--- a/hgext/absorb.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/absorb.py	Sun Oct 06 09:45:02 2019 -0400
@@ -53,9 +53,7 @@
     scmutil,
     util,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -81,8 +79,10 @@
 
 defaultdict = collections.defaultdict
 
+
 class nullui(object):
     """blank ui object doing nothing"""
+
     debugflag = False
     verbose = False
     quiet = True
@@ -90,16 +90,20 @@
     def __getitem__(name):
         def nullfunc(*args, **kwds):
             return
+
         return nullfunc
 
+
 class emptyfilecontext(object):
     """minimal filecontext representing an empty file"""
+
     def data(self):
         return ''
 
     def node(self):
         return node.nullid
 
+
 def uniq(lst):
     """list -> list. remove duplicated items without changing the order"""
     seen = set()
@@ -110,6 +114,7 @@
             result.append(x)
     return result
 
+
 def getdraftstack(headctx, limit=None):
     """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets.
 
@@ -132,6 +137,7 @@
     result.reverse()
     return result
 
+
 def getfilestack(stack, path, seenfctxs=None):
     """([ctx], str, set) -> [fctx], {ctx: fctx}
 
@@ -179,25 +185,25 @@
     fctxs = []
     fctxmap = {}
 
-    pctx = stack[0].p1() # the public (immutable) ctx we stop at
+    pctx = stack[0].p1()  # the public (immutable) ctx we stop at
     for ctx in reversed(stack):
-        if path not in ctx: # the file is added in the next commit
+        if path not in ctx:  # the file is added in the next commit
             pctx = ctx
             break
         fctx = ctx[path]
         fctxs.append(fctx)
-        if fctx in seenfctxs: # treat fctx as the immutable one
-            pctx = None # do not add another immutable fctx
+        if fctx in seenfctxs:  # treat fctx as the immutable one
+            pctx = None  # do not add another immutable fctx
             break
-        fctxmap[ctx] = fctx # only for mutable fctxs
+        fctxmap[ctx] = fctx  # only for mutable fctxs
         copy = fctx.copysource()
         if copy:
-            path = copy # follow rename
-            if path in ctx: # but do not follow copy
+            path = copy  # follow rename
+            if path in ctx:  # but do not follow copy
                 pctx = ctx.p1()
                 break
 
-    if pctx is not None: # need an extra immutable fctx
+    if pctx is not None:  # need an extra immutable fctx
         if path in pctx:
             fctxs.append(pctx[path])
         else:
@@ -213,10 +219,12 @@
     # remove uniq and find a different way to identify fctxs.
     return uniq(fctxs), fctxmap
 
+
 class overlaystore(patch.filestore):
     """read-only, hybrid store based on a dict and ctx.
     memworkingcopy: {path: content}, overrides file contents.
     """
+
     def __init__(self, basectx, memworkingcopy):
         self.basectx = basectx
         self.memworkingcopy = memworkingcopy
@@ -234,6 +242,7 @@
         copy = fctx.copysource()
         return content, mode, copy
 
+
 def overlaycontext(memworkingcopy, ctx, parents=None, extra=None):
     """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx
     memworkingcopy overrides file contents.
@@ -249,9 +258,17 @@
     files = set(ctx.files()).union(memworkingcopy)
     store = overlaystore(ctx, memworkingcopy)
     return context.memctx(
-        repo=ctx.repo(), parents=parents, text=desc,
-        files=files, filectxfn=store, user=user, date=date,
-        branch=None, extra=extra)
+        repo=ctx.repo(),
+        parents=parents,
+        text=desc,
+        files=files,
+        filectxfn=store,
+        user=user,
+        date=date,
+        branch=None,
+        extra=extra,
+    )
+
 
 class filefixupstate(object):
     """state needed to apply fixups to a single file
@@ -294,10 +311,10 @@
             assert self._checkoutlinelog() == self.contents
 
         # following fields will be filled later
-        self.chunkstats = [0, 0] # [adopted, total : int]
-        self.targetlines = [] # [str]
-        self.fixups = [] # [(linelog rev, a1, a2, b1, b2)]
-        self.finalcontents = [] # [str]
+        self.chunkstats = [0, 0]  # [adopted, total : int]
+        self.targetlines = []  # [str]
+        self.fixups = []  # [(linelog rev, a1, a2, b1, b2)]
+        self.finalcontents = []  # [str]
         self.ctxaffected = set()
 
     def diffwith(self, targetfctx, fm=None):
@@ -319,7 +336,7 @@
         self.targetlines = blines
 
         self.linelog.annotate(self.linelog.maxrev)
-        annotated = self.linelog.annotateresult # [(linelog rev, linenum)]
+        annotated = self.linelog.annotateresult  # [(linelog rev, linenum)]
         assert len(annotated) == len(alines)
         # add a dummy end line to make insertion at the end easier
         if annotated:
@@ -329,7 +346,7 @@
         # analyse diff blocks
         for chunk in self._alldiffchunks(a, b, alines, blines):
             newfixups = self._analysediffchunk(chunk, annotated)
-            self.chunkstats[0] += bool(newfixups) # 1 or 0
+            self.chunkstats[0] += bool(newfixups)  # 1 or 0
             self.chunkstats[1] += 1
             self.fixups += newfixups
             if fm is not None:
@@ -346,9 +363,10 @@
             blines = self.targetlines[b1:b2]
             if self.ui.debugflag:
                 idx = (max(rev - 1, 0)) // 2
-                self.ui.write(_('%s: chunk %d:%d -> %d lines\n')
-                              % (node.short(self.fctxs[idx].node()),
-                                 a1, a2, len(blines)))
+                self.ui.write(
+                    _('%s: chunk %d:%d -> %d lines\n')
+                    % (node.short(self.fctxs[idx].node()), a1, a2, len(blines))
+                )
             self.linelog.replacelines(rev, a1, a2, b1, b2)
         if self.opts.get('edit_lines', False):
             self.finalcontents = self._checkoutlinelogwithedits()
@@ -382,12 +400,13 @@
         a1, a2, b1, b2 = chunk
         # find involved indexes from annotate result
         involved = annotated[a1:a2]
-        if not involved and annotated: # a1 == a2 and a is not empty
+        if not involved and annotated:  # a1 == a2 and a is not empty
             # pure insertion, check nearby lines. ignore lines belong
             # to the public (first) changeset (i.e. annotated[i][0] == 1)
             nearbylinenums = {a2, max(0, a1 - 1)}
-            involved = [annotated[i]
-                        for i in nearbylinenums if annotated[i][0] != 1]
+            involved = [
+                annotated[i] for i in nearbylinenums if annotated[i][0] != 1
+            ]
         involvedrevs = list(set(r for r, l in involved))
         newfixups = []
         if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True):
@@ -401,9 +420,9 @@
             for i in pycompat.xrange(a1, a2):
                 rev, linenum = annotated[i]
                 if rev > 1:
-                    if b1 == b2: # deletion, simply remove that single line
+                    if b1 == b2:  # deletion, simply remove that single line
                         nb1 = nb2 = 0
-                    else: # 1:1 line mapping, change the corresponding rev
+                    else:  # 1:1 line mapping, change the corresponding rev
                         nb1 = b1 + i - a1
                         nb2 = nb1 + 1
                     fixuprev = rev + 1
@@ -448,32 +467,45 @@
         """() -> [str]. prompt all lines for edit"""
         alllines = self.linelog.getalllines()
         # header
-        editortext = (_('HG: editing %s\nHG: "y" means the line to the right '
-                        'exists in the changeset to the top\nHG:\n')
-                      % self.fctxs[-1].path())
+        editortext = (
+            _(
+                'HG: editing %s\nHG: "y" means the line to the right '
+                'exists in the changeset to the top\nHG:\n'
+            )
+            % self.fctxs[-1].path()
+        )
         # [(idx, fctx)]. hide the dummy emptyfilecontext
-        visiblefctxs = [(i, f)
-                        for i, f in enumerate(self.fctxs)
-                        if not isinstance(f, emptyfilecontext)]
+        visiblefctxs = [
+            (i, f)
+            for i, f in enumerate(self.fctxs)
+            if not isinstance(f, emptyfilecontext)
+        ]
         for i, (j, f) in enumerate(visiblefctxs):
-            editortext += (_('HG: %s/%s %s %s\n') %
-                           ('|' * i, '-' * (len(visiblefctxs) - i + 1),
-                            node.short(f.node()),
-                            f.description().split('\n',1)[0]))
+            editortext += _('HG: %s/%s %s %s\n') % (
+                '|' * i,
+                '-' * (len(visiblefctxs) - i + 1),
+                node.short(f.node()),
+                f.description().split('\n', 1)[0],
+            )
         editortext += _('HG: %s\n') % ('|' * len(visiblefctxs))
         # figure out the lifetime of a line, this is relatively inefficient,
         # but probably fine
-        lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}}
+        lineset = defaultdict(lambda: set())  # {(llrev, linenum): {llrev}}
         for i, f in visiblefctxs:
             self.linelog.annotate((i + 1) * 2)
             for l in self.linelog.annotateresult:
                 lineset[l].add(i)
         # append lines
         for l in alllines:
-            editortext += ('    %s : %s' %
-                           (''.join([('y' if i in lineset[l] else ' ')
-                                     for i, _f in visiblefctxs]),
-                            self._getline(l)))
+            editortext += '    %s : %s' % (
+                ''.join(
+                    [
+                        ('y' if i in lineset[l] else ' ')
+                        for i, _f in visiblefctxs
+                    ]
+                ),
+                self._getline(l),
+            )
         # run editor
         editedtext = self.ui.edit(editortext, '', action='absorb')
         if not editedtext:
@@ -485,11 +517,12 @@
         for l in mdiff.splitnewlines(editedtext):
             if l.startswith('HG:'):
                 continue
-            if l[colonpos - 1:colonpos + 2] != ' : ':
+            if l[colonpos - 1 : colonpos + 2] != ' : ':
                 raise error.Abort(_('malformed line: %s') % l)
-            linecontent = l[colonpos + 2:]
+            linecontent = l[colonpos + 2 :]
             for i, ch in enumerate(
-                    pycompat.bytestr(l[leftpadpos:colonpos - 1])):
+                pycompat.bytestr(l[leftpadpos : colonpos - 1])
+            ):
                 if ch == 'y':
                     contents[visiblefctxs[i][0]] += linecontent
         # chunkstats is hard to calculate if anything changes, therefore
@@ -501,9 +534,9 @@
     def _getline(self, lineinfo):
         """((rev, linenum)) -> str. convert rev+line number to line content"""
         rev, linenum = lineinfo
-        if rev & 1: # odd: original line taken from fctxs
+        if rev & 1:  # odd: original line taken from fctxs
             return self.contentlines[rev // 2][linenum]
-        else: # even: fixup line from targetfctx
+        else:  # even: fixup line from targetfctx
             return self.targetlines[linenum]
 
     def _iscontinuous(self, a1, a2, closedinterval=False):
@@ -539,8 +572,12 @@
             lastrev = pcurrentchunk[0][0]
             lasta2 = pcurrentchunk[0][2]
             lastb2 = pcurrentchunk[0][4]
-            if (a1 == lasta2 and b1 == lastb2 and rev == lastrev and
-                    self._iscontinuous(max(a1 - 1, 0), a1)):
+            if (
+                a1 == lasta2
+                and b1 == lastb2
+                and rev == lastrev
+                and self._iscontinuous(max(a1 - 1, 0), a1)
+            ):
                 # merge into currentchunk
                 pcurrentchunk[0][2] = a2
                 pcurrentchunk[0][4] = b2
@@ -551,7 +588,6 @@
         return result
 
     def _showchanges(self, fm, alines, blines, chunk, fixups):
-
         def trim(line):
             if line.endswith('\n'):
                 line = line[:-1]
@@ -568,9 +604,12 @@
                 bidxs[i - b1] = (max(idx, 1) - 1) // 2
 
         fm.startitem()
-        fm.write('hunk', '        %s\n',
-                 '@@ -%d,%d +%d,%d @@'
-                 % (a1, a2 - a1, b1, b2 - b1), label='diff.hunk')
+        fm.write(
+            'hunk',
+            '        %s\n',
+            '@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1),
+            label='diff.hunk',
+        )
         fm.data(path=self.path, linetype='hunk')
 
         def writeline(idx, diffchar, line, linetype, linelabel):
@@ -582,16 +621,24 @@
                 node = ctx.hex()
                 self.ctxaffected.add(ctx.changectx())
             fm.write('node', '%-7.7s ', node, label='absorb.node')
-            fm.write('diffchar ' + linetype, '%s%s\n', diffchar, line,
-                     label=linelabel)
+            fm.write(
+                'diffchar ' + linetype,
+                '%s%s\n',
+                diffchar,
+                line,
+                label=linelabel,
+            )
             fm.data(path=self.path, linetype=linetype)
 
         for i in pycompat.xrange(a1, a2):
-            writeline(aidxs[i - a1], '-', trim(alines[i]), 'deleted',
-                      'diff.deleted')
+            writeline(
+                aidxs[i - a1], '-', trim(alines[i]), 'deleted', 'diff.deleted'
+            )
         for i in pycompat.xrange(b1, b2):
-            writeline(bidxs[i - b1], '+', trim(blines[i]), 'inserted',
-                      'diff.inserted')
+            writeline(
+                bidxs[i - b1], '+', trim(blines[i]), 'inserted', 'diff.inserted'
+            )
+
 
 class fixupstate(object):
     """state needed to run absorb
@@ -619,13 +666,13 @@
         self.repo = stack[-1].repo().unfiltered()
 
         # following fields will be filled later
-        self.paths = [] # [str]
-        self.status = None # ctx.status output
-        self.fctxmap = {} # {path: {ctx: fctx}}
-        self.fixupmap = {} # {path: filefixupstate}
-        self.replacemap = {} # {oldnode: newnode or None}
-        self.finalnode = None # head after all fixups
-        self.ctxaffected = set() # ctx that will be absorbed into
+        self.paths = []  # [str]
+        self.status = None  # ctx.status output
+        self.fctxmap = {}  # {path: {ctx: fctx}}
+        self.fixupmap = {}  # {path: filefixupstate}
+        self.replacemap = {}  # {oldnode: newnode or None}
+        self.finalnode = None  # head after all fixups
+        self.ctxaffected = set()  # ctx that will be absorbed into
 
     def diffwith(self, targetctx, match=None, fm=None):
         """diff and prepare fixups. update self.fixupmap, self.paths"""
@@ -648,9 +695,11 @@
             targetfctx = targetctx[path]
             fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs)
             # ignore symbolic links or binary, or unchanged files
-            if any(f.islink() or stringutil.binary(f.data())
-                   for f in [targetfctx] + fctxs
-                   if not isinstance(f, emptyfilecontext)):
+            if any(
+                f.islink() or stringutil.binary(f.data())
+                for f in [targetfctx] + fctxs
+                if not isinstance(f, emptyfilecontext)
+            ):
                 continue
             if targetfctx.data() == fctxs[-1].data() and not editopt:
                 continue
@@ -677,8 +726,10 @@
     @property
     def chunkstats(self):
         """-> {path: chunkstats}. collect chunkstats from filefixupstates"""
-        return dict((path, state.chunkstats)
-                    for path, state in self.fixupmap.iteritems())
+        return dict(
+            (path, state.chunkstats)
+            for path, state in self.fixupmap.iteritems()
+        )
 
     def commit(self):
         """commit changes. update self.finalnode, self.replacemap"""
@@ -698,8 +749,10 @@
             # chunkstats for each file
             for path, stat in chunkstats.iteritems():
                 if stat[0]:
-                    ui.write(_('%s: %d of %d chunk(s) applied\n')
-                             % (path, stat[0], stat[1]))
+                    ui.write(
+                        _('%s: %d of %d chunk(s) applied\n')
+                        % (path, stat[0], stat[1])
+                    )
         elif not ui.quiet:
             # a summary for all files
             stats = chunkstats.values()
@@ -733,7 +786,9 @@
                 self.replacemap[ctx.node()] = lastcommitted.node()
                 if memworkingcopy:
                     msg = _('%d file(s) changed, became %s') % (
-                        len(memworkingcopy), self._ctx2str(lastcommitted))
+                        len(memworkingcopy),
+                        self._ctx2str(lastcommitted),
+                    )
                 else:
                     msg = _('became %s') % self._ctx2str(lastcommitted)
             if self.ui.verbose and msg:
@@ -754,7 +809,7 @@
         """
         result = {}
         for path in self.paths:
-            ctx2fctx = self.fctxmap[path] # {ctx: fctx}
+            ctx2fctx = self.fctxmap[path]  # {ctx: fctx}
             if ctx not in ctx2fctx:
                 continue
             fctx = ctx2fctx[ctx]
@@ -766,16 +821,19 @@
 
     def _movebookmarks(self, tr):
         repo = self.repo
-        needupdate = [(name, self.replacemap[hsh])
-                      for name, hsh in repo._bookmarks.iteritems()
-                      if hsh in self.replacemap]
+        needupdate = [
+            (name, self.replacemap[hsh])
+            for name, hsh in repo._bookmarks.iteritems()
+            if hsh in self.replacemap
+        ]
         changes = []
         for name, hsh in needupdate:
             if hsh:
                 changes.append((name, hsh))
                 if self.ui.verbose:
-                    self.ui.write(_('moving bookmark %s to %s\n')
-                                  % (name, node.hex(hsh)))
+                    self.ui.write(
+                        _('moving bookmark %s to %s\n') % (name, node.hex(hsh))
+                    )
             else:
                 changes.append((name, None))
                 if self.ui.verbose:
@@ -798,8 +856,10 @@
         restore = noop
         if util.safehasattr(dirstate, '_fsmonitorstate'):
             bak = dirstate._fsmonitorstate.invalidate
+
             def restore():
                 dirstate._fsmonitorstate.invalidate = bak
+
             dirstate._fsmonitorstate.invalidate = noop
         try:
             with dirstate.parentchange():
@@ -852,11 +912,15 @@
         return obsolete.isenabled(self.repo, obsolete.createmarkersopt)
 
     def _cleanupoldcommits(self):
-        replacements = {k: ([v] if v is not None else [])
-                        for k, v in self.replacemap.iteritems()}
+        replacements = {
+            k: ([v] if v is not None else [])
+            for k, v in self.replacemap.iteritems()
+        }
         if replacements:
-            scmutil.cleanupnodes(self.repo, replacements, operation='absorb',
-                                 fixphase=True)
+            scmutil.cleanupnodes(
+                self.repo, replacements, operation='absorb', fixphase=True
+            )
+
 
 def _parsechunk(hunk):
     """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))"""
@@ -874,6 +938,7 @@
     blines = [l[1:] for l in patchlines[1:] if not l.startswith('-')]
     return path, (a1, a2, blines)
 
+
 def overlaydiffcontext(ctx, chunks):
     """(ctx, [crecord.uihunk]) -> memctx
 
@@ -889,8 +954,8 @@
     # as we only care about applying changes to modified files, no mode
     # change, no binary diff, and no renames, it's probably okay to
     # re-invent the logic using much simpler code here.
-    memworkingcopy = {} # {path: content}
-    patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]}
+    memworkingcopy = {}  # {path: content}
+    patchmap = defaultdict(lambda: [])  # {path: [(a1, a2, [bline])]}
     for path, info in map(_parsechunk, chunks):
         if not path or not info:
             continue
@@ -905,6 +970,7 @@
         memworkingcopy[path] = ''.join(lines)
     return overlaycontext(memworkingcopy, ctx)
 
+
 def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None):
     """pick fixup chunks from targetctx, apply them to stack.
 
@@ -919,12 +985,13 @@
             raise error.Abort(_('cannot absorb into a merge'))
         stack = getdraftstack(headctx, limit)
         if limit and len(stack) >= limit:
-            ui.warn(_('absorb: only the recent %d changesets will '
-                      'be analysed\n')
-                    % limit)
+            ui.warn(
+                _('absorb: only the recent %d changesets will ' 'be analysed\n')
+                % limit
+            )
     if not stack:
         raise error.Abort(_('no mutable changeset to change'))
-    if targetctx is None: # default to working copy
+    if targetctx is None:  # default to working copy
         targetctx = repo[None]
     if pats is None:
         pats = ()
@@ -953,13 +1020,19 @@
             fm.data(linetype='changeset')
             fm.write('node', '%-7.7s ', ctx.hex(), label='absorb.node')
             descfirstline = ctx.description().splitlines()[0]
-            fm.write('descfirstline', '%s\n', descfirstline,
-                     label='absorb.description')
+            fm.write(
+                'descfirstline',
+                '%s\n',
+                descfirstline,
+                label='absorb.description',
+            )
         fm.end()
     if not opts.get('dry_run'):
-        if (not opts.get('apply_changes') and
-            state.ctxaffected and
-            ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)):
+        if (
+            not opts.get('apply_changes')
+            and state.ctxaffected
+            and ui.promptchoice("apply changes (yn)? $$ &Yes $$ &No", default=1)
+        ):
             raise error.Abort(_('absorb cancelled\n'))
 
         state.apply()
@@ -969,20 +1042,45 @@
             ui.write(_('nothing applied\n'))
     return state
 
-@command('absorb',
-         [('a', 'apply-changes', None,
-           _('apply changes without prompting for confirmation')),
-          ('p', 'print-changes', None,
-           _('always print which changesets are modified by which changes')),
-          ('i', 'interactive', None,
-           _('interactively select which chunks to apply (EXPERIMENTAL)')),
-          ('e', 'edit-lines', None,
-           _('edit what lines belong to which changesets before commit '
-             '(EXPERIMENTAL)')),
-         ] + commands.dryrunopts + commands.templateopts + commands.walkopts,
-         _('hg absorb [OPTION] [FILE]...'),
-         helpcategory=command.CATEGORY_COMMITTING,
-         helpbasic=True)
+
+@command(
+    'absorb',
+    [
+        (
+            'a',
+            'apply-changes',
+            None,
+            _('apply changes without prompting for confirmation'),
+        ),
+        (
+            'p',
+            'print-changes',
+            None,
+            _('always print which changesets are modified by which changes'),
+        ),
+        (
+            'i',
+            'interactive',
+            None,
+            _('interactively select which chunks to apply (EXPERIMENTAL)'),
+        ),
+        (
+            'e',
+            'edit-lines',
+            None,
+            _(
+                'edit what lines belong to which changesets before commit '
+                '(EXPERIMENTAL)'
+            ),
+        ),
+    ]
+    + commands.dryrunopts
+    + commands.templateopts
+    + commands.walkopts,
+    _('hg absorb [OPTION] [FILE]...'),
+    helpcategory=command.CATEGORY_COMMITTING,
+    helpbasic=True,
+)
 def absorbcmd(ui, repo, *pats, **opts):
     """incorporate corrections into the stack of draft changesets
 
--- a/hgext/acl.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/acl.py	Sun Oct 06 09:45:02 2019 -0400
@@ -224,9 +224,7 @@
     registrar,
     util,
 )
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 urlreq = util.urlreq
 
@@ -240,33 +238,29 @@
 configitem = registrar.configitem(configtable)
 
 # deprecated config: acl.config
-configitem('acl', 'config',
-    default=None,
+configitem(
+    'acl', 'config', default=None,
 )
-configitem('acl.groups', '.*',
-    default=None,
-    generic=True,
+configitem(
+    'acl.groups', '.*', default=None, generic=True,
 )
-configitem('acl.deny.branches', '.*',
-    default=None,
-    generic=True,
+configitem(
+    'acl.deny.branches', '.*', default=None, generic=True,
 )
-configitem('acl.allow.branches', '.*',
-    default=None,
-    generic=True,
+configitem(
+    'acl.allow.branches', '.*', default=None, generic=True,
 )
-configitem('acl.deny', '.*',
-    default=None,
-    generic=True,
+configitem(
+    'acl.deny', '.*', default=None, generic=True,
 )
-configitem('acl.allow', '.*',
-    default=None,
-    generic=True,
+configitem(
+    'acl.allow', '.*', default=None, generic=True,
 )
-configitem('acl', 'sources',
-    default=lambda: ['serve'],
+configitem(
+    'acl', 'sources', default=lambda: ['serve'],
 )
 
+
 def _getusers(ui, group):
 
     # First, try to use group definition from section [acl.groups]
@@ -281,6 +275,7 @@
     except KeyError:
         raise error.Abort(_("group '%s' is undefined") % group)
 
+
 def _usermatch(ui, user, usersorgroups):
 
     if usersorgroups == '*':
@@ -293,29 +288,35 @@
             # if ug is a user  name: !username
             # if ug is a group name: !@groupname
             ug = ug[1:]
-            if (not ug.startswith('@') and user != ug
-                or ug.startswith('@') and user not in _getusers(ui, ug[1:])):
+            if (
+                not ug.startswith('@')
+                and user != ug
+                or ug.startswith('@')
+                and user not in _getusers(ui, ug[1:])
+            ):
                 return True
 
         # Test for user or group. Format:
         # if ug is a user  name: username
         # if ug is a group name: @groupname
-        elif (user == ug
-              or ug.startswith('@') and user in _getusers(ui, ug[1:])):
+        elif user == ug or ug.startswith('@') and user in _getusers(ui, ug[1:]):
             return True
 
     return False
 
+
 def buildmatch(ui, repo, user, key):
     '''return tuple of (match function, list enabled).'''
     if not ui.has_section(key):
         ui.debug('acl: %s not enabled\n' % key)
         return None
 
-    pats = [pat for pat, users in ui.configitems(key)
-            if _usermatch(ui, user, users)]
-    ui.debug('acl: %s enabled, %d entries for user %s\n' %
-             (key, len(pats), user))
+    pats = [
+        pat for pat, users in ui.configitems(key) if _usermatch(ui, user, users)
+    ]
+    ui.debug(
+        'acl: %s enabled, %d entries for user %s\n' % (key, len(pats), user)
+    )
 
     # Branch-based ACL
     if not repo:
@@ -332,6 +333,7 @@
         return match.match(repo.root, '', pats)
     return util.never
 
+
 def ensureenabled(ui):
     """make sure the extension is enabled when used as hook
 
@@ -345,16 +347,22 @@
     ui.setconfig('extensions', 'acl', '', source='internal')
     extensions.loadall(ui, ['acl'])
 
+
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
 
     ensureenabled(ui)
 
     if hooktype not in ['pretxnchangegroup', 'pretxncommit', 'prepushkey']:
         raise error.Abort(
-            _('config error - hook type "%s" cannot stop '
-              'incoming changesets, commits, nor bookmarks') % hooktype)
-    if (hooktype == 'pretxnchangegroup' and
-        source not in ui.configlist('acl', 'sources')):
+            _(
+                'config error - hook type "%s" cannot stop '
+                'incoming changesets, commits, nor bookmarks'
+            )
+            % hooktype
+        )
+    if hooktype == 'pretxnchangegroup' and source not in ui.configlist(
+        'acl', 'sources'
+    ):
         ui.debug('acl: changes have source "%s" - skipping\n' % source)
         return
 
@@ -374,6 +382,7 @@
     else:
         _txnhook(ui, repo, hooktype, node, source, user, **kwargs)
 
+
 def _pkhook(ui, repo, hooktype, node, source, user, **kwargs):
     if kwargs[r'namespace'] == 'bookmarks':
         bookmark = kwargs[r'key']
@@ -382,22 +391,38 @@
         denybookmarks = buildmatch(ui, None, user, 'acl.deny.bookmarks')
 
         if denybookmarks and denybookmarks(bookmark):
-            raise error.Abort(_('acl: user "%s" denied on bookmark "%s"'
-                               ' (changeset "%s")')
-                               % (user, bookmark, ctx))
+            raise error.Abort(
+                _('acl: user "%s" denied on bookmark "%s"' ' (changeset "%s")')
+                % (user, bookmark, ctx)
+            )
         if allowbookmarks and not allowbookmarks(bookmark):
-            raise error.Abort(_('acl: user "%s" not allowed on bookmark "%s"'
-                               ' (changeset "%s")')
-                               % (user, bookmark, ctx))
-        ui.debug('acl: bookmark access granted: "%s" on bookmark "%s"\n'
-                 % (ctx, bookmark))
+            raise error.Abort(
+                _(
+                    'acl: user "%s" not allowed on bookmark "%s"'
+                    ' (changeset "%s")'
+                )
+                % (user, bookmark, ctx)
+            )
+        ui.debug(
+            'acl: bookmark access granted: "%s" on bookmark "%s"\n'
+            % (ctx, bookmark)
+        )
+
 
 def _txnhook(ui, repo, hooktype, node, source, user, **kwargs):
     # deprecated config: acl.config
     cfg = ui.config('acl', 'config')
     if cfg:
-        ui.readconfig(cfg, sections=['acl.groups', 'acl.allow.branches',
-            'acl.deny.branches', 'acl.allow', 'acl.deny'])
+        ui.readconfig(
+            cfg,
+            sections=[
+                'acl.groups',
+                'acl.allow.branches',
+                'acl.deny.branches',
+                'acl.allow',
+                'acl.deny',
+            ],
+        )
 
     allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
     denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
@@ -408,21 +433,31 @@
         ctx = repo[rev]
         branch = ctx.branch()
         if denybranches and denybranches(branch):
-            raise error.Abort(_('acl: user "%s" denied on branch "%s"'
-                               ' (changeset "%s")')
-                               % (user, branch, ctx))
+            raise error.Abort(
+                _('acl: user "%s" denied on branch "%s"' ' (changeset "%s")')
+                % (user, branch, ctx)
+            )
         if allowbranches and not allowbranches(branch):
-            raise error.Abort(_('acl: user "%s" not allowed on branch "%s"'
-                               ' (changeset "%s")')
-                               % (user, branch, ctx))
-        ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
-        % (ctx, branch))
+            raise error.Abort(
+                _(
+                    'acl: user "%s" not allowed on branch "%s"'
+                    ' (changeset "%s")'
+                )
+                % (user, branch, ctx)
+            )
+        ui.debug(
+            'acl: branch access granted: "%s" on branch "%s"\n' % (ctx, branch)
+        )
 
         for f in ctx.files():
             if deny and deny(f):
-                raise error.Abort(_('acl: user "%s" denied on "%s"'
-                ' (changeset "%s")') % (user, f, ctx))
+                raise error.Abort(
+                    _('acl: user "%s" denied on "%s"' ' (changeset "%s")')
+                    % (user, f, ctx)
+                )
             if allow and not allow(f):
-                raise error.Abort(_('acl: user "%s" not allowed on "%s"'
-                ' (changeset "%s")') % (user, f, ctx))
+                raise error.Abort(
+                    _('acl: user "%s" not allowed on "%s"' ' (changeset "%s")')
+                    % (user, f, ctx)
+                )
         ui.debug('acl: path access granted: "%s"\n' % ctx)
--- a/hgext/amend.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/amend.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,20 +29,35 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
-@command('amend',
-    [('A', 'addremove', None,
-      _('mark new/missing files as added/removed before committing')),
-     ('e', 'edit', None, _('invoke editor on commit messages')),
-     ('i', 'interactive', None, _('use interactive mode')),
-     (b'', b'close-branch', None,
-      _(b'mark a branch as closed, hiding it from the branch list')),
-     (b's', b'secret', None, _(b'use the secret phase for committing')),
-     ('n', 'note', '', _('store a note on the amend')),
-    ] + cmdutil.walkopts + cmdutil.commitopts + cmdutil.commitopts2
-      + cmdutil.commitopts3,
+
+@command(
+    'amend',
+    [
+        (
+            'A',
+            'addremove',
+            None,
+            _('mark new/missing files as added/removed before committing'),
+        ),
+        ('e', 'edit', None, _('invoke editor on commit messages')),
+        ('i', 'interactive', None, _('use interactive mode')),
+        (
+            b'',
+            b'close-branch',
+            None,
+            _(b'mark a branch as closed, hiding it from the branch list'),
+        ),
+        (b's', b'secret', None, _(b'use the secret phase for committing')),
+        ('n', 'note', '', _('store a note on the amend')),
+    ]
+    + cmdutil.walkopts
+    + cmdutil.commitopts
+    + cmdutil.commitopts2
+    + cmdutil.commitopts3,
     _('[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
-    inferrepo=True)
+    inferrepo=True,
+)
 def amend(ui, repo, *pats, **opts):
     """amend the working copy parent with all or specified outstanding changes
 
--- a/hgext/automv.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/automv.py	Sun Oct 06 09:45:02 2019 -0400
@@ -35,22 +35,23 @@
     pycompat,
     registrar,
     scmutil,
-    similar
+    similar,
 )
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('automv', 'similarity',
-    default=95,
+configitem(
+    'automv', 'similarity', default=95,
 )
 
+
 def extsetup(ui):
-    entry = extensions.wrapcommand(
-        commands.table, 'commit', mvcheck)
+    entry = extensions.wrapcommand(commands.table, 'commit', mvcheck)
     entry[1].append(
-        ('', 'no-automv', None,
-         _('disable automatic file move detection')))
+        ('', 'no-automv', None, _('disable automatic file move detection'))
+    )
+
 
 def mvcheck(orig, ui, repo, *pats, **opts):
     """Hook to check for moves at commit time"""
@@ -65,14 +66,16 @@
             match = scmutil.match(repo[None], pats, opts)
             added, removed = _interestingfiles(repo, match)
             uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
-            renames = _findrenames(repo, uipathfn, added, removed,
-                                   threshold / 100.0)
+            renames = _findrenames(
+                repo, uipathfn, added, removed, threshold / 100.0
+            )
 
     with repo.wlock():
         if renames is not None:
             scmutil._markchanges(repo, (), (), renames)
         return orig(ui, repo, *pats, **pycompat.strkwargs(opts))
 
+
 def _interestingfiles(repo, matcher):
     """Find what files were added or removed in this commit.
 
@@ -90,6 +93,7 @@
 
     return added, removed
 
+
 def _findrenames(repo, uipathfn, added, removed, similarity):
     """Find what files in added are really moved files.
 
@@ -100,11 +104,13 @@
     renames = {}
     if similarity > 0:
         for src, dst, score in similar.findrenames(
-                repo, added, removed, similarity):
+            repo, added, removed, similarity
+        ):
             if repo.ui.verbose:
                 repo.ui.status(
-                    _('detected move of %s as %s (%d%% similar)\n') % (
-                        uipathfn(src), uipathfn(dst), score * 100))
+                    _('detected move of %s as %s (%d%% similar)\n')
+                    % (uipathfn(src), uipathfn(dst), score * 100)
+                )
             renames[dst] = src
     if renames:
         repo.ui.status(_('detected move of %d files\n') % len(renames))
--- a/hgext/beautifygraph.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/beautifygraph.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,55 +28,64 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 def prettyedge(before, edge, after):
     if edge == '~':
-        return '\xE2\x95\xA7' # U+2567 ╧
+        return '\xE2\x95\xA7'  # U+2567 ╧
     if edge == '/':
-        return '\xE2\x95\xB1' # U+2571 ╱
+        return '\xE2\x95\xB1'  # U+2571 ╱
     if edge == '-':
-        return '\xE2\x94\x80' # U+2500 ─
+        return '\xE2\x94\x80'  # U+2500 ─
     if edge == '|':
-        return '\xE2\x94\x82' # U+2502 │
+        return '\xE2\x94\x82'  # U+2502 │
     if edge == ':':
-        return '\xE2\x94\x86' # U+2506 ┆
+        return '\xE2\x94\x86'  # U+2506 ┆
     if edge == '\\':
-        return '\xE2\x95\xB2' # U+2572 ╲
+        return '\xE2\x95\xB2'  # U+2572 ╲
     if edge == '+':
-        if before == ' ' and not after  == ' ':
-            return '\xE2\x94\x9C' # U+251C ├
-        if after  == ' ' and not before == ' ':
-            return '\xE2\x94\xA4' # U+2524 ┤
-        return '\xE2\x94\xBC' # U+253C ┼
+        if before == ' ' and not after == ' ':
+            return '\xE2\x94\x9C'  # U+251C ├
+        if after == ' ' and not before == ' ':
+            return '\xE2\x94\xA4'  # U+2524 ┤
+        return '\xE2\x94\xBC'  # U+253C ┼
     return edge
 
+
 def convertedges(line):
     line = ' %s ' % line
     pretty = []
     for idx in pycompat.xrange(len(line) - 2):
-        pretty.append(prettyedge(line[idx:idx + 1],
-                                 line[idx + 1:idx + 2],
-                                 line[idx + 2:idx + 3]))
+        pretty.append(
+            prettyedge(
+                line[idx : idx + 1],
+                line[idx + 1 : idx + 2],
+                line[idx + 2 : idx + 3],
+            )
+        )
     return ''.join(pretty)
 
+
 def getprettygraphnode(orig, *args, **kwargs):
     node = orig(*args, **kwargs)
     if node == 'o':
-        return '\xE2\x97\x8B' # U+25CB ○
+        return '\xE2\x97\x8B'  # U+25CB ○
     if node == '@':
-        return '\xE2\x97\x8D' # U+25CD ◍
+        return '\xE2\x97\x8D'  # U+25CD ◍
     if node == '*':
-        return '\xE2\x88\x97' # U+2217 ∗
+        return '\xE2\x88\x97'  # U+2217 ∗
     if node == 'x':
-        return '\xE2\x97\x8C' # U+25CC ◌
+        return '\xE2\x97\x8C'  # U+25CC ◌
     if node == '_':
-        return '\xE2\x95\xA4' # U+2564 ╤
+        return '\xE2\x95\xA4'  # U+2564 ╤
     return node
 
+
 def outputprettygraph(orig, ui, graph, *args, **kwargs):
     (edges, text) = zip(*graph)
     graph = zip([convertedges(e) for e in edges], text)
     return orig(ui, graph, *args, **kwargs)
 
+
 def extsetup(ui):
     if ui.plain('graph'):
         return
@@ -86,8 +95,12 @@
         return
 
     if r'A' in encoding._wide:
-        ui.warn(_('beautifygraph: unsupported terminal settings, '
-                  'monospace narrow text required\n'))
+        ui.warn(
+            _(
+                'beautifygraph: unsupported terminal settings, '
+                'monospace narrow text required\n'
+            )
+        )
         return
 
     extensions.wrapfunction(graphmod, 'outputgraph', outputprettygraph)
--- a/hgext/blackbox.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/blackbox.py	Sun Oct 06 09:45:02 2019 -0400
@@ -71,30 +71,33 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('blackbox', 'dirty',
-    default=False,
+configitem(
+    'blackbox', 'dirty', default=False,
 )
-configitem('blackbox', 'maxsize',
-    default='1 MB',
+configitem(
+    'blackbox', 'maxsize', default='1 MB',
+)
+configitem(
+    'blackbox', 'logsource', default=False,
 )
-configitem('blackbox', 'logsource',
-    default=False,
+configitem(
+    'blackbox', 'maxfiles', default=7,
 )
-configitem('blackbox', 'maxfiles',
-    default=7,
+configitem(
+    'blackbox', 'track', default=lambda: ['*'],
 )
-configitem('blackbox', 'track',
-    default=lambda: ['*'],
-)
-configitem('blackbox', 'ignore',
+configitem(
+    'blackbox',
+    'ignore',
     default=lambda: ['chgserver', 'cmdserver', 'extension'],
 )
-configitem('blackbox', 'date-format',
-    default='%Y/%m/%d %H:%M:%S',
+configitem(
+    'blackbox', 'date-format', default='%Y/%m/%d %H:%M:%S',
 )
 
 _lastlogger = loggingutil.proxylogger()
 
+
 class blackboxlogger(object):
     def __init__(self, ui, repo):
         self._repo = repo
@@ -105,9 +108,9 @@
         self._inlog = False
 
     def tracked(self, event):
-        return ((b'*' in self._trackedevents
-                 and event not in self._ignoredevents)
-                or event in self._trackedevents)
+        return (
+            b'*' in self._trackedevents and event not in self._ignoredevents
+        ) or event in self._trackedevents
 
     def log(self, ui, event, msg, opts):
         # self._log() -> ctx.dirty() may create new subrepo instance, which
@@ -129,9 +132,10 @@
         changed = ''
         ctx = self._repo[None]
         parents = ctx.parents()
-        rev = ('+'.join([hex(p.node()) for p in parents]))
-        if (ui.configbool('blackbox', 'dirty') and
-            ctx.dirty(missing=True, merge=False, branch=False)):
+        rev = '+'.join([hex(p.node()) for p in parents])
+        if ui.configbool('blackbox', 'dirty') and ctx.dirty(
+            missing=True, merge=False, branch=False
+        ):
             changed = '+'
         if ui.configbool('blackbox', 'logsource'):
             src = ' [%s]' % event
@@ -141,20 +145,28 @@
             fmt = '%s %s @%s%s (%s)%s> %s'
             args = (date, user, rev, changed, pid, src, msg)
             with loggingutil.openlogfile(
-                    ui, self._repo.vfs, name='blackbox.log',
-                    maxfiles=self._maxfiles, maxsize=self._maxsize) as fp:
+                ui,
+                self._repo.vfs,
+                name='blackbox.log',
+                maxfiles=self._maxfiles,
+                maxsize=self._maxsize,
+            ) as fp:
                 fp.write(fmt % args)
         except (IOError, OSError) as err:
             # deactivate this to avoid failed logging again
             self._trackedevents.clear()
-            ui.debug('warning: cannot write to blackbox.log: %s\n' %
-                     encoding.strtolocal(err.strerror))
+            ui.debug(
+                'warning: cannot write to blackbox.log: %s\n'
+                % encoding.strtolocal(err.strerror)
+            )
             return
         _lastlogger.logger = self
 
+
 def uipopulate(ui):
     ui.setlogger(b'blackbox', _lastlogger)
 
+
 def reposetup(ui, repo):
     # During 'hg pull' a httppeer repo is created to represent the remote repo.
     # It doesn't have a .hg directory to put a blackbox in, so we don't do
@@ -174,12 +186,14 @@
 
     repo._wlockfreeprefix.add('blackbox.log')
 
-@command('blackbox',
-    [('l', 'limit', 10, _('the number of events to show')),
-    ],
+
+@command(
+    'blackbox',
+    [('l', 'limit', 10, _('the number of events to show')),],
     _('hg blackbox [OPTION]...'),
     helpcategory=command.CATEGORY_MAINTENANCE,
-    helpbasic=True)
+    helpbasic=True,
+)
 def blackbox(ui, repo, *revs, **opts):
     '''view the recent repository events
     '''
--- a/hgext/bookflow.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/bookflow.py	Sun Oct 06 09:45:02 2019 -0400
@@ -36,20 +36,26 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
+
 def commit_hook(ui, repo, **kwargs):
     active = repo._bookmarks.active
     if active:
         if active in ui.configlist(MY_NAME, 'protect'):
             raise error.Abort(
-                _('cannot commit, bookmark %s is protected') % active)
+                _('cannot commit, bookmark %s is protected') % active
+            )
         if not cwd_at_bookmark(repo, active):
             raise error.Abort(
-       _('cannot commit, working directory out of sync with active bookmark'),
-                hint=_("run 'hg up %s'") % active)
+                _(
+                    'cannot commit, working directory out of sync with active bookmark'
+                ),
+                hint=_("run 'hg up %s'") % active,
+            )
     elif ui.configbool(MY_NAME, 'require-bookmark', True):
         raise error.Abort(_('cannot commit without an active bookmark'))
     return 0
 
+
 def bookmarks_update(orig, repo, parents, node):
     if len(parents) == 2:
         # called during commit
@@ -58,43 +64,59 @@
         # called during update
         return False
 
+
 def bookmarks_addbookmarks(
-        orig, repo, tr, names, rev=None, force=False, inactive=False):
+    orig, repo, tr, names, rev=None, force=False, inactive=False
+):
     if not rev:
         marks = repo._bookmarks
         for name in names:
             if name in marks:
-                raise error.Abort(_(
-                    "bookmark %s already exists, to move use the --rev option"
-                    ) % name)
+                raise error.Abort(
+                    _(
+                        "bookmark %s already exists, to move use the --rev option"
+                    )
+                    % name
+                )
     return orig(repo, tr, names, rev, force, inactive)
 
+
 def commands_commit(orig, ui, repo, *args, **opts):
     commit_hook(ui, repo)
     return orig(ui, repo, *args, **opts)
 
+
 def commands_pull(orig, ui, repo, *args, **opts):
     rc = orig(ui, repo, *args, **opts)
     active = repo._bookmarks.active
     if active and not cwd_at_bookmark(repo, active):
-        ui.warn(_(
-            "working directory out of sync with active bookmark, run "
-            "'hg up %s'"
-        ) % active)
+        ui.warn(
+            _(
+                "working directory out of sync with active bookmark, run "
+                "'hg up %s'"
+            )
+            % active
+        )
     return rc
 
+
 def commands_branch(orig, ui, repo, label=None, **opts):
     if label and not opts.get(r'clean') and not opts.get(r'rev'):
         raise error.Abort(
-         _("creating named branches is disabled and you should use bookmarks"),
-            hint="see 'hg help bookflow'")
+            _(
+                "creating named branches is disabled and you should use bookmarks"
+            ),
+            hint="see 'hg help bookflow'",
+        )
     return orig(ui, repo, label, **opts)
 
+
 def cwd_at_bookmark(repo, mark):
     mark_id = repo._bookmarks[mark]
     cur_id = repo.lookup('.')
     return cur_id == mark_id
 
+
 def uisetup(ui):
     extensions.wrapfunction(bookmarks, 'update', bookmarks_update)
     extensions.wrapfunction(bookmarks, 'addbookmarks', bookmarks_addbookmarks)
--- a/hgext/bugzilla.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/bugzilla.py	Sun Oct 06 09:45:02 2019 -0400
@@ -324,72 +324,81 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('bugzilla', 'apikey',
-    default='',
+configitem(
+    'bugzilla', 'apikey', default='',
 )
-configitem('bugzilla', 'bzdir',
-    default='/var/www/html/bugzilla',
+configitem(
+    'bugzilla', 'bzdir', default='/var/www/html/bugzilla',
+)
+configitem(
+    'bugzilla', 'bzemail', default=None,
 )
-configitem('bugzilla', 'bzemail',
-    default=None,
+configitem(
+    'bugzilla', 'bzurl', default='http://localhost/bugzilla/',
 )
-configitem('bugzilla', 'bzurl',
-    default='http://localhost/bugzilla/',
+configitem(
+    'bugzilla', 'bzuser', default=None,
 )
-configitem('bugzilla', 'bzuser',
-    default=None,
+configitem(
+    'bugzilla', 'db', default='bugs',
 )
-configitem('bugzilla', 'db',
-    default='bugs',
-)
-configitem('bugzilla', 'fixregexp',
-    default=(br'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
-             br'(?:nos?\.?|num(?:ber)?s?)?\s*'
-             br'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
-             br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
+configitem(
+    'bugzilla',
+    'fixregexp',
+    default=(
+        br'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
+        br'(?:nos?\.?|num(?:ber)?s?)?\s*'
+        br'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+        br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?'
+    ),
 )
-configitem('bugzilla', 'fixresolution',
-    default='FIXED',
+configitem(
+    'bugzilla', 'fixresolution', default='FIXED',
 )
-configitem('bugzilla', 'fixstatus',
-    default='RESOLVED',
+configitem(
+    'bugzilla', 'fixstatus', default='RESOLVED',
 )
-configitem('bugzilla', 'host',
-    default='localhost',
+configitem(
+    'bugzilla', 'host', default='localhost',
 )
-configitem('bugzilla', 'notify',
-    default=configitem.dynamicdefault,
+configitem(
+    'bugzilla', 'notify', default=configitem.dynamicdefault,
 )
-configitem('bugzilla', 'password',
-    default=None,
+configitem(
+    'bugzilla', 'password', default=None,
 )
-configitem('bugzilla', 'regexp',
-    default=(br'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
-             br'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
-             br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
+configitem(
+    'bugzilla',
+    'regexp',
+    default=(
+        br'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
+        br'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
+        br'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?'
+    ),
 )
-configitem('bugzilla', 'strip',
-    default=0,
+configitem(
+    'bugzilla', 'strip', default=0,
 )
-configitem('bugzilla', 'style',
-    default=None,
+configitem(
+    'bugzilla', 'style', default=None,
 )
-configitem('bugzilla', 'template',
-    default=None,
+configitem(
+    'bugzilla', 'template', default=None,
 )
-configitem('bugzilla', 'timeout',
-    default=5,
+configitem(
+    'bugzilla', 'timeout', default=5,
 )
-configitem('bugzilla', 'user',
-    default='bugs',
+configitem(
+    'bugzilla', 'user', default='bugs',
 )
-configitem('bugzilla', 'usermap',
-    default=None,
+configitem(
+    'bugzilla', 'usermap', default=None,
 )
-configitem('bugzilla', 'version',
-    default=None,
+configitem(
+    'bugzilla', 'version', default=None,
 )
 
+
 class bzaccess(object):
     '''Base class for access to Bugzilla.'''
 
@@ -434,6 +443,7 @@
         emails automatically.
         '''
 
+
 # Bugzilla via direct access to MySQL database.
 class bzmysql(bzaccess):
     '''Support for direct MySQL access to Bugzilla.
@@ -454,6 +464,7 @@
     def __init__(self, ui):
         try:
             import MySQLdb as mysql
+
             bzmysql._MySQLdb = mysql
         except ImportError as err:
             raise error.Abort(_('python mysql support not available: %s') % err)
@@ -465,12 +476,13 @@
         passwd = self.ui.config('bugzilla', 'password')
         db = self.ui.config('bugzilla', 'db')
         timeout = int(self.ui.config('bugzilla', 'timeout'))
-        self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
-                     (host, db, user, '*' * len(passwd)))
-        self.conn = bzmysql._MySQLdb.connect(host=host,
-                                                   user=user, passwd=passwd,
-                                                   db=db,
-                                                   connect_timeout=timeout)
+        self.ui.note(
+            _('connecting to %s:%s as %s, password %s\n')
+            % (host, db, user, '*' * len(passwd))
+        )
+        self.conn = bzmysql._MySQLdb.connect(
+            host=host, user=user, passwd=passwd, db=db, connect_timeout=timeout
+        )
         self.cursor = self.conn.cursor()
         self.longdesc_id = self.get_longdesc_id()
         self.user_ids = {}
@@ -495,8 +507,10 @@
 
     def filter_real_bug_ids(self, bugs):
         '''filter not-existing bugs from set.'''
-        self.run('select bug_id from bugs where bug_id in %s' %
-                 bzmysql.sql_buglist(bugs.keys()))
+        self.run(
+            'select bug_id from bugs where bug_id in %s'
+            % bzmysql.sql_buglist(bugs.keys())
+        )
         existing = [id for (id,) in self.cursor.fetchall()]
         for id in bugs.keys():
             if id not in existing:
@@ -505,12 +519,16 @@
 
     def filter_cset_known_bug_ids(self, node, bugs):
         '''filter bug ids that already refer to this changeset from set.'''
-        self.run('''select bug_id from longdescs where
-                    bug_id in %s and thetext like "%%%s%%"''' %
-                 (bzmysql.sql_buglist(bugs.keys()), short(node)))
+        self.run(
+            '''select bug_id from longdescs where
+                    bug_id in %s and thetext like "%%%s%%"'''
+            % (bzmysql.sql_buglist(bugs.keys()), short(node))
+        )
         for (id,) in self.cursor.fetchall():
-            self.ui.status(_('bug %d already knows about changeset %s\n') %
-                           (id, short(node)))
+            self.ui.status(
+                _('bug %d already knows about changeset %s\n')
+                % (id, short(node))
+            )
             del bugs[id]
 
     def notify(self, bugs, committer):
@@ -534,8 +552,9 @@
             ret = fp.close()
             if ret:
                 self.ui.warn(out)
-                raise error.Abort(_('bugzilla notify command %s') %
-                                  procutil.explainexit(ret))
+                raise error.Abort(
+                    _('bugzilla notify command %s') % procutil.explainexit(ret)
+                )
         self.ui.status(_('done\n'))
 
     def get_user_id(self, user):
@@ -547,8 +566,11 @@
                 userid = int(user)
             except ValueError:
                 self.ui.note(_('looking up user %s\n') % user)
-                self.run('''select userid from profiles
-                            where login_name like %s''', user)
+                self.run(
+                    '''select userid from profiles
+                            where login_name like %s''',
+                    user,
+                )
                 all = self.cursor.fetchall()
                 if len(all) != 1:
                     raise KeyError(user)
@@ -567,13 +589,16 @@
             try:
                 defaultuser = self.ui.config('bugzilla', 'bzuser')
                 if not defaultuser:
-                    raise error.Abort(_('cannot find bugzilla user id for %s') %
-                                     user)
+                    raise error.Abort(
+                        _('cannot find bugzilla user id for %s') % user
+                    )
                 userid = self.get_user_id(defaultuser)
                 user = defaultuser
             except KeyError:
-                raise error.Abort(_('cannot find bugzilla user id for %s or %s')
-                                 % (user, defaultuser))
+                raise error.Abort(
+                    _('cannot find bugzilla user id for %s or %s')
+                    % (user, defaultuser)
+                )
         return (user, userid)
 
     def updatebug(self, bugid, newstate, text, committer):
@@ -586,22 +611,29 @@
 
         (user, userid) = self.get_bugzilla_user(committer)
         now = time.strftime(r'%Y-%m-%d %H:%M:%S')
-        self.run('''insert into longdescs
+        self.run(
+            '''insert into longdescs
                     (bug_id, who, bug_when, thetext)
                     values (%s, %s, %s, %s)''',
-                 (bugid, userid, now, text))
-        self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
+            (bugid, userid, now, text),
+        )
+        self.run(
+            '''insert into bugs_activity (bug_id, who, bug_when, fieldid)
                     values (%s, %s, %s, %s)''',
-                 (bugid, userid, now, self.longdesc_id))
+            (bugid, userid, now, self.longdesc_id),
+        )
         self.conn.commit()
 
+
 class bzmysql_2_18(bzmysql):
     '''support for bugzilla 2.18 series.'''
 
     def __init__(self, ui):
         bzmysql.__init__(self, ui)
         self.default_notify = (
-            "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s")
+            "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
+        )
+
 
 class bzmysql_3_0(bzmysql_2_18):
     '''support for bugzilla 3.0 series.'''
@@ -617,8 +649,10 @@
             raise error.Abort(_('unknown database schema'))
         return ids[0][0]
 
+
 # Bugzilla via XMLRPC interface.
 
+
 class cookietransportrequest(object):
     """A Transport request method that retains cookies over its lifetime.
 
@@ -636,6 +670,7 @@
     # http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
 
     cookies = []
+
     def send_cookies(self, connection):
         if self.cookies:
             for cookie in self.cookies:
@@ -673,8 +708,12 @@
             self.cookies.append(cookie)
 
         if response.status != 200:
-            raise xmlrpclib.ProtocolError(host + handler, response.status,
-                                          response.reason, response.msg.headers)
+            raise xmlrpclib.ProtocolError(
+                host + handler,
+                response.status,
+                response.reason,
+                response.msg.headers,
+            )
 
         payload = response.read()
         parser, unmarshaller = self.getparser()
@@ -683,6 +722,7 @@
 
         return unmarshaller.close()
 
+
 # The explicit calls to the underlying xmlrpclib __init__() methods are
 # necessary. The xmlrpclib.Transport classes are old-style classes, and
 # it turns out their __init__() doesn't get called when doing multiple
@@ -692,11 +732,13 @@
         if util.safehasattr(xmlrpclib.Transport, "__init__"):
             xmlrpclib.Transport.__init__(self, use_datetime)
 
+
 class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
     def __init__(self, use_datetime=0):
         if util.safehasattr(xmlrpclib.Transport, "__init__"):
             xmlrpclib.SafeTransport.__init__(self, use_datetime)
 
+
 class bzxmlrpc(bzaccess):
     """Support for access to Bugzilla via the Bugzilla XMLRPC API.
 
@@ -719,8 +761,9 @@
         ver = self.bzproxy.Bugzilla.version()['version'].split('.')
         self.bzvermajor = int(ver[0])
         self.bzverminor = int(ver[1])
-        login = self.bzproxy.User.login({'login': user, 'password': passwd,
-                                         'restrict_login': True})
+        login = self.bzproxy.User.login(
+            {'login': user, 'password': passwd, 'restrict_login': True}
+        )
         self.bztoken = login.get('token', '')
 
     def transport(self, uri):
@@ -731,17 +774,20 @@
 
     def get_bug_comments(self, id):
         """Return a string with all comment text for a bug."""
-        c = self.bzproxy.Bug.comments({'ids': [id],
-                                       'include_fields': ['text'],
-                                       'token': self.bztoken})
+        c = self.bzproxy.Bug.comments(
+            {'ids': [id], 'include_fields': ['text'], 'token': self.bztoken}
+        )
         return ''.join([t['text'] for t in c['bugs']['%d' % id]['comments']])
 
     def filter_real_bug_ids(self, bugs):
-        probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
-                                      'include_fields': [],
-                                      'permissive': True,
-                                      'token': self.bztoken,
-                                      })
+        probe = self.bzproxy.Bug.get(
+            {
+                'ids': sorted(bugs.keys()),
+                'include_fields': [],
+                'permissive': True,
+                'token': self.bztoken,
+            }
+        )
         for badbug in probe['faults']:
             id = badbug['id']
             self.ui.status(_('bug %d does not exist\n') % id)
@@ -750,8 +796,10 @@
     def filter_cset_known_bug_ids(self, node, bugs):
         for id in sorted(bugs.keys()):
             if self.get_bug_comments(id).find(short(node)) != -1:
-                self.ui.status(_('bug %d already knows about changeset %s\n') %
-                               (id, short(node)))
+                self.ui.status(
+                    _('bug %d already knows about changeset %s\n')
+                    % (id, short(node))
+                )
                 del bugs[id]
 
     def updatebug(self, bugid, newstate, text, committer):
@@ -761,7 +809,7 @@
 
         if self.bzvermajor >= 4:
             args['ids'] = [bugid]
-            args['comment'] = {'body' : text}
+            args['comment'] = {'body': text}
             if 'fix' in newstate:
                 args['status'] = self.fixstatus
                 args['resolution'] = self.fixresolution
@@ -769,12 +817,17 @@
             self.bzproxy.Bug.update(args)
         else:
             if 'fix' in newstate:
-                self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
-                               "to mark bugs fixed\n"))
+                self.ui.warn(
+                    _(
+                        "Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
+                        "to mark bugs fixed\n"
+                    )
+                )
             args['id'] = bugid
             args['comment'] = text
             self.bzproxy.Bug.add_comment(args)
 
+
 class bzxmlrpcemail(bzxmlrpc):
     """Read data from Bugzilla via XMLRPC, send updates via email.
 
@@ -823,15 +876,18 @@
         than the subject line, and leave a blank line after it.
         '''
         user = self.map_committer(committer)
-        matches = self.bzproxy.User.get({'match': [user],
-                                         'token': self.bztoken})
+        matches = self.bzproxy.User.get(
+            {'match': [user], 'token': self.bztoken}
+        )
         if not matches['users']:
             user = self.ui.config('bugzilla', 'user')
-            matches = self.bzproxy.User.get({'match': [user],
-                                             'token': self.bztoken})
+            matches = self.bzproxy.User.get(
+                {'match': [user], 'token': self.bztoken}
+            )
             if not matches['users']:
-                raise error.Abort(_("default bugzilla user %s email not found")
-                                  % user)
+                raise error.Abort(
+                    _("default bugzilla user %s email not found") % user
+                )
         user = matches['users'][0]['email']
         commands.append(self.makecommandline("id", bugid))
 
@@ -856,13 +912,16 @@
             cmds.append(self.makecommandline("resolution", self.fixresolution))
         self.send_bug_modify_email(bugid, cmds, text, committer)
 
+
 class NotFound(LookupError):
     pass
 
+
 class bzrestapi(bzaccess):
     """Read and write bugzilla data using the REST API available since
     Bugzilla 5.0.
     """
+
     def __init__(self, ui):
         bzaccess.__init__(self, ui)
         bz = self.ui.config('bugzilla', 'bzurl')
@@ -902,14 +961,15 @@
     def _submit(self, burl, data, method='POST'):
         data = json.dumps(data)
         if method == 'PUT':
+
             class putrequest(util.urlreq.request):
                 def get_method(self):
                     return 'PUT'
+
             request_type = putrequest
         else:
             request_type = util.urlreq.request
-        req = request_type(burl, data,
-                           {'Content-Type': 'application/json'})
+        req = request_type(burl, data, {'Content-Type': 'application/json'})
         try:
             resp = url.opener(self.ui).open(req)
             return json.loads(resp.read())
@@ -941,8 +1001,9 @@
             result = self._fetch(burl)
             comments = result['bugs'][pycompat.bytestr(bugid)]['comments']
             if any(sn in c['text'] for c in comments):
-                self.ui.status(_('bug %d already knows about changeset %s\n') %
-                               (bugid, sn))
+                self.ui.status(
+                    _('bug %d already knows about changeset %s\n') % (bugid, sn)
+                )
                 del bugs[bugid]
 
     def updatebug(self, bugid, newstate, text, committer):
@@ -969,11 +1030,10 @@
             self.ui.debug('updated bug %s\n' % bugid)
         else:
             burl = self.apiurl(('bug', bugid, 'comment'))
-            self._submit(burl, {
-                'comment': text,
-                'is_private': False,
-                'is_markdown': False,
-            })
+            self._submit(
+                burl,
+                {'comment': text, 'is_private': False, 'is_markdown': False,},
+            )
             self.ui.debug('added comment to bug %s\n' % bugid)
 
     def notify(self, bugs, committer):
@@ -984,17 +1044,18 @@
         '''
         pass
 
+
 class bugzilla(object):
     # supported versions of bugzilla. different versions have
     # different schemas.
     _versions = {
         '2.16': bzmysql,
         '2.18': bzmysql_2_18,
-        '3.0':  bzmysql_3_0,
+        '3.0': bzmysql_3_0,
         'xmlrpc': bzxmlrpc,
         'xmlrpc+email': bzxmlrpcemail,
         'restapi': bzrestapi,
-        }
+    }
 
     def __init__(self, ui, repo):
         self.ui = ui
@@ -1004,14 +1065,17 @@
         try:
             bzclass = bugzilla._versions[bzversion]
         except KeyError:
-            raise error.Abort(_('bugzilla version %s not supported') %
-                             bzversion)
+            raise error.Abort(
+                _('bugzilla version %s not supported') % bzversion
+            )
         self.bzdriver = bzclass(self.ui)
 
         self.bug_re = re.compile(
-            self.ui.config('bugzilla', 'regexp'), re.IGNORECASE)
+            self.ui.config('bugzilla', 'regexp'), re.IGNORECASE
+        )
         self.fix_re = re.compile(
-            self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE)
+            self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE
+        )
         self.split_re = re.compile(br'\D+')
 
     def find_bugs(self, ctx):
@@ -1084,7 +1148,7 @@
                 c = root.find('/')
                 if c == -1:
                     break
-                root = root[c + 1:]
+                root = root[c + 1 :]
                 count -= 1
             return root
 
@@ -1093,31 +1157,39 @@
         if not tmpl:
             mapfile = self.ui.config('bugzilla', 'style')
         if not mapfile and not tmpl:
-            tmpl = _('changeset {node|short} in repo {root} refers '
-                     'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
+            tmpl = _(
+                'changeset {node|short} in repo {root} refers '
+                'to bug {bug}.\ndetails:\n\t{desc|tabindent}'
+            )
         spec = logcmdutil.templatespec(tmpl, mapfile)
         t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
         self.ui.pushbuffer()
-        t.show(ctx, changes=ctx.changeset(),
-               bug=pycompat.bytestr(bugid),
-               hgweb=self.ui.config('web', 'baseurl'),
-               root=self.repo.root,
-               webroot=webroot(self.repo.root))
+        t.show(
+            ctx,
+            changes=ctx.changeset(),
+            bug=pycompat.bytestr(bugid),
+            hgweb=self.ui.config('web', 'baseurl'),
+            root=self.repo.root,
+            webroot=webroot(self.repo.root),
+        )
         data = self.ui.popbuffer()
-        self.bzdriver.updatebug(bugid, newstate, data,
-                                stringutil.email(ctx.user()))
+        self.bzdriver.updatebug(
+            bugid, newstate, data, stringutil.email(ctx.user())
+        )
 
     def notify(self, bugs, committer):
         '''ensure Bugzilla users are notified of bug change.'''
         self.bzdriver.notify(bugs, committer)
 
+
 def hook(ui, repo, hooktype, node=None, **kwargs):
     '''add comment to bugzilla for each changeset that refers to a
     bugzilla bug id. only add a comment once per bug, so same change
     seen multiple times does not fill bug with duplicate data.'''
     if node is None:
-        raise error.Abort(_('hook type %s does not pass a changeset id') %
-                         hooktype)
+        raise error.Abort(
+            _('hook type %s does not pass a changeset id') % hooktype
+        )
     try:
         bz = bugzilla(ui, repo)
         ctx = repo[node]
--- a/hgext/censor.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/censor.py	Sun Oct 06 09:45:02 2019 -0400
@@ -44,15 +44,21 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-@command('censor',
-    [('r', 'rev', '', _('censor file from specified revision'), _('REV')),
-     ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT'))],
+
+@command(
+    'censor',
+    [
+        ('r', 'rev', '', _('censor file from specified revision'), _('REV')),
+        ('t', 'tombstone', '', _('replacement tombstone data'), _('TEXT')),
+    ],
     _('-r REV [-t TEXT] [FILE]'),
-    helpcategory=command.CATEGORY_MAINTENANCE)
+    helpcategory=command.CATEGORY_MAINTENANCE,
+)
 def censor(ui, repo, path, rev='', tombstone='', **opts):
     with repo.wlock(), repo.lock():
         return _docensor(ui, repo, path, rev, tombstone, **opts)
 
+
 def _docensor(ui, repo, path, rev='', tombstone='', **opts):
     if not path:
         raise error.Abort(_('must specify file path to censor'))
@@ -88,13 +94,17 @@
             heads.append(hc)
     if heads:
         headlist = ', '.join([short(c.node()) for c in heads])
-        raise error.Abort(_('cannot censor file in heads (%s)') % headlist,
-            hint=_('clean/delete and commit first'))
+        raise error.Abort(
+            _('cannot censor file in heads (%s)') % headlist,
+            hint=_('clean/delete and commit first'),
+        )
 
     wp = wctx.parents()
     if ctx.node() in [p.node() for p in wp]:
-        raise error.Abort(_('cannot censor working directory'),
-            hint=_('clean/delete/update first'))
+        raise error.Abort(
+            _('cannot censor working directory'),
+            hint=_('clean/delete/update first'),
+        )
 
     with repo.transaction(b'censor') as tr:
         flog.censorrevision(tr, fnode, tombstone=tombstone)
--- a/hgext/children.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/children.py	Sun Oct 06 09:45:02 2019 -0400
@@ -35,13 +35,15 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-@command('children',
-    [('r', 'rev', '.',
-     _('show children of the specified revision'), _('REV')),
-    ] + templateopts,
+
+@command(
+    'children',
+    [('r', 'rev', '.', _('show children of the specified revision'), _('REV')),]
+    + templateopts,
     _('hg children [-r REV] [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
-    inferrepo=True)
+    inferrepo=True,
+)
 def children(ui, repo, file_=None, **opts):
     """show the children of the given or working directory revision
 
--- a/hgext/churn.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/churn.py	Sun Oct 06 09:45:02 2019 -0400
@@ -34,6 +34,7 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 def changedlines(ui, repo, ctx1, ctx2, fns):
     added, removed = 0, 0
     fmatch = scmutil.matchfiles(repo, fns)
@@ -45,38 +46,45 @@
             removed += 1
     return (added, removed)
 
+
 def countrate(ui, repo, amap, *pats, **opts):
     """Calculate stats"""
     opts = pycompat.byteskwargs(opts)
     if opts.get('dateformat'):
+
         def getkey(ctx):
             t, tz = ctx.date()
             date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
             return encoding.strtolocal(
-                date.strftime(encoding.strfromlocal(opts['dateformat'])))
+                date.strftime(encoding.strfromlocal(opts['dateformat']))
+            )
+
     else:
         tmpl = opts.get('oldtemplate') or opts.get('template')
         tmpl = logcmdutil.maketemplater(ui, repo, tmpl)
+
         def getkey(ctx):
             ui.pushbuffer()
             tmpl.show(ctx)
             return ui.popbuffer()
 
-    progress = ui.makeprogress(_('analyzing'), unit=_('revisions'),
-                               total=len(repo))
+    progress = ui.makeprogress(
+        _('analyzing'), unit=_('revisions'), total=len(repo)
+    )
     rate = {}
     df = False
     if opts.get('date'):
         df = dateutil.matchdate(opts['date'])
 
     m = scmutil.match(repo[None], pats, opts)
+
     def prep(ctx, fns):
         rev = ctx.rev()
-        if df and not df(ctx.date()[0]): # doesn't match date format
+        if df and not df(ctx.date()[0]):  # doesn't match date format
             return
 
         key = getkey(ctx).strip()
-        key = amap.get(key, key) # alias remap
+        key = amap.get(key, key)  # alias remap
         if opts.get('changesets'):
             rate[key] = (rate.get(key, (0,))[0] + 1, 0)
         else:
@@ -99,25 +107,54 @@
     return rate
 
 
-@command('churn',
-    [('r', 'rev', [],
-     _('count rate for the specified revision or revset'), _('REV')),
-    ('d', 'date', '',
-     _('count rate for revisions matching date spec'), _('DATE')),
-    ('t', 'oldtemplate', '',
-     _('template to group changesets (DEPRECATED)'), _('TEMPLATE')),
-    ('T', 'template', '{author|email}',
-     _('template to group changesets'), _('TEMPLATE')),
-    ('f', 'dateformat', '',
-     _('strftime-compatible format for grouping by date'), _('FORMAT')),
-    ('c', 'changesets', False, _('count rate by number of changesets')),
-    ('s', 'sort', False, _('sort by key (default: sort by count)')),
-    ('', 'diffstat', False, _('display added/removed lines separately')),
-    ('', 'aliases', '', _('file with email aliases'), _('FILE')),
-    ] + cmdutil.walkopts,
+@command(
+    'churn',
+    [
+        (
+            'r',
+            'rev',
+            [],
+            _('count rate for the specified revision or revset'),
+            _('REV'),
+        ),
+        (
+            'd',
+            'date',
+            '',
+            _('count rate for revisions matching date spec'),
+            _('DATE'),
+        ),
+        (
+            't',
+            'oldtemplate',
+            '',
+            _('template to group changesets (DEPRECATED)'),
+            _('TEMPLATE'),
+        ),
+        (
+            'T',
+            'template',
+            '{author|email}',
+            _('template to group changesets'),
+            _('TEMPLATE'),
+        ),
+        (
+            'f',
+            'dateformat',
+            '',
+            _('strftime-compatible format for grouping by date'),
+            _('FORMAT'),
+        ),
+        ('c', 'changesets', False, _('count rate by number of changesets')),
+        ('s', 'sort', False, _('sort by key (default: sort by count)')),
+        ('', 'diffstat', False, _('display added/removed lines separately')),
+        ('', 'aliases', '', _('file with email aliases'), _('FILE')),
+    ]
+    + cmdutil.walkopts,
     _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]"),
     helpcategory=command.CATEGORY_MAINTENANCE,
-    inferrepo=True)
+    inferrepo=True,
+)
 def churn(ui, repo, *pats, **opts):
     '''histogram of changes to the repository
 
@@ -154,6 +191,7 @@
     a .hgchurn file will be looked for in the working directory root.
     Aliases will be split from the rightmost "=".
     '''
+
     def pad(s, l):
         return s + " " * (l - encoding.colwidth(s))
 
@@ -191,19 +229,25 @@
 
     if opts.get(r'diffstat'):
         width -= 15
+
         def format(name, diffstat):
             added, removed = diffstat
-            return "%s %15s %s%s\n" % (pad(name, maxname),
-                                       '+%d/-%d' % (added, removed),
-                                       ui.label('+' * charnum(added),
-                                                'diffstat.inserted'),
-                                       ui.label('-' * charnum(removed),
-                                                'diffstat.deleted'))
+            return "%s %15s %s%s\n" % (
+                pad(name, maxname),
+                '+%d/-%d' % (added, removed),
+                ui.label('+' * charnum(added), 'diffstat.inserted'),
+                ui.label('-' * charnum(removed), 'diffstat.deleted'),
+            )
+
     else:
         width -= 6
+
         def format(name, count):
-            return "%s %6d %s\n" % (pad(name, maxname), sum(count),
-                                    '*' * charnum(sum(count)))
+            return "%s %6d %s\n" % (
+                pad(name, maxname),
+                sum(count),
+                '*' * charnum(sum(count)),
+            )
 
     def charnum(count):
         return int(count * width // maxcount)
--- a/hgext/clonebundles.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/clonebundles.py	Sun Oct 06 09:45:02 2019 -0400
@@ -203,6 +203,7 @@
 
 testedwith = 'ships-with-hg-core'
 
+
 def capabilities(orig, repo, proto):
     caps = orig(repo, proto)
 
@@ -214,5 +215,6 @@
 
     return caps
 
+
 def extsetup(ui):
     extensions.wrapfunction(wireprotov1server, '_capabilities', capabilities)
--- a/hgext/closehead.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/closehead.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,13 +28,16 @@
 
 commitopts = cmdutil.commitopts
 commitopts2 = cmdutil.commitopts2
-commitopts3 = [('r', 'rev', [],
-               _('revision to check'), _('REV'))]
+commitopts3 = [('r', 'rev', [], _('revision to check'), _('REV'))]
+
 
-@command('close-head|close-heads', commitopts + commitopts2 + commitopts3,
+@command(
+    'close-head|close-heads',
+    commitopts + commitopts2 + commitopts3,
     _('[OPTION]... [REV]...'),
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
-    inferrepo=True)
+    inferrepo=True,
+)
 def close_branch(ui, repo, *revs, **opts):
     """close the given head revisions
 
@@ -44,10 +47,18 @@
 
     The commit message must be specified with -l or -m.
     """
+
     def docommit(rev):
-        cctx = context.memctx(repo, parents=[rev, None], text=message,
-                              files=[], filectxfn=None, user=opts.get('user'),
-                              date=opts.get('date'), extra=extra)
+        cctx = context.memctx(
+            repo,
+            parents=[rev, None],
+            text=message,
+            files=[],
+            filectxfn=None,
+            user=opts.get('user'),
+            date=opts.get('date'),
+            extra=extra,
+        )
         tr = repo.transaction('commit')
         ret = repo.commitctx(cctx, True)
         bookmarks.update(repo, [rev, None], ret)
@@ -73,7 +84,7 @@
     message = cmdutil.logmessage(ui, opts)
     if not message:
         raise error.Abort(_("no commit message specified with -l or -m"))
-    extra = { 'close': '1' }
+    extra = {'close': '1'}
 
     with repo.wlock(), repo.lock():
         for rev in revs:
--- a/hgext/commitextras.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/commitextras.py	Sun Oct 06 09:45:02 2019 -0400
@@ -37,36 +37,46 @@
     'transplant_source',
 }
 
+
 def extsetup(ui):
     entry = extensions.wrapcommand(commands.table, 'commit', _commit)
     options = entry[1]
-    options.append(('', 'extra', [],
-        _('set a changeset\'s extra values'), _("KEY=VALUE")))
+    options.append(
+        ('', 'extra', [], _('set a changeset\'s extra values'), _("KEY=VALUE"))
+    )
+
 
 def _commit(orig, ui, repo, *pats, **opts):
     if util.safehasattr(repo, 'unfiltered'):
         repo = repo.unfiltered()
+
     class repoextra(repo.__class__):
         def commit(self, *innerpats, **inneropts):
             extras = opts.get(r'extra')
             for raw in extras:
                 if '=' not in raw:
-                    msg = _("unable to parse '%s', should follow "
-                            "KEY=VALUE format")
+                    msg = _(
+                        "unable to parse '%s', should follow "
+                        "KEY=VALUE format"
+                    )
                     raise error.Abort(msg % raw)
                 k, v = raw.split('=', 1)
                 if not k:
                     msg = _("unable to parse '%s', keys can't be empty")
                     raise error.Abort(msg % raw)
                 if re.search(br'[^\w-]', k):
-                    msg = _("keys can only contain ascii letters, digits,"
-                            " '_' and '-'")
+                    msg = _(
+                        "keys can only contain ascii letters, digits,"
+                        " '_' and '-'"
+                    )
                     raise error.Abort(msg)
                 if k in usedinternally:
-                    msg = _("key '%s' is used internally, can't be set "
-                            "manually")
+                    msg = _(
+                        "key '%s' is used internally, can't be set " "manually"
+                    )
                     raise error.Abort(msg % k)
                 inneropts[r'extra'][k] = v
             return super(repoextra, self).commit(*innerpats, **inneropts)
+
     repo.__class__ = repoextra
     return orig(ui, repo, *pats, **opts)
--- a/hgext/convert/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,9 +10,7 @@
 from __future__ import absolute_import
 
 from mercurial.i18n import _
-from mercurial import (
-    registrar,
-)
+from mercurial import registrar
 
 from . import (
     convcmd,
@@ -30,28 +28,58 @@
 
 # Commands definition was moved elsewhere to ease demandload job.
 
-@command('convert',
-    [('', 'authors', '',
-      _('username mapping filename (DEPRECATED) (use --authormap instead)'),
-      _('FILE')),
-    ('s', 'source-type', '', _('source repository type'), _('TYPE')),
-    ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
-    ('r', 'rev', [], _('import up to source revision REV'), _('REV')),
-    ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
-    ('', 'filemap', '', _('remap file names using contents of file'),
-     _('FILE')),
-    ('', 'full', None,
-     _('apply filemap changes by converting all files again')),
-    ('', 'splicemap', '', _('splice synthesized history into place'),
-     _('FILE')),
-    ('', 'branchmap', '', _('change branch names while converting'),
-     _('FILE')),
-    ('', 'branchsort', None, _('try to sort changesets by branches')),
-    ('', 'datesort', None, _('try to sort changesets by date')),
-    ('', 'sourcesort', None, _('preserve source changesets order')),
-    ('', 'closesort', None, _('try to reorder closed revisions'))],
-   _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
-   norepo=True)
+
+@command(
+    'convert',
+    [
+        (
+            '',
+            'authors',
+            '',
+            _(
+                'username mapping filename (DEPRECATED) (use --authormap instead)'
+            ),
+            _('FILE'),
+        ),
+        ('s', 'source-type', '', _('source repository type'), _('TYPE')),
+        ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
+        ('r', 'rev', [], _('import up to source revision REV'), _('REV')),
+        ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
+        (
+            '',
+            'filemap',
+            '',
+            _('remap file names using contents of file'),
+            _('FILE'),
+        ),
+        (
+            '',
+            'full',
+            None,
+            _('apply filemap changes by converting all files again'),
+        ),
+        (
+            '',
+            'splicemap',
+            '',
+            _('splice synthesized history into place'),
+            _('FILE'),
+        ),
+        (
+            '',
+            'branchmap',
+            '',
+            _('change branch names while converting'),
+            _('FILE'),
+        ),
+        ('', 'branchsort', None, _('try to sort changesets by branches')),
+        ('', 'datesort', None, _('try to sort changesets by date')),
+        ('', 'sourcesort', None, _('preserve source changesets order')),
+        ('', 'closesort', None, _('try to reorder closed revisions')),
+    ],
+    _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
+    norepo=True,
+)
 def convert(ui, src, dest=None, revmapfile=None, **opts):
     """convert a foreign SCM repository to a Mercurial one.
 
@@ -454,29 +482,37 @@
     """
     return convcmd.convert(ui, src, dest, revmapfile, **opts)
 
+
 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
 def debugsvnlog(ui, **opts):
     return subversion.debugsvnlog(ui, **opts)
 
-@command('debugcvsps',
+
+@command(
+    'debugcvsps',
     [
-    # Main options shared with cvsps-2.1
-    ('b', 'branches', [], _('only return changes on specified branches')),
-    ('p', 'prefix', '', _('prefix to remove from file names')),
-    ('r', 'revisions', [],
-     _('only return changes after or between specified tags')),
-    ('u', 'update-cache', None, _("update cvs log cache")),
-    ('x', 'new-cache', None, _("create new cvs log cache")),
-    ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
-    ('', 'root', '', _('specify cvsroot')),
-    # Options specific to builtin cvsps
-    ('', 'parents', '', _('show parent changesets')),
-    ('', 'ancestors', '', _('show current changeset in ancestor branches')),
-    # Options that are ignored for compatibility with cvsps-2.1
-    ('A', 'cvs-direct', None, _('ignored for compatibility')),
+        # Main options shared with cvsps-2.1
+        ('b', 'branches', [], _('only return changes on specified branches')),
+        ('p', 'prefix', '', _('prefix to remove from file names')),
+        (
+            'r',
+            'revisions',
+            [],
+            _('only return changes after or between specified tags'),
+        ),
+        ('u', 'update-cache', None, _("update cvs log cache")),
+        ('x', 'new-cache', None, _("create new cvs log cache")),
+        ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
+        ('', 'root', '', _('specify cvsroot')),
+        # Options specific to builtin cvsps
+        ('', 'parents', '', _('show parent changesets')),
+        ('', 'ancestors', '', _('show current changeset in ancestor branches')),
+        # Options that are ignored for compatibility with cvsps-2.1
+        ('A', 'cvs-direct', None, _('ignored for compatibility')),
     ],
     _('hg debugcvsps [OPTION]... [PATH]...'),
-    norepo=True)
+    norepo=True,
+)
 def debugcvsps(ui, *args, **opts):
     '''create changeset information from CVS
 
@@ -490,34 +526,40 @@
     dates.'''
     return cvsps.debugcvsps(ui, *args, **opts)
 
+
 def kwconverted(context, mapping, name):
     ctx = context.resource(mapping, 'ctx')
     rev = ctx.extra().get('convert_revision', '')
     if rev.startswith('svn:'):
         if name == 'svnrev':
-            return (b"%d" % subversion.revsplit(rev)[2])
+            return b"%d" % subversion.revsplit(rev)[2]
         elif name == 'svnpath':
             return subversion.revsplit(rev)[1]
         elif name == 'svnuuid':
             return subversion.revsplit(rev)[0]
     return rev
 
+
 templatekeyword = registrar.templatekeyword()
 
+
 @templatekeyword('svnrev', requires={'ctx'})
 def kwsvnrev(context, mapping):
     """String. Converted subversion revision number."""
     return kwconverted(context, mapping, 'svnrev')
 
+
 @templatekeyword('svnpath', requires={'ctx'})
 def kwsvnpath(context, mapping):
     """String. Converted subversion revision project path."""
     return kwconverted(context, mapping, 'svnpath')
 
+
 @templatekeyword('svnuuid', requires={'ctx'})
 def kwsvnuuid(context, mapping):
     """String. Converted subversion revision repository identifier."""
     return kwconverted(context, mapping, 'svnuuid')
 
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
--- a/hgext/convert/bzr.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/bzr.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,18 +12,13 @@
 import os
 
 from mercurial.i18n import _
-from mercurial import (
-    demandimport,
-    error
-)
+from mercurial import demandimport, error
 from . import common
 
 # these do not work with demandimport, blacklist
-demandimport.IGNORES.update([
-        'bzrlib.transactions',
-        'bzrlib.urlutils',
-        'ElementPath',
-    ])
+demandimport.IGNORES.update(
+    ['bzrlib.transactions', 'bzrlib.urlutils', 'ElementPath',]
+)
 
 try:
     # bazaar imports
@@ -31,6 +26,7 @@
     import bzrlib.errors
     import bzrlib.revision
     import bzrlib.revisionspec
+
     bzrdir = bzrlib.bzrdir
     errors = bzrlib.errors
     revision = bzrlib.revision
@@ -41,6 +37,7 @@
 
 supportedkinds = ('file', 'symlink')
 
+
 class bzr_source(common.converter_source):
     """Reads Bazaar repositories by using the Bazaar Python libraries"""
 
@@ -48,8 +45,9 @@
         super(bzr_source, self).__init__(ui, repotype, path, revs=revs)
 
         if not os.path.exists(os.path.join(path, '.bzr')):
-            raise common.NoRepo(_('%s does not look like a Bazaar repository')
-                              % path)
+            raise common.NoRepo(
+                _('%s does not look like a Bazaar repository') % path
+            )
 
         try:
             # access bzrlib stuff
@@ -62,8 +60,9 @@
         try:
             self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
         except errors.NoRepositoryPresent:
-            raise common.NoRepo(_('%s does not look like a Bazaar repository')
-                              % path)
+            raise common.NoRepo(
+                _('%s does not look like a Bazaar repository') % path
+            )
         self._parentids = {}
         self._saverev = ui.configbool('convert', 'bzr.saverev')
 
@@ -78,11 +77,18 @@
             except (errors.NoWorkingTree, errors.NotLocalUrl):
                 tree = None
                 branch = dir.open_branch()
-            if (tree is not None and tree.bzrdir.root_transport.base !=
-                branch.bzrdir.root_transport.base):
-                self.ui.warn(_('warning: lightweight checkouts may cause '
-                               'conversion failures, try with a regular '
-                               'branch instead.\n'))
+            if (
+                tree is not None
+                and tree.bzrdir.root_transport.base
+                != branch.bzrdir.root_transport.base
+            ):
+                self.ui.warn(
+                    _(
+                        'warning: lightweight checkouts may cause '
+                        'conversion failures, try with a regular '
+                        'branch instead.\n'
+                    )
+                )
         except Exception:
             self.ui.note(_('bzr source type could not be determined\n'))
 
@@ -119,8 +125,9 @@
                     pass
                 revid = info.rev_id
             if revid is None:
-                raise error.Abort(_('%s is not a valid revision')
-                                  % self.revs[0])
+                raise error.Abort(
+                    _('%s is not a valid revision') % self.revs[0]
+                )
             heads = [revid]
         # Empty repositories return 'null:', which cannot be retrieved
         heads = [h for h in heads if h != 'null:']
@@ -139,8 +146,9 @@
         if kind == 'symlink':
             target = revtree.get_symlink_target(fileid)
             if target is None:
-                raise error.Abort(_('%s.%s symlink has no target')
-                                 % (name, rev))
+                raise error.Abort(
+                    _('%s.%s symlink has no target') % (name, rev)
+                )
             return target, mode
         else:
             sio = revtree.get_file(fileid)
@@ -171,13 +179,15 @@
         branch = self.recode(rev.properties.get('branch-nick', u'default'))
         if branch == 'trunk':
             branch = 'default'
-        return common.commit(parents=parents,
-                date='%d %d' % (rev.timestamp, -rev.timezone),
-                author=self.recode(rev.committer),
-                desc=self.recode(rev.message),
-                branch=branch,
-                rev=version,
-                saverev=self._saverev)
+        return common.commit(
+            parents=parents,
+            date='%d %d' % (rev.timestamp, -rev.timezone),
+            author=self.recode(rev.committer),
+            desc=self.recode(rev.message),
+            branch=branch,
+            rev=version,
+            saverev=self._saverev,
+        )
 
     def gettags(self):
         bytetags = {}
@@ -216,11 +226,21 @@
 
         # Process the entries by reverse lexicographic name order to
         # handle nested renames correctly, most specific first.
-        curchanges = sorted(current.iter_changes(origin),
-                            key=lambda c: c[1][0] or c[1][1],
-                            reverse=True)
-        for (fileid, paths, changed_content, versioned, parent, name,
-            kind, executable) in curchanges:
+        curchanges = sorted(
+            current.iter_changes(origin),
+            key=lambda c: c[1][0] or c[1][1],
+            reverse=True,
+        )
+        for (
+            fileid,
+            paths,
+            changed_content,
+            versioned,
+            parent,
+            name,
+            kind,
+            executable,
+        ) in curchanges:
 
             if paths[0] == u'' or paths[1] == u'':
                 # ignore changes to tree root
@@ -260,9 +280,11 @@
                         changes.append((frompath, revid))
                         changes.append((topath, revid))
                         # add to mode cache
-                        mode = ((entry.executable and 'x')
-                                or (entry.kind == 'symlink' and 's')
-                                or '')
+                        mode = (
+                            (entry.executable and 'x')
+                            or (entry.kind == 'symlink' and 's')
+                            or ''
+                        )
                         self._modecache[(topath, revid)] = mode
                         # register the change as move
                         renames[topath] = frompath
@@ -290,8 +312,7 @@
 
             # populate the mode cache
             kind, executable = [e[1] for e in (kind, executable)]
-            mode = ((executable and 'x') or (kind == 'symlink' and 'l')
-                    or '')
+            mode = (executable and 'x') or (kind == 'symlink' and 'l') or ''
             self._modecache[(topath, revid)] = mode
             changes.append((topath, revid))
 
--- a/hgext/convert/common.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/common.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,20 +22,19 @@
     pycompat,
     util,
 )
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 pickle = util.pickle
 propertycache = util.propertycache
 
+
 def _encodeornone(d):
     if d is None:
         return
     return d.encode('latin1')
 
+
 class _shlexpy3proxy(object):
-
     def __init__(self, l):
         self._l = l
 
@@ -53,6 +52,7 @@
     def lineno(self):
         return self._l.lineno
 
+
 def shlexer(data=None, filepath=None, wordchars=None, whitespace=None):
     if data is None:
         if pycompat.ispy3:
@@ -62,7 +62,8 @@
     else:
         if filepath is not None:
             raise error.ProgrammingError(
-                'shlexer only accepts data or filepath, not both')
+                'shlexer only accepts data or filepath, not both'
+            )
         if pycompat.ispy3:
             data = data.decode('latin1')
     l = shlex.shlex(data, infile=filepath, posix=True)
@@ -81,6 +82,7 @@
         return _shlexpy3proxy(l)
     return l
 
+
 def encodeargs(args):
     def encodearg(s):
         lines = base64.encodestring(s)
@@ -90,13 +92,16 @@
     s = pickle.dumps(args)
     return encodearg(s)
 
+
 def decodeargs(s):
     s = base64.decodestring(s)
     return pickle.loads(s)
 
+
 class MissingTool(Exception):
     pass
 
+
 def checktool(exe, name=None, abort=True):
     name = name or exe
     if not procutil.findexe(exe):
@@ -106,27 +111,43 @@
             exc = MissingTool
         raise exc(_('cannot find required "%s" tool') % name)
 
+
 class NoRepo(Exception):
     pass
 
+
 SKIPREV = 'SKIP'
 
+
 class commit(object):
-    def __init__(self, author, date, desc, parents, branch=None, rev=None,
-                 extra=None, sortkey=None, saverev=True, phase=phases.draft,
-                 optparents=None, ctx=None):
+    def __init__(
+        self,
+        author,
+        date,
+        desc,
+        parents,
+        branch=None,
+        rev=None,
+        extra=None,
+        sortkey=None,
+        saverev=True,
+        phase=phases.draft,
+        optparents=None,
+        ctx=None,
+    ):
         self.author = author or 'unknown'
         self.date = date or '0 0'
         self.desc = desc
-        self.parents = parents # will be converted and used as parents
-        self.optparents = optparents or [] # will be used if already converted
+        self.parents = parents  # will be converted and used as parents
+        self.optparents = optparents or []  # will be used if already converted
         self.branch = branch
         self.rev = rev
         self.extra = extra or {}
         self.sortkey = sortkey
         self.saverev = saverev
         self.phase = phase
-        self.ctx = ctx # for hg to hg conversions
+        self.ctx = ctx  # for hg to hg conversions
+
 
 class converter_source(object):
     """Conversion source interface"""
@@ -146,8 +167,10 @@
             such format for their revision numbering
         """
         if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
-            raise error.Abort(_('%s entry %s is not a valid revision'
-                               ' identifier') % (mapname, revstr))
+            raise error.Abort(
+                _('%s entry %s is not a valid revision' ' identifier')
+                % (mapname, revstr)
+            )
 
     def before(self):
         pass
@@ -223,8 +246,9 @@
             try:
                 return s.decode("latin-1").encode("utf-8")
             except UnicodeError:
-                return s.decode(pycompat.sysstr(encoding),
-                                "replace").encode("utf-8")
+                return s.decode(pycompat.sysstr(encoding), "replace").encode(
+                    "utf-8"
+                )
 
     def getchangedfiles(self, rev, i):
         """Return the files changed by rev compared to parent[i].
@@ -275,6 +299,7 @@
         """
         return True
 
+
 class converter_sink(object):
     """Conversion sink (target) interface"""
 
@@ -301,8 +326,9 @@
         mapping equivalent authors identifiers for each system."""
         return None
 
-    def putcommit(self, files, copies, parents, commit, source, revmap, full,
-                  cleanp2):
+    def putcommit(
+        self, files, copies, parents, commit, source, revmap, full, cleanp2
+    ):
         """Create a revision with all changed files listed in 'files'
         and having listed parents. 'commit' is a commit object
         containing at a minimum the author, date, and message for this
@@ -369,6 +395,7 @@
         special cases."""
         raise NotImplementedError
 
+
 class commandline(object):
     def __init__(self, ui, command):
         self.ui = ui
@@ -403,11 +430,15 @@
 
     def _run(self, cmd, *args, **kwargs):
         def popen(cmdline):
-            p = subprocess.Popen(procutil.tonativestr(cmdline),
-                                 shell=True, bufsize=-1,
-                                 close_fds=procutil.closefds,
-                                 stdout=subprocess.PIPE)
+            p = subprocess.Popen(
+                procutil.tonativestr(cmdline),
+                shell=True,
+                bufsize=-1,
+                close_fds=procutil.closefds,
+                stdout=subprocess.PIPE,
+            )
             return p
+
         return self._dorun(popen, cmd, *args, **kwargs)
 
     def _run2(self, cmd, *args, **kwargs):
@@ -416,7 +447,7 @@
     def _run3(self, cmd, *args, **kwargs):
         return self._dorun(procutil.popen3, cmd, *args, **kwargs)
 
-    def _dorun(self, openfunc, cmd,  *args, **kwargs):
+    def _dorun(self, openfunc, cmd, *args, **kwargs):
         cmdline = self._cmdline(cmd, *args, **kwargs)
         self.ui.debug('running: %s\n' % (cmdline,))
         self.prerun()
@@ -495,6 +526,7 @@
         for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
             self.run0(cmd, *(list(args) + l), **kwargs)
 
+
 class mapfile(dict):
     def __init__(self, ui, path):
         super(mapfile, self).__init__()
@@ -523,7 +555,8 @@
             except ValueError:
                 raise error.Abort(
                     _('syntax error in %s(%d): key/value pair expected')
-                    % (self.path, i + 1))
+                    % (self.path, i + 1)
+                )
             if key not in self:
                 self.order.append(key)
             super(mapfile, self).__setitem__(key, value)
@@ -535,8 +568,9 @@
                 self.fp = open(self.path, 'ab')
             except IOError as err:
                 raise error.Abort(
-                    _('could not open map file %r: %s') %
-                    (self.path, encoding.strtolocal(err.strerror)))
+                    _('could not open map file %r: %s')
+                    % (self.path, encoding.strtolocal(err.strerror))
+                )
         self.fp.write(util.tonativeeol('%s %s\n' % (key, value)))
         self.fp.flush()
         super(mapfile, self).__setitem__(key, value)
@@ -546,9 +580,11 @@
             self.fp.close()
             self.fp = None
 
+
 def makedatetimestamp(t):
     """Like dateutil.makedate() but for time t instead of current time"""
-    delta = (datetime.datetime.utcfromtimestamp(t) -
-             datetime.datetime.fromtimestamp(t))
+    delta = datetime.datetime.utcfromtimestamp(
+        t
+    ) - datetime.datetime.fromtimestamp(t)
     tz = delta.days * 86400 + delta.seconds
     return t, tz
--- a/hgext/convert/convcmd.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/convcmd.py	Sun Oct 06 09:45:02 2019 -0400
@@ -54,12 +54,15 @@
 
 orig_encoding = 'ascii'
 
+
 def recode(s):
     if isinstance(s, pycompat.unicode):
         return s.encode(pycompat.sysstr(orig_encoding), 'replace')
     else:
         return s.decode('utf-8').encode(
-            pycompat.sysstr(orig_encoding), 'replace')
+            pycompat.sysstr(orig_encoding), 'replace'
+        )
+
 
 def mapbranch(branch, branchmap):
     '''
@@ -90,10 +93,11 @@
     branch = branchmap.get(branch or 'default', branch)
     # At some point we used "None" literal to denote the default branch,
     # attempt to use that for backward compatibility.
-    if (not branch):
+    if not branch:
         branch = branchmap.get('None', branch)
     return branch
 
+
 source_converters = [
     ('cvs', convert_cvs, 'branchsort'),
     ('git', convert_git, 'branchsort'),
@@ -104,12 +108,13 @@
     ('gnuarch', gnuarch_source, 'branchsort'),
     ('bzr', bzr_source, 'branchsort'),
     ('p4', p4_source, 'branchsort'),
-    ]
+]
 
 sink_converters = [
     ('hg', mercurial_sink),
     ('svn', svn_sink),
-    ]
+]
+
 
 def convertsource(ui, path, type, revs):
     exceptions = []
@@ -126,6 +131,7 @@
             ui.write("%s\n" % pycompat.bytestr(inst.args[0]))
     raise error.Abort(_('%s: missing or unsupported repository') % path)
 
+
 def convertsink(ui, path, type):
     if type and type not in [s[0] for s in sink_converters]:
         raise error.Abort(_('%s: invalid destination repository type') % type)
@@ -139,12 +145,14 @@
             raise error.Abort('%s\n' % inst)
     raise error.Abort(_('%s: unknown repository type') % path)
 
+
 class progresssource(object):
     def __init__(self, ui, source, filecount):
         self.ui = ui
         self.source = source
-        self.progress = ui.makeprogress(_('getting files'), unit=_('files'),
-                                        total=filecount)
+        self.progress = ui.makeprogress(
+            _('getting files'), unit=_('files'), total=filecount
+        )
 
     def getfile(self, file, rev):
         self.progress.increment(item=file)
@@ -159,6 +167,7 @@
     def close(self):
         self.progress.complete()
 
+
 class converter(object):
     def __init__(self, ui, source, dest, revmapfile, opts):
 
@@ -213,8 +222,13 @@
                 line = list(lex)
                 # check number of parents
                 if not (2 <= len(line) <= 3):
-                    raise error.Abort(_('syntax error in %s(%d): child parent1'
-                                       '[,parent2] expected') % (path, i + 1))
+                    raise error.Abort(
+                        _(
+                            'syntax error in %s(%d): child parent1'
+                            '[,parent2] expected'
+                        )
+                        % (path, i + 1)
+                    )
                 for part in line:
                     self.source.checkrevformat(part)
                 child, p1, p2 = line[0], line[1:2], line[2:]
@@ -222,13 +236,13 @@
                     m[child] = p1
                 else:
                     m[child] = p1 + p2
-         # if file does not exist or error reading, exit
+        # if file does not exist or error reading, exit
         except IOError:
-            raise error.Abort(_('splicemap file not found or error reading %s:')
-                               % path)
+            raise error.Abort(
+                _('splicemap file not found or error reading %s:') % path
+            )
         return m
 
-
     def walktree(self, heads):
         '''Return a mapping that identifies the uncommitted parents of every
         uncommitted changeset.'''
@@ -236,8 +250,9 @@
         known = set()
         parents = {}
         numcommits = self.source.numcommits()
-        progress = self.ui.makeprogress(_('scanning'), unit=_('revisions'),
-                                        total=numcommits)
+        progress = self.ui.makeprogress(
+            _('scanning'), unit=_('revisions'), total=numcommits
+        )
         while visit:
             n = visit.pop(0)
             if n in known:
@@ -266,8 +281,13 @@
             if c not in parents:
                 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
                     # Could be in source but not converted during this run
-                    self.ui.warn(_('splice map revision %s is not being '
-                                   'converted, ignoring\n') % c)
+                    self.ui.warn(
+                        _(
+                            'splice map revision %s is not being '
+                            'converted, ignoring\n'
+                        )
+                        % c
+                    )
                 continue
             pc = []
             for p in splicemap[c]:
@@ -325,6 +345,7 @@
             compression.
             """
             prev = [None]
+
             def picknext(nodes):
                 next = nodes[0]
                 for n in nodes:
@@ -333,26 +354,34 @@
                         break
                 prev[0] = next
                 return next
+
             return picknext
 
         def makesourcesorter():
             """Source specific sort."""
             keyfn = lambda n: self.commitcache[n].sortkey
+
             def picknext(nodes):
                 return sorted(nodes, key=keyfn)[0]
+
             return picknext
 
         def makeclosesorter():
             """Close order sort."""
-            keyfn = lambda n: ('close' not in self.commitcache[n].extra,
-                               self.commitcache[n].sortkey)
+            keyfn = lambda n: (
+                'close' not in self.commitcache[n].extra,
+                self.commitcache[n].sortkey,
+            )
+
             def picknext(nodes):
                 return sorted(nodes, key=keyfn)[0]
+
             return picknext
 
         def makedatesorter():
             """Sort revisions by date."""
             dates = {}
+
             def getdate(n):
                 if n not in dates:
                     dates[n] = dateutil.parsedate(self.commitcache[n].date)
@@ -390,8 +419,10 @@
                 try:
                     pendings[c].remove(n)
                 except ValueError:
-                    raise error.Abort(_('cycle detected between %s and %s')
-                                       % (recode(c), recode(n)))
+                    raise error.Abort(
+                        _('cycle detected between %s and %s')
+                        % (recode(c), recode(n))
+                    )
                 if not pendings[c]:
                     # Parents are converted, node is eligible
                     actives.insert(0, c)
@@ -408,8 +439,9 @@
             self.ui.status(_('writing author map file %s\n') % authorfile)
             ofile = open(authorfile, 'wb+')
             for author in self.authors:
-                ofile.write(util.tonativeeol("%s=%s\n"
-                                             % (author, self.authors[author])))
+                ofile.write(
+                    util.tonativeeol("%s=%s\n" % (author, self.authors[author]))
+                )
             ofile.close()
 
     def readauthormap(self, authorfile):
@@ -464,19 +496,22 @@
             for prev in commit.parents:
                 if prev not in self.commitcache:
                     self.cachecommit(prev)
-                pbranches.append((self.map[prev],
-                                  self.commitcache[prev].branch))
+                pbranches.append(
+                    (self.map[prev], self.commitcache[prev].branch)
+                )
         self.dest.setbranch(commit.branch, pbranches)
         try:
             parents = self.splicemap[rev]
-            self.ui.status(_('spliced in %s as parents of %s\n') %
-                           (_(' and ').join(parents), rev))
+            self.ui.status(
+                _('spliced in %s as parents of %s\n')
+                % (_(' and ').join(parents), rev)
+            )
             parents = [self.map.get(p, p) for p in parents]
         except KeyError:
             parents = [b[0] for b in pbranches]
-            parents.extend(self.map[x]
-                           for x in commit.optparents
-                           if x in self.map)
+            parents.extend(
+                self.map[x] for x in commit.optparents if x in self.map
+            )
         if len(pbranches) != 2:
             cleanp2 = set()
         if len(parents) < 3:
@@ -486,10 +521,12 @@
             # changed files N-1 times. This tweak to the number of
             # files makes it so the progress bar doesn't overflow
             # itself.
-            source = progresssource(self.ui, self.source,
-                                    len(files) * (len(parents) - 1))
-        newnode = self.dest.putcommit(files, copies, parents, commit,
-                                      source, self.map, full, cleanp2)
+            source = progresssource(
+                self.ui, self.source, len(files) * (len(parents) - 1)
+            )
+        newnode = self.dest.putcommit(
+            files, copies, parents, commit, source, self.map, full, cleanp2
+        )
         source.close()
         self.source.converted(rev, newnode)
         self.map[rev] = newnode
@@ -509,8 +546,9 @@
             c = None
 
             self.ui.status(_("converting...\n"))
-            progress = self.ui.makeprogress(_('converting'),
-                                            unit=_('revisions'), total=len(t))
+            progress = self.ui.makeprogress(
+                _('converting'), unit=_('revisions'), total=len(t)
+            )
             for i, c in enumerate(t):
                 num -= 1
                 desc = self.commitcache[c].desc
@@ -538,8 +576,11 @@
                     if nrev and tagsparent:
                         # write another hash correspondence to override the
                         # previous one so we don't end up with extra tag heads
-                        tagsparents = [e for e in self.map.iteritems()
-                                       if e[1] == tagsparent]
+                        tagsparents = [
+                            e
+                            for e in self.map.iteritems()
+                            if e[1] == tagsparent
+                        ]
                         if tagsparents:
                             self.map[tagsparents[0][0]] = nrev
 
@@ -564,6 +605,7 @@
             self.source.after()
         self.map.close()
 
+
 def convert(ui, src, dest=None, revmapfile=None, **opts):
     opts = pycompat.byteskwargs(opts)
     global orig_encoding
@@ -582,8 +624,9 @@
     destc = scmutil.wrapconvertsink(destc)
 
     try:
-        srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
-                                          opts.get('rev'))
+        srcc, defaultsort = convertsource(
+            ui, src, opts.get('source_type'), opts.get('rev')
+        )
     except Exception:
         for path in destc.created:
             shutil.rmtree(path, True)
@@ -599,8 +642,9 @@
         sortmode = defaultsort
 
     if sortmode == 'sourcesort' and not srcc.hasnativeorder():
-        raise error.Abort(_('--sourcesort is not supported by this data source')
-                         )
+        raise error.Abort(
+            _('--sourcesort is not supported by this data source')
+        )
     if sortmode == 'closesort' and not srcc.hasnativeclose():
         raise error.Abort(_('--closesort is not supported by this data source'))
 
--- a/hgext/convert/cvs.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/cvs.py	Sun Oct 06 09:45:02 2019 -0400
@@ -34,6 +34,7 @@
 makedatetimestamp = common.makedatetimestamp
 NoRepo = common.NoRepo
 
+
 class convert_cvs(converter_source):
     def __init__(self, ui, repotype, path, revs=None):
         super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
@@ -63,15 +64,17 @@
         maxrev = 0
         if self.revs:
             if len(self.revs) > 1:
-                raise error.Abort(_('cvs source does not support specifying '
-                                   'multiple revs'))
+                raise error.Abort(
+                    _('cvs source does not support specifying ' 'multiple revs')
+                )
             # TODO: handle tags
             try:
                 # patchset number?
                 maxrev = int(self.revs[0])
             except ValueError:
-                raise error.Abort(_('revision %s is not a patchset number')
-                                 % self.revs[0])
+                raise error.Abort(
+                    _('revision %s is not a patchset number') % self.revs[0]
+                )
 
         d = encoding.getcwd()
         try:
@@ -81,15 +84,18 @@
             if not self.ui.configbool('convert', 'cvsps.cache'):
                 cache = None
             db = cvsps.createlog(self.ui, cache=cache)
-            db = cvsps.createchangeset(self.ui, db,
+            db = cvsps.createchangeset(
+                self.ui,
+                db,
                 fuzz=int(self.ui.config('convert', 'cvsps.fuzz')),
                 mergeto=self.ui.config('convert', 'cvsps.mergeto'),
-                mergefrom=self.ui.config('convert', 'cvsps.mergefrom'))
+                mergefrom=self.ui.config('convert', 'cvsps.mergefrom'),
+            )
 
             for cs in db:
                 if maxrev and cs.id > maxrev:
                     break
-                id = (b"%d" % cs.id)
+                id = b"%d" % cs.id
                 cs.author = self.recode(cs.author)
                 self.lastbranch[cs.branch] = id
                 cs.comment = self.recode(cs.comment)
@@ -100,14 +106,19 @@
 
                 files = {}
                 for f in cs.entries:
-                    files[f.file] = "%s%s" % ('.'.join([(b"%d" % x)
-                                                        for x in f.revision]),
-                                              ['', '(DEAD)'][f.dead])
+                    files[f.file] = "%s%s" % (
+                        '.'.join([(b"%d" % x) for x in f.revision]),
+                        ['', '(DEAD)'][f.dead],
+                    )
 
                 # add current commit to set
-                c = commit(author=cs.author, date=date,
-                           parents=[(b"%d" % p.id) for p in cs.parents],
-                           desc=cs.comment, branch=cs.branch or '')
+                c = commit(
+                    author=cs.author,
+                    date=date,
+                    parents=[(b"%d" % p.id) for p in cs.parents],
+                    desc=cs.comment,
+                    branch=cs.branch or '',
+                )
                 self.changeset[id] = c
                 self.files[id] = files
 
@@ -125,8 +136,9 @@
 
         if root.startswith(":pserver:"):
             root = root[9:]
-            m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
-                         root)
+            m = re.match(
+                r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)', root
+            )
             if m:
                 conntype = "pserver"
                 user, passw, serv, port, root = m.groups()
@@ -166,8 +178,18 @@
 
                 sck = socket.socket()
                 sck.connect((serv, port))
-                sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
-                                    "END AUTH REQUEST", ""]))
+                sck.send(
+                    "\n".join(
+                        [
+                            "BEGIN AUTH REQUEST",
+                            root,
+                            user,
+                            passw,
+                            "END AUTH REQUEST",
+                            "",
+                        ]
+                    )
+                )
                 if sck.recv(128) != "I LOVE YOU\n":
                     raise error.Abort(_("CVS pserver authentication failed"))
 
@@ -205,16 +227,22 @@
         self.realroot = root
 
         self.writep.write("Root %s\n" % root)
-        self.writep.write("Valid-responses ok error Valid-requests Mode"
-                          " M Mbinary E Checked-in Created Updated"
-                          " Merged Removed\n")
+        self.writep.write(
+            "Valid-responses ok error Valid-requests Mode"
+            " M Mbinary E Checked-in Created Updated"
+            " Merged Removed\n"
+        )
         self.writep.write("valid-requests\n")
         self.writep.flush()
         r = self.readp.readline()
         if not r.startswith("Valid-requests"):
-            raise error.Abort(_('unexpected response from CVS server '
-                               '(expected "Valid-requests", but got %r)')
-                             % r)
+            raise error.Abort(
+                _(
+                    'unexpected response from CVS server '
+                    '(expected "Valid-requests", but got %r)'
+                )
+                % r
+            )
         if "UseUnchanged" in r:
             self.writep.write("UseUnchanged\n")
             self.writep.flush()
@@ -225,7 +253,6 @@
         return self.heads
 
     def getfile(self, name, rev):
-
         def chunkedread(fp, count):
             # file-objects returned by socket.makefile() do not handle
             # large read() requests very well.
@@ -234,8 +261,9 @@
             while count > 0:
                 data = fp.read(min(count, chunksize))
                 if not data:
-                    raise error.Abort(_("%d bytes missing from remote file")
-                                     % count)
+                    raise error.Abort(
+                        _("%d bytes missing from remote file") % count
+                    )
                 count -= len(data)
                 output.write(data)
             return output.getvalue()
@@ -256,8 +284,8 @@
         while True:
             line = self.readp.readline()
             if line.startswith("Created ") or line.startswith("Updated "):
-                self.readp.readline() # path
-                self.readp.readline() # entries
+                self.readp.readline()  # path
+                self.readp.readline()  # entries
                 mode = self.readp.readline()[:-1]
                 count = int(self.readp.readline()[:-1])
                 data = chunkedread(self.readp, count)
--- a/hgext/convert/cvsps.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/cvsps.py	Sun Oct 06 09:45:02 2019 -0400
@@ -26,6 +26,7 @@
 
 pickle = util.pickle
 
+
 class logentry(object):
     '''Class logentry has the following attributes:
         .author    - author name as CVS knows it
@@ -46,17 +47,22 @@
                       rlog output) or None
         .branchpoints - the branches that start at the current entry or empty
     '''
+
     def __init__(self, **entries):
         self.synthetic = False
         self.__dict__.update(entries)
 
     def __repr__(self):
-        items = (r"%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
-        return r"%s(%s)"%(type(self).__name__, r", ".join(items))
+        items = (
+            r"%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__)
+        )
+        return r"%s(%s)" % (type(self).__name__, r", ".join(items))
+
 
 class logerror(Exception):
     pass
 
+
 def getrepopath(cvspath):
     """Return the repository path from a CVS path.
 
@@ -93,45 +99,52 @@
     if atposition != -1:
         start = atposition
 
-    repopath = parts[-1][parts[-1].find('/', start):]
+    repopath = parts[-1][parts[-1].find('/', start) :]
     return repopath
 
+
 def createlog(ui, directory=None, root="", rlog=True, cache=None):
     '''Collect the CVS rlog'''
 
     # Because we store many duplicate commit log messages, reusing strings
     # saves a lot of memory and pickle storage space.
     _scache = {}
+
     def scache(s):
         "return a shared version of a string"
         return _scache.setdefault(s, s)
 
     ui.status(_('collecting CVS rlog\n'))
 
-    log = []      # list of logentry objects containing the CVS state
+    log = []  # list of logentry objects containing the CVS state
 
     # patterns to match in CVS (r)log output, by state of use
     re_00 = re.compile(b'RCS file: (.+)$')
     re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
     re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
-    re_03 = re.compile(b"(Cannot access.+CVSROOT)|"
-                       b"(can't create temporary directory.+)$")
+    re_03 = re.compile(
+        b"(Cannot access.+CVSROOT)|" b"(can't create temporary directory.+)$"
+    )
     re_10 = re.compile(b'Working file: (.+)$')
     re_20 = re.compile(b'symbolic names:')
     re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
     re_31 = re.compile(b'----------------------------$')
-    re_32 = re.compile(b'======================================='
-                       b'======================================$')
+    re_32 = re.compile(
+        b'======================================='
+        b'======================================$'
+    )
     re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
-    re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
-                       br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
-                       br'(\s+commitid:\s+([^;]+);)?'
-                       br'(.*mergepoint:\s+([^;]+);)?')
+    re_60 = re.compile(
+        br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
+        br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
+        br'(\s+commitid:\s+([^;]+);)?'
+        br'(.*mergepoint:\s+([^;]+);)?'
+    )
     re_70 = re.compile(b'branches: (.+);$')
 
     file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
 
-    prefix = ''   # leading path to strip of what we get from CVS
+    prefix = ''  # leading path to strip of what we get from CVS
 
     if directory is None:
         # Current working directory
@@ -151,7 +164,7 @@
 
         # Use the Root file in the sandbox, if it exists
         try:
-            root = open(os.path.join('CVS','Root'), 'rb').read().strip()
+            root = open(os.path.join('CVS', 'Root'), 'rb').read().strip()
         except IOError:
             pass
 
@@ -178,17 +191,20 @@
         # are mapped to different cache file names.
         cachefile = root.split(":") + [directory, "cache"]
         cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
-        cachefile = os.path.join(cachedir,
-                                 '.'.join([s for s in cachefile if s]))
+        cachefile = os.path.join(
+            cachedir, '.'.join([s for s in cachefile if s])
+        )
 
     if cache == 'update':
         try:
             ui.note(_('reading cvs log cache %s\n') % cachefile)
             oldlog = pickle.load(open(cachefile, 'rb'))
             for e in oldlog:
-                if not (util.safehasattr(e, 'branchpoints') and
-                        util.safehasattr(e, 'commitid') and
-                        util.safehasattr(e, 'mergepoint')):
+                if not (
+                    util.safehasattr(e, 'branchpoints')
+                    and util.safehasattr(e, 'commitid')
+                    and util.safehasattr(e, 'mergepoint')
+                ):
                     ui.status(_('ignoring old cache\n'))
                     oldlog = []
                     break
@@ -198,7 +214,7 @@
             ui.note(_('error reading cache: %r\n') % e)
 
         if oldlog:
-            date = oldlog[-1].date    # last commit date as a (time,tz) tuple
+            date = oldlog[-1].date  # last commit date as a (time,tz) tuple
             date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
 
     # build the CVS commandline
@@ -220,11 +236,11 @@
     cmd.append(directory)
 
     # state machine begins here
-    tags = {}     # dictionary of revisions on current file with their tags
-    branchmap = {} # mapping between branch names and revision numbers
+    tags = {}  # dictionary of revisions on current file with their tags
+    branchmap = {}  # mapping between branch names and revision numbers
     rcsmap = {}
     state = 0
-    store = False # set when a new record can be appended
+    store = False  # set when a new record can be appended
 
     cmd = [procutil.shellquote(arg) for arg in cmd]
     ui.note(_("running %s\n") % (' '.join(cmd)))
@@ -239,7 +255,7 @@
         peek = util.fromnativeeol(pfp.readline())
         if line.endswith('\n'):
             line = line[:-1]
-        #ui.debug('state=%d line=%r\n' % (state, line))
+        # ui.debug('state=%d line=%r\n' % (state, line))
 
         if state == 0:
             # initial state, consume input until we see 'RCS file'
@@ -250,7 +266,7 @@
                 if rlog:
                     filename = util.normpath(rcs[:-2])
                     if filename.startswith(prefix):
-                        filename = filename[len(prefix):]
+                        filename = filename[len(prefix) :]
                     if filename.startswith('/'):
                         filename = filename[1:]
                     if filename.startswith('Attic/'):
@@ -310,8 +326,9 @@
             if re_31.match(line):
                 state = 5
             else:
-                assert not re_32.match(line), _('must have at least '
-                                                'some revisions')
+                assert not re_32.match(line), _(
+                    'must have at least ' 'some revisions'
+                )
 
         elif state == 5:
             # expecting revision number and possibly (ignored) lock indication
@@ -319,15 +336,16 @@
             # as this state is re-entered for subsequent revisions of a file.
             match = re_50.match(line)
             assert match, _('expected revision number')
-            e = logentry(rcs=scache(rcs),
-                         file=scache(filename),
-                         revision=tuple([int(x) for x in
-                                         match.group(1).split('.')]),
-                         branches=[],
-                         parent=None,
-                         commitid=None,
-                         mergepoint=None,
-                         branchpoints=set())
+            e = logentry(
+                rcs=scache(rcs),
+                file=scache(filename),
+                revision=tuple([int(x) for x in match.group(1).split('.')]),
+                branches=[],
+                parent=None,
+                commitid=None,
+                mergepoint=None,
+                branchpoints=set(),
+            )
 
             state = 6
 
@@ -343,9 +361,10 @@
             if len(d.split()) != 3:
                 # cvs log dates always in GMT
                 d = d + ' UTC'
-            e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
-                                        '%Y/%m/%d %H:%M:%S',
-                                        '%Y-%m-%d %H:%M:%S'])
+            e.date = dateutil.parsedate(
+                d,
+                ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'],
+            )
             e.author = scache(match.group(2))
             e.dead = match.group(3).lower() == 'dead'
 
@@ -359,18 +378,19 @@
             else:
                 e.lines = None
 
-            if match.group(7): # cvs 1.12 commitid
+            if match.group(7):  # cvs 1.12 commitid
                 e.commitid = match.group(8)
 
-            if match.group(9): # cvsnt mergepoint
+            if match.group(9):  # cvsnt mergepoint
                 myrev = match.group(10).split('.')
-                if len(myrev) == 2: # head
+                if len(myrev) == 2:  # head
                     e.mergepoint = 'HEAD'
                 else:
                     myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
                     branches = [b for b in branchmap if branchmap[b] == myrev]
-                    assert len(branches) == 1, ('unknown branch: %s'
-                                                % e.mergepoint)
+                    assert len(branches) == 1, (
+                        'unknown branch: %s' % e.mergepoint
+                    )
                     e.mergepoint = branches[0]
 
             e.comment = []
@@ -381,8 +401,10 @@
             # or store the commit log message otherwise
             m = re_70.match(line)
             if m:
-                e.branches = [tuple([int(y) for y in x.strip().split('.')])
-                                for x in m.group(1).split(';')]
+                e.branches = [
+                    tuple([int(y) for y in x.strip().split('.')])
+                    for x in m.group(1).split(';')
+                ]
                 state = 8
             elif re_31.match(line) and re_50.match(peek):
                 state = 5
@@ -417,13 +439,16 @@
         # creates a synthetic dead revision 1.1.x.1 on B2.  Don't drop
         # these revisions now, but mark them synthetic so
         # createchangeset() can take care of them.
-        if (store and
-              e.dead and
-              e.revision[-1] == 1 and      # 1.1 or 1.1.x.1
-              len(e.comment) == 1 and
-              file_added_re.match(e.comment[0])):
-            ui.debug('found synthetic revision in %s: %r\n'
-                     % (e.rcs, e.comment[0]))
+        if (
+            store
+            and e.dead
+            and e.revision[-1] == 1
+            and len(e.comment) == 1  # 1.1 or 1.1.x.1
+            and file_added_re.match(e.comment[0])
+        ):
+            ui.debug(
+                'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
+            )
             e.synthetic = True
 
         if store:
@@ -442,13 +467,13 @@
             branchpoints = set()
             for branch, revision in branchmap.iteritems():
                 revparts = tuple([int(i) for i in revision.split('.')])
-                if len(revparts) < 2: # bad tags
+                if len(revparts) < 2:  # bad tags
                     continue
                 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
                     # normal branch
                     if revparts[:-2] == e.revision:
                         branchpoints.add(branch)
-                elif revparts == (1, 1, 1): # vendor branch
+                elif revparts == (1, 1, 1):  # vendor branch
                     if revparts in e.branches:
                         branchpoints.add(branch)
             e.branchpoints = branchpoints
@@ -458,8 +483,9 @@
             rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
 
             if len(log) % 100 == 0:
-                ui.status(stringutil.ellipsis('%d %s' % (len(log), e.file), 80)
-                          + '\n')
+                ui.status(
+                    stringutil.ellipsis('%d %s' % (len(log), e.file), 80) + '\n'
+                )
 
     log.sort(key=lambda x: (x.rcs, x.revision))
 
@@ -487,8 +513,12 @@
             log.sort(key=lambda x: x.date)
 
             if oldlog and oldlog[-1].date >= log[0].date:
-                raise logerror(_('log cache overlaps with new log entries,'
-                                 ' re-run without cache.'))
+                raise logerror(
+                    _(
+                        'log cache overlaps with new log entries,'
+                        ' re-run without cache.'
+                    )
+                )
 
             log = oldlog + log
 
@@ -502,6 +532,7 @@
 
     encodings = ui.configlist('convert', 'cvsps.logencoding')
     if encodings:
+
         def revstr(r):
             # this is needed, because logentry.revision is a tuple of "int"
             # (e.g. (1, 2) for "1.2")
@@ -511,24 +542,33 @@
             comment = entry.comment
             for e in encodings:
                 try:
-                    entry.comment = comment.decode(
-                        pycompat.sysstr(e)).encode('utf-8')
+                    entry.comment = comment.decode(pycompat.sysstr(e)).encode(
+                        'utf-8'
+                    )
                     if ui.debugflag:
-                        ui.debug("transcoding by %s: %s of %s\n" %
-                                 (e, revstr(entry.revision), entry.file))
+                        ui.debug(
+                            "transcoding by %s: %s of %s\n"
+                            % (e, revstr(entry.revision), entry.file)
+                        )
                     break
                 except UnicodeDecodeError:
-                    pass # try next encoding
-                except LookupError as inst: # unknown encoding, maybe
-                    raise error.Abort(inst,
-                                      hint=_('check convert.cvsps.logencoding'
-                                             ' configuration'))
+                    pass  # try next encoding
+                except LookupError as inst:  # unknown encoding, maybe
+                    raise error.Abort(
+                        inst,
+                        hint=_(
+                            'check convert.cvsps.logencoding' ' configuration'
+                        ),
+                    )
             else:
-                raise error.Abort(_("no encoding can transcode"
-                                    " CVS log message for %s of %s")
-                                  % (revstr(entry.revision), entry.file),
-                                  hint=_('check convert.cvsps.logencoding'
-                                         ' configuration'))
+                raise error.Abort(
+                    _(
+                        "no encoding can transcode"
+                        " CVS log message for %s of %s"
+                    )
+                    % (revstr(entry.revision), entry.file),
+                    hint=_('check convert.cvsps.logencoding' ' configuration'),
+                )
 
     hook.hook(ui, None, "cvslog", True, log=log)
 
@@ -550,14 +590,16 @@
         .mergepoint- the branch that has been merged from or None
         .branchpoints- the branches that start at the current entry or empty
     '''
+
     def __init__(self, **entries):
         self.id = None
         self.synthetic = False
         self.__dict__.update(entries)
 
     def __repr__(self):
-        items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
-        return "%s(%s)"%(type(self).__name__, ", ".join(items))
+        items = ("%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__))
+        return "%s(%s)" % (type(self).__name__, ", ".join(items))
+
 
 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
     '''Convert log into changesets.'''
@@ -574,9 +616,17 @@
                 mindate[e.commitid] = min(e.date, mindate[e.commitid])
 
     # Merge changesets
-    log.sort(key=lambda x: (mindate.get(x.commitid, (-1, 0)),
-                            x.commitid or '', x.comment,
-                            x.author, x.branch or '', x.date, x.branchpoints))
+    log.sort(
+        key=lambda x: (
+            mindate.get(x.commitid, (-1, 0)),
+            x.commitid or '',
+            x.comment,
+            x.author,
+            x.branch or '',
+            x.date,
+            x.branchpoints,
+        )
+    )
 
     changesets = []
     files = set()
@@ -599,22 +649,35 @@
         # first changeset and bar the next and MYBRANCH and MYBRANCH2
         # should both start off of the bar changeset. No provisions are
         # made to ensure that this is, in fact, what happens.
-        if not (c and e.branchpoints == c.branchpoints and
-                (# cvs commitids
-                 (e.commitid is not None and e.commitid == c.commitid) or
-                 (# no commitids, use fuzzy commit detection
-                  (e.commitid is None or c.commitid is None) and
-                   e.comment == c.comment and
-                   e.author == c.author and
-                   e.branch == c.branch and
-                   ((c.date[0] + c.date[1]) <=
-                    (e.date[0] + e.date[1]) <=
-                    (c.date[0] + c.date[1]) + fuzz) and
-                   e.file not in files))):
-            c = changeset(comment=e.comment, author=e.author,
-                          branch=e.branch, date=e.date,
-                          entries=[], mergepoint=e.mergepoint,
-                          branchpoints=e.branchpoints, commitid=e.commitid)
+        if not (
+            c
+            and e.branchpoints == c.branchpoints
+            and (  # cvs commitids
+                (e.commitid is not None and e.commitid == c.commitid)
+                or (  # no commitids, use fuzzy commit detection
+                    (e.commitid is None or c.commitid is None)
+                    and e.comment == c.comment
+                    and e.author == c.author
+                    and e.branch == c.branch
+                    and (
+                        (c.date[0] + c.date[1])
+                        <= (e.date[0] + e.date[1])
+                        <= (c.date[0] + c.date[1]) + fuzz
+                    )
+                    and e.file not in files
+                )
+            )
+        ):
+            c = changeset(
+                comment=e.comment,
+                author=e.author,
+                branch=e.branch,
+                date=e.date,
+                entries=[],
+                mergepoint=e.mergepoint,
+                branchpoints=e.branchpoints,
+                commitid=e.commitid,
+            )
             changesets.append(c)
 
             files = set()
@@ -624,7 +687,7 @@
 
         c.entries.append(e)
         files.add(e.file)
-        c.date = e.date       # changeset date is date of latest commit in it
+        c.date = e.date  # changeset date is date of latest commit in it
 
     # Mark synthetic changesets
 
@@ -665,6 +728,7 @@
     # Sort changesets by date
 
     odd = set()
+
     def cscmp(l, r):
         d = sum(l.date) - sum(r.date)
         if d:
@@ -745,8 +809,8 @@
     if mergefrom:
         mergefrom = re.compile(mergefrom)
 
-    versions = {}    # changeset index where we saw any particular file version
-    branches = {}    # changeset index where we saw a branch
+    versions = {}  # changeset index where we saw any particular file version
+    branches = {}  # changeset index where we saw a branch
     n = len(changesets)
     i = 0
     while i < n:
@@ -777,8 +841,9 @@
 
             # Ensure no changeset has a synthetic changeset as a parent.
             while p.synthetic:
-                assert len(p.parents) <= 1, (
-                       _('synthetic changeset cannot have multiple parents'))
+                assert len(p.parents) <= 1, _(
+                    'synthetic changeset cannot have multiple parents'
+                )
                 if p.parents:
                     p = p.parents[0]
                 else:
@@ -802,9 +867,13 @@
                 try:
                     candidate = changesets[branches[m]]
                 except KeyError:
-                    ui.warn(_("warning: CVS commit message references "
-                              "non-existent branch %r:\n%s\n")
-                            % (pycompat.bytestr(m), c.comment))
+                    ui.warn(
+                        _(
+                            "warning: CVS commit message references "
+                            "non-existent branch %r:\n%s\n"
+                        )
+                        % (pycompat.bytestr(m), c.comment)
+                    )
                 if m in branches and c.branch != m and not candidate.synthetic:
                     c.parents.append(candidate)
 
@@ -816,15 +885,19 @@
                     if m == 'HEAD':
                         m = None
                 else:
-                    m = None   # if no group found then merge to HEAD
+                    m = None  # if no group found then merge to HEAD
                 if m in branches and c.branch != m:
                     # insert empty changeset for merge
                     cc = changeset(
-                        author=c.author, branch=m, date=c.date,
+                        author=c.author,
+                        branch=m,
+                        date=c.date,
                         comment='convert-repo: CVS merge from branch %s'
                         % c.branch,
-                        entries=[], tags=[],
-                        parents=[changesets[branches[m]], c])
+                        entries=[],
+                        tags=[],
+                        parents=[changesets[branches[m]], c],
+                    )
                     changesets.insert(i + 1, cc)
                     branches[m] = i + 1
 
@@ -853,8 +926,10 @@
     if odd:
         for l, r in odd:
             if l.id is not None and r.id is not None:
-                ui.warn(_('changeset %d is both before and after %d\n')
-                        % (l.id, r.id))
+                ui.warn(
+                    _('changeset %d is both before and after %d\n')
+                    % (l.id, r.id)
+                )
 
     ui.status(_('%d changeset entries\n') % len(changesets))
 
@@ -886,7 +961,7 @@
         else:
             log = createlog(ui, root=opts["root"], cache=cache)
     except logerror as e:
-        ui.write("%r\n"%e)
+        ui.write("%r\n" % e)
         return
 
     changesets = createchangeset(ui, log, opts["fuzz"])
@@ -895,14 +970,16 @@
     # Print changesets (optionally filtered)
 
     off = len(revisions)
-    branches = {}    # latest version number in each branch
-    ancestors = {}   # parent branch
+    branches = {}  # latest version number in each branch
+    ancestors = {}  # parent branch
     for cs in changesets:
 
         if opts["ancestors"]:
             if cs.branch not in branches and cs.parents and cs.parents[0].id:
-                ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
-                                        cs.parents[0].id)
+                ancestors[cs.branch] = (
+                    changesets[cs.parents[0].id - 1].branch,
+                    cs.parents[0].id,
+                )
             branches[cs.branch] = cs.id
 
         # limit by branches
@@ -914,19 +991,35 @@
             #       bug-for-bug compatibility with cvsps.
             ui.write('---------------------\n')
             ui.write(('PatchSet %d \n' % cs.id))
-            ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
-                                                 '%Y/%m/%d %H:%M:%S %1%2')))
+            ui.write(
+                (
+                    'Date: %s\n'
+                    % dateutil.datestr(cs.date, '%Y/%m/%d %H:%M:%S %1%2')
+                )
+            )
             ui.write(('Author: %s\n' % cs.author))
             ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
-            ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
-                                  ','.join(cs.tags) or '(none)')))
+            ui.write(
+                (
+                    'Tag%s: %s \n'
+                    % (
+                        ['', 's'][len(cs.tags) > 1],
+                        ','.join(cs.tags) or '(none)',
+                    )
+                )
+            )
             if cs.branchpoints:
-                ui.write(('Branchpoints: %s \n') %
-                         ', '.join(sorted(cs.branchpoints)))
+                ui.write(
+                    'Branchpoints: %s \n' % ', '.join(sorted(cs.branchpoints))
+                )
             if opts["parents"] and cs.parents:
                 if len(cs.parents) > 1:
-                    ui.write(('Parents: %s\n' %
-                             (','.join([(b"%d" % p.id) for p in cs.parents]))))
+                    ui.write(
+                        (
+                            'Parents: %s\n'
+                            % (','.join([(b"%d" % p.id) for p in cs.parents]))
+                        )
+                    )
                 else:
                     ui.write(('Parent: %d\n' % cs.parents[0].id))
 
@@ -939,28 +1032,30 @@
                 if r:
                     ui.write(('Ancestors: %s\n' % (','.join(r))))
 
-            ui.write(('Log:\n'))
+            ui.write('Log:\n')
             ui.write('%s\n\n' % cs.comment)
-            ui.write(('Members: \n'))
+            ui.write('Members: \n')
             for f in cs.entries:
                 fn = f.file
                 if fn.startswith(opts["prefix"]):
-                    fn = fn[len(opts["prefix"]):]
-                ui.write('\t%s:%s->%s%s \n' % (
+                    fn = fn[len(opts["prefix"]) :]
+                ui.write(
+                    '\t%s:%s->%s%s \n'
+                    % (
                         fn,
                         '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
                         '.'.join([(b"%d" % x) for x in f.revision]),
-                        ['', '(DEAD)'][f.dead]))
+                        ['', '(DEAD)'][f.dead],
+                    )
+                )
             ui.write('\n')
 
         # have we seen the start tag?
         if revisions and off:
-            if (revisions[0] == (b"%d" % cs.id) or
-                revisions[0] in cs.tags):
+            if revisions[0] == (b"%d" % cs.id) or revisions[0] in cs.tags:
                 off = False
 
         # see if we reached the end tag
         if len(revisions) > 1 and not off:
-            if (revisions[1] == (b"%d" % cs.id) or
-                revisions[1] in cs.tags):
+            if revisions[1] == (b"%d" % cs.id) or revisions[1] in cs.tags:
                 break
--- a/hgext/convert/darcs.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/darcs.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,6 +19,7 @@
 )
 from mercurial.utils import dateutil
 from . import common
+
 NoRepo = common.NoRepo
 
 # The naming drift of ElementTree is fun!
@@ -37,10 +38,11 @@
         except ImportError:
             try:
                 import elementtree.ElementTree.ElementTree as ElementTree
-                import elementtree.ElementTree.XMLParser  as XMLParser
+                import elementtree.ElementTree.XMLParser as XMLParser
             except ImportError:
                 pass
 
+
 class darcs_source(common.converter_source, common.commandline):
     def __init__(self, ui, repotype, path, revs=None):
         common.converter_source.__init__(self, ui, repotype, path, revs=revs)
@@ -54,8 +56,9 @@
         common.checktool('darcs')
         version = self.run0('--version').splitlines()[0].strip()
         if version < '2.1':
-            raise error.Abort(_('darcs version 2.1 or newer needed (found %r)')
-                              % version)
+            raise error.Abort(
+                _('darcs version 2.1 or newer needed (found %r)') % version
+            )
 
         if "ElementTree" not in globals():
             raise error.Abort(_("Python ElementTree module is not available"))
@@ -71,19 +74,23 @@
         format = self.format()
         if format:
             if format in ('darcs-1.0', 'hashed'):
-                raise NoRepo(_("%s repository format is unsupported, "
-                               "please upgrade") % format)
+                raise NoRepo(
+                    _("%s repository format is unsupported, " "please upgrade")
+                    % format
+                )
         else:
             self.ui.warn(_('failed to detect repository format!'))
 
     def before(self):
         self.tmppath = pycompat.mkdtemp(
-            prefix='convert-' + os.path.basename(self.path) + '-')
+            prefix='convert-' + os.path.basename(self.path) + '-'
+        )
         output, status = self.run('init', repodir=self.tmppath)
         self.checkexit(status)
 
-        tree = self.xml('changes', xml_output=True, summary=True,
-                        repodir=self.path)
+        tree = self.xml(
+            'changes', xml_output=True, summary=True, repodir=self.path
+        )
         tagname = None
         child = None
         for elt in tree.findall('patch'):
@@ -135,8 +142,9 @@
 
     def manifest(self):
         man = []
-        output, status = self.run('show', 'files', no_directories=True,
-                                  repodir=self.tmppath)
+        output, status = self.run(
+            'show', 'files', no_directories=True, repodir=self.tmppath
+        )
         self.checkexit(status)
         for line in output.split('\n'):
             path = line[2:]
@@ -155,17 +163,24 @@
         # etree can return unicode objects for name, comment, and author,
         # so recode() is used to ensure str objects are emitted.
         newdateformat = '%Y-%m-%d %H:%M:%S %1%2'
-        return common.commit(author=self.recode(elt.get('author')),
-                             date=dateutil.datestr(date, newdateformat),
-                             desc=self.recode(desc).strip(),
-                             parents=self.parents[rev])
+        return common.commit(
+            author=self.recode(elt.get('author')),
+            date=dateutil.datestr(date, newdateformat),
+            desc=self.recode(desc).strip(),
+            parents=self.parents[rev],
+        )
 
     def pull(self, rev):
-        output, status = self.run('pull', self.path, all=True,
-                                  match='hash %s' % rev,
-                                  no_test=True, no_posthook=True,
-                                  external_merge='/bin/false',
-                                  repodir=self.tmppath)
+        output, status = self.run(
+            'pull',
+            self.path,
+            all=True,
+            match='hash %s' % rev,
+            no_test=True,
+            no_posthook=True,
+            external_merge='/bin/false',
+            repodir=self.tmppath,
+        )
         if status:
             if output.find('We have conflicts in') == -1:
                 self.checkexit(status, output)
@@ -196,7 +211,7 @@
                     for f in man:
                         if not f.startswith(source):
                             continue
-                        fdest = dest + '/' + f[len(source):]
+                        fdest = dest + '/' + f[len(source) :]
                         changes.append((f, rev))
                         changes.append((fdest, rev))
                         copies[fdest] = f
--- a/hgext/convert/filemap.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/filemap.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,8 +14,10 @@
     pycompat,
 )
 from . import common
+
 SKIPREV = common.SKIPREV
 
+
 def rpairs(path):
     '''Yield tuples with path split at '/', starting with the full path.
     No leading, trailing or double '/', please.
@@ -27,10 +29,11 @@
     '''
     i = len(path)
     while i != -1:
-        yield path[:i], path[i + 1:]
+        yield path[:i], path[i + 1 :]
         i = path.rfind('/', 0, i)
     yield '.', path
 
+
 def normalize(path):
     ''' We use posixpath.normpath to support cross-platform path format.
     However, it doesn't handle None input. So we wrap it up. '''
@@ -38,6 +41,7 @@
         return None
     return posixpath.normpath(path)
 
+
 class filemapper(object):
     '''Map and filter filenames when importing.
     A name can be mapped to itself, a new name, or None (omit from new
@@ -55,25 +59,31 @@
 
     def parse(self, path):
         errs = 0
+
         def check(name, mapping, listname):
             if not name:
-                self.ui.warn(_('%s:%d: path to %s is missing\n') %
-                             (lex.infile, lex.lineno, listname))
+                self.ui.warn(
+                    _('%s:%d: path to %s is missing\n')
+                    % (lex.infile, lex.lineno, listname)
+                )
                 return 1
             if name in mapping:
-                self.ui.warn(_('%s:%d: %r already in %s list\n') %
-                             (lex.infile, lex.lineno, name, listname))
+                self.ui.warn(
+                    _('%s:%d: %r already in %s list\n')
+                    % (lex.infile, lex.lineno, name, listname)
+                )
                 return 1
-            if (name.startswith('/') or
-                name.endswith('/') or
-                '//' in name):
-                self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
-                             (lex.infile, lex.lineno, listname,
-                              pycompat.bytestr(name)))
+            if name.startswith('/') or name.endswith('/') or '//' in name:
+                self.ui.warn(
+                    _('%s:%d: superfluous / in %s %r\n')
+                    % (lex.infile, lex.lineno, listname, pycompat.bytestr(name))
+                )
                 return 1
             return 0
+
         lex = common.shlexer(
-            filepath=path, wordchars='!@#$%^&*()-=+[]{}|;:,./<>?')
+            filepath=path, wordchars='!@#$%^&*()-=+[]{}|;:,./<>?'
+        )
         cmd = lex.get_token()
         while cmd:
             if cmd == 'include':
@@ -93,8 +103,10 @@
             elif cmd == 'source':
                 errs += self.parse(normalize(lex.get_token()))
             else:
-                self.ui.warn(_('%s:%d: unknown directive %r\n') %
-                             (lex.infile, lex.lineno, pycompat.bytestr(cmd)))
+                self.ui.warn(
+                    _('%s:%d: unknown directive %r\n')
+                    % (lex.infile, lex.lineno, pycompat.bytestr(cmd))
+                )
                 errs += 1
             cmd = lex.get_token()
         return errs
@@ -157,6 +169,7 @@
     def active(self):
         return bool(self.include or self.exclude or self.rename)
 
+
 # This class does two additional things compared to a regular source:
 #
 # - Filter and rename files.  This is mostly wrapped by the filemapper
@@ -171,6 +184,7 @@
 #   touch files we're interested in, but also merges that merge two
 #   or more interesting revisions.
 
+
 class filemap_source(common.converter_source):
     def __init__(self, ui, baseconverter, filemap):
         super(filemap_source, self).__init__(ui, baseconverter.repotype)
@@ -189,8 +203,9 @@
         self.children = {}
         self.seenchildren = {}
         # experimental config: convert.ignoreancestorcheck
-        self.ignoreancestorcheck = self.ui.configbool('convert',
-                                                      'ignoreancestorcheck')
+        self.ignoreancestorcheck = self.ui.configbool(
+            'convert', 'ignoreancestorcheck'
+        )
 
     def before(self):
         self.base.before()
@@ -250,7 +265,7 @@
             try:
                 parents = self.origparents[rev]
             except KeyError:
-                continue # unknown revmap source
+                continue  # unknown revmap source
             if wanted:
                 self.mark_wanted(rev, parents)
             else:
@@ -348,8 +363,7 @@
             if p in self.wantedancestors:
                 wrev.update(self.wantedancestors[p])
             else:
-                self.ui.warn(_('warning: %s parent %s is missing\n') %
-                             (rev, p))
+                self.ui.warn(_('warning: %s parent %s is missing\n') % (rev, p))
         wrev.add(rev)
         self.wantedancestors[rev] = wrev
 
@@ -382,10 +396,13 @@
             if mp1 == SKIPREV or mp1 in knownparents:
                 continue
 
-            isancestor = (not self.ignoreancestorcheck and
-                          any(p2 for p2 in parents
-                              if p1 != p2 and mp1 != self.parentmap[p2]
-                                 and mp1 in self.wantedancestors[p2]))
+            isancestor = not self.ignoreancestorcheck and any(
+                p2
+                for p2 in parents
+                if p1 != p2
+                and mp1 != self.parentmap[p2]
+                and mp1 in self.wantedancestors[p2]
+            )
             if not isancestor and not hasbranchparent and len(parents) > 1:
                 # This could be expensive, avoid unnecessary calls.
                 if self._cachedcommit(p1).branch == branch:
--- a/hgext/convert/git.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/git.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,9 +16,8 @@
     pycompat,
 )
 
-from . import (
-    common,
-)
+from . import common
+
 
 class submodule(object):
     def __init__(self, path, node, url):
@@ -32,6 +31,7 @@
     def hgsubstate(self):
         return "%s %s" % (self.node, self.path)
 
+
 # Keys in extra fields that should not be copied if the user requests.
 bannedextrakeys = {
     # Git commit object built-ins.
@@ -44,6 +44,7 @@
     'close',
 }
 
+
 class convert_git(common.converter_source, common.commandline):
     # Windows does not support GIT_DIR= construct while other systems
     # cannot remove environment variable. Just assume none have
@@ -78,8 +79,9 @@
         if os.path.isdir(path + "/.git"):
             path += "/.git"
         if not os.path.exists(path + "/objects"):
-            raise common.NoRepo(_("%s does not look like a Git repository") %
-                                path)
+            raise common.NoRepo(
+                _("%s does not look like a Git repository") % path
+            )
 
         # The default value (50) is based on the default for 'git diff'.
         similarity = ui.configint('convert', 'git.similarity')
@@ -106,8 +108,10 @@
         self.copyextrakeys = self.ui.configlist('convert', 'git.extrakeys')
         banned = set(self.copyextrakeys) & bannedextrakeys
         if banned:
-            raise error.Abort(_('copying of extra key is forbidden: %s') %
-                              _(', ').join(sorted(banned)))
+            raise error.Abort(
+                _('copying of extra key is forbidden: %s')
+                % _(', ').join(sorted(banned))
+            )
 
         committeractions = self.ui.configlist('convert', 'git.committeractions')
 
@@ -126,19 +130,31 @@
                     messagealways = v or 'committer:'
 
         if messagedifferent and messagealways:
-            raise error.Abort(_('committeractions cannot define both '
-                                'messagedifferent and messagealways'))
+            raise error.Abort(
+                _(
+                    'committeractions cannot define both '
+                    'messagedifferent and messagealways'
+                )
+            )
 
         dropcommitter = 'dropcommitter' in committeractions
         replaceauthor = 'replaceauthor' in committeractions
 
         if dropcommitter and replaceauthor:
-            raise error.Abort(_('committeractions cannot define both '
-                                'dropcommitter and replaceauthor'))
+            raise error.Abort(
+                _(
+                    'committeractions cannot define both '
+                    'dropcommitter and replaceauthor'
+                )
+            )
 
         if dropcommitter and messagealways:
-            raise error.Abort(_('committeractions cannot define both '
-                                'dropcommitter and messagealways'))
+            raise error.Abort(
+                _(
+                    'committeractions cannot define both '
+                    'dropcommitter and messagealways'
+                )
+            )
 
         if not messagedifferent and not messagealways:
             messagedifferent = 'committer:'
@@ -172,17 +188,20 @@
     def catfile(self, rev, ftype):
         if rev == nodemod.nullhex:
             raise IOError
-        self.catfilepipe[0].write(rev+'\n')
+        self.catfilepipe[0].write(rev + '\n')
         self.catfilepipe[0].flush()
         info = self.catfilepipe[1].readline().split()
         if info[1] != ftype:
-            raise error.Abort(_('cannot read %r object at %s') % (
-                pycompat.bytestr(ftype), rev))
+            raise error.Abort(
+                _('cannot read %r object at %s')
+                % (pycompat.bytestr(ftype), rev)
+            )
         size = int(info[2])
         data = self.catfilepipe[1].read(size)
         if len(data) < size:
-            raise error.Abort(_('cannot read %r object at %s: unexpected size')
-                              % (ftype, rev))
+            raise error.Abort(
+                _('cannot read %r object at %s: unexpected size') % (ftype, rev)
+            )
         # read the trailing newline
         self.catfilepipe[1].read(1)
         return data
@@ -216,8 +235,10 @@
         self.submodules = []
         c = config.config()
         # Each item in .gitmodules starts with whitespace that cant be parsed
-        c.parse('.gitmodules', '\n'.join(line.strip() for line in
-                               content.split('\n')))
+        c.parse(
+            '.gitmodules',
+            '\n'.join(line.strip() for line in content.split('\n')),
+        )
         for sec in c.sections():
             s = c[sec]
             if 'url' in s and 'path' in s:
@@ -228,15 +249,18 @@
         if ret:
             # This can happen if a file is in the repo that has permissions
             # 160000, but there is no .gitmodules file.
-            self.ui.warn(_("warning: cannot read submodules config file in "
-                           "%s\n") % version)
+            self.ui.warn(
+                _("warning: cannot read submodules config file in " "%s\n")
+                % version
+            )
             return
 
         try:
             self.parsegitmodules(modules)
         except error.ParseError:
-            self.ui.warn(_("warning: unable to parse .gitmodules in %s\n")
-                         % version)
+            self.ui.warn(
+                _("warning: unable to parse .gitmodules in %s\n") % version
+            )
             return
 
         for m in self.submodules:
@@ -249,7 +273,9 @@
         if full:
             raise error.Abort(_("convert from git does not support --full"))
         self.modecache = {}
-        cmd = ['diff-tree','-z', '--root', '-m', '-r'] + self.simopt + [version]
+        cmd = (
+            ['diff-tree', '-z', '--root', '-m', '-r'] + self.simopt + [version]
+        )
         output, status = self.gitrun(*cmd)
         if status:
             raise error.Abort(_('cannot read changes in %s') % version)
@@ -264,12 +290,13 @@
         i = 0
 
         skipsubmodules = self.ui.configbool('convert', 'git.skipsubmodules')
+
         def add(entry, f, isdest):
             seen.add(f)
             h = entry[3]
-            p = (entry[1] == "100755")
-            s = (entry[1] == "120000")
-            renamesource = (not isdest and entry[4][0] == 'R')
+            p = entry[1] == "100755"
+            s = entry[1] == "120000"
+            renamesource = not isdest and entry[4][0] == 'R'
 
             if f == '.gitmodules':
                 if skipsubmodules:
@@ -330,9 +357,9 @@
         return (changes, copies, set())
 
     def getcommit(self, version):
-        c = self.catfile(version, "commit") # read the commit hash
+        c = self.catfile(version, "commit")  # read the commit hash
         end = c.find("\n\n")
-        message = c[end + 2:]
+        message = c[end + 2 :]
         message = self.recode(message)
         l = c[:end].splitlines()
         parents = []
@@ -377,18 +404,23 @@
         date = tm + " " + (b"%d" % tz)
         saverev = self.ui.configbool('convert', 'git.saverev')
 
-        c = common.commit(parents=parents, date=date, author=author,
-                          desc=message,
-                          rev=version,
-                          extra=extra,
-                          saverev=saverev)
+        c = common.commit(
+            parents=parents,
+            date=date,
+            author=author,
+            desc=message,
+            rev=version,
+            extra=extra,
+            saverev=saverev,
+        )
         return c
 
     def numcommits(self):
         output, ret = self.gitrunlines('rev-list', '--all')
         if ret:
-            raise error.Abort(_('cannot retrieve number of commits in %s')
-                              % self.path)
+            raise error.Abort(
+                _('cannot retrieve number of commits in %s') % self.path
+            )
         return len(output)
 
     def gettags(self):
@@ -408,7 +440,7 @@
             node, tag = line.split(None, 1)
             if not tag.startswith(prefix):
                 continue
-            alltags[tag[len(prefix):]] = node
+            alltags[tag[len(prefix) :]] = node
 
         # Filter out tag objects for annotated tag refs
         for tag in alltags:
@@ -425,8 +457,9 @@
     def getchangedfiles(self, version, i):
         changes = []
         if i is None:
-            output, status = self.gitrunlines('diff-tree', '--root', '-m',
-                                              '-r', version)
+            output, status = self.gitrunlines(
+                'diff-tree', '--root', '-m', '-r', version
+            )
             if status:
                 raise error.Abort(_('cannot read changes in %s') % version)
             for l in output:
@@ -435,9 +468,15 @@
                 m, f = l[:-1].split("\t")
                 changes.append(f)
         else:
-            output, status = self.gitrunlines('diff-tree', '--name-only',
-                                              '--root', '-r', version,
-                                              '%s^%d' % (version, i + 1), '--')
+            output, status = self.gitrunlines(
+                'diff-tree',
+                '--name-only',
+                '--root',
+                '-r',
+                version,
+                '%s^%d' % (version, i + 1),
+                '--',
+            )
             if status:
                 raise error.Abort(_('cannot read changes in %s') % version)
             changes = [f.rstrip('\n') for f in output]
@@ -452,7 +491,7 @@
         reftypes = [
             # (git prefix, hg prefix)
             ('refs/remotes/origin/', remoteprefix + '/'),
-            ('refs/heads/', '')
+            ('refs/heads/', ''),
         ]
 
         exclude = {
@@ -468,7 +507,7 @@
                 for gitprefix, hgprefix in reftypes:
                     if not name.startswith(gitprefix) or name in exclude:
                         continue
-                    name = '%s%s' % (hgprefix, name[len(gitprefix):])
+                    name = '%s%s' % (hgprefix, name[len(gitprefix) :])
                     bookmarks[name] = rev
         except Exception:
             pass
--- a/hgext/convert/gnuarch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/gnuarch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -26,8 +26,8 @@
 )
 from . import common
 
+
 class gnuarch_source(common.converter_source, common.commandline):
-
     class gnuarch_rev(object):
         def __init__(self, rev):
             self.rev = rev
@@ -45,8 +45,9 @@
         super(gnuarch_source, self).__init__(ui, repotype, path, revs=revs)
 
         if not os.path.exists(os.path.join(path, '{arch}')):
-            raise common.NoRepo(_("%s does not look like a GNU Arch repository")
-                         % path)
+            raise common.NoRepo(
+                _("%s does not look like a GNU Arch repository") % path
+            )
 
         # Could use checktool, but we want to check for baz or tla.
         self.execmd = None
@@ -74,8 +75,9 @@
 
     def before(self):
         # Get registered archives
-        self.archives = [i.rstrip('\n')
-                         for i in self.runlines0('archives', '-n')]
+        self.archives = [
+            i.rstrip('\n') for i in self.runlines0('archives', '-n')
+        ]
 
         if self.execmd == 'tla':
             output = self.run0('tree-version', self.path)
@@ -85,8 +87,9 @@
 
         # Get name of temporary directory
         version = self.treeversion.split('/')
-        self.tmppath = os.path.join(pycompat.fsencode(tempfile.gettempdir()),
-                                    'hg-%s' % version[1])
+        self.tmppath = os.path.join(
+            pycompat.fsencode(tempfile.gettempdir()), 'hg-%s' % version[1]
+        )
 
         # Generate parents dictionary
         self.parents[None] = []
@@ -97,14 +100,20 @@
 
             archive = treeversion.split('/')[0]
             if archive not in self.archives:
-                self.ui.status(_('tree analysis stopped because it points to '
-                                 'an unregistered archive %s...\n') % archive)
+                self.ui.status(
+                    _(
+                        'tree analysis stopped because it points to '
+                        'an unregistered archive %s...\n'
+                    )
+                    % archive
+                )
                 break
 
             # Get the complete list of revisions for that tree version
             output, status = self.runlines('revisions', '-r', '-f', treeversion)
-            self.checkexit(status, 'failed retrieving revisions for %s'
-                           % treeversion)
+            self.checkexit(
+                status, 'failed retrieving revisions for %s' % treeversion
+            )
 
             # No new iteration unless a revision has a continuation-of header
             treeversion = None
@@ -117,7 +126,7 @@
                 # Read author, date and summary
                 catlog, status = self.run('cat-log', '-d', self.path, rev)
                 if status:
-                    catlog  = self.run0('cat-archive-log', rev)
+                    catlog = self.run0('cat-archive-log', rev)
                 self._parsecatlog(catlog, rev)
 
                 # Populate the parents map
@@ -132,7 +141,8 @@
                 # by the continuation-of header.
                 if self.changes[rev].continuationof:
                     treeversion = '--'.join(
-                        self.changes[rev].continuationof.split('--')[:-1])
+                        self.changes[rev].continuationof.split('--')[:-1]
+                    )
                     break
 
                 # If we reached a base-0 revision w/o any continuation-of
@@ -189,9 +199,13 @@
 
     def getcommit(self, rev):
         changes = self.changes[rev]
-        return common.commit(author=changes.author, date=changes.date,
-                             desc=changes.summary, parents=self.parents[rev],
-                             rev=rev)
+        return common.commit(
+            author=changes.author,
+            date=changes.date,
+            desc=changes.summary,
+            parents=self.parents[rev],
+            rev=rev,
+        )
 
     def gettags(self):
         return self.tags
@@ -207,8 +221,7 @@
 
     def _update(self, rev):
         self.ui.debug('applying revision %s...\n' % rev)
-        changeset, status = self.runlines('replay', '-d', self.tmppath,
-                                              rev)
+        changeset, status = self.runlines('replay', '-d', self.tmppath, rev)
         if status:
             # Something went wrong while merging (baz or tla
             # issue?), get latest revision and try from there
@@ -216,8 +229,9 @@
             self._obtainrevision(rev)
         else:
             old_rev = self.parents[rev][0]
-            self.ui.debug('computing changeset between %s and %s...\n'
-                          % (old_rev, rev))
+            self.ui.debug(
+                'computing changeset between %s and %s...\n' % (old_rev, rev)
+            )
             self._parsechangeset(changeset, rev)
 
     def _getfile(self, name, rev):
@@ -286,21 +300,23 @@
 
             # Commit date
             self.changes[rev].date = dateutil.datestr(
-                dateutil.strdate(catlog['Standard-date'],
-                             '%Y-%m-%d %H:%M:%S'))
+                dateutil.strdate(catlog['Standard-date'], '%Y-%m-%d %H:%M:%S')
+            )
 
             # Commit author
             self.changes[rev].author = self.recode(catlog['Creator'])
 
             # Commit description
-            self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
-                                                    catlog.get_payload()))
+            self.changes[rev].summary = '\n\n'.join(
+                (catlog['Summary'], catlog.get_payload())
+            )
             self.changes[rev].summary = self.recode(self.changes[rev].summary)
 
             # Commit revision origin when dealing with a branch or tag
             if 'Continuation-of' in catlog:
                 self.changes[rev].continuationof = self.recode(
-                    catlog['Continuation-of'])
+                    catlog['Continuation-of']
+                )
         except Exception:
             raise error.Abort(_('could not parse cat-log of %s') % rev)
 
--- a/hgext/convert/hg.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/hg.py	Sun Oct 06 09:45:02 2019 -0400
@@ -37,14 +37,17 @@
     util,
 )
 from mercurial.utils import dateutil
+
 stringio = util.stringio
 
 from . import common
+
 mapfile = common.mapfile
 NoRepo = common.NoRepo
 
 sha1re = re.compile(br'\b[0-9a-f]{12,40}\b')
 
+
 class mercurial_sink(common.converter_sink):
     def __init__(self, ui, repotype, path):
         common.converter_sink.__init__(self, ui, repotype, path)
@@ -56,8 +59,9 @@
             try:
                 self.repo = hg.repository(self.ui, path)
                 if not self.repo.local():
-                    raise NoRepo(_('%s is not a local Mercurial repository')
-                                 % path)
+                    raise NoRepo(
+                        _('%s is not a local Mercurial repository') % path
+                    )
             except error.RepoError as err:
                 ui.traceback()
                 raise NoRepo(err.args[0])
@@ -66,13 +70,15 @@
                 ui.status(_('initializing destination %s repository\n') % path)
                 self.repo = hg.repository(self.ui, path, create=True)
                 if not self.repo.local():
-                    raise NoRepo(_('%s is not a local Mercurial repository')
-                                 % path)
+                    raise NoRepo(
+                        _('%s is not a local Mercurial repository') % path
+                    )
                 self.created.append(path)
             except error.RepoError:
                 ui.traceback()
-                raise NoRepo(_("could not create hg repository %s as sink")
-                             % path)
+                raise NoRepo(
+                    _("could not create hg repository %s as sink") % path
+                )
         self.lock = None
         self.wlock = None
         self.filemapmode = False
@@ -100,7 +106,7 @@
         if not self.clonebranches:
             return
 
-        setbranch = (branch != self.lastbranch)
+        setbranch = branch != self.lastbranch
         self.lastbranch = branch
         if not branch:
             branch = 'default'
@@ -130,8 +136,9 @@
                 pbranchpath = os.path.join(self.path, pbranch)
                 prepo = hg.peer(self.ui, {}, pbranchpath)
                 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
-                exchange.pull(self.repo, prepo,
-                              [prepo.lookup(h) for h in heads])
+                exchange.pull(
+                    self.repo, prepo, [prepo.lookup(h) for h in heads]
+                )
             self.before()
 
     def _rewritetags(self, source, revmap, data):
@@ -166,8 +173,9 @@
             if revid != nodemod.nullhex:
                 revmap = self.subrevmaps.get(subpath)
                 if revmap is None:
-                    revmap = mapfile(self.ui,
-                                     self.repo.wjoin(subpath, '.hg/shamap'))
+                    revmap = mapfile(
+                        self.ui, self.repo.wjoin(subpath, '.hg/shamap')
+                    )
                     self.subrevmaps[subpath] = revmap
 
                     # It is reasonable that one or more of the subrepos don't
@@ -184,8 +192,10 @@
                 newid = revmap.get(revid)
                 if not newid:
                     if len(revmap) > 0:
-                        self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
-                                     (revid, subpath))
+                        self.ui.warn(
+                            _("%s is missing from %s/.hg/shamap\n")
+                            % (revid, subpath)
+                        )
                 else:
                     revid = newid
 
@@ -204,8 +214,15 @@
         anc = [p1ctx.ancestor(p2ctx)]
         # Calculate what files are coming from p2
         actions, diverge, rename = mergemod.calculateupdates(
-            self.repo, p1ctx, p2ctx, anc, branchmerge=True,
-            force=True, acceptremote=False, followcopies=False)
+            self.repo,
+            p1ctx,
+            p2ctx,
+            anc,
+            branchmerge=True,
+            force=True,
+            acceptremote=False,
+            followcopies=False,
+        )
 
         for file, (action, info, msg) in actions.iteritems():
             if source.targetfilebelongstosource(file):
@@ -216,10 +233,14 @@
             # If the file requires actual merging, abort. We don't have enough
             # context to resolve merges correctly.
             if action in ['m', 'dm', 'cd', 'dc']:
-                raise error.Abort(_("unable to convert merge commit "
-                    "since target parents do not merge cleanly (file "
-                    "%s, parents %s and %s)") % (file, p1ctx,
-                                                 p2ctx))
+                raise error.Abort(
+                    _(
+                        "unable to convert merge commit "
+                        "since target parents do not merge cleanly (file "
+                        "%s, parents %s and %s)"
+                    )
+                    % (file, p1ctx, p2ctx)
+                )
             elif action == 'k':
                 # 'keep' means nothing changed from p1
                 continue
@@ -227,8 +248,9 @@
                 # Any other change means we want to take the p2 version
                 yield file
 
-    def putcommit(self, files, copies, parents, commit, source, revmap, full,
-                  cleanp2):
+    def putcommit(
+        self, files, copies, parents, commit, source, revmap, full, cleanp2
+    ):
         files = dict(files)
 
         def getfilectx(repo, memctx, f):
@@ -251,8 +273,15 @@
                 data = self._rewritetags(source, revmap, data)
             if f == '.hgsubstate':
                 data = self._rewritesubstate(source, data)
-            return context.memfilectx(self.repo, memctx, f, data, 'l' in mode,
-                                      'x' in mode, copies.get(f))
+            return context.memfilectx(
+                self.repo,
+                memctx,
+                f,
+                data,
+                'l' in mode,
+                'x' in mode,
+                copies.get(f),
+            )
 
         pl = []
         for p in parents:
@@ -277,7 +306,7 @@
             oldrev = source.lookuprev(sha1)
             newrev = revmap.get(oldrev)
             if newrev is not None:
-                text = text.replace(sha1, newrev[:len(sha1)])
+                text = text.replace(sha1, newrev[: len(sha1)])
 
         extra = commit.extra.copy()
 
@@ -285,8 +314,12 @@
         if sourcename:
             extra['convert_source'] = sourcename
 
-        for label in ('source', 'transplant_source', 'rebase_source',
-                      'intermediate-source'):
+        for label in (
+            'source',
+            'transplant_source',
+            'rebase_source',
+            'intermediate-source',
+        ):
             node = extra.get(label)
 
             if node is None:
@@ -326,13 +359,25 @@
                     p2files.add(file)
                     fileset.add(file)
 
-            ctx = context.memctx(self.repo, (p1, p2), text, fileset,
-                                 getfilectx, commit.author, commit.date, extra)
+            ctx = context.memctx(
+                self.repo,
+                (p1, p2),
+                text,
+                fileset,
+                getfilectx,
+                commit.author,
+                commit.date,
+                extra,
+            )
 
             # We won't know if the conversion changes the node until after the
             # commit, so copy the source's phase for now.
-            self.repo.ui.setconfig('phases', 'new-commit',
-                                   phases.phasenames[commit.phase], 'convert')
+            self.repo.ui.setconfig(
+                'phases',
+                'new-commit',
+                phases.phasenames[commit.phase],
+                'convert',
+            )
 
             with self.repo.transaction("convert") as tr:
                 if self.repo.ui.config('convert', 'hg.preserve-hash'):
@@ -347,8 +392,9 @@
                 if commit.rev != node:
                     ctx = self.repo[node]
                     if ctx.phase() < phases.draft:
-                        phases.registernew(self.repo, tr, phases.draft,
-                                           [ctx.node()])
+                        phases.registernew(
+                            self.repo, tr, phases.draft, [ctx.node()]
+                        )
 
             text = "(octopus merge fixup)\n"
             p2 = node
@@ -372,7 +418,8 @@
             for h in heads:
                 if '.hgtags' in self.repo[h]:
                     oldlines.update(
-                        set(self.repo[h]['.hgtags'].data().splitlines(True)))
+                        set(self.repo[h]['.hgtags'].data().splitlines(True))
+                    )
         oldlines = sorted(list(oldlines))
 
         newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
@@ -398,15 +445,23 @@
             return None, None
 
         data = "".join(newlines)
+
         def getfilectx(repo, memctx, f):
             return context.memfilectx(repo, memctx, f, data, False, False, None)
 
         self.ui.status(_("updating tags\n"))
         date = "%d 0" % int(time.mktime(time.gmtime()))
         extra = {'branch': self.tagsbranch}
-        ctx = context.memctx(self.repo, (tagparent, None), "update tags",
-                             [".hgtags"], getfilectx, "convert-repo", date,
-                             extra)
+        ctx = context.memctx(
+            self.repo,
+            (tagparent, None),
+            "update tags",
+            [".hgtags"],
+            getfilectx,
+            "convert-repo",
+            date,
+            extra,
+        )
         node = self.repo.commitctx(ctx)
         return nodemod.hex(node), nodemod.hex(tagparent)
 
@@ -423,8 +478,10 @@
             tr = self.repo.transaction('bookmark')
             self.ui.status(_("updating bookmarks\n"))
             destmarks = self.repo._bookmarks
-            changes = [(bookmark, nodemod.bin(updatedbookmark[bookmark]))
-                       for bookmark in updatedbookmark]
+            changes = [
+                (bookmark, nodemod.bin(updatedbookmark[bookmark]))
+                for bookmark in updatedbookmark
+            ]
             destmarks.applychanges(self.repo, tr, changes)
             tr.close()
         finally:
@@ -436,11 +493,17 @@
 
     def hascommitforsplicemap(self, rev):
         if rev not in self.repo and self.clonebranches:
-            raise error.Abort(_('revision %s not found in destination '
-                               'repository (lookups with clonebranches=true '
-                               'are not implemented)') % rev)
+            raise error.Abort(
+                _(
+                    'revision %s not found in destination '
+                    'repository (lookups with clonebranches=true '
+                    'are not implemented)'
+                )
+                % rev
+            )
         return rev in self.repo
 
+
 class mercurial_source(common.converter_source):
     def __init__(self, ui, repotype, path, revs=None):
         common.converter_source.__init__(self, ui, repotype, path, revs)
@@ -468,8 +531,9 @@
                 try:
                     startnode = self.repo.lookup(startnode)
                 except error.RepoError:
-                    raise error.Abort(_('%s is not a valid start revision')
-                                     % startnode)
+                    raise error.Abort(
+                        _('%s is not a valid start revision') % startnode
+                    )
                 startrev = self.repo.changelog.rev(startnode)
                 children = {startnode: 1}
                 for r in self.repo.changelog.descendants([startrev]):
@@ -483,8 +547,9 @@
                 self._heads = self.repo.heads()
         else:
             if revs or startnode is not None:
-                raise error.Abort(_('hg.revs cannot be combined with '
-                                   'hg.startrev or --rev'))
+                raise error.Abort(
+                    _('hg.revs cannot be combined with ' 'hg.startrev or --rev')
+                )
             nodes = set()
             parents = set()
             for r in scmutil.revrange(self.repo, [hgrevs]):
@@ -580,29 +645,38 @@
         optparents = [p.hex() for p in ctx.parents() if p and p not in _parents]
         crev = rev
 
-        return common.commit(author=ctx.user(),
-                             date=dateutil.datestr(ctx.date(),
-                                               '%Y-%m-%d %H:%M:%S %1%2'),
-                             desc=ctx.description(),
-                             rev=crev,
-                             parents=parents,
-                             optparents=optparents,
-                             branch=ctx.branch(),
-                             extra=ctx.extra(),
-                             sortkey=ctx.rev(),
-                             saverev=self.saverev,
-                             phase=ctx.phase(),
-                             ctx=ctx)
+        return common.commit(
+            author=ctx.user(),
+            date=dateutil.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
+            desc=ctx.description(),
+            rev=crev,
+            parents=parents,
+            optparents=optparents,
+            branch=ctx.branch(),
+            extra=ctx.extra(),
+            sortkey=ctx.rev(),
+            saverev=self.saverev,
+            phase=ctx.phase(),
+            ctx=ctx,
+        )
 
     def numcommits(self):
         return len(self.repo)
 
     def gettags(self):
         # This will get written to .hgtags, filter non global tags out.
-        tags = [t for t in self.repo.tagslist()
-                if self.repo.tagtype(t[0]) == 'global']
-        return dict([(name, nodemod.hex(node)) for name, node in tags
-                     if self.keep(node)])
+        tags = [
+            t
+            for t in self.repo.tagslist()
+            if self.repo.tagtype(t[0]) == 'global'
+        ]
+        return dict(
+            [
+                (name, nodemod.hex(node))
+                for name, node in tags
+                if self.keep(node)
+            ]
+        )
 
     def getchangedfiles(self, rev, i):
         ctx = self._changectx(rev)
--- a/hgext/convert/monotone.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/monotone.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,12 +19,17 @@
 
 from . import common
 
+
 class monotone_source(common.converter_source, common.commandline):
     def __init__(self, ui, repotype, path=None, revs=None):
         common.converter_source.__init__(self, ui, repotype, path, revs)
         if revs and len(revs) > 1:
-            raise error.Abort(_('monotone source does not support specifying '
-                               'multiple revs'))
+            raise error.Abort(
+                _(
+                    'monotone source does not support specifying '
+                    'multiple revs'
+                )
+            )
         common.commandline.__init__(self, ui, 'mtn')
 
         self.ui = ui
@@ -32,8 +37,9 @@
         self.automatestdio = False
         self.revs = revs
 
-        norepo = common.NoRepo(_("%s does not look like a monotone repository")
-                             % path)
+        norepo = common.NoRepo(
+            _("%s does not look like a monotone repository") % path
+        )
         if not os.path.exists(os.path.join(path, '_MTN')):
             # Could be a monotone repository (SQLite db file)
             try:
@@ -46,35 +52,37 @@
                 raise norepo
 
         # regular expressions for parsing monotone output
-        space    = br'\s*'
-        name     = br'\s+"((?:\\"|[^"])*)"\s*'
-        value    = name
+        space = br'\s*'
+        name = br'\s+"((?:\\"|[^"])*)"\s*'
+        value = name
         revision = br'\s+\[(\w+)\]\s*'
-        lines    = br'(?:.|\n)+'
+        lines = br'(?:.|\n)+'
 
-        self.dir_re      = re.compile(space + "dir" + name)
-        self.file_re     = re.compile(space + "file" + name +
-                                      "content" + revision)
-        self.add_file_re = re.compile(space + "add_file" + name +
-                                      "content" + revision)
-        self.patch_re    = re.compile(space + "patch" + name +
-                                      "from" + revision + "to" + revision)
-        self.rename_re   = re.compile(space + "rename" + name + "to" + name)
-        self.delete_re   = re.compile(space + "delete" + name)
-        self.tag_re      = re.compile(space + "tag" + name + "revision" +
-                                      revision)
-        self.cert_re     = re.compile(lines + space + "name" + name +
-                                      "value" + value)
+        self.dir_re = re.compile(space + "dir" + name)
+        self.file_re = re.compile(space + "file" + name + "content" + revision)
+        self.add_file_re = re.compile(
+            space + "add_file" + name + "content" + revision
+        )
+        self.patch_re = re.compile(
+            space + "patch" + name + "from" + revision + "to" + revision
+        )
+        self.rename_re = re.compile(space + "rename" + name + "to" + name)
+        self.delete_re = re.compile(space + "delete" + name)
+        self.tag_re = re.compile(space + "tag" + name + "revision" + revision)
+        self.cert_re = re.compile(
+            lines + space + "name" + name + "value" + value
+        )
 
         attr = space + "file" + lines + space + "attr" + space
-        self.attr_execute_re = re.compile(attr  + '"mtn:execute"' +
-                                          space + '"true"')
+        self.attr_execute_re = re.compile(
+            attr + '"mtn:execute"' + space + '"true"'
+        )
 
         # cached data
         self.manifest_rev = None
         self.manifest = None
         self.files = None
-        self.dirs  = None
+        self.dirs = None
 
         common.checktool('mtn', abort=False)
 
@@ -140,13 +148,19 @@
         try:
             length = pycompat.long(lengthstr[:-1])
         except TypeError:
-            raise error.Abort(_('bad mtn packet - bad packet size %s')
-                % lengthstr)
+            raise error.Abort(
+                _('bad mtn packet - bad packet size %s') % lengthstr
+            )
 
         read = self.mtnreadfp.read(length)
         if len(read) != length:
-            raise error.Abort(_("bad mtn packet - unable to read full packet "
-                "read %s of %s") % (len(read), length))
+            raise error.Abort(
+                _(
+                    "bad mtn packet - unable to read full packet "
+                    "read %s of %s"
+                )
+                % (len(read), length)
+            )
 
         return (commandnbr, stream, length, read)
 
@@ -154,14 +168,16 @@
         retval = []
         while True:
             commandnbr, stream, length, output = self.mtnstdioreadpacket()
-            self.ui.debug('mtn: read packet %s:%s:%d\n' %
-                (commandnbr, stream, length))
+            self.ui.debug(
+                'mtn: read packet %s:%s:%d\n' % (commandnbr, stream, length)
+            )
 
             if stream == 'l':
                 # End of command
                 if output != '0':
-                    raise error.Abort(_("mtn command '%s' returned %s") %
-                        (command, output))
+                    raise error.Abort(
+                        _("mtn command '%s' returned %s") % (command, output)
+                    )
                 break
             elif stream in 'ew':
                 # Error, warning output
@@ -207,8 +223,12 @@
         return name in self.dirs
 
     def mtngetcerts(self, rev):
-        certs = {"author":"<missing>", "date":"<missing>",
-            "changelog":"<missing>", "branch":"<missing>"}
+        certs = {
+            "author": "<missing>",
+            "date": "<missing>",
+            "changelog": "<missing>",
+            "branch": "<missing>",
+        }
         certlist = self.mtnrun("certs", rev)
         # mtn < 0.45:
         #   key "test@selenic.com"
@@ -237,8 +257,9 @@
 
     def getchanges(self, rev, full):
         if full:
-            raise error.Abort(_("convert from monotone does not support "
-                              "--full"))
+            raise error.Abort(
+                _("convert from monotone does not support " "--full")
+            )
         revision = self.mtnrun("get_revision", rev).split("\n\n")
         files = {}
         ignoremove = {}
@@ -278,7 +299,7 @@
                 if tofile in ignoremove:
                     continue
                 if tofile.startswith(todir + '/'):
-                    renamed[tofile] = fromdir + tofile[len(todir):]
+                    renamed[tofile] = fromdir + tofile[len(todir) :]
                     # Avoid chained moves like:
                     # d1(/a) => d3/d1(/a)
                     # d2 => d3
@@ -286,7 +307,9 @@
             for tofile, fromfile in renamed.items():
                 self.ui.debug(
                     "copying file in renamed directory from '%s' to '%s'"
-                    % (fromfile, tofile), '\n')
+                    % (fromfile, tofile),
+                    '\n',
+                )
                 files[tofile] = rev
                 copies[tofile] = fromfile
             for fromfile in renamed.values():
@@ -318,7 +341,8 @@
             rev=rev,
             parents=self.mtnrun("parents", rev).splitlines(),
             branch=certs["branch"],
-            extra=extra)
+            extra=extra,
+        )
 
     def gettags(self):
         tags = {}
@@ -339,30 +363,40 @@
             versionstr = self.mtnrunsingle("interface_version")
             version = float(versionstr)
         except Exception:
-            raise error.Abort(_("unable to determine mtn automate interface "
-                "version"))
+            raise error.Abort(
+                _("unable to determine mtn automate interface " "version")
+            )
 
         if version >= 12.0:
             self.automatestdio = True
-            self.ui.debug("mtn automate version %f - using automate stdio\n" %
-                version)
+            self.ui.debug(
+                "mtn automate version %f - using automate stdio\n" % version
+            )
 
             # launch the long-running automate stdio process
-            self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
-                '-d', self.path)
+            self.mtnwritefp, self.mtnreadfp = self._run2(
+                'automate', 'stdio', '-d', self.path
+            )
             # read the headers
             read = self.mtnreadfp.readline()
             if read != 'format-version: 2\n':
-                raise error.Abort(_('mtn automate stdio header unexpected: %s')
-                    % read)
+                raise error.Abort(
+                    _('mtn automate stdio header unexpected: %s') % read
+                )
             while read != '\n':
                 read = self.mtnreadfp.readline()
                 if not read:
-                    raise error.Abort(_("failed to reach end of mtn automate "
-                        "stdio headers"))
+                    raise error.Abort(
+                        _(
+                            "failed to reach end of mtn automate "
+                            "stdio headers"
+                        )
+                    )
         else:
-            self.ui.debug("mtn automate version %s - not using automate stdio "
-                "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
+            self.ui.debug(
+                "mtn automate version %s - not using automate stdio "
+                "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version
+            )
 
     def after(self):
         if self.automatestdio:
--- a/hgext/convert/p4.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/p4.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,6 +22,7 @@
 
 from . import common
 
+
 def loaditer(f):
     "Yield the dictionary objects generated by p4"
     try:
@@ -33,6 +34,7 @@
     except EOFError:
         pass
 
+
 def decodefilename(filename):
     """Perforce escapes special characters @, #, *, or %
     with %40, %23, %2A, or %25 respectively
@@ -47,6 +49,7 @@
         filename = filename.replace(k, v)
     return filename
 
+
 class p4_source(common.converter_source):
     def __init__(self, ui, repotype, path, revs=None):
         # avoid import cycle
@@ -55,25 +58,30 @@
         super(p4_source, self).__init__(ui, repotype, path, revs=revs)
 
         if "/" in path and not path.startswith('//'):
-            raise common.NoRepo(_('%s does not look like a P4 repository') %
-                                path)
+            raise common.NoRepo(
+                _('%s does not look like a P4 repository') % path
+            )
 
         common.checktool('p4', abort=False)
 
         self.revmap = {}
-        self.encoding = self.ui.config('convert', 'p4.encoding',
-                                       convcmd.orig_encoding)
+        self.encoding = self.ui.config(
+            'convert', 'p4.encoding', convcmd.orig_encoding
+        )
         self.re_type = re.compile(
             br"([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
-            br"(\+\w+)?$")
+            br"(\+\w+)?$"
+        )
         self.re_keywords = re.compile(
             br"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
-            br":[^$\n]*\$")
+            br":[^$\n]*\$"
+        )
         self.re_keywords_old = re.compile(br"\$(Id|Header):[^$\n]*\$")
 
         if revs and len(revs) > 1:
-            raise error.Abort(_("p4 source does not support specifying "
-                               "multiple revisions"))
+            raise error.Abort(
+                _("p4 source does not support specifying " "multiple revisions")
+            )
 
     def setrevmap(self, revmap):
         """Sets the parsed revmap dictionary.
@@ -115,7 +123,7 @@
         if "/" in path:
             p4changes.update(self._parse_view(path))
             if path.startswith("//") and path.endswith("/..."):
-                views = {path[:-3]:""}
+                views = {path[:-3]: ""}
             else:
                 views = {"//": ""}
         else:
@@ -131,7 +139,7 @@
                         sview = sview[:-3]
                         cview = cview[:-3]
                     cview = cview[2:]
-                    cview = cview[cview.find("/") + 1:]
+                    cview = cview[cview.find("/") + 1 :]
                     views[sview] = cview
 
         # list of changes that affect our source files
@@ -184,12 +192,12 @@
                 filename = None
                 for v in vieworder:
                     if oldname.lower().startswith(v.lower()):
-                        filename = decodefilename(views[v] + oldname[len(v):])
+                        filename = decodefilename(views[v] + oldname[len(v) :])
                         break
                 if filename:
                     files.append((filename, d["rev%d" % i]))
                     depotname[filename] = oldname
-                    if (d.get("action%d" % i) == "move/add"):
+                    if d.get("action%d" % i) == "move/add":
                         copiedfiles.append(filename)
                     localname[oldname] = filename
                 i += 1
@@ -198,8 +206,7 @@
             for filename in copiedfiles:
                 oldname = depotname[filename]
 
-                flcmd = ('p4 -G filelog %s'
-                         % procutil.shellquote(oldname))
+                flcmd = 'p4 -G filelog %s' % procutil.shellquote(oldname)
                 flstdout = procutil.popen(flcmd, mode='rb')
 
                 copiedfilename = None
@@ -208,8 +215,10 @@
 
                     i = 0
                     while ("change%d" % i) in d:
-                        if (d["change%d" % i] == change and
-                            d["action%d" % i] == "move/add"):
+                        if (
+                            d["change%d" % i] == change
+                            and d["action%d" % i] == "move/add"
+                        ):
                             j = 0
                             while ("file%d,%d" % (i, j)) in d:
                                 if d["how%d,%d" % (i, j)] == "moved from":
@@ -225,8 +234,10 @@
                 if copiedfilename:
                     copies[filename] = copiedfilename
                 else:
-                    ui.warn(_("cannot find source for copied file: %s@%s\n")
-                            % (filename, change))
+                    ui.warn(
+                        _("cannot find source for copied file: %s@%s\n")
+                        % (filename, change)
+                    )
 
             changeset[change] = c
             files_map[change] = files
@@ -272,8 +283,9 @@
         return self.heads
 
     def getfile(self, name, rev):
-        cmd = ('p4 -G print %s'
-               % procutil.shellquote("%s#%s" % (self.depotname[name], rev)))
+        cmd = 'p4 -G print %s' % procutil.shellquote(
+            "%s#%s" % (self.depotname[name], rev)
+        )
 
         lasterror = None
         while True:
@@ -304,8 +316,9 @@
                     p4type = self.re_type.match(d["type"])
                     if p4type:
                         mode = ""
-                        flags = ((p4type.group(1) or "")
-                               + (p4type.group(3) or ""))
+                        flags = (p4type.group(1) or "") + (
+                            p4type.group(3) or ""
+                        )
                         if "x" in flags:
                             mode = "x"
                         if p4type.group(2) == "symlink":
@@ -346,14 +359,19 @@
         `p4 describe` output
         """
         desc = self.recode(obj.get("desc", ""))
-        date = (int(obj["time"]), 0)     # timezone not set
+        date = (int(obj["time"]), 0)  # timezone not set
         if parents is None:
             parents = []
 
-        return common.commit(author=self.recode(obj["user"]),
+        return common.commit(
+            author=self.recode(obj["user"]),
             date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
-            parents=parents, desc=desc, branch=None, rev=obj['change'],
-            extra={"p4": obj['change'], "convert_revision": obj['change']})
+            parents=parents,
+            desc=desc,
+            branch=None,
+            rev=obj['change'],
+            extra={"p4": obj['change'], "convert_revision": obj['change']},
+        )
 
     def _fetch_revision(self, rev):
         """Return an output of `p4 describe` including author, commit date as
@@ -369,7 +387,8 @@
             d = self._fetch_revision(rev)
             return self._construct_commit(d, parents=None)
         raise error.Abort(
-            _("cannot find %s in the revmap or parsed changesets") % rev)
+            _("cannot find %s in the revmap or parsed changesets") % rev
+        )
 
     def gettags(self):
         return {}
--- a/hgext/convert/subversion.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/subversion.py	Sun Oct 06 09:45:02 2019 -0400
@@ -52,17 +52,20 @@
     import svn.delta
     from . import transport
     import warnings
-    warnings.filterwarnings('ignore',
-            module='svn.core',
-            category=DeprecationWarning)
-    svn.core.SubversionException # trigger import to catch error
+
+    warnings.filterwarnings(
+        'ignore', module='svn.core', category=DeprecationWarning
+    )
+    svn.core.SubversionException  # trigger import to catch error
 
 except ImportError:
     svn = None
 
+
 class SvnPathNotFound(Exception):
     pass
 
+
 def revsplit(rev):
     """Parse a revision string and return (uuid, path, revnum).
     >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
@@ -89,6 +92,7 @@
         mod = '/' + parts[1]
     return uuid, mod, revnum
 
+
 def quote(s):
     # As of svn 1.7, many svn calls expect "canonical" paths. In
     # theory, we should call svn.core.*canonicalize() on all paths
@@ -99,6 +103,7 @@
     # libsvn_subr/path.c.
     return urlreq.quote(s, "!$&'()*+,-./:=@_~")
 
+
 def geturl(path):
     try:
         return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
@@ -115,37 +120,54 @@
         path = 'file://%s' % quote(path)
     return svn.core.svn_path_canonicalize(path)
 
+
 def optrev(number):
     optrev = svn.core.svn_opt_revision_t()
     optrev.kind = svn.core.svn_opt_revision_number
     optrev.value.number = number
     return optrev
 
+
 class changedpath(object):
     def __init__(self, p):
         self.copyfrom_path = p.copyfrom_path
         self.copyfrom_rev = p.copyfrom_rev
         self.action = p.action
 
-def get_log_child(fp, url, paths, start, end, limit=0,
-                  discover_changed_paths=True, strict_node_history=False):
+
+def get_log_child(
+    fp,
+    url,
+    paths,
+    start,
+    end,
+    limit=0,
+    discover_changed_paths=True,
+    strict_node_history=False,
+):
     protocol = -1
+
     def receiver(orig_paths, revnum, author, date, message, pool):
         paths = {}
         if orig_paths is not None:
             for k, v in orig_paths.iteritems():
                 paths[k] = changedpath(v)
-        pickle.dump((paths, revnum, author, date, message),
-                    fp, protocol)
+        pickle.dump((paths, revnum, author, date, message), fp, protocol)
 
     try:
         # Use an ra of our own so that our parent can consume
         # our results without confusing the server.
         t = transport.SvnRaTransport(url=url)
-        svn.ra.get_log(t.ra, paths, start, end, limit,
-                       discover_changed_paths,
-                       strict_node_history,
-                       receiver)
+        svn.ra.get_log(
+            t.ra,
+            paths,
+            start,
+            end,
+            limit,
+            discover_changed_paths,
+            strict_node_history,
+            receiver,
+        )
     except IOError:
         # Caller may interrupt the iteration
         pickle.dump(None, fp, protocol)
@@ -159,19 +181,23 @@
     # there is no need for clean termination.
     os._exit(0)
 
+
 def debugsvnlog(ui, **opts):
     """Fetch SVN log in a subprocess and channel them back to parent to
     avoid memory collection issues.
     """
     if svn is None:
-        raise error.Abort(_('debugsvnlog could not load Subversion python '
-                           'bindings'))
+        raise error.Abort(
+            _('debugsvnlog could not load Subversion python ' 'bindings')
+        )
 
     args = decodeargs(ui.fin.read())
     get_log_child(ui.fout, *args)
 
+
 class logstream(object):
     """Interruptible revision log iterator."""
+
     def __init__(self, stdout):
         self._stdout = stdout
 
@@ -180,8 +206,12 @@
             try:
                 entry = pickle.load(self._stdout)
             except EOFError:
-                raise error.Abort(_('Mercurial failed to run itself, check'
-                                   ' hg executable is in PATH'))
+                raise error.Abort(
+                    _(
+                        'Mercurial failed to run itself, check'
+                        ' hg executable is in PATH'
+                    )
+                )
             try:
                 orig_paths, revnum, author, date, message = entry
             except (TypeError, ValueError):
@@ -195,13 +225,22 @@
             self._stdout.close()
             self._stdout = None
 
+
 class directlogstream(list):
     """Direct revision log iterator.
     This can be used for debugging and development but it will probably leak
     memory and is not suitable for real conversions."""
-    def __init__(self, url, paths, start, end, limit=0,
-                  discover_changed_paths=True, strict_node_history=False):
 
+    def __init__(
+        self,
+        url,
+        paths,
+        start,
+        end,
+        limit=0,
+        discover_changed_paths=True,
+        strict_node_history=False,
+    ):
         def receiver(orig_paths, revnum, author, date, message, pool):
             paths = {}
             if orig_paths is not None:
@@ -212,14 +251,21 @@
         # Use an ra of our own so that our parent can consume
         # our results without confusing the server.
         t = transport.SvnRaTransport(url=url)
-        svn.ra.get_log(t.ra, paths, start, end, limit,
-                       discover_changed_paths,
-                       strict_node_history,
-                       receiver)
+        svn.ra.get_log(
+            t.ra,
+            paths,
+            start,
+            end,
+            limit,
+            discover_changed_paths,
+            strict_node_history,
+            receiver,
+        )
 
     def close(self):
         pass
 
+
 # Check to see if the given path is a local Subversion repo. Verify this by
 # looking for several svn-specific files and directories in the given
 # directory.
@@ -229,6 +275,7 @@
             return False
     return True
 
+
 # Check to see if a given path is the root of an svn repo over http. We verify
 # this by requesting a version-controlled URL we know can't exist and looking
 # for the svn-specific "not found" XML.
@@ -240,9 +287,13 @@
     except urlerr.httperror as inst:
         if inst.code != 404:
             # Except for 404 we cannot know for sure this is not an svn repo
-            ui.warn(_('svn: cannot probe remote repository, assume it could '
-                      'be a subversion repository. Use --source-type if you '
-                      'know better.\n'))
+            ui.warn(
+                _(
+                    'svn: cannot probe remote repository, assume it could '
+                    'be a subversion repository. Use --source-type if you '
+                    'know better.\n'
+                )
+            )
             return True
         data = inst.fp.read()
     except Exception:
@@ -250,16 +301,24 @@
         return False
     return '<m:human-readable errcode="160013">' in data
 
-protomap = {'http': httpcheck,
-            'https': httpcheck,
-            'file': filecheck,
-            }
+
+protomap = {
+    'http': httpcheck,
+    'https': httpcheck,
+    'file': filecheck,
+}
+
+
 def issvnurl(ui, url):
     try:
         proto, path = url.split('://', 1)
         if proto == 'file':
-            if (pycompat.iswindows and path[:1] == '/'
-                  and path[1:2].isalpha() and path[2:6].lower() == '%3a/'):
+            if (
+                pycompat.iswindows
+                and path[:1] == '/'
+                and path[1:2].isalpha()
+                and path[2:6].lower() == '%3a/'
+            ):
                 path = path[:2] + ':/' + path[6:]
             path = urlreq.url2pathname(path)
     except ValueError:
@@ -274,6 +333,7 @@
         path = path.rsplit('/', 1)[0]
     return False
 
+
 # SVN conversion code stolen from bzr-svn and tailor
 #
 # Subversion looks like a versioned filesystem, branches structures
@@ -292,23 +352,38 @@
     def __init__(self, ui, repotype, url, revs=None):
         super(svn_source, self).__init__(ui, repotype, url, revs=revs)
 
-        if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
-                (os.path.exists(url) and
-                 os.path.exists(os.path.join(url, '.svn'))) or
-                issvnurl(ui, url)):
-            raise NoRepo(_("%s does not look like a Subversion repository")
-                         % url)
+        if not (
+            url.startswith('svn://')
+            or url.startswith('svn+ssh://')
+            or (
+                os.path.exists(url)
+                and os.path.exists(os.path.join(url, '.svn'))
+            )
+            or issvnurl(ui, url)
+        ):
+            raise NoRepo(
+                _("%s does not look like a Subversion repository") % url
+            )
         if svn is None:
             raise MissingTool(_('could not load Subversion python bindings'))
 
         try:
             version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
             if version < (1, 4):
-                raise MissingTool(_('Subversion python bindings %d.%d found, '
-                                    '1.4 or later required') % version)
+                raise MissingTool(
+                    _(
+                        'Subversion python bindings %d.%d found, '
+                        '1.4 or later required'
+                    )
+                    % version
+                )
         except AttributeError:
-            raise MissingTool(_('Subversion python bindings are too old, 1.4 '
-                                'or later required'))
+            raise MissingTool(
+                _(
+                    'Subversion python bindings are too old, 1.4 '
+                    'or later required'
+                )
+            )
 
         self.lastrevs = {}
 
@@ -318,12 +393,12 @@
             # deleted branches.
             at = url.rfind('@')
             if at >= 0:
-                latest = int(url[at + 1:])
+                latest = int(url[at + 1 :])
                 url = url[:at]
         except ValueError:
             pass
         self.url = geturl(url)
-        self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
+        self.encoding = 'UTF-8'  # Subversion is always nominal UTF-8
         try:
             self.transport = transport.SvnRaTransport(url=self.url)
             self.ra = self.transport.ra
@@ -331,7 +406,7 @@
             self.baseurl = svn.ra.get_repos_root(self.ra)
             # Module is either empty or a repository path starting with
             # a slash and not ending with a slash.
-            self.module = urlreq.unquote(self.url[len(self.baseurl):])
+            self.module = urlreq.unquote(self.url[len(self.baseurl) :])
             self.prevmodule = None
             self.rootmodule = self.module
             self.commits = {}
@@ -339,22 +414,33 @@
             self.uuid = svn.ra.get_uuid(self.ra)
         except svn.core.SubversionException:
             ui.traceback()
-            svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
-                                       svn.core.SVN_VER_MINOR,
-                                       svn.core.SVN_VER_MICRO)
-            raise NoRepo(_("%s does not look like a Subversion repository "
-                           "to libsvn version %s")
-                         % (self.url, svnversion))
+            svnversion = '%d.%d.%d' % (
+                svn.core.SVN_VER_MAJOR,
+                svn.core.SVN_VER_MINOR,
+                svn.core.SVN_VER_MICRO,
+            )
+            raise NoRepo(
+                _(
+                    "%s does not look like a Subversion repository "
+                    "to libsvn version %s"
+                )
+                % (self.url, svnversion)
+            )
 
         if revs:
             if len(revs) > 1:
-                raise error.Abort(_('subversion source does not support '
-                                   'specifying multiple revisions'))
+                raise error.Abort(
+                    _(
+                        'subversion source does not support '
+                        'specifying multiple revisions'
+                    )
+                )
             try:
                 latest = int(revs[0])
             except ValueError:
-                raise error.Abort(_('svn: revision %s is not an integer') %
-                                 revs[0])
+                raise error.Abort(
+                    _('svn: revision %s is not an integer') % revs[0]
+                )
 
         trunkcfg = self.ui.config('convert', 'svn.trunk')
         if trunkcfg is None:
@@ -366,16 +452,16 @@
             if self.startrev < 0:
                 self.startrev = 0
         except ValueError:
-            raise error.Abort(_('svn: start revision %s is not an integer')
-                             % self.startrev)
+            raise error.Abort(
+                _('svn: start revision %s is not an integer') % self.startrev
+            )
 
         try:
             self.head = self.latest(self.module, latest)
         except SvnPathNotFound:
             self.head = None
         if not self.head:
-            raise error.Abort(_('no revision found in module %s')
-                             % self.module)
+            raise error.Abort(_('no revision found in module %s') % self.module)
         self.last_changed = self.revnum(self.head)
 
         self._changescache = (None, None)
@@ -397,14 +483,17 @@
 
     def exists(self, path, optrev):
         try:
-            svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
-                                 optrev, False, self.ctx)
+            svn.client.ls(
+                self.url.rstrip('/') + '/' + quote(path),
+                optrev,
+                False,
+                self.ctx,
+            )
             return True
         except svn.core.SubversionException:
             return False
 
     def getheads(self):
-
         def isdir(path, revnum):
             kind = self._checkpath(path, revnum)
             return kind == svn.core.svn_node_dir
@@ -419,8 +508,10 @@
                     # we are converting from inside this directory
                     return None
                 if cfgpath:
-                    raise error.Abort(_('expected %s to be at %r, but not found'
-                                       ) % (name, path))
+                    raise error.Abort(
+                        _('expected %s to be at %r, but not found')
+                        % (name, path)
+                    )
                 return None
             self.ui.note(_('found %s at %r\n') % (name, path))
             return path
@@ -438,19 +529,21 @@
             self.module += '/' + trunk
             self.head = self.latest(self.module, self.last_changed)
             if not self.head:
-                raise error.Abort(_('no revision found in module %s')
-                                 % self.module)
+                raise error.Abort(
+                    _('no revision found in module %s') % self.module
+                )
 
         # First head in the list is the module's head
         self.heads = [self.head]
         if self.tags is not None:
-            self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
+            self.tags = '%s/%s' % (oldmodule, (self.tags or 'tags'))
 
         # Check if branches bring a few more heads to the list
         if branches:
             rpath = self.url.strip('/')
-            branchnames = svn.client.ls(rpath + '/' + quote(branches),
-                                        rev, False, self.ctx)
+            branchnames = svn.client.ls(
+                rpath + '/' + quote(branches), rev, False, self.ctx
+            )
             for branch in sorted(branchnames):
                 module = '%s/%s/%s' % (oldmodule, branches, branch)
                 if not isdir(module, self.last_changed):
@@ -459,19 +552,25 @@
                 if not brevid:
                     self.ui.note(_('ignoring empty branch %s\n') % branch)
                     continue
-                self.ui.note(_('found branch %s at %d\n') %
-                             (branch, self.revnum(brevid)))
+                self.ui.note(
+                    _('found branch %s at %d\n') % (branch, self.revnum(brevid))
+                )
                 self.heads.append(brevid)
 
         if self.startrev and self.heads:
             if len(self.heads) > 1:
-                raise error.Abort(_('svn: start revision is not supported '
-                                   'with more than one branch'))
+                raise error.Abort(
+                    _(
+                        'svn: start revision is not supported '
+                        'with more than one branch'
+                    )
+                )
             revnum = self.revnum(self.heads[0])
             if revnum < self.startrev:
                 raise error.Abort(
                     _('svn: no revision found after start revision %d')
-                                 % self.startrev)
+                    % self.startrev
+                )
 
         return self.heads
 
@@ -483,10 +582,14 @@
         if full or not parents:
             # Perform a full checkout on roots
             uuid, module, revnum = revsplit(rev)
-            entries = svn.client.ls(self.baseurl + quote(module),
-                                    optrev(revnum), True, self.ctx)
-            files = [n for n, e in entries.iteritems()
-                     if e.kind == svn.core.svn_node_file]
+            entries = svn.client.ls(
+                self.baseurl + quote(module), optrev(revnum), True, self.ctx
+            )
+            files = [
+                n
+                for n, e in entries.iteritems()
+                if e.kind == svn.core.svn_node_file
+            ]
             self.removed = set()
 
         files.sort()
@@ -533,11 +636,16 @@
 
     def checkrevformat(self, revstr, mapname='splicemap'):
         """ fails if revision format does not match the correct format"""
-        if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
-                              r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
-                              r'{12,12}(.*)\@[0-9]+$',revstr):
-            raise error.Abort(_('%s entry %s is not a valid revision'
-                               ' identifier') % (mapname, revstr))
+        if not re.match(
+            r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
+            r'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
+            r'{12,12}(.*)\@[0-9]+$',
+            revstr,
+        ):
+            raise error.Abort(
+                _('%s entry %s is not a valid revision' ' identifier')
+                % (mapname, revstr)
+            )
 
     def numcommits(self):
         return int(self.head.rsplit('@', 1)[1]) - self.startrev
@@ -567,8 +675,11 @@
                 origpaths, revnum, author, date, message = entry
                 if not origpaths:
                     origpaths = []
-                copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
-                          in origpaths.iteritems() if e.copyfrom_path]
+                copies = [
+                    (e.copyfrom_path, e.copyfrom_rev, p)
+                    for p, e in origpaths.iteritems()
+                    if e.copyfrom_path
+                ]
                 # Apply moves/copies from more specific to general
                 copies.sort(reverse=True)
 
@@ -582,7 +693,7 @@
                         continue
                     for tag in pendings:
                         if tag[0].startswith(dest):
-                            tagpath = source + tag[0][len(dest):]
+                            tagpath = source + tag[0][len(dest) :]
                             tag[:2] = [tagpath, sourcerev]
                             break
                     else:
@@ -595,21 +706,28 @@
                 # Here/tags/tag.1 discarded as well as its children.
                 # It happens with tools like cvs2svn. Such tags cannot
                 # be represented in mercurial.
-                addeds = dict((p, e.copyfrom_path) for p, e
-                              in origpaths.iteritems()
-                              if e.action == 'A' and e.copyfrom_path)
+                addeds = dict(
+                    (p, e.copyfrom_path)
+                    for p, e in origpaths.iteritems()
+                    if e.action == 'A' and e.copyfrom_path
+                )
                 badroots = set()
                 for destroot in addeds:
                     for source, sourcerev, dest in pendings:
-                        if (not dest.startswith(destroot + '/')
-                            or source.startswith(addeds[destroot] + '/')):
+                        if not dest.startswith(
+                            destroot + '/'
+                        ) or source.startswith(addeds[destroot] + '/'):
                             continue
                         badroots.add(destroot)
                         break
 
                 for badroot in badroots:
-                    pendings = [p for p in pendings if p[2] != badroot
-                                and not p[2].startswith(badroot + '/')]
+                    pendings = [
+                        p
+                        for p in pendings
+                        if p[2] != badroot
+                        and not p[2].startswith(badroot + '/')
+                    ]
 
                 # Tell tag renamings from tag creations
                 renamings = []
@@ -642,10 +760,12 @@
         if not self.wc:
             return
         if self.convertfp is None:
-            self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
-                                  'ab')
-        self.convertfp.write(util.tonativeeol('%s %d\n'
-                                              % (destrev, self.revnum(rev))))
+            self.convertfp = open(
+                os.path.join(self.wc, '.svn', 'hg-shamap'), 'ab'
+            )
+        self.convertfp.write(
+            util.tonativeeol('%s %d\n' % (destrev, self.revnum(rev)))
+        )
         self.convertfp.flush()
 
     def revid(self, revnum, module=None):
@@ -662,6 +782,7 @@
         reported. Return None if computed module does not belong to
         rootmodule subtree.
         """
+
         def findchanges(path, start, stop=None):
             stream = self._getlog([path], start, stop or 1)
             try:
@@ -675,12 +796,13 @@
                         break
 
                     for p in paths:
-                        if (not path.startswith(p) or
-                            not paths[p].copyfrom_path):
+                        if not path.startswith(p) or not paths[p].copyfrom_path:
                             continue
-                        newpath = paths[p].copyfrom_path + path[len(p):]
-                        self.ui.debug("branch renamed from %s to %s at %d\n" %
-                                      (path, newpath, revnum))
+                        newpath = paths[p].copyfrom_path + path[len(p) :]
+                        self.ui.debug(
+                            "branch renamed from %s to %s at %d\n"
+                            % (path, newpath, revnum)
+                        )
                         path = newpath
                         break
                 if not paths:
@@ -703,8 +825,9 @@
         except svn.core.SubversionException:
             dirent = None
         if not dirent:
-            raise SvnPathNotFound(_('%s not found up to revision %d')
-                                  % (path, stop))
+            raise SvnPathNotFound(
+                _('%s not found up to revision %d') % (path, stop)
+            )
 
         # stat() gives us the previous revision on this line of
         # development, but it might be in *another module*. Fetch the
@@ -750,8 +873,9 @@
             self.module = new_module
             self.reparent(self.module)
 
-        progress = self.ui.makeprogress(_('scanning paths'), unit=_('paths'),
-                                        total=len(paths))
+        progress = self.ui.makeprogress(
+            _('scanning paths'), unit=_('paths'), total=len(paths)
+        )
         for i, (path, ent) in enumerate(paths):
             progress.update(i, item=path)
             entrypath = self.getrelpath(path)
@@ -769,10 +893,12 @@
                 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
                 if not copyfrom_path:
                     continue
-                self.ui.debug("copied to %s from %s@%s\n" %
-                              (entrypath, copyfrom_path, ent.copyfrom_rev))
+                self.ui.debug(
+                    "copied to %s from %s@%s\n"
+                    % (entrypath, copyfrom_path, ent.copyfrom_rev)
+                )
                 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
-            elif kind == 0: # gone, but had better be a deleted *file*
+            elif kind == 0:  # gone, but had better be a deleted *file*
                 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
                 pmodule, prevnum = revsplit(parents[0])[1:]
                 parentpath = pmodule + "/" + entrypath
@@ -790,8 +916,9 @@
                         if childpath:
                             removed.add(self.recode(childpath))
                 else:
-                    self.ui.debug('unknown path in revision %d: %s\n' %
-                                  (revnum, path))
+                    self.ui.debug(
+                        'unknown path in revision %d: %s\n' % (revnum, path)
+                    )
             elif kind == svn.core.svn_node_dir:
                 if ent.action == 'M':
                     # If the directory just had a prop change,
@@ -828,14 +955,16 @@
                 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
                 if not copyfrompath:
                     continue
-                self.ui.debug("mark %s came from %s:%d\n"
-                              % (path, copyfrompath, ent.copyfrom_rev))
+                self.ui.debug(
+                    "mark %s came from %s:%d\n"
+                    % (path, copyfrompath, ent.copyfrom_rev)
+                )
                 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
                 for childpath in children:
                     childpath = self.getrelpath("/" + childpath, pmodule)
                     if not childpath:
                         continue
-                    copytopath = path + childpath[len(copyfrompath):]
+                    copytopath = path + childpath[len(copyfrompath) :]
                     copytopath = self.getrelpath(copytopath)
                     copies[self.recode(copytopath)] = self.recode(childpath)
 
@@ -853,8 +982,9 @@
             """Return the parsed commit object or None, and True if
             the revision is a branch root.
             """
-            self.ui.debug("parsing revision %d (%d changes)\n" %
-                          (revnum, len(orig_paths)))
+            self.ui.debug(
+                "parsing revision %d (%d changes)\n" % (revnum, len(orig_paths))
+            )
 
             branched = False
             rev = self.revid(revnum)
@@ -867,13 +997,14 @@
             # check whether this revision is the start of a branch or part
             # of a branch renaming
             orig_paths = sorted(orig_paths.iteritems())
-            root_paths = [(p, e) for p, e in orig_paths
-                          if self.module.startswith(p)]
+            root_paths = [
+                (p, e) for p, e in orig_paths if self.module.startswith(p)
+            ]
             if root_paths:
                 path, ent = root_paths[-1]
                 if ent.copyfrom_path:
                     branched = True
-                    newpath = ent.copyfrom_path + self.module[len(path):]
+                    newpath = ent.copyfrom_path + self.module[len(path) :]
                     # ent.copyfrom_rev may not be the actual last revision
                     previd = self.latest(newpath, ent.copyfrom_rev)
                     if previd is not None:
@@ -881,8 +1012,9 @@
                         if prevnum >= self.startrev:
                             parents = [previd]
                             self.ui.note(
-                                _('found parent of branch %s at %d: %s\n') %
-                                (self.module, prevnum, prevmodule))
+                                _('found parent of branch %s at %d: %s\n')
+                                % (self.module, prevnum, prevmodule)
+                            )
                 else:
                     self.ui.debug("no copyfrom path, don't know what to do.\n")
 
@@ -917,12 +1049,14 @@
             except IndexError:
                 branch = None
 
-            cset = commit(author=author,
-                          date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
-                          desc=log,
-                          parents=parents,
-                          branch=branch,
-                          rev=rev)
+            cset = commit(
+                author=author,
+                date=dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
+                desc=log,
+                parents=parents,
+                branch=branch,
+                rev=rev,
+            )
 
             self.commits[rev] = cset
             # The parents list is *shared* among self.paths and the
@@ -933,8 +1067,10 @@
             self.child_cset = cset
             return cset, branched
 
-        self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
-                     (self.module, from_revnum, to_revnum))
+        self.ui.note(
+            _('fetching revision log for "%s" from %d to %d\n')
+            % (self.module, from_revnum, to_revnum)
+        )
 
         try:
             firstcset = None
@@ -952,8 +1088,9 @@
                         # revision, do not try to get a parent branch
                         lastonbranch = lastonbranch or revnum == 0
                         continue
-                    cset, lastonbranch = parselogentry(paths, revnum, author,
-                                                       date, message)
+                    cset, lastonbranch = parselogentry(
+                        paths, revnum, author, date, message
+                    )
                     if cset:
                         firstcset = cset
                     if lastonbranch:
@@ -976,8 +1113,9 @@
         except svn.core.SubversionException as xxx_todo_changeme:
             (inst, num) = xxx_todo_changeme.args
             if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
-                raise error.Abort(_('svn: branch has no revision %s')
-                                 % to_revnum)
+                raise error.Abort(
+                    _('svn: branch has no revision %s') % to_revnum
+                )
             raise
 
     def getfile(self, file, rev):
@@ -1000,15 +1138,17 @@
             mode = ("svn:executable" in info) and 'x' or ''
             mode = ("svn:special" in info) and 'l' or mode
         except svn.core.SubversionException as e:
-            notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
-                svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
-            if e.apr_err in notfound: # File not found
+            notfound = (
+                svn.core.SVN_ERR_FS_NOT_FOUND,
+                svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND,
+            )
+            if e.apr_err in notfound:  # File not found
                 return None, None
             raise
         if mode == 'l':
             link_prefix = "link "
             if data.startswith(link_prefix):
-                data = data[len(link_prefix):]
+                data = data[len(link_prefix) :]
         return data, mode
 
     def _iterfiles(self, path, revnum):
@@ -1019,8 +1159,11 @@
         entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
         if path:
             path += '/'
-        return ((path + p) for p, e in entries.iteritems()
-                if e.kind == svn.core.svn_node_file)
+        return (
+            (path + p)
+            for p, e in entries.iteritems()
+            if e.kind == svn.core.svn_node_file
+        )
 
     def getrelpath(self, path, module=None):
         if module is None:
@@ -1032,7 +1175,7 @@
         #   "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
         # that is to say "tests/PloneTestCase.py"
         if path.startswith(module):
-            relative = path.rstrip('/')[len(module):]
+            relative = path.rstrip('/')[len(module) :]
             if relative.startswith('/'):
                 return relative[1:]
             elif relative == '':
@@ -1054,8 +1197,15 @@
             if module is not None:
                 self.reparent(prevmodule)
 
-    def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
-                strict_node_history=False):
+    def _getlog(
+        self,
+        paths,
+        start,
+        end,
+        limit=0,
+        discover_changed_paths=True,
+        strict_node_history=False,
+    ):
         # Normalize path names, svn >= 1.5 only wants paths relative to
         # supplied URL
         relpaths = []
@@ -1063,8 +1213,15 @@
             if not p.startswith('/'):
                 p = self.module + '/' + p
             relpaths.append(p.strip('/'))
-        args = [self.baseurl, relpaths, start, end, limit,
-                discover_changed_paths, strict_node_history]
+        args = [
+            self.baseurl,
+            relpaths,
+            start,
+            end,
+            limit,
+            discover_changed_paths,
+            strict_node_history,
+        ]
         # developer config: convert.svn.debugsvnlog
         if not self.ui.configbool('convert', 'svn.debugsvnlog'):
             return directlogstream(*args)
@@ -1076,10 +1233,15 @@
         try:
             stdin.close()
         except IOError:
-            raise error.Abort(_('Mercurial failed to run itself, check'
-                               ' hg executable is in PATH'))
+            raise error.Abort(
+                _(
+                    'Mercurial failed to run itself, check'
+                    ' hg executable is in PATH'
+                )
+            )
         return logstream(stdout)
 
+
 pre_revprop_change = b'''#!/bin/sh
 
 REPOS="$1"
@@ -1096,6 +1258,7 @@
 exit 1
 '''
 
+
 class svn_sink(converter_sink, commandline):
     commit_re = re.compile(br'Committed revision (\d+).', re.M)
     uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
@@ -1137,8 +1300,10 @@
                 path = os.path.realpath(path)
                 if os.path.isdir(os.path.dirname(path)):
                     if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
-                        ui.status(_("initializing svn repository '%s'\n") %
-                                  os.path.basename(path))
+                        ui.status(
+                            _("initializing svn repository '%s'\n")
+                            % os.path.basename(path)
+                        )
                         commandline(ui, 'svnadmin').run0('create', path)
                         created = path
                     path = util.normpath(path)
@@ -1146,10 +1311,13 @@
                         path = '/' + path
                     path = 'file://' + path
 
-            wcpath = os.path.join(encoding.getcwd(), os.path.basename(path) +
-                                '-wc')
-            ui.status(_("initializing svn working copy '%s'\n")
-                      % os.path.basename(wcpath))
+            wcpath = os.path.join(
+                encoding.getcwd(), os.path.basename(path) + '-wc'
+            )
+            ui.status(
+                _("initializing svn working copy '%s'\n")
+                % os.path.basename(wcpath)
+            )
             self.run0('checkout', path, wcpath)
 
             self.wc = wcpath
@@ -1186,8 +1354,9 @@
             for n in e.childNodes:
                 if n.nodeType != n.ELEMENT_NODE or n.tagName != r'name':
                     continue
-                name = r''.join(c.data for c in n.childNodes
-                                if c.nodeType == c.TEXT_NODE)
+                name = r''.join(
+                    c.data for c in n.childNodes if c.nodeType == c.TEXT_NODE
+                )
                 # Entries are compared with names coming from
                 # mercurial, so bytes with undefined encoding. Our
                 # best bet is to assume they are in local
@@ -1233,7 +1402,8 @@
         exists = os.path.lexists(wdest)
         if exists:
             fd, tempname = pycompat.mkstemp(
-                prefix='hg-copy-', dir=os.path.dirname(wdest))
+                prefix='hg-copy-', dir=os.path.dirname(wdest)
+            )
             os.close(fd)
             os.unlink(tempname)
             os.rename(wdest, tempname)
@@ -1259,8 +1429,9 @@
         return dirs
 
     def add_dirs(self, files):
-        add_dirs = [d for d in sorted(self.dirs_of(files))
-                    if d not in self.manifest]
+        add_dirs = [
+            d for d in sorted(self.dirs_of(files)) if d not in self.manifest
+        ]
         if add_dirs:
             self.manifest.update(add_dirs)
             self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
@@ -1279,8 +1450,9 @@
     def revid(self, rev):
         return "svn:%s@%s" % (self.uuid, rev)
 
-    def putcommit(self, files, copies, parents, commit, source, revmap, full,
-                  cleanp2):
+    def putcommit(
+        self, files, copies, parents, commit, source, revmap, full, cleanp2
+    ):
         for parent in parents:
             try:
                 return self.revid(self.childmap[parent])
@@ -1325,10 +1497,12 @@
         fp.write(util.tonativeeol(commit.desc))
         fp.close()
         try:
-            output = self.run0('commit',
-                               username=stringutil.shortuser(commit.author),
-                               file=messagefile,
-                               encoding='utf-8')
+            output = self.run0(
+                'commit',
+                username=stringutil.shortuser(commit.author),
+                file=messagefile,
+                encoding='utf-8',
+            )
             try:
                 rev = self.commit_re.search(output).group(1)
             except AttributeError:
@@ -1338,11 +1512,21 @@
                 self.ui.warn(output)
                 raise error.Abort(_('unable to cope with svn output'))
             if commit.rev:
-                self.run('propset', 'hg:convert-rev', commit.rev,
-                         revprop=True, revision=rev)
+                self.run(
+                    'propset',
+                    'hg:convert-rev',
+                    commit.rev,
+                    revprop=True,
+                    revision=rev,
+                )
             if commit.branch and commit.branch != 'default':
-                self.run('propset', 'hg:convert-branch', commit.branch,
-                         revprop=True, revision=rev)
+                self.run(
+                    'propset',
+                    'hg:convert-branch',
+                    commit.branch,
+                    revprop=True,
+                    revision=rev,
+                )
             for parent in parents:
                 self.addchild(parent, rev)
             return self.revid(rev)
@@ -1363,6 +1547,10 @@
         # repository and childmap would not list all revisions. Too bad.
         if rev in self.childmap:
             return True
-        raise error.Abort(_('splice map revision %s not found in subversion '
-                           'child map (revision lookups are not implemented)')
-                         % rev)
+        raise error.Abort(
+            _(
+                'splice map revision %s not found in subversion '
+                'child map (revision lookups are not implemented)'
+            )
+            % rev
+        )
--- a/hgext/convert/transport.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/convert/transport.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,9 +25,7 @@
 Pool = svn.core.Pool
 SubversionException = svn.core.SubversionException
 
-from mercurial import (
-    util,
-)
+from mercurial import util
 
 # Some older versions of the Python bindings need to be
 # explicitly initialized. But what we want to do probably
@@ -36,9 +34,11 @@
 
 svn_config = None
 
+
 def _create_auth_baton(pool):
     """Create a Subversion authentication baton. """
     import svn.client
+
     # Give the client context baton a suite of authentication
     # providers.h
     providers = [
@@ -47,10 +47,11 @@
         svn.client.get_ssl_client_cert_file_provider(pool),
         svn.client.get_ssl_client_cert_pw_file_provider(pool),
         svn.client.get_ssl_server_trust_file_provider(pool),
-        ]
+    ]
     # Platform-dependent authentication methods
-    getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
-                          None)
+    getprovider = getattr(
+        svn.core, 'svn_auth_get_platform_specific_provider', None
+    )
     if getprovider:
         # Available in svn >= 1.6
         for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
@@ -64,13 +65,16 @@
 
     return svn.core.svn_auth_open(providers, pool)
 
+
 class NotBranchError(SubversionException):
     pass
 
+
 class SvnRaTransport(object):
     """
     Open an ra connection to a Subversion repository.
     """
+
     def __init__(self, url="", ra=None):
         self.pool = Pool()
         self.svn_url = url
@@ -88,13 +92,15 @@
             self.client.config = svn_config
             try:
                 self.ra = svn.client.open_ra_session(
-                    self.svn_url,
-                    self.client, self.pool)
+                    self.svn_url, self.client, self.pool
+                )
             except SubversionException as xxx_todo_changeme:
                 (inst, num) = xxx_todo_changeme.args
-                if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
-                           svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
-                           svn.core.SVN_ERR_BAD_URL):
+                if num in (
+                    svn.core.SVN_ERR_RA_ILLEGAL_URL,
+                    svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
+                    svn.core.SVN_ERR_BAD_URL,
+                ):
                     raise NotBranchError(url)
                 raise
         else:
@@ -106,27 +112,46 @@
             self._reporter, self._baton = reporter_data
 
         def set_path(self, path, revnum, start_empty, lock_token, pool=None):
-            svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
-                        path, revnum, start_empty, lock_token, pool)
+            svn.ra.reporter2_invoke_set_path(
+                self._reporter,
+                self._baton,
+                path,
+                revnum,
+                start_empty,
+                lock_token,
+                pool,
+            )
 
         def delete_path(self, path, pool=None):
-            svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
-                    path, pool)
+            svn.ra.reporter2_invoke_delete_path(
+                self._reporter, self._baton, path, pool
+            )
 
-        def link_path(self, path, url, revision, start_empty, lock_token,
-                      pool=None):
-            svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
-                    path, url, revision, start_empty, lock_token,
-                    pool)
+        def link_path(
+            self, path, url, revision, start_empty, lock_token, pool=None
+        ):
+            svn.ra.reporter2_invoke_link_path(
+                self._reporter,
+                self._baton,
+                path,
+                url,
+                revision,
+                start_empty,
+                lock_token,
+                pool,
+            )
 
         def finish_report(self, pool=None):
-            svn.ra.reporter2_invoke_finish_report(self._reporter,
-                    self._baton, pool)
+            svn.ra.reporter2_invoke_finish_report(
+                self._reporter, self._baton, pool
+            )
 
         def abort_report(self, pool=None):
-            svn.ra.reporter2_invoke_abort_report(self._reporter,
-                    self._baton, pool)
+            svn.ra.reporter2_invoke_abort_report(
+                self._reporter, self._baton, pool
+            )
 
     def do_update(self, revnum, path, *args, **kwargs):
-        return self.Reporter(svn.ra.do_update(self.ra, revnum, path,
-                                              *args, **kwargs))
+        return self.Reporter(
+            svn.ra.do_update(self.ra, revnum, path, *args, **kwargs)
+        )
--- a/hgext/eol.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/eol.py	Sun Oct 06 09:45:02 2019 -0400
@@ -106,9 +106,7 @@
     scmutil,
     util,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -119,57 +117,69 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('eol', 'fix-trailing-newline',
-    default=False,
+configitem(
+    'eol', 'fix-trailing-newline', default=False,
 )
-configitem('eol', 'native',
-    default=pycompat.oslinesep,
+configitem(
+    'eol', 'native', default=pycompat.oslinesep,
 )
-configitem('eol', 'only-consistent',
-    default=True,
+configitem(
+    'eol', 'only-consistent', default=True,
 )
 
 # Matches a lone LF, i.e., one that is not part of CRLF.
 singlelf = re.compile('(^|[^\r])\n')
 
+
 def inconsistenteol(data):
     return '\r\n' in data and singlelf.search(data)
 
+
 def tolf(s, params, ui, **kwargs):
     """Filter to convert to LF EOLs."""
     if stringutil.binary(s):
         return s
     if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
         return s
-    if (ui.configbool('eol', 'fix-trailing-newline')
-        and s and not s.endswith('\n')):
+    if (
+        ui.configbool('eol', 'fix-trailing-newline')
+        and s
+        and not s.endswith('\n')
+    ):
         s = s + '\n'
     return util.tolf(s)
 
+
 def tocrlf(s, params, ui, **kwargs):
     """Filter to convert to CRLF EOLs."""
     if stringutil.binary(s):
         return s
     if ui.configbool('eol', 'only-consistent') and inconsistenteol(s):
         return s
-    if (ui.configbool('eol', 'fix-trailing-newline')
-        and s and not s.endswith('\n')):
+    if (
+        ui.configbool('eol', 'fix-trailing-newline')
+        and s
+        and not s.endswith('\n')
+    ):
         s = s + '\n'
     return util.tocrlf(s)
 
+
 def isbinary(s, params):
     """Filter to do nothing with the file."""
     return s
 
+
 filters = {
     'to-lf': tolf,
     'to-crlf': tocrlf,
     'is-binary': isbinary,
     # The following provide backwards compatibility with win32text
     'cleverencode:': tolf,
-    'cleverdecode:': tocrlf
+    'cleverdecode:': tocrlf,
 }
 
+
 class eolfile(object):
     def __init__(self, ui, root, data):
         self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
@@ -208,15 +218,17 @@
                 ui.setconfig('decode', pattern, self._decode[key], 'eol')
                 ui.setconfig('encode', pattern, self._encode[key], 'eol')
             except KeyError:
-                ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
-                        % (key, self.cfg.source('patterns', pattern)))
+                ui.warn(
+                    _("ignoring unknown EOL style '%s' from %s\n")
+                    % (key, self.cfg.source('patterns', pattern))
+                )
         # eol.only-consistent can be specified in ~/.hgrc or .hgeol
         for k, v in self.cfg.items('eol'):
             ui.setconfig('eol', k, v, 'eol')
 
     def checkrev(self, repo, ctx, files):
         failed = []
-        for f in (files or ctx.files()):
+        for f in files or ctx.files():
             if f not in ctx:
                 continue
             for pattern, key, m in self.patterns:
@@ -224,12 +236,17 @@
                     continue
                 target = self._encode[key]
                 data = ctx[f].data()
-                if (target == "to-lf" and "\r\n" in data
-                    or target == "to-crlf" and singlelf.search(data)):
+                if (
+                    target == "to-lf"
+                    and "\r\n" in data
+                    or target == "to-crlf"
+                    and singlelf.search(data)
+                ):
                     failed.append((f, target, bytes(ctx)))
                 break
         return failed
 
+
 def parseeol(ui, repo, nodes):
     try:
         for node in nodes:
@@ -244,10 +261,13 @@
             except (IOError, LookupError):
                 pass
     except errormod.ParseError as inst:
-        ui.warn(_("warning: ignoring .hgeol file due to parse error "
-                  "at %s: %s\n") % (inst.args[1], inst.args[0]))
+        ui.warn(
+            _("warning: ignoring .hgeol file due to parse error " "at %s: %s\n")
+            % (inst.args[1], inst.args[0])
+        )
     return None
 
+
 def ensureenabled(ui):
     """make sure the extension is enabled when used as hook
 
@@ -261,6 +281,7 @@
     ui.setconfig('extensions', 'eol', '', source='internal')
     extensions.loadall(ui, ['eol'])
 
+
 def _checkhook(ui, repo, node, headsonly):
     # Get revisions to check and touched files at the same time
     ensureenabled(ui)
@@ -284,34 +305,46 @@
         eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
         msgs = []
         for f, target, node in sorted(failed):
-            msgs.append(_("  %s in %s should not have %s line endings") %
-                        (f, node, eols[target]))
+            msgs.append(
+                _("  %s in %s should not have %s line endings")
+                % (f, node, eols[target])
+            )
         raise errormod.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
 
+
 def checkallhook(ui, repo, node, hooktype, **kwargs):
     """verify that files have expected EOLs"""
     _checkhook(ui, repo, node, False)
 
+
 def checkheadshook(ui, repo, node, hooktype, **kwargs):
     """verify that files have expected EOLs"""
     _checkhook(ui, repo, node, True)
 
+
 # "checkheadshook" used to be called "hook"
 hook = checkheadshook
 
+
 def preupdate(ui, repo, hooktype, parent1, parent2):
     p1node = scmutil.resolvehexnodeidprefix(repo, parent1)
     repo.loadeol([p1node])
     return False
 
+
 def uisetup(ui):
     ui.setconfig('hooks', 'preupdate.eol', preupdate, 'eol')
 
+
 def extsetup(ui):
     try:
         extensions.find('win32text')
-        ui.warn(_("the eol extension is incompatible with the "
-                  "win32text extension\n"))
+        ui.warn(
+            _(
+                "the eol extension is incompatible with the "
+                "win32text extension\n"
+            )
+        )
     except KeyError:
         pass
 
@@ -327,7 +360,6 @@
     ui.setconfig('patch', 'eol', 'auto', 'eol')
 
     class eolrepo(repo.__class__):
-
         def loadeol(self, nodes):
             eol = parseeol(self.ui, self, nodes)
             if eol is None:
@@ -414,8 +446,10 @@
                     # have all non-binary files taken care of.
                     continue
                 if inconsistenteol(data):
-                    raise errormod.Abort(_("inconsistent newline style "
-                                           "in %s\n") % f)
+                    raise errormod.Abort(
+                        _("inconsistent newline style " "in %s\n") % f
+                    )
             return super(eolrepo, self).commitctx(ctx, error, origctx)
+
     repo.__class__ = eolrepo
     repo._hgcleardirstate()
--- a/hgext/extdiff.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/extdiff.py	Sun Oct 06 09:45:02 2019 -0400
@@ -117,22 +117,20 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('extdiff', br'opts\..*',
-    default='',
-    generic=True,
+configitem(
+    'extdiff', br'opts\..*', default='', generic=True,
 )
 
-configitem('extdiff', br'gui\..*',
-    generic=True,
+configitem(
+    'extdiff', br'gui\..*', generic=True,
 )
 
-configitem('diff-tools', br'.*\.diffargs$',
-    default=None,
-    generic=True,
+configitem(
+    'diff-tools', br'.*\.diffargs$', default=None, generic=True,
 )
 
-configitem('diff-tools', br'.*\.gui$',
-    generic=True,
+configitem(
+    'diff-tools', br'.*\.gui$', generic=True,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -141,6 +139,7 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
     '''snapshot files as of some revision
     if not using snapshot, -I/-X does not work and recursive diff
@@ -155,18 +154,27 @@
     fnsandstat = []
 
     if node is not None:
-        ui.note(_('making snapshot of %d files from rev %s\n') %
-                (len(files), short(node)))
+        ui.note(
+            _('making snapshot of %d files from rev %s\n')
+            % (len(files), short(node))
+        )
     else:
-        ui.note(_('making snapshot of %d files from working directory\n') %
-            (len(files)))
+        ui.note(
+            _('making snapshot of %d files from working directory\n')
+            % (len(files))
+        )
 
     if files:
         repo.ui.setconfig("ui", "archivemeta", False)
 
-        archival.archive(repo, base, node, 'files',
-                         match=scmutil.matchfiles(repo, files),
-                         subrepos=listsubrepos)
+        archival.archive(
+            repo,
+            base,
+            node,
+            'files',
+            match=scmutil.matchfiles(repo, files),
+            subrepos=listsubrepos,
+        )
 
         for fn in sorted(files):
             wfn = util.pconvert(fn)
@@ -178,15 +186,32 @@
                 fnsandstat.append((dest, repo.wjoin(fn), os.lstat(dest)))
     return dirname, fnsandstat
 
-def formatcmdline(cmdline, repo_root, do3way,
-                  parent1, plabel1, parent2, plabel2, child, clabel):
+
+def formatcmdline(
+    cmdline,
+    repo_root,
+    do3way,
+    parent1,
+    plabel1,
+    parent2,
+    plabel2,
+    child,
+    clabel,
+):
     # Function to quote file/dir names in the argument string.
     # When not operating in 3-way mode, an empty string is
     # returned for parent2
-    replace = {'parent': parent1, 'parent1': parent1, 'parent2': parent2,
-               'plabel1': plabel1, 'plabel2': plabel2,
-               'child': child, 'clabel': clabel,
-               'root': repo_root}
+    replace = {
+        'parent': parent1,
+        'parent1': parent1,
+        'parent2': parent2,
+        'plabel1': plabel1,
+        'plabel2': plabel2,
+        'child': child,
+        'clabel': clabel,
+        'root': repo_root,
+    }
+
     def quote(match):
         pre = match.group(2)
         key = match.group(3)
@@ -195,28 +220,48 @@
         return pre + procutil.shellquote(replace[key])
 
     # Match parent2 first, so 'parent1?' will match both parent1 and parent
-    regex = (br'''(['"]?)([^\s'"$]*)'''
-             br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1')
+    regex = (
+        br'''(['"]?)([^\s'"$]*)'''
+        br'\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)\1'
+    )
     if not do3way and not re.search(regex, cmdline):
         cmdline += ' $parent1 $child'
     return re.sub(regex, quote, cmdline)
 
+
 def _systembackground(cmd, environ=None, cwd=None):
     ''' like 'procutil.system', but returns the Popen object directly
         so we don't have to wait on it.
     '''
     cmd = procutil.quotecommand(cmd)
     env = procutil.shellenviron(environ)
-    proc = subprocess.Popen(procutil.tonativestr(cmd),
-                            shell=True, close_fds=procutil.closefds,
-                            env=procutil.tonativeenv(env),
-                            cwd=pycompat.rapply(procutil.tonativestr, cwd))
+    proc = subprocess.Popen(
+        procutil.tonativestr(cmd),
+        shell=True,
+        close_fds=procutil.closefds,
+        env=procutil.tonativeenv(env),
+        cwd=pycompat.rapply(procutil.tonativestr, cwd),
+    )
     return proc
 
-def _runperfilediff(cmdline, repo_root, ui, guitool, do3way, confirm,
-                    commonfiles, tmproot, dir1a, dir1b,
-                    dir2root, dir2,
-                    rev1a, rev1b, rev2):
+
+def _runperfilediff(
+    cmdline,
+    repo_root,
+    ui,
+    guitool,
+    do3way,
+    confirm,
+    commonfiles,
+    tmproot,
+    dir1a,
+    dir1b,
+    dir2root,
+    dir2,
+    rev1a,
+    rev1b,
+    rev2,
+):
     # Note that we need to sort the list of files because it was
     # built in an "unstable" way and it's annoying to get files in a
     # random order, especially when "confirm" mode is enabled.
@@ -241,31 +286,42 @@
 
         if confirm:
             # Prompt before showing this diff
-            difffiles = _('diff %s (%d of %d)') % (commonfile, idx + 1,
-                                                   totalfiles)
-            responses = _('[Yns?]'
-                          '$$ &Yes, show diff'
-                          '$$ &No, skip this diff'
-                          '$$ &Skip remaining diffs'
-                          '$$ &? (display help)')
+            difffiles = _('diff %s (%d of %d)') % (
+                commonfile,
+                idx + 1,
+                totalfiles,
+            )
+            responses = _(
+                '[Yns?]'
+                '$$ &Yes, show diff'
+                '$$ &No, skip this diff'
+                '$$ &Skip remaining diffs'
+                '$$ &? (display help)'
+            )
             r = ui.promptchoice('%s %s' % (difffiles, responses))
-            if r == 3: # ?
+            if r == 3:  # ?
                 while r == 3:
                     for c, t in ui.extractchoices(responses)[1]:
                         ui.write('%s - %s\n' % (c, encoding.lower(t)))
                     r = ui.promptchoice('%s %s' % (difffiles, responses))
-            if r == 0: # yes
+            if r == 0:  # yes
                 pass
-            elif r == 1: # no
+            elif r == 1:  # no
                 continue
-            elif r == 2: # skip
+            elif r == 2:  # skip
                 break
 
         curcmdline = formatcmdline(
-            cmdline, repo_root, do3way=do3way,
-            parent1=path1a, plabel1=label1a,
-            parent2=path1b, plabel2=label1b,
-            child=path2, clabel=label2)
+            cmdline,
+            repo_root,
+            do3way=do3way,
+            parent1=path1a,
+            plabel1=label1a,
+            parent2=path1b,
+            plabel2=label1b,
+            child=path2,
+            clabel=label2,
+        )
 
         if confirm or not guitool:
             # Run the comparison program and wait for it to exit
@@ -274,15 +330,18 @@
             # from the user between each invocation, or because, as far
             # as we know, the tool doesn't have a GUI, in which case
             # we can't run multiple CLI programs at the same time.
-            ui.debug('running %r in %s\n' %
-                     (pycompat.bytestr(curcmdline), tmproot))
+            ui.debug(
+                'running %r in %s\n' % (pycompat.bytestr(curcmdline), tmproot)
+            )
             ui.system(curcmdline, cwd=tmproot, blockedtag='extdiff')
         else:
             # Run the comparison program but don't wait, as we're
             # going to rapid-fire each file diff and then wait on
             # the whole group.
-            ui.debug('running %r in %s (backgrounded)\n' %
-                     (pycompat.bytestr(curcmdline), tmproot))
+            ui.debug(
+                'running %r in %s (backgrounded)\n'
+                % (pycompat.bytestr(curcmdline), tmproot)
+            )
             proc = _systembackground(curcmdline, cwd=tmproot)
             waitprocs.append(proc)
 
@@ -291,6 +350,7 @@
             for proc in waitprocs:
                 proc.wait()
 
+
 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
     '''Do the actual diff:
 
@@ -329,7 +389,7 @@
         if node1b == nullid:
             do3way = False
 
-    subrepos=opts.get('subrepos')
+    subrepos = opts.get('subrepos')
 
     matcher = scmutil.match(repo[node2], pats, opts)
 
@@ -341,12 +401,14 @@
         if node2 is None:
             raise error.Abort(_('--patch requires two revisions'))
     else:
-        mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher,
-                                                   listsubrepos=subrepos)[:3])
+        mod_a, add_a, rem_a = map(
+            set, repo.status(node1a, node2, matcher, listsubrepos=subrepos)[:3]
+        )
         if do3way:
-            mod_b, add_b, rem_b = map(set,
-                                      repo.status(node1b, node2, matcher,
-                                                  listsubrepos=subrepos)[:3])
+            mod_b, add_b, rem_b = map(
+                set,
+                repo.status(node1b, node2, matcher, listsubrepos=subrepos)[:3],
+            )
         else:
             mod_b, add_b, rem_b = set(), set(), set()
         modadd = mod_a | add_a | mod_b | add_b
@@ -359,13 +421,15 @@
         if not opts.get('patch'):
             # Always make a copy of node1a (and node1b, if applicable)
             dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
-            dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot,
-                             subrepos)[0]
+            dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot, subrepos)[
+                0
+            ]
             rev1a = '@%d' % repo[node1a].rev()
             if do3way:
                 dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
-                dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot,
-                                 subrepos)[0]
+                dir1b = snapshot(
+                    ui, repo, dir1b_files, node1b, tmproot, subrepos
+                )[0]
                 rev1b = '@%d' % repo[node1b].rev()
             else:
                 dir1b = None
@@ -380,12 +444,13 @@
                 dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
                 rev2 = '@%d' % repo[node2].rev()
             elif len(common) > 1:
-                #we only actually need to get the files to copy back to
-                #the working dir in this case (because the other cases
-                #are: diffing 2 revisions or single file -- in which case
-                #the file is already directly passed to the diff tool).
-                dir2, fnsandstat = snapshot(ui, repo, modadd, None, tmproot,
-                                            subrepos)
+                # we only actually need to get the files to copy back to
+                # the working dir in this case (because the other cases
+                # are: diffing 2 revisions or single file -- in which case
+                # the file is already directly passed to the diff tool).
+                dir2, fnsandstat = snapshot(
+                    ui, repo, modadd, None, tmproot, subrepos
+                )
             else:
                 # This lets the diff tool open the changed file directly
                 dir2 = ''
@@ -413,10 +478,13 @@
         else:
             template = 'hg-%h.patch'
             with formatter.nullformatter(ui, 'extdiff', {}) as fm:
-                cmdutil.export(repo, [repo[node1a].rev(), repo[node2].rev()],
-                               fm,
-                               fntemplate=repo.vfs.reljoin(tmproot, template),
-                               match=matcher)
+                cmdutil.export(
+                    repo,
+                    [repo[node1a].rev(), repo[node2].rev()],
+                    fm,
+                    fntemplate=repo.vfs.reljoin(tmproot, template),
+                    match=matcher,
+                )
             label1a = cmdutil.makefilename(repo[node1a], template)
             label2 = cmdutil.makefilename(repo[node2], template)
             dir1a = repo.vfs.reljoin(tmproot, label1a)
@@ -428,21 +496,39 @@
         if not perfile:
             # Run the external tool on the 2 temp directories or the patches
             cmdline = formatcmdline(
-                cmdline, repo.root, do3way=do3way,
-                parent1=dir1a, plabel1=label1a,
-                parent2=dir1b, plabel2=label1b,
-                child=dir2, clabel=label2)
-            ui.debug('running %r in %s\n' % (pycompat.bytestr(cmdline),
-                                             tmproot))
+                cmdline,
+                repo.root,
+                do3way=do3way,
+                parent1=dir1a,
+                plabel1=label1a,
+                parent2=dir1b,
+                plabel2=label1b,
+                child=dir2,
+                clabel=label2,
+            )
+            ui.debug(
+                'running %r in %s\n' % (pycompat.bytestr(cmdline), tmproot)
+            )
             ui.system(cmdline, cwd=tmproot, blockedtag='extdiff')
         else:
             # Run the external tool once for each pair of files
             _runperfilediff(
-                cmdline, repo.root, ui, guitool=guitool,
-                do3way=do3way, confirm=confirm,
-                commonfiles=common, tmproot=tmproot, dir1a=dir1a, dir1b=dir1b,
-                dir2root=dir2root, dir2=dir2,
-                rev1a=rev1a, rev1b=rev1b, rev2=rev2)
+                cmdline,
+                repo.root,
+                ui,
+                guitool=guitool,
+                do3way=do3way,
+                confirm=confirm,
+                commonfiles=common,
+                tmproot=tmproot,
+                dir1a=dir1a,
+                dir1b=dir1b,
+                dir2root=dir2root,
+                dir2=dir2,
+                rev1a=rev1a,
+                rev1b=rev1b,
+                rev2=rev2,
+            )
 
         for copy_fn, working_fn, st in fnsandstat:
             cpstat = os.lstat(copy_fn)
@@ -453,11 +539,15 @@
             # copyfile() carries over the permission, so the mode check could
             # be in an 'elif' branch, but for the case where the file has
             # changed without affecting mtime or size.
-            if (cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
+            if (
+                cpstat[stat.ST_MTIME] != st[stat.ST_MTIME]
                 or cpstat.st_size != st.st_size
-                or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)):
-                ui.debug('file changed while diffing. '
-                         'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
+                or (cpstat.st_mode & 0o100) != (st.st_mode & 0o100)
+            ):
+                ui.debug(
+                    'file changed while diffing. '
+                    'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)
+                )
                 util.copyfile(copy_fn, working_fn)
 
         return 1
@@ -465,24 +555,39 @@
         ui.note(_('cleaning up temp directory\n'))
         shutil.rmtree(tmproot)
 
-extdiffopts = [
-    ('o', 'option', [],
-     _('pass option to comparison program'), _('OPT')),
-    ('r', 'rev', [], _('revision'), _('REV')),
-    ('c', 'change', '', _('change made by revision'), _('REV')),
-    ('', 'per-file', False,
-     _('compare each file instead of revision snapshots')),
-    ('', 'confirm', False,
-     _('prompt user before each external program invocation')),
-    ('', 'patch', None, _('compare patches for two revisions'))
-    ] + cmdutil.walkopts + cmdutil.subrepoopts
 
-@command('extdiff',
-    [('p', 'program', '', _('comparison program to run'), _('CMD')),
-     ] + extdiffopts,
+extdiffopts = (
+    [
+        ('o', 'option', [], _('pass option to comparison program'), _('OPT')),
+        ('r', 'rev', [], _('revision'), _('REV')),
+        ('c', 'change', '', _('change made by revision'), _('REV')),
+        (
+            '',
+            'per-file',
+            False,
+            _('compare each file instead of revision snapshots'),
+        ),
+        (
+            '',
+            'confirm',
+            False,
+            _('prompt user before each external program invocation'),
+        ),
+        ('', 'patch', None, _('compare patches for two revisions')),
+    ]
+    + cmdutil.walkopts
+    + cmdutil.subrepoopts
+)
+
+
+@command(
+    'extdiff',
+    [('p', 'program', '', _('comparison program to run'), _('CMD')),]
+    + extdiffopts,
     _('hg extdiff [OPT]... [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
-    inferrepo=True)
+    inferrepo=True,
+)
 def extdiff(ui, repo, *pats, **opts):
     '''use external program to diff repository (or selected files)
 
@@ -523,6 +628,7 @@
     cmdline = ' '.join(map(procutil.shellquote, [program] + option))
     return dodiff(ui, repo, cmdline, pats, opts)
 
+
 class savedcmd(object):
     """use external program to diff repository (or selected files)
 
@@ -552,8 +658,10 @@
         options = ' '.join(map(procutil.shellquote, opts['option']))
         if options:
             options = ' ' + options
-        return dodiff(ui, repo, self._cmdline + options, pats, opts,
-                      guitool=self._isgui)
+        return dodiff(
+            ui, repo, self._cmdline + options, pats, opts, guitool=self._isgui
+        )
+
 
 def uisetup(ui):
     for cmd, path in ui.configitems('extdiff'):
@@ -594,9 +702,14 @@
                     if isgui is None:
                         isgui = ui.configbool(section, cmd + '.gui') or False
                     break
-        command(cmd, extdiffopts[:], _('hg %s [OPTION]... [FILE]...') % cmd,
-                helpcategory=command.CATEGORY_FILE_CONTENTS,
-                inferrepo=True)(savedcmd(path, cmdline, isgui))
+        command(
+            cmd,
+            extdiffopts[:],
+            _('hg %s [OPTION]... [FILE]...') % cmd,
+            helpcategory=command.CATEGORY_FILE_CONTENTS,
+            inferrepo=True,
+        )(savedcmd(path, cmdline, isgui))
+
 
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = [savedcmd]
--- a/hgext/factotum.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/factotum.py	Sun Oct 06 09:45:02 2019 -0400
@@ -49,9 +49,7 @@
 
 import os
 from mercurial.i18n import _
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 from mercurial import (
     error,
     httpconnection,
@@ -70,16 +68,17 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('factotum', 'executable',
-    default='/bin/auth/factotum',
+configitem(
+    'factotum', 'executable', default='/bin/auth/factotum',
 )
-configitem('factotum', 'mountpoint',
-    default='/mnt/factotum',
+configitem(
+    'factotum', 'mountpoint', default='/mnt/factotum',
 )
-configitem('factotum', 'service',
-    default='hg',
+configitem(
+    'factotum', 'service', default='hg',
 )
 
+
 def auth_getkey(self, params):
     if not self.ui.interactive():
         raise error.Abort(_('factotum not interactive'))
@@ -88,6 +87,7 @@
     params = '%s !password?' % params
     os.system(procutil.tonativestr("%s -g '%s'" % (_executable, params)))
 
+
 def auth_getuserpasswd(self, getkey, params):
     params = 'proto=pass %s' % params
     while True:
@@ -111,12 +111,15 @@
             os.close(fd)
         getkey(self, params)
 
+
 def monkeypatch_method(cls):
     def decorator(func):
         setattr(cls, func.__name__, func)
         return func
+
     return decorator
 
+
 @monkeypatch_method(passwordmgr)
 def find_user_password(self, realm, authuri):
     user, passwd = self.passwddb.find_user_password(realm, authuri)
@@ -142,6 +145,7 @@
     self._writedebug(user, passwd)
     return (user, passwd)
 
+
 def uisetup(ui):
     global _executable
     _executable = ui.config('factotum', 'executable')
--- a/hgext/fastannotate/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -149,9 +149,11 @@
             commands.registercommand()
         elif name == 'hgweb':
             from . import support
+
             support.replacehgwebannotate()
         elif name == 'fctx':
             from . import support
+
             support.replacefctxannotate()
             commands.wrapdefault()
         else:
@@ -160,11 +162,13 @@
     if ui.configbool('fastannotate', 'server'):
         protocol.serveruisetup(ui)
 
+
 def extsetup(ui):
     # fastannotate has its own locking, without depending on repo lock
     # TODO: avoid mutating this unless the specific repo has it enabled
     localrepo.localrepository._wlockfreeprefix.add('fastannotate/')
 
+
 def reposetup(ui, repo):
     if ui.configbool('fastannotate', 'client'):
         protocol.clientreposetup(ui, repo)
--- a/hgext/fastannotate/commands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/commands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -31,6 +31,7 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
+
 def _matchpaths(repo, rev, pats, opts, aopts=facontext.defaultopts):
     """generate paths matching given patterns"""
     perfhack = repo.ui.configbool('fastannotate', 'perfhack')
@@ -45,16 +46,23 @@
         reldir = os.path.relpath(encoding.getcwd(), reporoot)
         if reldir == '.':
             reldir = ''
-        if any(opts.get(o[1]) for o in commands.walkopts): # a)
+        if any(opts.get(o[1]) for o in commands.walkopts):  # a)
             perfhack = False
-        else: # b)
-            relpats = [os.path.relpath(p, reporoot) if os.path.isabs(p) else p
-                       for p in pats]
+        else:  # b)
+            relpats = [
+                os.path.relpath(p, reporoot) if os.path.isabs(p) else p
+                for p in pats
+            ]
             # disable perfhack on '..' since it allows escaping from the repo
-            if any(('..' in f or
-                    not os.path.isfile(
-                        facontext.pathhelper(repo, f, aopts).linelogpath))
-                   for f in relpats):
+            if any(
+                (
+                    '..' in f
+                    or not os.path.isfile(
+                        facontext.pathhelper(repo, f, aopts).linelogpath
+                    )
+                )
+                for f in relpats
+            ):
                 perfhack = False
 
     # perfhack: emit paths directory without checking with manifest
@@ -63,13 +71,16 @@
         for p in relpats:
             yield os.path.join(reldir, p)
     else:
+
         def bad(x, y):
             raise error.Abort("%s: %s" % (x, y))
+
         ctx = scmutil.revsingle(repo, rev)
         m = scmutil.match(ctx, pats, opts, badfn=bad)
         for p in ctx.walk(m):
             yield p
 
+
 fastannotatecommandargs = {
     r'options': [
         ('r', 'rev', '.', _('annotate the specified revision'), _('REV')),
@@ -78,21 +89,40 @@
         ('d', 'date', None, _('list the date (short with -q)')),
         ('n', 'number', None, _('list the revision number (default)')),
         ('c', 'changeset', None, _('list the changeset')),
-        ('l', 'line-number', None, _('show line number at the first '
-                                     'appearance')),
+        (
+            'l',
+            'line-number',
+            None,
+            _('show line number at the first ' 'appearance'),
+        ),
         ('e', 'deleted', None, _('show deleted lines (slow) (EXPERIMENTAL)')),
         ('', 'no-content', None, _('do not show file content (EXPERIMENTAL)')),
         ('', 'no-follow', None, _("don't follow copies and renames")),
-        ('', 'linear', None, _('enforce linear history, ignore second parent '
-                               'of merges (EXPERIMENTAL)')),
+        (
+            '',
+            'linear',
+            None,
+            _(
+                'enforce linear history, ignore second parent '
+                'of merges (EXPERIMENTAL)'
+            ),
+        ),
         ('', 'long-hash', None, _('show long changeset hash (EXPERIMENTAL)')),
-        ('', 'rebuild', None, _('rebuild cache even if it exists '
-                                '(EXPERIMENTAL)')),
-    ] + commands.diffwsopts + commands.walkopts + commands.formatteropts,
+        (
+            '',
+            'rebuild',
+            None,
+            _('rebuild cache even if it exists ' '(EXPERIMENTAL)'),
+        ),
+    ]
+    + commands.diffwsopts
+    + commands.walkopts
+    + commands.formatteropts,
     r'synopsis': _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
     r'inferrepo': True,
 }
 
+
 def fastannotate(ui, repo, *pats, **opts):
     """show changeset information by line for each file
 
@@ -136,15 +166,18 @@
     rev = opts.get('rev', '.')
     rebuild = opts.get('rebuild', False)
 
-    diffopts = patch.difffeatureopts(ui, opts, section='annotate',
-                                     whitespace=True)
+    diffopts = patch.difffeatureopts(
+        ui, opts, section='annotate', whitespace=True
+    )
     aopts = facontext.annotateopts(
         diffopts=diffopts,
         followmerge=not opts.get('linear', False),
-        followrename=not opts.get('no_follow', False))
+        followrename=not opts.get('no_follow', False),
+    )
 
-    if not any(opts.get(s)
-               for s in ['user', 'date', 'file', 'number', 'changeset']):
+    if not any(
+        opts.get(s) for s in ['user', 'date', 'file', 'number', 'changeset']
+    ):
         # default 'number' for compatibility. but fastannotate is more
         # efficient with "changeset", "line-number" and "no-content".
         for name in ui.configlist('fastannotate', 'defaultformat', ['number']):
@@ -175,20 +208,24 @@
         while True:
             try:
                 with facontext.annotatecontext(repo, path, aopts, rebuild) as a:
-                    result = a.annotate(rev, master=master, showpath=showpath,
-                                        showlines=(showlines and
-                                                   not showdeleted))
+                    result = a.annotate(
+                        rev,
+                        master=master,
+                        showpath=showpath,
+                        showlines=(showlines and not showdeleted),
+                    )
                     if showdeleted:
                         existinglines = set((l[0], l[1]) for l in result)
                         result = a.annotatealllines(
-                            rev, showpath=showpath, showlines=showlines)
+                            rev, showpath=showpath, showlines=showlines
+                        )
                 break
             except (faerror.CannotReuseError, faerror.CorruptedFileError):
                 # happens if master moves backwards, or the file was deleted
                 # and readded, or renamed to an existing name, or corrupted.
-                if rebuild: # give up since we have tried rebuild already
+                if rebuild:  # give up since we have tried rebuild already
                     raise
-                else: # try a second time rebuilding the cache (slow)
+                else:  # try a second time rebuilding the cache (slow)
                     rebuild = True
                     continue
 
@@ -198,9 +235,13 @@
         formatter.write(result, lines, existinglines=existinglines)
     formatter.end()
 
+
 _newopts = set()
-_knownopts = {opt[1].replace('-', '_') for opt in
-              (fastannotatecommandargs[r'options'] + commands.globalopts)}
+_knownopts = {
+    opt[1].replace('-', '_')
+    for opt in (fastannotatecommandargs[r'options'] + commands.globalopts)
+}
+
 
 def _annotatewrapper(orig, ui, repo, *pats, **opts):
     """used by wrapdefault"""
@@ -220,19 +261,24 @@
 
     return orig(ui, repo, *pats, **opts)
 
+
 def registercommand():
     """register the fastannotate command"""
     name = 'fastannotate|fastblame|fa'
     command(name, helpbasic=True, **fastannotatecommandargs)(fastannotate)
 
+
 def wrapdefault():
     """wrap the default annotate command, to be aware of the protocol"""
     extensions.wrapcommand(commands.table, 'annotate', _annotatewrapper)
 
-@command('debugbuildannotatecache',
-         [('r', 'rev', '', _('build up to the specific revision'), _('REV'))
-         ] + commands.walkopts,
-         _('[-r REV] FILE...'))
+
+@command(
+    'debugbuildannotatecache',
+    [('r', 'rev', '', _('build up to the specific revision'), _('REV'))]
+    + commands.walkopts,
+    _('[-r REV] FILE...'),
+)
 def debugbuildannotatecache(ui, repo, *pats, **opts):
     """incrementally build fastannotate cache up to REV for specified files
 
@@ -247,8 +293,10 @@
     opts = pycompat.byteskwargs(opts)
     rev = opts.get('REV') or ui.config('fastannotate', 'mainbranch')
     if not rev:
-        raise error.Abort(_('you need to provide a revision'),
-                          hint=_('set fastannotate.mainbranch or use --rev'))
+        raise error.Abort(
+            _('you need to provide a revision'),
+            hint=_('set fastannotate.mainbranch or use --rev'),
+        )
     if ui.configbool('fastannotate', 'unfilteredrepo'):
         repo = repo.unfiltered()
     ctx = scmutil.revsingle(repo, rev)
@@ -272,14 +320,20 @@
                 except (faerror.CannotReuseError, faerror.CorruptedFileError):
                     # the cache is broken (could happen with renaming so the
                     # file history gets invalidated). rebuild and try again.
-                    ui.debug('fastannotate: %s: rebuilding broken cache\n'
-                             % path)
+                    ui.debug(
+                        'fastannotate: %s: rebuilding broken cache\n' % path
+                    )
                     actx.rebuild()
                     try:
                         actx.annotate(rev, rev)
                     except Exception as ex:
                         # possibly a bug, but should not stop us from building
                         # cache for other files.
-                        ui.warn(_('fastannotate: %s: failed to '
-                                  'build cache: %r\n') % (path, ex))
+                        ui.warn(
+                            _(
+                                'fastannotate: %s: failed to '
+                                'build cache: %r\n'
+                            )
+                            % (path, ex)
+                        )
         progress.complete()
--- a/hgext/fastannotate/context.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/context.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,9 +23,7 @@
     scmutil,
     util,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 from . import (
     error as faerror,
@@ -37,6 +35,7 @@
 def _getflog(repo, path):
     return repo.file(path)
 
+
 # extracted from mercurial.context.basefilectx.annotate
 def _parents(f, follow=True):
     # Cut _descendantrev here to mitigate the penalty of lazy linkrev
@@ -58,6 +57,7 @@
 
     return pl
 
+
 # extracted from mercurial.context.basefilectx.annotate. slightly modified
 # so it takes a fctx instead of a pair of text and fctx.
 def _decorate(fctx):
@@ -67,6 +67,7 @@
         linecount += 1
     return ([(fctx, i) for i in pycompat.xrange(linecount)], text)
 
+
 # extracted from mercurial.context.basefilectx.annotate. slightly modified
 # so it takes an extra "blocks" parameter calculated elsewhere, instead of
 # calculating diff here.
@@ -78,10 +79,12 @@
             child[0][b1:b2] = parent[0][a1:a2]
     return child
 
+
 # like scmutil.revsingle, but with lru cache, so their states (like manifests)
 # could be reused
 _revsingle = util.lrucachefunc(scmutil.revsingle)
 
+
 def resolvefctx(repo, rev, path, resolverev=False, adjustctx=None):
     """(repo, str, str) -> fctx
 
@@ -125,23 +128,27 @@
             fctx._changectx = repo[introrev]
     return fctx
 
+
 # like mercurial.store.encodedir, but use linelog suffixes: .m, .l, .lock
 def encodedir(path):
-    return (path
-            .replace('.hg/', '.hg.hg/')
-            .replace('.l/', '.l.hg/')
-            .replace('.m/', '.m.hg/')
-            .replace('.lock/', '.lock.hg/'))
+    return (
+        path.replace('.hg/', '.hg.hg/')
+        .replace('.l/', '.l.hg/')
+        .replace('.m/', '.m.hg/')
+        .replace('.lock/', '.lock.hg/')
+    )
+
 
 def hashdiffopts(diffopts):
-    diffoptstr = stringutil.pprint(sorted(
-        (k, getattr(diffopts, k))
-        for k in mdiff.diffopts.defaults
-    ))
+    diffoptstr = stringutil.pprint(
+        sorted((k, getattr(diffopts, k)) for k in mdiff.diffopts.defaults)
+    )
     return node.hex(hashlib.sha1(diffoptstr).digest())[:6]
 
+
 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
 
+
 class annotateopts(object):
     """like mercurial.mdiff.diffopts, but is for annotate
 
@@ -175,8 +182,10 @@
                 result += 'i' + diffopthash
         return result or 'default'
 
+
 defaultopts = annotateopts()
 
+
 class _annotatecontext(object):
     """do not use this class directly as it does not use lock to protect
     writes. use "with annotatecontext(...)" instead.
@@ -191,7 +200,7 @@
         self.revmappath = revmappath
         self._linelog = None
         self._revmap = None
-        self._node2path = {} # {str: str}
+        self._node2path = {}  # {str: str}
 
     @property
     def linelog(self):
@@ -298,23 +307,27 @@
         directly, revfctx = self.canannotatedirectly(rev)
         if directly:
             if self.ui.debugflag:
-                self.ui.debug('fastannotate: %s: using fast path '
-                              '(resolved fctx: %s)\n'
-                              % (self.path,
-                                 stringutil.pprint(util.safehasattr(revfctx,
-                                                                    'node'))))
+                self.ui.debug(
+                    'fastannotate: %s: using fast path '
+                    '(resolved fctx: %s)\n'
+                    % (
+                        self.path,
+                        stringutil.pprint(util.safehasattr(revfctx, 'node')),
+                    )
+                )
             return self.annotatedirectly(revfctx, showpath, showlines)
 
         # resolve master
         masterfctx = None
         if master:
             try:
-                masterfctx = self._resolvefctx(master, resolverev=True,
-                                               adjustctx=True)
-            except LookupError: # master does not have the file
+                masterfctx = self._resolvefctx(
+                    master, resolverev=True, adjustctx=True
+                )
+            except LookupError:  # master does not have the file
                 pass
             else:
-                if masterfctx in self.revmap: # no need to update linelog
+                if masterfctx in self.revmap:  # no need to update linelog
                     masterfctx = None
 
         #                  ... - @ <- rev (can be an arbitrary changeset,
@@ -342,18 +355,20 @@
         initvisit = [revfctx]
         if masterfctx:
             if masterfctx.rev() is None:
-                raise error.Abort(_('cannot update linelog to wdir()'),
-                                  hint=_('set fastannotate.mainbranch'))
+                raise error.Abort(
+                    _('cannot update linelog to wdir()'),
+                    hint=_('set fastannotate.mainbranch'),
+                )
             initvisit.append(masterfctx)
         visit = initvisit[:]
         pcache = {}
         needed = {revfctx: 1}
-        hist = {} # {fctx: ([(llrev or fctx, linenum)], text)}
+        hist = {}  # {fctx: ([(llrev or fctx, linenum)], text)}
         while visit:
             f = visit.pop()
             if f in pcache or f in hist:
                 continue
-            if f in self.revmap: # in the old main branch, it's a joint
+            if f in self.revmap:  # in the old main branch, it's a joint
                 llrev = self.revmap.hsh2rev(f.node())
                 self.linelog.annotate(llrev)
                 result = self.linelog.annotateresult
@@ -387,19 +402,24 @@
 
         if self.ui.debugflag:
             if newmainbranch:
-                self.ui.debug('fastannotate: %s: %d new changesets in the main'
-                              ' branch\n' % (self.path, len(newmainbranch)))
-            elif not hist: # no joints, no updates
-                self.ui.debug('fastannotate: %s: linelog cannot help in '
-                              'annotating this revision\n' % self.path)
+                self.ui.debug(
+                    'fastannotate: %s: %d new changesets in the main'
+                    ' branch\n' % (self.path, len(newmainbranch))
+                )
+            elif not hist:  # no joints, no updates
+                self.ui.debug(
+                    'fastannotate: %s: linelog cannot help in '
+                    'annotating this revision\n' % self.path
+                )
 
         # prepare annotateresult so we can update linelog incrementally
         self.linelog.annotate(self.linelog.maxrev)
 
         # 3rd DFS does the actual annotate
         visit = initvisit[:]
-        progress = self.ui.makeprogress(('building cache'),
-                                        total=len(newmainbranch))
+        progress = self.ui.makeprogress(
+            'building cache', total=len(newmainbranch)
+        )
         while visit:
             f = visit[-1]
             if f in hist:
@@ -416,8 +436,8 @@
                 continue
 
             visit.pop()
-            blocks = None # mdiff blocks, used for appending linelog
-            ismainbranch = (f in newmainbranch)
+            blocks = None  # mdiff blocks, used for appending linelog
+            ismainbranch = f in newmainbranch
             # curr is the same as the traditional annotate algorithm,
             # if we only care about linear history (do not follow merge),
             # then curr is not actually used.
@@ -437,22 +457,23 @@
             hist[f] = curr
             del pcache[f]
 
-            if ismainbranch: # need to write to linelog
+            if ismainbranch:  # need to write to linelog
                 progress.increment()
                 bannotated = None
-                if len(pl) == 2 and self.opts.followmerge: # merge
+                if len(pl) == 2 and self.opts.followmerge:  # merge
                     bannotated = curr[0]
-                if blocks is None: # no parents, add an empty one
+                if blocks is None:  # no parents, add an empty one
                     blocks = list(self._diffblocks('', curr[1]))
                 self._appendrev(f, blocks, bannotated)
-            elif showpath: # not append linelog, but we need to record path
+            elif showpath:  # not append linelog, but we need to record path
                 self._node2path[f.node()] = f.path()
 
         progress.complete()
 
         result = [
             ((self.revmap.rev2hsh(fr) if isinstance(fr, int) else fr.node()), l)
-            for fr, l in hist[revfctx][0]] # [(node, linenumber)]
+            for fr, l in hist[revfctx][0]
+        ]  # [(node, linenumber)]
         return self._refineannotateresult(result, revfctx, showpath, showlines)
 
     def canannotatedirectly(self, rev):
@@ -557,7 +578,7 @@
                         continue
                     hsh = annotateresult[idxs[0]][0]
                     break
-            except StopIteration: # no more unresolved lines
+            except StopIteration:  # no more unresolved lines
                 return result
             if hsh is None:
                 # the remaining key2idxs are not in main branch, resolving them
@@ -567,9 +588,11 @@
                     if rev not in revlines:
                         hsh = annotateresult[idxs[0]][0]
                         if self.ui.debugflag:
-                            self.ui.debug('fastannotate: reading %s line #%d '
-                                          'to resolve lines %r\n'
-                                          % (node.short(hsh), linenum, idxs))
+                            self.ui.debug(
+                                'fastannotate: reading %s line #%d '
+                                'to resolve lines %r\n'
+                                % (node.short(hsh), linenum, idxs)
+                            )
                         fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
                         lines = mdiff.splitnewlines(fctx.data())
                         revlines[rev] = lines
@@ -579,8 +602,9 @@
                 return result
 
             # run the annotate and the lines should match to the file content
-            self.ui.debug('fastannotate: annotate %s to resolve lines\n'
-                          % node.short(hsh))
+            self.ui.debug(
+                'fastannotate: annotate %s to resolve lines\n' % node.short(hsh)
+            )
             linelog.annotate(rev)
             fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
             annotated = linelog.annotateresult
@@ -608,14 +632,17 @@
             hsh = f.node()
         llrev = self.revmap.hsh2rev(hsh)
         if not llrev:
-            raise faerror.CorruptedFileError('%s is not in revmap'
-                                             % node.hex(hsh))
+            raise faerror.CorruptedFileError(
+                '%s is not in revmap' % node.hex(hsh)
+            )
         if (self.revmap.rev2flag(llrev) & revmapmod.sidebranchflag) != 0:
-            raise faerror.CorruptedFileError('%s is not in revmap mainbranch'
-                                             % node.hex(hsh))
+            raise faerror.CorruptedFileError(
+                '%s is not in revmap mainbranch' % node.hex(hsh)
+            )
         self.linelog.annotate(llrev)
-        result = [(self.revmap.rev2hsh(r), l)
-                  for r, l in self.linelog.annotateresult]
+        result = [
+            (self.revmap.rev2hsh(r), l) for r, l in self.linelog.annotateresult
+        ]
         return self._refineannotateresult(result, f, showpath, showlines)
 
     def _refineannotateresult(self, result, f, showpath, showlines):
@@ -625,13 +652,13 @@
         if showpath:
             result = self._addpathtoresult(result)
         if showlines:
-            if isinstance(f, bytes): # f: node or fctx
+            if isinstance(f, bytes):  # f: node or fctx
                 llrev = self.revmap.hsh2rev(f)
                 fctx = self._resolvefctx(f, self.revmap.rev2path(llrev))
             else:
                 fctx = f
             lines = mdiff.splitnewlines(fctx.data())
-            if len(lines) != len(result): # linelog is probably corrupted
+            if len(lines) != len(result):  # linelog is probably corrupted
                 raise faerror.CorruptedFileError()
             result = (result, lines)
         return result
@@ -660,15 +687,18 @@
 
         # append sidebranch revisions to revmap
         siderevs = []
-        siderevmap = {} # node: int
+        siderevmap = {}  # node: int
         if bannotated is not None:
             for (a1, a2, b1, b2), op in blocks:
                 if op != '=':
                     # f could be either linelong rev, or fctx.
-                    siderevs += [f for f, l in bannotated[b1:b2]
-                                 if not isinstance(f, int)]
+                    siderevs += [
+                        f
+                        for f, l in bannotated[b1:b2]
+                        if not isinstance(f, int)
+                    ]
         siderevs = set(siderevs)
-        if fctx in siderevs: # mainnode must be appended seperately
+        if fctx in siderevs:  # mainnode must be appended seperately
             siderevs.remove(fctx)
         for f in siderevs:
             siderevmap[f] = getllrev(f)
@@ -683,8 +713,10 @@
             if bannotated is None:
                 linelog.replacelines(llrev, a1, a2, b1, b2)
             else:
-                blines = [((r if isinstance(r, int) else siderevmap[r]), l)
-                          for r, l in bannotated[b1:b2]]
+                blines = [
+                    ((r if isinstance(r, int) else siderevmap[r]), l)
+                    for r, l in bannotated[b1:b2]
+                ]
                 linelog.replacelines_vec(llrev, a1, a2, blines)
 
     def _addpathtoresult(self, annotateresult, revmap=None):
@@ -717,11 +749,13 @@
         """-> (fctx) -> [fctx]"""
         followrename = self.opts.followrename
         followmerge = self.opts.followmerge
+
         def parents(f):
             pl = _parents(f, follow=followrename)
             if not followmerge:
                 pl = pl[:1]
             return pl
+
         return parents
 
     @util.propertycache
@@ -731,6 +765,7 @@
     def _resolvefctx(self, rev, path=None, **kwds):
         return resolvefctx(self.repo, rev, (path or self.path), **kwds)
 
+
 def _unlinkpaths(paths):
     """silent, best-effort unlink"""
     for path in paths:
@@ -739,13 +774,15 @@
         except OSError:
             pass
 
+
 class pathhelper(object):
     """helper for getting paths for lockfile, linelog and revmap"""
 
     def __init__(self, repo, path, opts=defaultopts):
         # different options use different directories
-        self._vfspath = os.path.join('fastannotate',
-                                     opts.shortstr, encodedir(path))
+        self._vfspath = os.path.join(
+            'fastannotate', opts.shortstr, encodedir(path)
+        )
         self._repo = repo
 
     @property
@@ -763,6 +800,7 @@
     def revmappath(self):
         return self._repo.vfs.join(self._vfspath + '.m')
 
+
 @contextlib.contextmanager
 def annotatecontext(repo, path, opts=defaultopts, rebuild=False):
     """context needed to perform (fast) annotate on a file
@@ -799,6 +837,7 @@
         if actx is not None:
             actx.close()
 
+
 def fctxannotatecontext(fctx, follow=True, diffopts=None, rebuild=False):
     """like annotatecontext but get the context from a fctx. convenient when
     used in fctx.annotate
--- a/hgext/fastannotate/error.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/error.py	Sun Oct 06 09:45:02 2019 -0400
@@ -6,8 +6,10 @@
 # GNU General Public License version 2 or any later version.
 from __future__ import absolute_import
 
+
 class CorruptedFileError(Exception):
     pass
 
+
 class CannotReuseError(Exception):
     """cannot reuse or update the cache incrementally"""
--- a/hgext/fastannotate/formatter.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/formatter.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,9 +13,7 @@
     templatefilters,
     util,
 )
-from mercurial.utils import (
-        dateutil,
-)
+from mercurial.utils import dateutil
 
 # imitating mercurial.commands.annotate, not using the vanilla formatter since
 # the data structures are a bit different, and we have some fast paths.
@@ -41,28 +39,34 @@
             wnode = hexfunc(repo['.'].node()) + '+'
             wrev = '%d' % repo['.'].rev()
             wrevpad = ''
-            if not opts.get('changeset'): # only show + if changeset is hidden
+            if not opts.get('changeset'):  # only show + if changeset is hidden
                 wrev += '+'
                 wrevpad = ' '
             revenc = lambda x: wrev if x is None else ('%d' % x) + wrevpad
+
             def csetenc(x):
                 if x is None:
                     return wnode
                 return pycompat.bytestr(x) + ' '
+
         else:
             revenc = csetenc = pycompat.bytestr
 
         # opt name, separator, raw value (for json/plain), encoder (for plain)
-        opmap = [('user', ' ', lambda x: getctx(x).user(), ui.shortuser),
-                 ('number', ' ', lambda x: getctx(x).rev(), revenc),
-                 ('changeset', ' ', lambda x: hexfunc(x[0]), csetenc),
-                 ('date', ' ', lambda x: getctx(x).date(), datefunc),
-                 ('file', ' ', lambda x: x[2], pycompat.bytestr),
-                 ('line_number', ':', lambda x: x[1] + 1, pycompat.bytestr)]
+        opmap = [
+            ('user', ' ', lambda x: getctx(x).user(), ui.shortuser),
+            ('number', ' ', lambda x: getctx(x).rev(), revenc),
+            ('changeset', ' ', lambda x: hexfunc(x[0]), csetenc),
+            ('date', ' ', lambda x: getctx(x).date(), datefunc),
+            ('file', ' ', lambda x: x[2], pycompat.bytestr),
+            ('line_number', ':', lambda x: x[1] + 1, pycompat.bytestr),
+        ]
         fieldnamemap = {'number': 'rev', 'changeset': 'node'}
-        funcmap = [(get, sep, fieldnamemap.get(op, op), enc)
-                   for op, sep, get, enc in opmap
-                   if opts.get(op)]
+        funcmap = [
+            (get, sep, fieldnamemap.get(op, op), enc)
+            for op, sep, get, enc in opmap
+            if opts.get(op)
+        ]
         # no separator for first column
         funcmap[0] = list(funcmap[0])
         funcmap[0][1] = ''
@@ -72,17 +76,17 @@
         """(annotateresult, [str], set([rev, linenum])) -> None. write output.
         annotateresult can be [(node, linenum, path)], or [(node, linenum)]
         """
-        pieces = [] # [[str]]
-        maxwidths = [] # [int]
+        pieces = []  # [[str]]
+        maxwidths = []  # [int]
 
         # calculate padding
         for f, sep, name, enc in self.funcmap:
             l = [enc(f(x)) for x in annotatedresult]
             pieces.append(l)
-            if name in ['node', 'date']: # node and date has fixed size
+            if name in ['node', 'date']:  # node and date has fixed size
                 l = l[:1]
             widths = pycompat.maplist(encoding.colwidth, set(l))
-            maxwidth = (max(widths) if widths else 0)
+            maxwidth = max(widths) if widths else 0
             maxwidths.append(maxwidth)
 
         # buffered output
@@ -95,13 +99,14 @@
             if lines:
                 if existinglines is None:
                     result += ': ' + lines[i]
-                else: # extra formatting showing whether a line exists
+                else:  # extra formatting showing whether a line exists
                     key = (annotatedresult[i][0], annotatedresult[i][1])
                     if key in existinglines:
                         result += ':  ' + lines[i]
                     else:
-                        result += ': ' + self.ui.label('-' + lines[i],
-                                                       'diff.deleted')
+                        result += ': ' + self.ui.label(
+                            '-' + lines[i], 'diff.deleted'
+                        )
 
             if result[-1:] != '\n':
                 result += '\n'
@@ -118,6 +123,7 @@
     def end(self):
         pass
 
+
 class jsonformatter(defaultformatter):
     def __init__(self, ui, repo, opts):
         super(jsonformatter, self).__init__(ui, repo, opts)
@@ -128,8 +134,10 @@
         if annotatedresult:
             self._writecomma()
 
-        pieces = [(name, pycompat.maplist(f, annotatedresult))
-                  for f, sep, name, enc in self.funcmap]
+        pieces = [
+            (name, pycompat.maplist(f, annotatedresult))
+            for f, sep, name, enc in self.funcmap
+        ]
         if lines is not None:
             pieces.append(('line', lines))
         pieces.sort()
@@ -142,9 +150,11 @@
             result += '\n {\n'
             for j, p in enumerate(pieces):
                 k, vs = p
-                result += ('  "%s": %s%s\n'
-                           % (k, templatefilters.json(vs[i], paranoid=False),
-                              seps[j]))
+                result += '  "%s": %s%s\n' % (
+                    k,
+                    templatefilters.json(vs[i], paranoid=False),
+                    seps[j],
+                )
             result += ' }%s' % ('' if i == lasti else ',')
         if lasti >= 0:
             self.needcomma = True
--- a/hgext/fastannotate/protocol.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/protocol.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,28 +22,37 @@
 
 # common
 
+
 def _getmaster(ui):
     """get the mainbranch, and enforce it is set"""
     master = ui.config('fastannotate', 'mainbranch')
     if not master:
-        raise error.Abort(_('fastannotate.mainbranch is required '
-                            'for both the client and the server'))
+        raise error.Abort(
+            _(
+                'fastannotate.mainbranch is required '
+                'for both the client and the server'
+            )
+        )
     return master
 
+
 # server-side
 
+
 def _capabilities(orig, repo, proto):
     result = orig(repo, proto)
     result.append('getannotate')
     return result
 
+
 def _getannotate(repo, proto, path, lastnode):
     # output:
     #   FILE := vfspath + '\0' + str(size) + '\0' + content
     #   OUTPUT := '' | FILE + OUTPUT
     result = ''
-    buildondemand = repo.ui.configbool('fastannotate', 'serverbuildondemand',
-                                       True)
+    buildondemand = repo.ui.configbool(
+        'fastannotate', 'serverbuildondemand', True
+    )
     with context.annotatecontext(repo, path) as actx:
         if buildondemand:
             # update before responding to the client
@@ -57,7 +66,7 @@
                 try:
                     actx.annotate(master, master)
                 except Exception:
-                    actx.rebuild() # delete files
+                    actx.rebuild()  # delete files
             finally:
                 # although the "with" context will also do a close/flush, we
                 # need to do it early so we can send the correct respond to
@@ -78,29 +87,34 @@
                 result += '%s\0%d\0%s' % (relpath, len(content), content)
     return result
 
+
 def _registerwireprotocommand():
     if 'getannotate' in wireprotov1server.commands:
         return
-    wireprotov1server.wireprotocommand(
-        'getannotate', 'path lastnode')(_getannotate)
+    wireprotov1server.wireprotocommand('getannotate', 'path lastnode')(
+        _getannotate
+    )
+
 
 def serveruisetup(ui):
     _registerwireprotocommand()
     extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
 
+
 # client-side
 
+
 def _parseresponse(payload):
     result = {}
     i = 0
     l = len(payload) - 1
-    state = 0 # 0: vfspath, 1: size
+    state = 0  # 0: vfspath, 1: size
     vfspath = size = ''
     while i < l:
-        ch = payload[i:i + 1]
+        ch = payload[i : i + 1]
         if ch == '\0':
             if state == 1:
-                result[vfspath] = payload[i + 1:i + 1 + int(size)]
+                result[vfspath] = payload[i + 1 : i + 1 + int(size)]
                 i += int(size)
                 state = 0
                 vfspath = size = ''
@@ -114,6 +128,7 @@
         i += 1
     return result
 
+
 def peersetup(ui, peer):
     class fastannotatepeer(peer.__class__):
         @wireprotov1peer.batchable
@@ -126,14 +141,17 @@
                 f = wireprotov1peer.future()
                 yield args, f
                 yield _parseresponse(f.value)
+
     peer.__class__ = fastannotatepeer
 
+
 @contextlib.contextmanager
 def annotatepeer(repo):
     ui = repo.ui
 
     remotepath = ui.expandpath(
-        ui.config('fastannotate', 'remotepath', 'default'))
+        ui.config('fastannotate', 'remotepath', 'default')
+    )
     peer = hg.peer(ui, {}, remotepath)
 
     try:
@@ -141,6 +159,7 @@
     finally:
         peer.close()
 
+
 def clientfetch(repo, paths, lastnodemap=None, peer=None):
     """download annotate cache from the server for paths"""
     if not paths:
@@ -158,9 +177,11 @@
     with peer.commandexecutor() as batcher:
         ui.debug('fastannotate: requesting %d files\n' % len(paths))
         for p in paths:
-            results.append(batcher.callcommand(
-                'getannotate',
-                {'path': p, 'lastnode':lastnodemap.get(p)}))
+            results.append(
+                batcher.callcommand(
+                    'getannotate', {'path': p, 'lastnode': lastnodemap.get(p)}
+                )
+            )
 
         for result in results:
             r = result.result()
@@ -168,18 +189,22 @@
             r = {util.pconvert(p): v for p, v in r.iteritems()}
             for path in sorted(r):
                 # ignore malicious paths
-                if (not path.startswith('fastannotate/')
-                    or '/../' in (path + '/')):
+                if not path.startswith('fastannotate/') or '/../' in (
+                    path + '/'
+                ):
                     ui.debug('fastannotate: ignored malicious path %s\n' % path)
                     continue
                 content = r[path]
                 if ui.debugflag:
-                    ui.debug('fastannotate: writing %d bytes to %s\n'
-                             % (len(content), path))
+                    ui.debug(
+                        'fastannotate: writing %d bytes to %s\n'
+                        % (len(content), path)
+                    )
                 repo.vfs.makedirs(os.path.dirname(path))
                 with repo.vfs(path, 'wb') as f:
                     f.write(content)
 
+
 def _filterfetchpaths(repo, paths):
     """return a subset of paths whose history is long and need to fetch linelog
     from the server. works with remotefilelog and non-remotefilelog repos.
@@ -193,11 +218,12 @@
         try:
             if len(repo.file(path)) >= threshold:
                 result.append(path)
-        except Exception: # file not found etc.
+        except Exception:  # file not found etc.
             result.append(path)
 
     return result
 
+
 def localreposetup(ui, repo):
     class fastannotaterepo(repo.__class__):
         def prefetchfastannotate(self, paths, peer=None):
@@ -215,8 +241,10 @@
             except Exception as ex:
                 # could be directory not writable or so, not fatal
                 self.ui.debug('fastannotate: prefetch failed: %r\n' % ex)
+
     repo.__class__ = fastannotaterepo
 
+
 def clientreposetup(ui, repo):
     _registerwireprotocommand()
     if repo.local():
--- a/hgext/fastannotate/revmap.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/revmap.py	Sun Oct 06 09:45:02 2019 -0400
@@ -47,6 +47,7 @@
 # len(mercurial.node.nullid)
 _hshlen = 20
 
+
 class revmap(object):
     """trivial hg bin hash - linelog rev bidirectional map
 
@@ -157,15 +158,16 @@
         """write the state down to the file"""
         if not self.path:
             return
-        if self._lastmaxrev == -1: # write the entire file
+        if self._lastmaxrev == -1:  # write the entire file
             with open(self.path, 'wb') as f:
                 f.write(self.HEADER)
                 for i in pycompat.xrange(1, len(self._rev2hsh)):
                     self._writerev(i, f)
-        else: # append incrementally
+        else:  # append incrementally
             with open(self.path, 'ab') as f:
-                for i in pycompat.xrange(self._lastmaxrev + 1,
-                                         len(self._rev2hsh)):
+                for i in pycompat.xrange(
+                    self._lastmaxrev + 1, len(self._rev2hsh)
+                ):
                     self._writerev(i, f)
         self._lastmaxrev = self.maxrev
 
@@ -217,7 +219,7 @@
         buf = ''
         while True:
             ch = f.read(1)
-            if not ch: # unexpected eof
+            if not ch:  # unexpected eof
                 raise error.CorruptedFileError()
             if ch == '\0':
                 break
@@ -229,9 +231,9 @@
         test if (node, path) is in the map, and is not in a side branch.
         f can be either a tuple of (node, path), or a fctx.
         """
-        if isinstance(f, tuple): # f: (node, path)
+        if isinstance(f, tuple):  # f: (node, path)
             hsh, path = f
-        else: # f: fctx
+        else:  # f: fctx
             hsh, path = f.node(), f.path()
         rev = self.hsh2rev(hsh)
         if rev is None:
@@ -240,6 +242,7 @@
             return False
         return (self.rev2flag(rev) & sidebranchflag) == 0
 
+
 def getlastnode(path):
     """return the last hash in a revmap, without loading its full content.
     this is equivalent to `m = revmap(path); m.rev2hsh(m.maxrev)`, but faster.
--- a/hgext/fastannotate/support.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fastannotate/support.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,6 +21,7 @@
     revmap,
 )
 
+
 class _lazyfctx(object):
     """delegates to fctx but do not construct fctx when unnecessary"""
 
@@ -42,6 +43,7 @@
     def __getattr__(self, name):
         return getattr(self._fctx, name)
 
+
 def _convertoutputs(repo, annotated, contents):
     """convert fastannotate outputs to vanilla annotate format"""
     # fastannotate returns: [(nodeid, linenum, path)], [linecontent]
@@ -59,10 +61,12 @@
         results.append(annotateline(fctx=fctx, lineno=lineno, text=line))
     return results
 
+
 def _getmaster(fctx):
     """(fctx) -> str"""
     return fctx._repo.ui.config('fastannotate', 'mainbranch') or 'default'
 
+
 def _doannotate(fctx, follow=True, diffopts=None):
     """like the vanilla fctx.annotate, but do it via fastannotate, and make
     the output format compatible with the vanilla fctx.annotate.
@@ -73,49 +77,61 @@
 
     with context.fctxannotatecontext(fctx, follow, diffopts) as ac:
         try:
-            annotated, contents = ac.annotate(fctx.rev(), master=master,
-                                              showpath=True, showlines=True)
+            annotated, contents = ac.annotate(
+                fctx.rev(), master=master, showpath=True, showlines=True
+            )
         except Exception:
-            ac.rebuild() # try rebuild once
-            fctx._repo.ui.debug('fastannotate: %s: rebuilding broken cache\n'
-                                % fctx._path)
+            ac.rebuild()  # try rebuild once
+            fctx._repo.ui.debug(
+                'fastannotate: %s: rebuilding broken cache\n' % fctx._path
+            )
             try:
-                annotated, contents = ac.annotate(fctx.rev(), master=master,
-                                                  showpath=True, showlines=True)
+                annotated, contents = ac.annotate(
+                    fctx.rev(), master=master, showpath=True, showlines=True
+                )
             except Exception:
                 raise
 
     assert annotated and contents
     return _convertoutputs(fctx._repo, annotated, contents)
 
+
 def _hgwebannotate(orig, fctx, ui):
-    diffopts = patch.difffeatureopts(ui, untrusted=True,
-                                     section='annotate', whitespace=True)
+    diffopts = patch.difffeatureopts(
+        ui, untrusted=True, section='annotate', whitespace=True
+    )
     return _doannotate(fctx, diffopts=diffopts)
 
-def _fctxannotate(orig, self, follow=False, linenumber=False, skiprevs=None,
-                  diffopts=None):
+
+def _fctxannotate(
+    orig, self, follow=False, linenumber=False, skiprevs=None, diffopts=None
+):
     if skiprevs:
         # skiprevs is not supported yet
-        return orig(self, follow, linenumber, skiprevs=skiprevs,
-                    diffopts=diffopts)
+        return orig(
+            self, follow, linenumber, skiprevs=skiprevs, diffopts=diffopts
+        )
     try:
         return _doannotate(self, follow, diffopts)
     except Exception as ex:
-        self._repo.ui.debug('fastannotate: falling back to the vanilla '
-                            'annotate: %r\n' % ex)
-        return orig(self, follow=follow, skiprevs=skiprevs,
-                    diffopts=diffopts)
+        self._repo.ui.debug(
+            'fastannotate: falling back to the vanilla ' 'annotate: %r\n' % ex
+        )
+        return orig(self, follow=follow, skiprevs=skiprevs, diffopts=diffopts)
+
 
 def _remotefctxannotate(orig, self, follow=False, skiprevs=None, diffopts=None):
     # skipset: a set-like used to test if a fctx needs to be downloaded
     with context.fctxannotatecontext(self, follow, diffopts) as ac:
         skipset = revmap.revmap(ac.revmappath)
-    return orig(self, follow, skiprevs=skiprevs, diffopts=diffopts,
-                prefetchskip=skipset)
+    return orig(
+        self, follow, skiprevs=skiprevs, diffopts=diffopts, prefetchskip=skipset
+    )
+
 
 def replacehgwebannotate():
     extensions.wrapfunction(hgweb.webutil, 'annotate', _hgwebannotate)
 
+
 def replacefctxannotate():
     extensions.wrapfunction(hgcontext.basefilectx, 'annotate', _fctxannotate)
--- a/hgext/fetch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fetch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,9 +10,7 @@
 from __future__ import absolute_import
 
 from mercurial.i18n import _
-from mercurial.node import (
-    short,
-)
+from mercurial.node import short
 from mercurial import (
     cmdutil,
     error,
@@ -34,15 +32,27 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-@command('fetch',
-    [('r', 'rev', [],
-     _('a specific revision you would like to pull'), _('REV')),
-    ('', 'edit', None, _('invoke editor on commit messages')),
-    ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
-    ('', 'switch-parent', None, _('switch parents when merging')),
-    ] + cmdutil.commitopts + cmdutil.commitopts2 + cmdutil.remoteopts,
+
+@command(
+    'fetch',
+    [
+        (
+            'r',
+            'rev',
+            [],
+            _('a specific revision you would like to pull'),
+            _('REV'),
+        ),
+        ('', 'edit', None, _('invoke editor on commit messages')),
+        ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
+        ('', 'switch-parent', None, _('switch parents when merging')),
+    ]
+    + cmdutil.commitopts
+    + cmdutil.commitopts2
+    + cmdutil.remoteopts,
     _('hg fetch [SOURCE]'),
-    helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT)
+    helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
+)
 def fetch(ui, repo, source='default', **opts):
     '''pull changes from a remote repository, merge new changes if needed.
 
@@ -75,8 +85,10 @@
     except error.RepoLookupError:
         branchnode = None
     if parent != branchnode:
-        raise error.Abort(_('working directory not at branch tip'),
-                         hint=_("use 'hg update' to check out branch tip"))
+        raise error.Abort(
+            _('working directory not at branch tip'),
+            hint=_("use 'hg update' to check out branch tip"),
+        )
 
     wlock = lock = None
     try:
@@ -88,19 +100,26 @@
         bheads = repo.branchheads(branch)
         bheads = [head for head in bheads if len(repo[head].children()) == 0]
         if len(bheads) > 1:
-            raise error.Abort(_('multiple heads in this branch '
-                               '(use "hg heads ." and "hg merge" to merge)'))
+            raise error.Abort(
+                _(
+                    'multiple heads in this branch '
+                    '(use "hg heads ." and "hg merge" to merge)'
+                )
+            )
 
         other = hg.peer(repo, opts, ui.expandpath(source))
-        ui.status(_('pulling from %s\n') %
-                  util.hidepassword(ui.expandpath(source)))
+        ui.status(
+            _('pulling from %s\n') % util.hidepassword(ui.expandpath(source))
+        )
         revs = None
         if opts['rev']:
             try:
                 revs = [other.lookup(rev) for rev in opts['rev']]
             except error.CapabilityError:
-                err = _("other repository doesn't support revision lookup, "
-                        "so a rev cannot be specified.")
+                err = _(
+                    "other repository doesn't support revision lookup, "
+                    "so a rev cannot be specified."
+                )
                 raise error.Abort(err)
 
         # Are there any changes at all?
@@ -125,9 +144,13 @@
             hg.clean(repo, newparent)
         newheads = [n for n in newheads if n != newparent]
         if len(newheads) > 1:
-            ui.status(_('not merging with %d other new branch heads '
-                        '(use "hg heads ." and "hg merge" to merge them)\n') %
-                      (len(newheads) - 1))
+            ui.status(
+                _(
+                    'not merging with %d other new branch heads '
+                    '(use "hg heads ." and "hg merge" to merge them)\n'
+                )
+                % (len(newheads) - 1)
+            )
             return 1
 
         if not newheads:
@@ -143,25 +166,29 @@
                 firstparent, secondparent = newparent, newheads[0]
             else:
                 firstparent, secondparent = newheads[0], newparent
-                ui.status(_('updating to %d:%s\n') %
-                          (repo.changelog.rev(firstparent),
-                           short(firstparent)))
+                ui.status(
+                    _('updating to %d:%s\n')
+                    % (repo.changelog.rev(firstparent), short(firstparent))
+                )
             hg.clean(repo, firstparent)
-            ui.status(_('merging with %d:%s\n') %
-                      (repo.changelog.rev(secondparent), short(secondparent)))
+            ui.status(
+                _('merging with %d:%s\n')
+                % (repo.changelog.rev(secondparent), short(secondparent))
+            )
             err = hg.merge(repo, secondparent, remind=False)
 
         if not err:
             # we don't translate commit messages
-            message = (cmdutil.logmessage(ui, opts) or
-                       ('Automated merge with %s' %
-                        util.removeauth(other.url())))
+            message = cmdutil.logmessage(ui, opts) or (
+                'Automated merge with %s' % util.removeauth(other.url())
+            )
             editopt = opts.get('edit') or opts.get('force_editor')
             editor = cmdutil.getcommiteditor(edit=editopt, editform='fetch')
             n = repo.commit(message, opts['user'], opts['date'], editor=editor)
-            ui.status(_('new changeset %d:%s merges remote changes '
-                        'with local\n') % (repo.changelog.rev(n),
-                                           short(n)))
+            ui.status(
+                _('new changeset %d:%s merges remote changes ' 'with local\n')
+                % (repo.changelog.rev(n), short(n))
+            )
 
         return err
 
--- a/hgext/fix.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fix.py	Sun Oct 06 09:45:02 2019 -0400
@@ -189,26 +189,43 @@
 # problem.
 configitem('fix', 'failure', default='continue')
 
+
 def checktoolfailureaction(ui, message, hint=None):
     """Abort with 'message' if fix.failure=abort"""
     action = ui.config('fix', 'failure')
     if action not in ('continue', 'abort'):
-        raise error.Abort(_('unknown fix.failure action: %s') % (action,),
-                          hint=_('use "continue" or "abort"'))
+        raise error.Abort(
+            _('unknown fix.failure action: %s') % (action,),
+            hint=_('use "continue" or "abort"'),
+        )
     if action == 'abort':
         raise error.Abort(message, hint=hint)
 
+
 allopt = ('', 'all', False, _('fix all non-public non-obsolete revisions'))
-baseopt = ('', 'base', [], _('revisions to diff against (overrides automatic '
-                             'selection, and applies to every revision being '
-                             'fixed)'), _('REV'))
+baseopt = (
+    '',
+    'base',
+    [],
+    _(
+        'revisions to diff against (overrides automatic '
+        'selection, and applies to every revision being '
+        'fixed)'
+    ),
+    _('REV'),
+)
 revopt = ('r', 'rev', [], _('revisions to fix'), _('REV'))
 wdiropt = ('w', 'working-dir', False, _('fix the working directory'))
 wholeopt = ('', 'whole', False, _('always fix every line of a file'))
 usage = _('[OPTION]... [FILE]...')
 
-@command('fix', [allopt, baseopt, revopt, wdiropt, wholeopt], usage,
-        helpcategory=command.CATEGORY_FILE_CONTENTS)
+
+@command(
+    'fix',
+    [allopt, baseopt, revopt, wdiropt, wholeopt],
+    usage,
+    helpcategory=command.CATEGORY_FILE_CONTENTS,
+)
 def fix(ui, repo, *pats, **opts):
     """rewrite file content in changesets or working directory
 
@@ -241,8 +258,9 @@
     with repo.wlock(), repo.lock(), repo.transaction('fix'):
         revstofix = getrevstofix(ui, repo, opts)
         basectxs = getbasectxs(repo, opts, revstofix)
-        workqueue, numitems = getworkqueue(ui, repo, pats, opts, revstofix,
-                                           basectxs)
+        workqueue, numitems = getworkqueue(
+            ui, repo, pats, opts, revstofix, basectxs
+        )
         fixers = getfixers(ui)
 
         # There are no data dependencies between the workers fixing each file
@@ -251,14 +269,21 @@
             for rev, path in items:
                 ctx = repo[rev]
                 olddata = ctx[path].data()
-                metadata, newdata = fixfile(ui, repo, opts, fixers, ctx, path,
-                                            basectxs[rev])
+                metadata, newdata = fixfile(
+                    ui, repo, opts, fixers, ctx, path, basectxs[rev]
+                )
                 # Don't waste memory/time passing unchanged content back, but
                 # produce one result per item either way.
-                yield (rev, path, metadata,
-                       newdata if newdata != olddata else None)
-        results = worker.worker(ui, 1.0, getfixes, tuple(), workqueue,
-                                threadsafe=False)
+                yield (
+                    rev,
+                    path,
+                    metadata,
+                    newdata if newdata != olddata else None,
+                )
+
+        results = worker.worker(
+            ui, 1.0, getfixes, tuple(), workqueue, threadsafe=False
+        )
 
         # We have to hold on to the data for each successor revision in memory
         # until all its parents are committed. We ensure this by committing and
@@ -271,8 +296,9 @@
         replacements = {}
         wdirwritten = False
         commitorder = sorted(revstofix, reverse=True)
-        with ui.makeprogress(topic=_('fixing'), unit=_('files'),
-                             total=sum(numitems.values())) as progress:
+        with ui.makeprogress(
+            topic=_('fixing'), unit=_('files'), total=sum(numitems.values())
+        ) as progress:
             for rev, path, filerevmetadata, newdata in results:
                 progress.increment(item=path)
                 for fixername, fixermetadata in filerevmetadata.items():
@@ -280,12 +306,15 @@
                 if newdata is not None:
                     filedata[rev][path] = newdata
                     hookargs = {
-                      'rev': rev,
-                      'path': path,
-                      'metadata': filerevmetadata,
+                        'rev': rev,
+                        'path': path,
+                        'metadata': filerevmetadata,
                     }
-                    repo.hook('postfixfile', throw=False,
-                              **pycompat.strkwargs(hookargs))
+                    repo.hook(
+                        'postfixfile',
+                        throw=False,
+                        **pycompat.strkwargs(hookargs)
+                    )
                 numitems[rev] -= 1
                 # Apply the fixes for this and any other revisions that are
                 # ready and sitting at the front of the queue. Using a loop here
@@ -309,6 +338,7 @@
         }
         repo.hook('postfix', throw=True, **pycompat.strkwargs(hookargs))
 
+
 def cleanup(repo, replacements, wdirwritten):
     """Calls scmutil.cleanupnodes() with the given replacements.
 
@@ -325,6 +355,7 @@
     replacements = {prec: [succ] for prec, succ in replacements.iteritems()}
     scmutil.cleanupnodes(repo, replacements, 'fix', fixphase=True)
 
+
 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
     """"Constructs the list of files to be fixed at specific revisions
 
@@ -348,19 +379,23 @@
     for rev in sorted(revstofix):
         fixctx = repo[rev]
         match = scmutil.match(fixctx, pats, opts)
-        for path in sorted(pathstofix(
-                        ui, repo, pats, opts, match, basectxs[rev], fixctx)):
+        for path in sorted(
+            pathstofix(ui, repo, pats, opts, match, basectxs[rev], fixctx)
+        ):
             fctx = fixctx[path]
             if fctx.islink():
                 continue
             if fctx.size() > maxfilesize:
-                ui.warn(_('ignoring file larger than %s: %s\n') %
-                        (util.bytecount(maxfilesize), path))
+                ui.warn(
+                    _('ignoring file larger than %s: %s\n')
+                    % (util.bytecount(maxfilesize), path)
+                )
                 continue
             workqueue.append((rev, path))
             numitems[rev] += 1
     return workqueue, numitems
 
+
 def getrevstofix(ui, repo, opts):
     """Returns the set of revision numbers that should be fixed"""
     revs = set(scmutil.revrange(repo, opts['rev']))
@@ -375,27 +410,35 @@
             raise error.Abort('unresolved conflicts', hint="use 'hg resolve'")
     if not revs:
         raise error.Abort(
-            'no changesets specified', hint='use --rev or --working-dir')
+            'no changesets specified', hint='use --rev or --working-dir'
+        )
     return revs
 
+
 def checknodescendants(repo, revs):
-    if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
-        repo.revs('(%ld::) - (%ld)', revs, revs)):
-        raise error.Abort(_('can only fix a changeset together '
-                            'with all its descendants'))
+    if not obsolete.isenabled(repo, obsolete.allowunstableopt) and repo.revs(
+        '(%ld::) - (%ld)', revs, revs
+    ):
+        raise error.Abort(
+            _('can only fix a changeset together ' 'with all its descendants')
+        )
+
 
 def checkfixablectx(ui, repo, ctx):
     """Aborts if the revision shouldn't be replaced with a fixed one."""
     if not ctx.mutable():
-        raise error.Abort('can\'t fix immutable changeset %s' %
-                          (scmutil.formatchangeid(ctx),))
+        raise error.Abort(
+            'can\'t fix immutable changeset %s' % (scmutil.formatchangeid(ctx),)
+        )
     if ctx.obsolete():
         # It would be better to actually check if the revision has a successor.
-        allowdivergence = ui.configbool('experimental',
-                                        'evolution.allowdivergence')
+        allowdivergence = ui.configbool(
+            'experimental', 'evolution.allowdivergence'
+        )
         if not allowdivergence:
             raise error.Abort('fixing obsolete revision could cause divergence')
 
+
 def pathstofix(ui, repo, pats, opts, match, basectxs, fixctx):
     """Returns the set of files that should be fixed in a context
 
@@ -405,13 +448,19 @@
     """
     files = set()
     for basectx in basectxs:
-        stat = basectx.status(fixctx, match=match, listclean=bool(pats),
-                              listunknown=bool(pats))
+        stat = basectx.status(
+            fixctx, match=match, listclean=bool(pats), listunknown=bool(pats)
+        )
         files.update(
-            set(itertools.chain(stat.added, stat.modified, stat.clean,
-                                stat.unknown)))
+            set(
+                itertools.chain(
+                    stat.added, stat.modified, stat.clean, stat.unknown
+                )
+            )
+        )
     return files
 
+
 def lineranges(opts, path, basectxs, fixctx, content2):
     """Returns the set of line ranges that should be fixed in a file
 
@@ -439,6 +488,7 @@
         rangeslist.extend(difflineranges(content1, content2))
     return unionranges(rangeslist)
 
+
 def unionranges(rangeslist):
     """Return the union of some closed intervals
 
@@ -473,6 +523,7 @@
             unioned[-1] = (c, max(b, d))
     return unioned
 
+
 def difflineranges(content1, content2):
     """Return list of line number ranges in content2 that differ from content1.
 
@@ -519,6 +570,7 @@
             ranges.append((firstline + 1, lastline))
     return ranges
 
+
 def getbasectxs(repo, opts, revstofix):
     """Returns a map of the base contexts for each revision
 
@@ -548,6 +600,7 @@
                 basectxs[rev].add(pctx)
     return basectxs
 
+
 def fixfile(ui, repo, opts, fixers, fixctx, path, basectxs):
     """Run any configured fixers that should affect the file in this context
 
@@ -575,7 +628,8 @@
                 cwd=repo.root,
                 stdin=subprocess.PIPE,
                 stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE)
+                stderr=subprocess.PIPE,
+            )
             stdout, stderr = proc.communicate(newdata)
             if stderr:
                 showstderr(ui, fixctx.rev(), fixername, stderr)
@@ -585,8 +639,10 @@
                     metadatajson, newerdata = stdout.split('\0', 1)
                     metadata[fixername] = json.loads(metadatajson)
                 except ValueError:
-                    ui.warn(_('ignored invalid output from fixer tool: %s\n') %
-                            (fixername,))
+                    ui.warn(
+                        _('ignored invalid output from fixer tool: %s\n')
+                        % (fixername,)
+                    )
                     continue
             else:
                 metadata[fixername] = None
@@ -597,11 +653,16 @@
                     message = _('exited with status %d\n') % (proc.returncode,)
                     showstderr(ui, fixctx.rev(), fixername, message)
                 checktoolfailureaction(
-                    ui, _('no fixes will be applied'),
-                    hint=_('use --config fix.failure=continue to apply any '
-                           'successful fixes anyway'))
+                    ui,
+                    _('no fixes will be applied'),
+                    hint=_(
+                        'use --config fix.failure=continue to apply any '
+                        'successful fixes anyway'
+                    ),
+                )
     return metadata, newdata
 
+
 def showstderr(ui, rev, fixername, stderr):
     """Writes the lines of the stderr string as warnings on the ui
 
@@ -612,12 +673,13 @@
     """
     for line in re.split('[\r\n]+', stderr):
         if line:
-            ui.warn(('['))
+            ui.warn('[')
             if rev is None:
                 ui.warn(_('wdir'), label='evolve.rev')
             else:
                 ui.warn((str(rev)), label='evolve.rev')
-            ui.warn(('] %s: %s\n') % (fixername, line))
+            ui.warn('] %s: %s\n' % (fixername, line))
+
 
 def writeworkingdir(repo, ctx, filedata, replacements):
     """Write new content to the working copy and check out the new p1 if any
@@ -640,6 +702,7 @@
     if newparentnodes != oldparentnodes:
         repo.setparents(*newparentnodes)
 
+
 def replacerev(ui, repo, ctx, filedata, replacements):
     """Commit a new revision like the given one, but with file content changes
 
@@ -671,9 +734,11 @@
     # intervention to evolve. We can't rely on commit() to avoid creating the
     # un-needed revision because the extra field added below produces a new hash
     # regardless of file content changes.
-    if (not filedata and
-        p1ctx.node() not in replacements and
-        p2ctx.node() not in replacements):
+    if (
+        not filedata
+        and p1ctx.node() not in replacements
+        and p2ctx.node() not in replacements
+    ):
         return
 
     def filectxfn(repo, memctx, path):
@@ -688,7 +753,8 @@
             data=filedata.get(path, fctx.data()),
             islink=fctx.islink(),
             isexec=fctx.isexec(),
-            copysource=copysource)
+            copysource=copysource,
+        )
 
     extra = ctx.extra().copy()
     extra['fix_source'] = ctx.hex()
@@ -703,7 +769,8 @@
         date=ctx.date(),
         extra=extra,
         branch=ctx.branch(),
-        editor=None)
+        editor=None,
+    )
     sucnode = memctx.commit()
     prenode = ctx.node()
     if prenode == sucnode:
@@ -711,6 +778,7 @@
     else:
         replacements[ctx.node()] = sucnode
 
+
 def getfixers(ui):
     """Returns a map of configured fixer tools indexed by their names
 
@@ -722,8 +790,11 @@
         fixers[name] = Fixer()
         attrs = ui.configsuboptions('fix', name)[1]
         for key, default in FIXER_ATTRS.items():
-            setattr(fixers[name], pycompat.sysstr('_' + key),
-                    attrs.get(key, default))
+            setattr(
+                fixers[name],
+                pycompat.sysstr('_' + key),
+                attrs.get(key, default),
+            )
         fixers[name]._priority = int(fixers[name]._priority)
         fixers[name]._metadata = stringutil.parsebool(fixers[name]._metadata)
         fixers[name]._skipclean = stringutil.parsebool(fixers[name]._skipclean)
@@ -734,14 +805,16 @@
         # default.
         if fixers[name]._pattern is None:
             ui.warn(
-                _('fixer tool has no pattern configuration: %s\n') % (name,))
+                _('fixer tool has no pattern configuration: %s\n') % (name,)
+            )
             del fixers[name]
         elif not fixers[name]._enabled:
             ui.debug('ignoring disabled fixer tool: %s\n' % (name,))
             del fixers[name]
     return collections.OrderedDict(
-        sorted(fixers.items(), key=lambda item: item[1]._priority,
-               reverse=True))
+        sorted(fixers.items(), key=lambda item: item[1]._priority, reverse=True)
+    )
+
 
 def fixernames(ui):
     """Returns the names of [fix] config options that have suboptions"""
@@ -751,13 +824,15 @@
             names.add(k.split(':', 1)[0])
     return names
 
+
 class Fixer(object):
     """Wraps the raw config values for a fixer with methods"""
 
     def affects(self, opts, fixctx, path):
         """Should this fixer run on the file at the given path and context?"""
-        return (self._pattern is not None and
-                scmutil.match(fixctx, [self._pattern], opts)(path))
+        return self._pattern is not None and scmutil.match(
+            fixctx, [self._pattern], opts
+        )(path)
 
     def shouldoutputmetadata(self):
         """Should the stdout of this fixer start with JSON and a null byte?"""
@@ -770,13 +845,19 @@
         parameters.
         """
         expand = cmdutil.rendercommandtemplate
-        parts = [expand(ui, self._command,
-                        {'rootpath': path, 'basename': os.path.basename(path)})]
+        parts = [
+            expand(
+                ui,
+                self._command,
+                {'rootpath': path, 'basename': os.path.basename(path)},
+            )
+        ]
         if self._linerange:
             if self._skipclean and not ranges:
                 # No line ranges to fix, so don't run the fixer.
                 return None
             for first, last in ranges:
-                parts.append(expand(ui, self._linerange,
-                                    {'first': first, 'last': last}))
+                parts.append(
+                    expand(ui, self._linerange, {'first': first, 'last': last})
+                )
         return ' '.join(parts)
--- a/hgext/fsmonitor/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fsmonitor/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -116,9 +116,7 @@
 import weakref
 
 from mercurial.i18n import _
-from mercurial.node import (
-    hex,
-)
+from mercurial.node import hex
 
 from mercurial import (
     context,
@@ -150,50 +148,60 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('fsmonitor', 'mode',
-    default='on',
+configitem(
+    'fsmonitor', 'mode', default='on',
 )
-configitem('fsmonitor', 'walk_on_invalidate',
-    default=False,
+configitem(
+    'fsmonitor', 'walk_on_invalidate', default=False,
 )
-configitem('fsmonitor', 'timeout',
-    default='2',
+configitem(
+    'fsmonitor', 'timeout', default='2',
 )
-configitem('fsmonitor', 'blacklistusers',
-    default=list,
+configitem(
+    'fsmonitor', 'blacklistusers', default=list,
 )
-configitem('fsmonitor', 'watchman_exe',
-    default='watchman',
+configitem(
+    'fsmonitor', 'watchman_exe', default='watchman',
 )
-configitem('fsmonitor', 'verbose',
-    default=True,
-    experimental=True,
+configitem(
+    'fsmonitor', 'verbose', default=True, experimental=True,
 )
-configitem('experimental', 'fsmonitor.transaction_notify',
-    default=False,
+configitem(
+    'experimental', 'fsmonitor.transaction_notify', default=False,
 )
 
 # This extension is incompatible with the following blacklisted extensions
 # and will disable itself when encountering one of these:
 _blacklist = ['largefiles', 'eol']
 
+
 def debuginstall(ui, fm):
-    fm.write("fsmonitor-watchman",
-             _("fsmonitor checking for watchman binary... (%s)\n"),
-               ui.configpath("fsmonitor", "watchman_exe"))
+    fm.write(
+        "fsmonitor-watchman",
+        _("fsmonitor checking for watchman binary... (%s)\n"),
+        ui.configpath("fsmonitor", "watchman_exe"),
+    )
     root = tempfile.mkdtemp()
     c = watchmanclient.client(ui, root)
     err = None
     try:
         v = c.command("version")
-        fm.write("fsmonitor-watchman-version",
-                 _(" watchman binary version %s\n"), v["version"])
+        fm.write(
+            "fsmonitor-watchman-version",
+            _(" watchman binary version %s\n"),
+            v["version"],
+        )
     except watchmanclient.Unavailable as e:
         err = str(e)
-    fm.condwrite(err, "fsmonitor-watchman-error",
-                 _(" watchman binary missing or broken: %s\n"), err)
+    fm.condwrite(
+        err,
+        "fsmonitor-watchman-error",
+        _(" watchman binary missing or broken: %s\n"),
+        err,
+    )
     return 1 if err else 0
 
+
 def _handleunavailable(ui, state, ex):
     """Exception handler for Watchman interaction exceptions"""
     if isinstance(ex, watchmanclient.Unavailable):
@@ -209,6 +217,7 @@
     else:
         ui.log('fsmonitor', 'Watchman exception: %s\n', ex)
 
+
 def _hashignore(ignore):
     """Calculate hash for ignore patterns and filenames
 
@@ -221,10 +230,12 @@
     sha1.update(repr(ignore))
     return sha1.hexdigest()
 
+
 _watchmanencoding = pywatchman.encoding.get_local_encoding()
 _fsencoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
 _fixencoding = codecs.lookup(_watchmanencoding) != codecs.lookup(_fsencoding)
 
+
 def _watchmantofsencoding(path):
     """Fix path to match watchman and local filesystem encoding
 
@@ -243,12 +254,14 @@
 
     return encoded
 
+
 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
     '''Replacement for dirstate.walk, hooking into Watchman.
 
     Whenever full is False, ignored is False, and the Watchman client is
     available, use Watchman combined with saved state to possibly return only a
     subset of files.'''
+
     def bail(reason):
         self._ui.debug('fsmonitor: fallback to core status, %s\n' % reason)
         return orig(match, subrepos, unknown, ignored, full=True)
@@ -335,18 +348,19 @@
         # Add a little slack over the top of the user query to allow for
         # overheads while transferring the data
         self._watchmanclient.settimeout(state.timeout + 0.1)
-        result = self._watchmanclient.command('query', {
-            'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
-            'since': clock,
-            'expression': [
-                'not', [
-                    'anyof', ['dirname', '.hg'],
-                    ['name', '.hg', 'wholename']
-                ]
-            ],
-            'sync_timeout': int(state.timeout * 1000),
-            'empty_on_fresh_instance': state.walk_on_invalidate,
-        })
+        result = self._watchmanclient.command(
+            'query',
+            {
+                'fields': ['mode', 'mtime', 'size', 'exists', 'name'],
+                'since': clock,
+                'expression': [
+                    'not',
+                    ['anyof', ['dirname', '.hg'], ['name', '.hg', 'wholename']],
+                ],
+                'sync_timeout': int(state.timeout * 1000),
+                'empty_on_fresh_instance': state.walk_on_invalidate,
+            },
+        )
     except Exception as ex:
         _handleunavailable(self._ui, state, ex)
         self._watchmanclient.clearconnection()
@@ -398,8 +412,11 @@
             # record, mark it as deleted.  If we already have an entry
             # for fname then it was either part of walkexplicit or was
             # an earlier result that was a case change
-            if fname not in results and fname in dmap and (
-                    matchalways or matchfn(fname)):
+            if (
+                fname not in results
+                and fname in dmap
+                and (matchalways or matchfn(fname))
+            ):
                 results[fname] = None
         elif kind == dirkind:
             if fname in dmap and (matchalways or matchfn(fname)):
@@ -418,29 +435,43 @@
     if normalize:
         # any notable files that have changed case will already be handled
         # above, so just check membership in the foldmap
-        notefiles = set((normalize(f, True, True) for f in notefiles
-                         if normcase(f) not in foldmap))
-    visit = set((f for f in notefiles if (f not in results and matchfn(f)
-                                          and (f in dmap or not ignore(f)))))
+        notefiles = set(
+            (
+                normalize(f, True, True)
+                for f in notefiles
+                if normcase(f) not in foldmap
+            )
+        )
+    visit = set(
+        (
+            f
+            for f in notefiles
+            if (
+                f not in results and matchfn(f) and (f in dmap or not ignore(f))
+            )
+        )
+    )
 
     if not fresh_instance:
         if matchalways:
             visit.update(f for f in nonnormalset if f not in results)
             visit.update(f for f in copymap if f not in results)
         else:
-            visit.update(f for f in nonnormalset
-                         if f not in results and matchfn(f))
-            visit.update(f for f in copymap
-                         if f not in results and matchfn(f))
+            visit.update(
+                f for f in nonnormalset if f not in results and matchfn(f)
+            )
+            visit.update(f for f in copymap if f not in results and matchfn(f))
     else:
         if matchalways:
             visit.update(f for f, st in dmap.iteritems() if f not in results)
             visit.update(f for f in copymap if f not in results)
         else:
-            visit.update(f for f, st in dmap.iteritems()
-                         if f not in results and matchfn(f))
-            visit.update(f for f in copymap
-                         if f not in results and matchfn(f))
+            visit.update(
+                f
+                for f, st in dmap.iteritems()
+                if f not in results and matchfn(f)
+            )
+            visit.update(f for f in copymap if f not in results and matchfn(f))
 
     audit = pathutil.pathauditor(self._root, cached=True).check
     auditpass = [f for f in visit if audit(f)]
@@ -460,9 +491,18 @@
     del results['.hg']
     return results
 
+
 def overridestatus(
-        orig, self, node1='.', node2=None, match=None, ignored=False,
-        clean=False, unknown=False, listsubrepos=False):
+    orig,
+    self,
+    node1='.',
+    node2=None,
+    match=None,
+    ignored=False,
+    clean=False,
+    unknown=False,
+    listsubrepos=False,
+):
     listignored = ignored
     listclean = clean
     listunknown = unknown
@@ -508,10 +548,12 @@
     # HG_PENDING is set in the environment when the dirstate is being updated
     # in the middle of a transaction; we must not update our state in that
     # case, or we risk forgetting about changes in the working copy.
-    updatestate = (parentworking and match.always() and
-                   not isinstance(ctx2, (context.workingcommitctx,
-                                         context.memctx)) and
-                   'HG_PENDING' not in encoding.environ)
+    updatestate = (
+        parentworking
+        and match.always()
+        and not isinstance(ctx2, (context.workingcommitctx, context.memctx))
+        and 'HG_PENDING' not in encoding.environ
+    )
 
     try:
         if self._fsmonitorstate.walk_on_invalidate:
@@ -528,15 +570,21 @@
             # and return the initial clock.  In this mode we assume that
             # the filesystem will be slower than parsing a potentially
             # very large Watchman result set.
-            self._watchmanclient.settimeout(
-                self._fsmonitorstate.timeout + 0.1)
+            self._watchmanclient.settimeout(self._fsmonitorstate.timeout + 0.1)
         startclock = self._watchmanclient.getcurrentclock()
     except Exception as ex:
         self._watchmanclient.clearconnection()
         _handleunavailable(self.ui, self._fsmonitorstate, ex)
         # boo, Watchman failed. bail
-        return orig(node1, node2, match, listignored, listclean,
-                    listunknown, listsubrepos)
+        return orig(
+            node1,
+            node2,
+            match,
+            listignored,
+            listclean,
+            listunknown,
+            listsubrepos,
+        )
 
     if updatestate:
         # We need info about unknown files. This may make things slower the
@@ -549,8 +597,9 @@
         ps = poststatus(startclock)
         self.addpostdsstatus(ps)
 
-    r = orig(node1, node2, match, listignored, listclean, stateunknown,
-             listsubrepos)
+    r = orig(
+        node1, node2, match, listignored, listclean, stateunknown, listsubrepos
+    )
     modified, added, removed, deleted, unknown, ignored, clean = r
 
     if not listunknown:
@@ -570,8 +619,14 @@
 
         try:
             rv2 = orig(
-                node1, node2, match, listignored, listclean, listunknown,
-                listsubrepos)
+                node1,
+                node2,
+                match,
+                listignored,
+                listclean,
+                listunknown,
+                listsubrepos,
+            )
         finally:
             self.dirstate._fsmonitordisable = False
             self.ui.quiet = quiet
@@ -581,11 +636,14 @@
         with self.wlock():
             _cmpsets(
                 [modified, added, removed, deleted, unknown, ignored, clean],
-                rv2)
+                rv2,
+            )
         modified, added, removed, deleted, unknown, ignored, clean = rv2
 
     return scmutil.status(
-        modified, added, removed, deleted, unknown, ignored, clean)
+        modified, added, removed, deleted, unknown, ignored, clean
+    )
+
 
 class poststatus(object):
     def __init__(self, startclock):
@@ -594,10 +652,16 @@
     def __call__(self, wctx, status):
         clock = wctx.repo()._fsmonitorstate.getlastclock() or self._startclock
         hashignore = _hashignore(wctx.repo().dirstate._ignore)
-        notefiles = (status.modified + status.added + status.removed +
-                     status.deleted + status.unknown)
+        notefiles = (
+            status.modified
+            + status.added
+            + status.removed
+            + status.deleted
+            + status.unknown
+        )
         wctx.repo()._fsmonitorstate.set(clock, hashignore, notefiles)
 
+
 def makedirstate(repo, dirstate):
     class fsmonitordirstate(dirstate.__class__):
         def _fsmonitorinit(self, repo):
@@ -624,6 +688,7 @@
     dirstate.__class__ = fsmonitordirstate
     dirstate._fsmonitorinit(repo)
 
+
 def wrapdirstate(orig, self):
     ds = orig(self)
     # only override the dirstate when Watchman is available for the repo
@@ -631,15 +696,18 @@
         makedirstate(self, ds)
     return ds
 
+
 def extsetup(ui):
     extensions.wrapfilecache(
-        localrepo.localrepository, 'dirstate', wrapdirstate)
+        localrepo.localrepository, 'dirstate', wrapdirstate
+    )
     if pycompat.isdarwin:
         # An assist for avoiding the dangling-symlink fsevents bug
         extensions.wrapfunction(os, 'symlink', wrapsymlink)
 
     extensions.wrapfunction(merge, 'update', wrapupdate)
 
+
 def wrapsymlink(orig, source, link_name):
     ''' if we create a dangling symlink, also touch the parent dir
     to encourage fsevents notifications to work more correctly '''
@@ -651,6 +719,7 @@
         except OSError:
             pass
 
+
 class state_update(object):
     ''' This context manager is responsible for dispatching the state-enter
         and state-leave signals to the watchman service. The enter and leave
@@ -660,8 +729,15 @@
         leave, respectively. Similarly, if the distance is none, it will be
         calculated based on the oldnode and newnode in the leave method.'''
 
-    def __init__(self, repo, name, oldnode=None, newnode=None, distance=None,
-                 partial=False):
+    def __init__(
+        self,
+        repo,
+        name,
+        oldnode=None,
+        newnode=None,
+        distance=None,
+        partial=False,
+    ):
         self.repo = repo.unfiltered()
         self.name = name
         self.oldnode = oldnode
@@ -687,9 +763,7 @@
                 self._lock = self.repo.wlocknostateupdate()
             else:
                 self._lock = self.repo.wlock()
-        self.need_leave = self._state(
-            'state-enter',
-            hex(self.oldnode))
+        self.need_leave = self._state('state-enter', hex(self.oldnode))
         return self
 
     def __exit__(self, type_, value, tb):
@@ -704,11 +778,9 @@
                     self.newnode = self.repo['.'].node()
                 if self.distance is None:
                     self.distance = calcdistance(
-                        self.repo, self.oldnode, self.newnode)
-                self._state(
-                    'state-leave',
-                    hex(self.newnode),
-                    status=status)
+                        self.repo, self.oldnode, self.newnode
+                    )
+                self._state('state-leave', hex(self.newnode), status=status)
         finally:
             self.need_leave = False
             if self._lock:
@@ -718,39 +790,57 @@
         if not util.safehasattr(self.repo, '_watchmanclient'):
             return False
         try:
-            self.repo._watchmanclient.command(cmd, {
-                'name': self.name,
-                'metadata': {
-                    # the target revision
-                    'rev': commithash,
-                    # approximate number of commits between current and target
-                    'distance': self.distance if self.distance else 0,
-                    # success/failure (only really meaningful for state-leave)
-                    'status': status,
-                    # whether the working copy parent is changing
-                    'partial': self.partial,
-            }})
+            self.repo._watchmanclient.command(
+                cmd,
+                {
+                    'name': self.name,
+                    'metadata': {
+                        # the target revision
+                        'rev': commithash,
+                        # approximate number of commits between current and target
+                        'distance': self.distance if self.distance else 0,
+                        # success/failure (only really meaningful for state-leave)
+                        'status': status,
+                        # whether the working copy parent is changing
+                        'partial': self.partial,
+                    },
+                },
+            )
             return True
         except Exception as e:
             # Swallow any errors; fire and forget
             self.repo.ui.log(
-                'watchman', 'Exception %s while running %s\n', e, cmd)
+                'watchman', 'Exception %s while running %s\n', e, cmd
+            )
             return False
 
+
 # Estimate the distance between two nodes
 def calcdistance(repo, oldnode, newnode):
     anc = repo.changelog.ancestor(oldnode, newnode)
     ancrev = repo[anc].rev()
-    distance = (abs(repo[oldnode].rev() - ancrev)
-        + abs(repo[newnode].rev() - ancrev))
+    distance = abs(repo[oldnode].rev() - ancrev) + abs(
+        repo[newnode].rev() - ancrev
+    )
     return distance
 
+
 # Bracket working copy updates with calls to the watchman state-enter
 # and state-leave commands.  This allows clients to perform more intelligent
 # settling during bulk file change scenarios
 # https://facebook.github.io/watchman/docs/cmd/subscribe.html#advanced-settling
-def wrapupdate(orig, repo, node, branchmerge, force, ancestor=None,
-               mergeancestor=False, labels=None, matcher=None, **kwargs):
+def wrapupdate(
+    orig,
+    repo,
+    node,
+    branchmerge,
+    force,
+    ancestor=None,
+    mergeancestor=False,
+    labels=None,
+    matcher=None,
+    **kwargs
+):
 
     distance = 0
     partial = True
@@ -760,11 +850,26 @@
         partial = False
         distance = calcdistance(repo.unfiltered(), oldnode, newnode)
 
-    with state_update(repo, name="hg.update", oldnode=oldnode, newnode=newnode,
-                      distance=distance, partial=partial):
+    with state_update(
+        repo,
+        name="hg.update",
+        oldnode=oldnode,
+        newnode=newnode,
+        distance=distance,
+        partial=partial,
+    ):
         return orig(
-            repo, node, branchmerge, force, ancestor, mergeancestor,
-            labels, matcher, **kwargs)
+            repo,
+            node,
+            branchmerge,
+            force,
+            ancestor,
+            mergeancestor,
+            labels,
+            matcher,
+            **kwargs
+        )
+
 
 def repo_has_depth_one_nested_repo(repo):
     for f in repo.wvfs.listdir():
@@ -774,13 +879,19 @@
             return True
     return False
 
+
 def reposetup(ui, repo):
     # We don't work with largefiles or inotify
     exts = extensions.enabled()
     for ext in _blacklist:
         if ext in exts:
-            ui.warn(_('The fsmonitor extension is incompatible with the %s '
-                      'extension and has been disabled.\n') % ext)
+            ui.warn(
+                _(
+                    'The fsmonitor extension is incompatible with the %s '
+                    'extension and has been disabled.\n'
+                )
+                % ext
+            )
             return
 
     if repo.local():
@@ -824,7 +935,8 @@
             def wlock(self, *args, **kwargs):
                 l = super(fsmonitorrepo, self).wlock(*args, **kwargs)
                 if not ui.configbool(
-                    "experimental", "fsmonitor.transaction_notify"):
+                    "experimental", "fsmonitor.transaction_notify"
+                ):
                     return l
                 if l.held != 1:
                     return l
@@ -844,8 +956,7 @@
                     l.releasefn = staterelease
                 except Exception as e:
                     # Swallow any errors; fire and forget
-                    self.ui.log(
-                        'watchman', 'Exception in state update %s\n', e)
+                    self.ui.log('watchman', 'Exception in state update %s\n', e)
                 return l
 
         repo.__class__ = fsmonitorrepo
--- a/hgext/fsmonitor/state.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fsmonitor/state.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,6 +21,7 @@
 _version = 4
 _versionformat = ">I"
 
+
 class state(object):
     def __init__(self, repo):
         self._vfs = repo.vfs
@@ -31,7 +32,8 @@
 
         self.mode = self._ui.config('fsmonitor', 'mode')
         self.walk_on_invalidate = self._ui.configbool(
-            'fsmonitor', 'walk_on_invalidate')
+            'fsmonitor', 'walk_on_invalidate'
+        )
         self.timeout = float(self._ui.config('fsmonitor', 'timeout'))
 
     def get(self):
@@ -48,8 +50,10 @@
         versionbytes = file.read(4)
         if len(versionbytes) < 4:
             self._ui.log(
-                'fsmonitor', 'fsmonitor: state file only has %d bytes, '
-                'nuking state\n' % len(versionbytes))
+                'fsmonitor',
+                'fsmonitor: state file only has %d bytes, '
+                'nuking state\n' % len(versionbytes),
+            )
             self.invalidate()
             return None, None, None
         try:
@@ -57,8 +61,10 @@
             if diskversion != _version:
                 # different version, nuke state and start over
                 self._ui.log(
-                    'fsmonitor', 'fsmonitor: version switch from %d to '
-                    '%d, nuking state\n' % (diskversion, _version))
+                    'fsmonitor',
+                    'fsmonitor: version switch from %d to '
+                    '%d, nuking state\n' % (diskversion, _version),
+                )
                 self.invalidate()
                 return None, None, None
 
@@ -67,17 +73,23 @@
             # followed by a \0
             if len(state) < 3:
                 self._ui.log(
-                    'fsmonitor', 'fsmonitor: state file truncated (expected '
-                    '3 chunks, found %d), nuking state\n', len(state))
+                    'fsmonitor',
+                    'fsmonitor: state file truncated (expected '
+                    '3 chunks, found %d), nuking state\n',
+                    len(state),
+                )
                 self.invalidate()
                 return None, None, None
             diskhostname = state[0]
             hostname = socket.gethostname()
             if diskhostname != hostname:
                 # file got moved to a different host
-                self._ui.log('fsmonitor', 'fsmonitor: stored hostname "%s" '
-                             'different from current "%s", nuking state\n' %
-                             (diskhostname, hostname))
+                self._ui.log(
+                    'fsmonitor',
+                    'fsmonitor: stored hostname "%s" '
+                    'different from current "%s", nuking state\n'
+                    % (diskhostname, hostname),
+                )
                 self.invalidate()
                 return None, None, None
 
@@ -104,8 +116,9 @@
             return
 
         try:
-            file = self._vfs('fsmonitor.state', 'wb', atomictemp=True,
-                checkambig=True)
+            file = self._vfs(
+                'fsmonitor.state', 'wb', atomictemp=True, checkambig=True
+            )
         except (IOError, OSError):
             self._ui.warn(_("warning: unable to write out fsmonitor state\n"))
             return
--- a/hgext/fsmonitor/watchmanclient.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/fsmonitor/watchmanclient.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,6 +13,7 @@
 
 from . import pywatchman
 
+
 class Unavailable(Exception):
     def __init__(self, msg, warn=True, invalidate=False):
         self.msg = msg
@@ -27,11 +28,13 @@
         else:
             return 'Watchman unavailable: %s' % self.msg
 
+
 class WatchmanNoRoot(Unavailable):
     def __init__(self, root, msg):
         self.root = root
         super(WatchmanNoRoot, self).__init__(msg)
 
+
 class client(object):
     def __init__(self, ui, root, timeout=1.0):
         err = None
@@ -59,8 +62,9 @@
     def getcurrentclock(self):
         result = self.command('clock')
         if not util.safehasattr(result, 'clock'):
-            raise Unavailable('clock result is missing clock value',
-                              invalidate=True)
+            raise Unavailable(
+                'clock result is missing clock value', invalidate=True
+            )
         return result.clock
 
     def clearconnection(self):
@@ -86,7 +90,8 @@
                 self._watchmanclient = pywatchman.client(
                     timeout=self._timeout,
                     useImmutableBser=True,
-                    watchman_exe=watchman_exe)
+                    watchman_exe=watchman_exe,
+                )
             return self._watchmanclient.query(*watchmanargs)
         except pywatchman.CommandError as ex:
             if 'unable to resolve root' in ex.msg:
--- a/hgext/githelp.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/githelp.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,9 +29,7 @@
     registrar,
     scmutil,
 )
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -42,6 +40,7 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
+
 def convert(s):
     if s.startswith("origin/"):
         return s[7:]
@@ -51,30 +50,37 @@
     s = re.sub('~$', '~1', s)
     return s
 
-@command('githelp|git', [
-    ], _('hg githelp'),
-    helpcategory=command.CATEGORY_HELP, helpbasic=True)
+
+@command(
+    'githelp|git',
+    [],
+    _('hg githelp'),
+    helpcategory=command.CATEGORY_HELP,
+    helpbasic=True,
+)
 def githelp(ui, repo, *args, **kwargs):
     '''suggests the Mercurial equivalent of the given git command
 
     Usage: hg githelp -- <git command>
     '''
 
-    if len(args) == 0 or (len(args) == 1 and args[0] =='git'):
-        raise error.Abort(_('missing git command - '
-                            'usage: hg githelp -- <git command>'))
+    if len(args) == 0 or (len(args) == 1 and args[0] == 'git'):
+        raise error.Abort(
+            _('missing git command - ' 'usage: hg githelp -- <git command>')
+        )
 
     if args[0] == 'git':
         args = args[1:]
 
     cmd = args[0]
     if not cmd in gitcommands:
-        raise error.Abort(_("error: unknown git command %s") % (cmd))
+        raise error.Abort(_("error: unknown git command %s") % cmd)
 
     ui.pager('githelp')
     args = args[1:]
     return gitcommands[cmd](ui, repo, *args, **kwargs)
 
+
 def parseoptions(ui, cmdoptions, args):
     cmdoptions = list(cmdoptions)
     opts = {}
@@ -91,24 +97,32 @@
             elif (r'-' + ex.opt) in ex.msg:
                 flag = '-' + pycompat.bytestr(ex.opt)
             else:
-                raise error.Abort(_("unknown option %s") %
-                                  pycompat.bytestr(ex.opt))
+                raise error.Abort(
+                    _("unknown option %s") % pycompat.bytestr(ex.opt)
+                )
             try:
                 args.remove(flag)
             except Exception:
                 msg = _("unknown option '%s' packed with other options")
                 hint = _("please try passing the option as its own flag: -%s")
-                raise error.Abort(msg % pycompat.bytestr(ex.opt),
-                                  hint=hint % pycompat.bytestr(ex.opt))
+                raise error.Abort(
+                    msg % pycompat.bytestr(ex.opt),
+                    hint=hint % pycompat.bytestr(ex.opt),
+                )
 
             ui.warn(_("ignoring unknown option %s\n") % flag)
 
     args = list([convert(x) for x in args])
-    opts = dict([(k, convert(v)) if isinstance(v, str) else (k, v)
-                                 for k, v in opts.iteritems()])
+    opts = dict(
+        [
+            (k, convert(v)) if isinstance(v, str) else (k, v)
+            for k, v in opts.iteritems()
+        ]
+    )
 
     return args, opts
 
+
 class Command(object):
     def __init__(self, name):
         self.name = name
@@ -149,6 +163,7 @@
     def __and__(self, other):
         return AndCommand(self, other)
 
+
 class AndCommand(object):
     def __init__(self, left, right):
         self.left = left
@@ -160,6 +175,7 @@
     def __and__(self, other):
         return AndCommand(self, other)
 
+
 def add(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('A', 'all', None, ''),
@@ -167,9 +183,13 @@
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    if (opts.get('patch')):
-        ui.status(_("note: Mercurial will commit when complete, "
-                    "as there is no staging area in Mercurial\n\n"))
+    if opts.get('patch'):
+        ui.status(
+            _(
+                "note: Mercurial will commit when complete, "
+                "as there is no staging area in Mercurial\n\n"
+            )
+        )
         cmd = Command('commit --interactive')
     else:
         cmd = Command("add")
@@ -177,18 +197,23 @@
         if not opts.get('all'):
             cmd.extend(args)
         else:
-            ui.status(_("note: use hg addremove to remove files that have "
-                        "been deleted\n\n"))
+            ui.status(
+                _(
+                    "note: use hg addremove to remove files that have "
+                    "been deleted\n\n"
+                )
+            )
 
     ui.status((bytes(cmd)), "\n")
 
+
 def am(ui, repo, *args, **kwargs):
-    cmdoptions=[
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
     cmd = Command('import')
     ui.status(bytes(cmd), "\n")
 
+
 def apply(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('p', 'p', int, ''),
@@ -197,7 +222,7 @@
     args, opts = parseoptions(ui, cmdoptions, args)
 
     cmd = Command('import --no-commit')
-    if (opts.get('p')):
+    if opts.get('p'):
         cmd['-p'] = opts.get('p')
     if opts.get('directory'):
         cmd['--prefix'] = opts.get('directory')
@@ -205,17 +230,19 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def bisect(ui, repo, *args, **kwargs):
     ui.status(_("see 'hg help bisect' for how to use bisect\n\n"))
 
+
 def blame(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
     cmd = Command('annotate -udl')
     cmd.extend([convert(v) for v in args])
     ui.status((bytes(cmd)), "\n")
 
+
 def branch(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'set-upstream', None, ''),
@@ -259,6 +286,7 @@
             cmd.append(args[0])
     ui.status((bytes(cmd)), "\n")
 
+
 def ispath(repo, string):
     """
     The first argument to git checkout can either be a revision or a path. Let's
@@ -287,6 +315,7 @@
 
     return didexist
 
+
 def checkout(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('b', 'branch', '', ''),
@@ -297,7 +326,7 @@
     paths = []
     if '--' in args:
         sepindex = args.index('--')
-        paths.extend(args[sepindex + 1:])
+        paths.extend(args[sepindex + 1 :])
         args = args[:sepindex]
 
     args, opts = parseoptions(ui, cmdoptions, args)
@@ -350,6 +379,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def cherrypick(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'continue', None, ''),
@@ -372,6 +402,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def clean(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('d', 'd', None, ''),
@@ -387,6 +418,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def clone(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'bare', None, ''),
@@ -405,8 +437,12 @@
 
     if opts.get('bare'):
         cmd['-U'] = None
-        ui.status(_("note: Mercurial does not have bare clones. "
-                    "-U will clone the repo without checking out a commit\n\n"))
+        ui.status(
+            _(
+                "note: Mercurial does not have bare clones. "
+                "-U will clone the repo without checking out a commit\n\n"
+            )
+        )
     elif opts.get('no_checkout'):
         cmd['-U'] = None
 
@@ -417,6 +453,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def commit(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('a', 'all', None, ''),
@@ -448,9 +485,13 @@
         cmd['-m'] = "'%s'" % (opts.get('message'),)
 
     if opts.get('all'):
-        ui.status(_("note: Mercurial doesn't have a staging area, "
-                    "so there is no --all. -A will add and remove files "
-                    "for you though.\n\n"))
+        ui.status(
+            _(
+                "note: Mercurial doesn't have a staging area, "
+                "so there is no --all. -A will add and remove files "
+                "for you though.\n\n"
+            )
+        )
 
     if opts.get('file'):
         cmd['-l'] = opts.get('file')
@@ -465,9 +506,15 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def deprecated(ui, repo, *args, **kwargs):
-    ui.warn(_('this command has been deprecated in the git project, '
-              'thus isn\'t supported by this tool\n\n'))
+    ui.warn(
+        _(
+            'this command has been deprecated in the git project, '
+            'thus isn\'t supported by this tool\n\n'
+        )
+    )
+
 
 def diff(ui, repo, *args, **kwargs):
     cmdoptions = [
@@ -480,8 +527,12 @@
     cmd = Command('diff')
 
     if opts.get('cached'):
-        ui.status(_('note: Mercurial has no concept of a staging area, '
-                    'so --cached does nothing\n\n'))
+        ui.status(
+            _(
+                'note: Mercurial has no concept of a staging area, '
+                'so --cached does nothing\n\n'
+            )
+        )
 
     if opts.get('reverse'):
         cmd['--reverse'] = None
@@ -496,14 +547,20 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def difftool(ui, repo, *args, **kwargs):
-    ui.status(_('Mercurial does not enable external difftool by default. You '
-        'need to enable the extdiff extension in your .hgrc file by adding\n'
-        'extdiff =\n'
-        'to the [extensions] section and then running\n\n'
-        'hg extdiff -p <program>\n\n'
-        'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
-        'information.\n'))
+    ui.status(
+        _(
+            'Mercurial does not enable external difftool by default. You '
+            'need to enable the extdiff extension in your .hgrc file by adding\n'
+            'extdiff =\n'
+            'to the [extensions] section and then running\n\n'
+            'hg extdiff -p <program>\n\n'
+            'See \'hg help extdiff\' and \'hg help -e extdiff\' for more '
+            'information.\n'
+        )
+    )
+
 
 def fetch(ui, repo, *args, **kwargs):
     cmdoptions = [
@@ -517,10 +574,14 @@
     if len(args) > 0:
         cmd.append(args[0])
         if len(args) > 1:
-            ui.status(_("note: Mercurial doesn't have refspecs. "
-                        "-r can be used to specify which commits you want to "
-                        "pull. -B can be used to specify which bookmark you "
-                        "want to pull.\n\n"))
+            ui.status(
+                _(
+                    "note: Mercurial doesn't have refspecs. "
+                    "-r can be used to specify which commits you want to "
+                    "pull. -B can be used to specify which bookmark you "
+                    "want to pull.\n\n"
+                )
+            )
             for v in args[1:]:
                 if v in repo._bookmarks:
                     cmd['-B'] = v
@@ -529,9 +590,9 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def grep(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
     cmd = Command('grep')
@@ -542,9 +603,9 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def init(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
     cmd = Command('init')
@@ -554,6 +615,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def log(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'follow', None, ''),
@@ -568,10 +630,18 @@
         ('p', 'patch', None, ''),
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
-    ui.status(_('note: -v prints the entire commit message like Git does. To '
-                'print just the first line, drop the -v.\n\n'))
-    ui.status(_("note: see hg help revset for information on how to filter "
-                "log output\n\n"))
+    ui.status(
+        _(
+            'note: -v prints the entire commit message like Git does. To '
+            'print just the first line, drop the -v.\n\n'
+        )
+    )
+    ui.status(
+        _(
+            "note: see hg help revset for information on how to filter "
+            "log output\n\n"
+        )
+    )
 
     cmd = Command('log')
     cmd['-v'] = None
@@ -590,13 +660,21 @@
     if opts.get('pretty') or opts.get('format') or opts.get('oneline'):
         format = opts.get('format', '')
         if 'format:' in format:
-            ui.status(_("note: --format format:??? equates to Mercurial's "
-                        "--template. See hg help templates for more info.\n\n"))
+            ui.status(
+                _(
+                    "note: --format format:??? equates to Mercurial's "
+                    "--template. See hg help templates for more info.\n\n"
+                )
+            )
             cmd['--template'] = '???'
         else:
-            ui.status(_("note: --pretty/format/oneline equate to Mercurial's "
-                        "--style or --template. See hg help templates for "
-                        "more info.\n\n"))
+            ui.status(
+                _(
+                    "note: --pretty/format/oneline equate to Mercurial's "
+                    "--style or --template. See hg help templates for "
+                    "more info.\n\n"
+                )
+            )
             cmd['--style'] = '???'
 
     if len(args) > 0:
@@ -608,6 +686,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def lsfiles(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('c', 'cached', None, ''),
@@ -620,8 +699,12 @@
     ]
     args, opts = parseoptions(ui, cmdoptions, args)
 
-    if (opts.get('modified') or opts.get('deleted')
-        or opts.get('others') or opts.get('ignored')):
+    if (
+        opts.get('modified')
+        or opts.get('deleted')
+        or opts.get('others')
+        or opts.get('ignored')
+    ):
         cmd = Command('status')
         if opts.get('deleted'):
             cmd['-d'] = None
@@ -634,8 +717,12 @@
     else:
         cmd = Command('files')
     if opts.get('stage'):
-        ui.status(_("note: Mercurial doesn't have a staging area, ignoring "
-                  "--stage\n"))
+        ui.status(
+            _(
+                "note: Mercurial doesn't have a staging area, ignoring "
+                "--stage\n"
+            )
+        )
     if opts.get('_zero'):
         cmd['-0'] = None
     cmd.append('.')
@@ -644,9 +731,9 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def merge(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
     cmd = Command('merge')
@@ -656,6 +743,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def mergebase(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
@@ -663,13 +751,17 @@
     if len(args) != 2:
         args = ['A', 'B']
 
-    cmd = Command("log -T '{node}\\n' -r 'ancestor(%s,%s)'"
-                  % (args[0], args[1]))
+    cmd = Command(
+        "log -T '{node}\\n' -r 'ancestor(%s,%s)'" % (args[0], args[1])
+    )
 
-    ui.status(_('note: ancestors() is part of the revset language\n'),
-              _("(learn more about revsets with 'hg help revsets')\n\n"))
+    ui.status(
+        _('note: ancestors() is part of the revset language\n'),
+        _("(learn more about revsets with 'hg help revsets')\n\n"),
+    )
     ui.status((bytes(cmd)), "\n")
 
+
 def mergetool(ui, repo, *args, **kwargs):
     cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
@@ -681,6 +773,7 @@
     cmd.extend(args)
     ui.status((bytes(cmd)), "\n")
 
+
 def mv(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('f', 'force', None, ''),
@@ -698,6 +791,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def pull(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'all', None, ''),
@@ -712,10 +806,14 @@
     if len(args) > 0:
         cmd.append(args[0])
         if len(args) > 1:
-            ui.status(_("note: Mercurial doesn't have refspecs. "
-                        "-r can be used to specify which commits you want to "
-                        "pull. -B can be used to specify which bookmark you "
-                        "want to pull.\n\n"))
+            ui.status(
+                _(
+                    "note: Mercurial doesn't have refspecs. "
+                    "-r can be used to specify which commits you want to "
+                    "pull. -B can be used to specify which bookmark you "
+                    "want to pull.\n\n"
+                )
+            )
             for v in args[1:]:
                 if v in repo._bookmarks:
                     cmd['-B'] = v
@@ -724,6 +822,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def push(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'all', None, ''),
@@ -736,10 +835,14 @@
     if len(args) > 0:
         cmd.append(args[0])
         if len(args) > 1:
-            ui.status(_("note: Mercurial doesn't have refspecs. "
-                        "-r can be used to specify which commits you want "
-                        "to push. -B can be used to specify which bookmark "
-                        "you want to push.\n\n"))
+            ui.status(
+                _(
+                    "note: Mercurial doesn't have refspecs. "
+                    "-r can be used to specify which commits you want "
+                    "to push. -B can be used to specify which bookmark "
+                    "you want to push.\n\n"
+                )
+            )
             for v in args[1:]:
                 if v in repo._bookmarks:
                     cmd['-B'] = v
@@ -751,6 +854,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def rebase(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'all', None, ''),
@@ -763,12 +867,20 @@
     args, opts = parseoptions(ui, cmdoptions, args)
 
     if opts.get('interactive'):
-        ui.status(_("note: hg histedit does not perform a rebase. "
-                    "It just edits history.\n\n"))
+        ui.status(
+            _(
+                "note: hg histedit does not perform a rebase. "
+                "It just edits history.\n\n"
+            )
+        )
         cmd = Command('histedit')
         if len(args) > 0:
-            ui.status(_("also note: 'hg histedit' will automatically detect"
-                      " your stack, so no second argument is necessary\n\n"))
+            ui.status(
+                _(
+                    "also note: 'hg histedit' will automatically detect"
+                    " your stack, so no second argument is necessary\n\n"
+                )
+            )
         ui.status((bytes(cmd)), "\n")
         return
 
@@ -784,9 +896,13 @@
         cmd['--abort'] = None
 
     if opts.get('onto'):
-        ui.status(_("note: if you're trying to lift a commit off one branch, "
-                    "try hg rebase -d <destination commit> -s <commit to be "
-                    "lifted>\n\n"))
+        ui.status(
+            _(
+                "note: if you're trying to lift a commit off one branch, "
+                "try hg rebase -d <destination commit> -s <commit to be "
+                "lifted>\n\n"
+            )
+        )
         cmd['-d'] = convert(opts.get('onto'))
         if len(args) < 2:
             raise error.Abort(_("expected format: git rebase --onto X Y Z"))
@@ -800,6 +916,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def reflog(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'all', None, ''),
@@ -813,8 +930,13 @@
         cmd.append(args[0])
 
     ui.status(bytes(cmd), "\n\n")
-    ui.status(_("note: in hg commits can be deleted from repo but we always"
-              " have backups\n"))
+    ui.status(
+        _(
+            "note: in hg commits can be deleted from repo but we always"
+            " have backups\n"
+        )
+    )
+
 
 def reset(ui, repo, *args, **kwargs):
     cmdoptions = [
@@ -828,11 +950,19 @@
     hard = opts.get('hard')
 
     if opts.get('mixed'):
-        ui.status(_('note: --mixed has no meaning since Mercurial has no '
-                    'staging area\n\n'))
+        ui.status(
+            _(
+                'note: --mixed has no meaning since Mercurial has no '
+                'staging area\n\n'
+            )
+        )
     if opts.get('soft'):
-        ui.status(_('note: --soft has no meaning since Mercurial has no '
-                    'staging area\n\n'))
+        ui.status(
+            _(
+                'note: --soft has no meaning since Mercurial has no '
+                'staging area\n\n'
+            )
+        )
 
     cmd = Command('update')
     if hard:
@@ -842,14 +972,18 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def revert(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
     if len(args) > 1:
-        ui.status(_("note: hg backout doesn't support multiple commits at "
-                    "once\n\n"))
+        ui.status(
+            _(
+                "note: hg backout doesn't support multiple commits at "
+                "once\n\n"
+            )
+        )
 
     cmd = Command('backout')
     if args:
@@ -857,6 +991,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def revparse(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'show-cdup', None, ''),
@@ -872,6 +1007,7 @@
     else:
         ui.status(_("note: see hg help revset for how to refer to commits\n"))
 
+
 def rm(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('f', 'force', None, ''),
@@ -889,6 +1025,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def show(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'name-status', None, ''),
@@ -921,6 +1058,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def stash(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('p', 'patch', None, ''),
@@ -956,8 +1094,12 @@
         if action == 'apply':
             cmd['--keep'] = None
     elif action == 'branch' or action == 'create':
-        ui.status(_("note: Mercurial doesn't have equivalents to the "
-                    "git stash branch or create actions\n\n"))
+        ui.status(
+            _(
+                "note: Mercurial doesn't have equivalents to the "
+                "git stash branch or create actions\n\n"
+            )
+        )
         return
     else:
         if len(args) > 0:
@@ -968,6 +1110,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def status(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('', 'ignored', None, ''),
@@ -982,28 +1125,29 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def svn(ui, repo, *args, **kwargs):
     if not args:
         raise error.Abort(_('missing svn command'))
     svncmd = args[0]
     if svncmd not in gitsvncommands:
-        raise error.Abort(_('unknown git svn command "%s"') % (svncmd))
+        raise error.Abort(_('unknown git svn command "%s"') % svncmd)
 
     args = args[1:]
     return gitsvncommands[svncmd](ui, repo, *args, **kwargs)
 
+
 def svndcommit(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
     cmd = Command('push')
 
     ui.status((bytes(cmd)), "\n")
 
+
 def svnfetch(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
     cmd = Command('pull')
@@ -1011,9 +1155,9 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def svnfindrev(ui, repo, *args, **kwargs):
-    cmdoptions = [
-    ]
+    cmdoptions = []
     args, opts = parseoptions(ui, cmdoptions, args)
 
     if not args:
@@ -1024,6 +1168,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def svnrebase(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('l', 'local', None, ''),
@@ -1039,6 +1184,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 def tag(ui, repo, *args, **kwargs):
     cmdoptions = [
         ('f', 'force', None, ''),
@@ -1067,6 +1213,7 @@
 
     ui.status((bytes(cmd)), "\n")
 
+
 gitcommands = {
     'add': add,
     'am': am,
--- a/hgext/gpg.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/gpg.py	Sun Oct 06 09:45:02 2019 -0400
@@ -36,25 +36,24 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('gpg', 'cmd',
-    default='gpg',
+configitem(
+    'gpg', 'cmd', default='gpg',
 )
-configitem('gpg', 'key',
-    default=None,
+configitem(
+    'gpg', 'key', default=None,
 )
-configitem('gpg', '.*',
-    default=None,
-    generic=True,
+configitem(
+    'gpg', '.*', default=None, generic=True,
 )
 
 # Custom help category
 _HELP_CATEGORY = 'gpg'
 help.CATEGORY_ORDER.insert(
-    help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP),
-    _HELP_CATEGORY
+    help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP), _HELP_CATEGORY
 )
 help.CATEGORY_NAMES[_HELP_CATEGORY] = 'Signing changes (GPG)'
 
+
 class gpg(object):
     def __init__(self, path, key=None):
         self.path = path
@@ -77,8 +76,11 @@
             fp = os.fdopen(fd, r'wb')
             fp.write(data)
             fp.close()
-            gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
-                      "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
+            gpgcmd = "%s --logger-fd 1 --status-fd 1 --verify " "\"%s\" \"%s\"" % (
+                self.path,
+                sigfile,
+                datafile,
+            )
             ret = procutil.filter("", gpgcmd)
         finally:
             for f in (sigfile, datafile):
@@ -102,10 +104,12 @@
                 key = l.split(" ", 3)[:2]
                 key.append("")
                 fingerprint = None
-            elif (l.startswith("GOODSIG") or
-                  l.startswith("EXPSIG") or
-                  l.startswith("EXPKEYSIG") or
-                  l.startswith("BADSIG")):
+            elif (
+                l.startswith("GOODSIG")
+                or l.startswith("EXPSIG")
+                or l.startswith("EXPKEYSIG")
+                or l.startswith("BADSIG")
+            ):
                 if key is not None:
                     keys.append(key + [fingerprint])
                 key = l.split(" ", 2)
@@ -114,6 +118,7 @@
             keys.append(key + [fingerprint])
         return keys
 
+
 def newgpg(ui, **opts):
     """create a new gpg instance"""
     gpgpath = ui.config("gpg", "cmd")
@@ -122,11 +127,13 @@
         gpgkey = ui.config("gpg", "key")
     return gpg(gpgpath, gpgkey)
 
+
 def sigwalk(repo):
     """
     walk over every sigs, yields a couple
     ((node, version, sig), (filename, linenumber))
     """
+
     def parsefile(fileiter, context):
         ln = 1
         for l in fileiter:
@@ -149,6 +156,7 @@
     except IOError:
         pass
 
+
 def getkeys(ui, repo, mygpg, sigdata, context):
     """get the keys who signed a data"""
     fn, ln = context
@@ -170,14 +178,19 @@
             ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
             continue
         if key[0] == "EXPSIG":
-            ui.write(_("%s Note: Signature has expired"
-                       " (signed by: \"%s\")\n") % (prefix, key[2]))
+            ui.write(
+                _("%s Note: Signature has expired" " (signed by: \"%s\")\n")
+                % (prefix, key[2])
+            )
         elif key[0] == "EXPKEYSIG":
-            ui.write(_("%s Note: This key has expired"
-                       " (signed by: \"%s\")\n") % (prefix, key[2]))
+            ui.write(
+                _("%s Note: This key has expired" " (signed by: \"%s\")\n")
+                % (prefix, key[2])
+            )
         validkeys.append((key[1], key[2], key[3]))
     return validkeys
 
+
 @command("sigs", [], _('hg sigs'), helpcategory=_HELP_CATEGORY)
 def sigs(ui, repo):
     """list signed changesets"""
@@ -203,6 +216,7 @@
             r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
             ui.write("%-30s %s\n" % (keystr(ui, k), r))
 
+
 @command("sigcheck", [], _('hg sigcheck REV'), helpcategory=_HELP_CATEGORY)
 def sigcheck(ui, repo, rev):
     """verify all the signatures there may be for a particular revision"""
@@ -227,6 +241,7 @@
     for key in keys:
         ui.write(" %s\n" % keystr(ui, key))
 
+
 def keystr(ui, key):
     """associate a string to a key (username, comment)"""
     keyid, user, fingerprint = key
@@ -236,18 +251,21 @@
     else:
         return user
 
-@command("sign",
-         [('l', 'local', None, _('make the signature local')),
-          ('f', 'force', None, _('sign even if the sigfile is modified')),
-          ('', 'no-commit', None, _('do not commit the sigfile after signing')),
-          ('k', 'key', '',
-           _('the key id to sign with'), _('ID')),
-          ('m', 'message', '',
-           _('use text as commit message'), _('TEXT')),
-          ('e', 'edit', False, _('invoke editor on commit messages')),
-         ] + cmdutil.commitopts2,
-         _('hg sign [OPTION]... [REV]...'),
-         helpcategory=_HELP_CATEGORY)
+
+@command(
+    "sign",
+    [
+        ('l', 'local', None, _('make the signature local')),
+        ('f', 'force', None, _('sign even if the sigfile is modified')),
+        ('', 'no-commit', None, _('do not commit the sigfile after signing')),
+        ('k', 'key', '', _('the key id to sign with'), _('ID')),
+        ('m', 'message', '', _('use text as commit message'), _('TEXT')),
+        ('e', 'edit', False, _('invoke editor on commit messages')),
+    ]
+    + cmdutil.commitopts2,
+    _('hg sign [OPTION]... [REV]...'),
+    helpcategory=_HELP_CATEGORY,
+)
 def sign(ui, repo, *revs, **opts):
     """add a signature for the current or given revision
 
@@ -262,6 +280,7 @@
     with repo.wlock():
         return _dosign(ui, repo, *revs, **opts)
 
+
 def _dosign(ui, repo, *revs, **opts):
     mygpg = newgpg(ui, **opts)
     opts = pycompat.byteskwargs(opts)
@@ -275,18 +294,21 @@
     if revs:
         nodes = [repo.lookup(n) for n in revs]
     else:
-        nodes = [node for node in repo.dirstate.parents()
-                 if node != hgnode.nullid]
+        nodes = [
+            node for node in repo.dirstate.parents() if node != hgnode.nullid
+        ]
         if len(nodes) > 1:
-            raise error.Abort(_('uncommitted merge - please provide a '
-                               'specific revision'))
+            raise error.Abort(
+                _('uncommitted merge - please provide a ' 'specific revision')
+            )
         if not nodes:
             nodes = [repo.changelog.tip()]
 
     for n in nodes:
         hexnode = hgnode.hex(n)
-        ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
-                                         hgnode.short(n)))
+        ui.write(
+            _("signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))
+        )
         # build data
         data = node2txt(repo, n, sigver)
         sig = mygpg.sign(data)
@@ -304,8 +326,10 @@
     if not opts["force"]:
         msigs = match.exact(['.hgsigs'])
         if any(repo.status(match=msigs, unknown=True, ignored=True)):
-            raise error.Abort(_("working copy of .hgsigs is changed "),
-                             hint=_("please commit .hgsigs manually"))
+            raise error.Abort(
+                _("working copy of .hgsigs is changed "),
+                hint=_("please commit .hgsigs manually"),
+            )
 
     sigsfile = repo.wvfs(".hgsigs", "ab")
     sigsfile.write(sigmessage)
@@ -320,17 +344,23 @@
     message = opts['message']
     if not message:
         # we don't translate commit messages
-        message = "\n".join(["Added signature for changeset %s"
-                             % hgnode.short(n)
-                             for n in nodes])
+        message = "\n".join(
+            [
+                "Added signature for changeset %s" % hgnode.short(n)
+                for n in nodes
+            ]
+        )
     try:
-        editor = cmdutil.getcommiteditor(editform='gpg.sign',
-                                         **pycompat.strkwargs(opts))
-        repo.commit(message, opts['user'], opts['date'], match=msigs,
-                    editor=editor)
+        editor = cmdutil.getcommiteditor(
+            editform='gpg.sign', **pycompat.strkwargs(opts)
+        )
+        repo.commit(
+            message, opts['user'], opts['date'], match=msigs, editor=editor
+        )
     except ValueError as inst:
         raise error.Abort(pycompat.bytestr(inst))
 
+
 def node2txt(repo, node, ver):
     """map a manifest into some text"""
     if ver == "0":
@@ -338,9 +368,10 @@
     else:
         raise error.Abort(_("unknown signature version"))
 
+
 def extsetup(ui):
     # Add our category before "Repository maintenance".
     help.CATEGORY_ORDER.insert(
-        help.CATEGORY_ORDER.index(command.CATEGORY_MAINTENANCE),
-        _HELP_CATEGORY)
+        help.CATEGORY_ORDER.index(command.CATEGORY_MAINTENANCE), _HELP_CATEGORY
+    )
     help.CATEGORY_NAMES[_HELP_CATEGORY] = 'GPG signing'
--- a/hgext/graphlog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/graphlog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -32,30 +32,67 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-@command('glog',
-    [('f', 'follow', None,
-     _('follow changeset history, or file history across copies and renames')),
-    ('', 'follow-first', None,
-     _('only follow the first parent of merge changesets (DEPRECATED)')),
-    ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
-    ('C', 'copies', None, _('show copied files')),
-    ('k', 'keyword', [],
-     _('do case-insensitive search for a given text'), _('TEXT')),
-    ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
-    ('', 'removed', None, _('include revisions where files were removed')),
-    ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
-    ('u', 'user', [], _('revisions committed by user'), _('USER')),
-    ('', 'only-branch', [],
-     _('show only changesets within the given named branch (DEPRECATED)'),
-     _('BRANCH')),
-    ('b', 'branch', [],
-     _('show changesets within the given named branch'), _('BRANCH')),
-    ('P', 'prune', [],
-     _('do not display revision or any of its ancestors'), _('REV')),
-    ] + cmdutil.logopts + cmdutil.walkopts,
+
+@command(
+    'glog',
+    [
+        (
+            'f',
+            'follow',
+            None,
+            _(
+                'follow changeset history, or file history across copies and renames'
+            ),
+        ),
+        (
+            '',
+            'follow-first',
+            None,
+            _('only follow the first parent of merge changesets (DEPRECATED)'),
+        ),
+        ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
+        ('C', 'copies', None, _('show copied files')),
+        (
+            'k',
+            'keyword',
+            [],
+            _('do case-insensitive search for a given text'),
+            _('TEXT'),
+        ),
+        ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
+        ('', 'removed', None, _('include revisions where files were removed')),
+        ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
+        ('u', 'user', [], _('revisions committed by user'), _('USER')),
+        (
+            '',
+            'only-branch',
+            [],
+            _(
+                'show only changesets within the given named branch (DEPRECATED)'
+            ),
+            _('BRANCH'),
+        ),
+        (
+            'b',
+            'branch',
+            [],
+            _('show changesets within the given named branch'),
+            _('BRANCH'),
+        ),
+        (
+            'P',
+            'prune',
+            [],
+            _('do not display revision or any of its ancestors'),
+            _('REV'),
+        ),
+    ]
+    + cmdutil.logopts
+    + cmdutil.walkopts,
     _('[OPTION]... [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
-    inferrepo=True)
+    inferrepo=True,
+)
 def glog(ui, repo, *pats, **opts):
     """show revision history alongside an ASCII revision graph
 
--- a/hgext/hgk.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/hgk.py	Sun Oct 06 09:45:02 2019 -0400
@@ -64,19 +64,24 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('hgk', 'path',
-    default='hgk',
+configitem(
+    'hgk', 'path', default='hgk',
 )
 
-@command('debug-diff-tree',
-    [('p', 'patch', None, _('generate patch')),
-    ('r', 'recursive', None, _('recursive')),
-    ('P', 'pretty', None, _('pretty')),
-    ('s', 'stdin', None, _('stdin')),
-    ('C', 'copy', None, _('detect copies')),
-    ('S', 'search', "", _('search'))],
-    ('[OPTION]... NODE1 NODE2 [FILE]...'),
-    inferrepo=True)
+
+@command(
+    'debug-diff-tree',
+    [
+        ('p', 'patch', None, _('generate patch')),
+        ('r', 'recursive', None, _('recursive')),
+        ('P', 'pretty', None, _('pretty')),
+        ('s', 'stdin', None, _('stdin')),
+        ('C', 'copy', None, _('detect copies')),
+        ('S', 'search', "", _('search')),
+    ],
+    '[OPTION]... NODE1 NODE2 [FILE]...',
+    inferrepo=True,
+)
 def difftree(ui, repo, node1=None, node2=None, *files, **opts):
     """diff trees from two commits"""
 
@@ -87,19 +92,26 @@
         mmap = repo[node1].manifest()
         mmap2 = repo[node2].manifest()
         m = scmutil.match(repo[node1], files)
-        modified, added, removed  = repo.status(node1, node2, m)[:3]
+        modified, added, removed = repo.status(node1, node2, m)[:3]
         empty = short(nullid)
 
         for f in modified:
             # TODO get file permissions
-            ui.write((":100664 100664 %s %s M\t%s\t%s\n") %
-                     (short(mmap[f]), short(mmap2[f]), f, f))
+            ui.write(
+                ":100664 100664 %s %s M\t%s\t%s\n"
+                % (short(mmap[f]), short(mmap2[f]), f, f)
+            )
         for f in added:
-            ui.write((":000000 100664 %s %s N\t%s\t%s\n") %
-                     (empty, short(mmap2[f]), f, f))
+            ui.write(
+                ":000000 100664 %s %s N\t%s\t%s\n"
+                % (empty, short(mmap2[f]), f, f)
+            )
         for f in removed:
-            ui.write((":100664 000000 %s %s D\t%s\t%s\n") %
-                     (short(mmap[f]), empty, f, f))
+            ui.write(
+                ":100664 000000 %s %s D\t%s\t%s\n"
+                % (short(mmap[f]), empty, f, f)
+            )
+
     ##
 
     while True:
@@ -125,8 +137,7 @@
             m = scmutil.match(repo[node1], files)
             diffopts = patch.difffeatureopts(ui)
             diffopts.git = True
-            chunks = patch.diff(repo, node1, node2, match=m,
-                                opts=diffopts)
+            chunks = patch.diff(repo, node1, node2, match=m, opts=diffopts)
             for chunk in chunks:
                 ui.write(chunk)
         else:
@@ -134,6 +145,7 @@
         if not opts[r'stdin']:
             break
 
+
 def catcommit(ui, repo, n, prefix, ctx=None):
     nlprefix = '\n' + prefix
     if ctx is None:
@@ -154,17 +166,19 @@
     ui.write(("branch %s\n" % ctx.branch()))
     if obsolete.isenabled(repo, obsolete.createmarkersopt):
         if ctx.obsolete():
-            ui.write(("obsolete\n"))
+            ui.write("obsolete\n")
     ui.write(("phase %s\n\n" % ctx.phasestr()))
 
     if prefix != "":
-        ui.write("%s%s\n" % (prefix,
-                             description.replace('\n', nlprefix).strip()))
+        ui.write(
+            "%s%s\n" % (prefix, description.replace('\n', nlprefix).strip())
+        )
     else:
         ui.write(description + "\n")
     if prefix:
         ui.write('\0')
 
+
 @command('debug-merge-base', [], _('REV REV'))
 def base(ui, repo, node1, node2):
     """output common ancestor information"""
@@ -173,10 +187,13 @@
     n = repo.changelog.ancestor(node1, node2)
     ui.write(short(n) + "\n")
 
-@command('debug-cat-file',
+
+@command(
+    'debug-cat-file',
     [('s', 'stdin', None, _('stdin'))],
     _('[OPTION]... TYPE FILE'),
-    inferrepo=True)
+    inferrepo=True,
+)
 def catfile(ui, repo, type=None, r=None, **opts):
     """cat a specific revision"""
     # in stdin mode, every line except the commit is prefixed with two
@@ -209,6 +226,7 @@
         else:
             break
 
+
 # git rev-tree is a confusing thing.  You can supply a number of
 # commit sha1s on the command line, and it walks the commit history
 # telling you which commits are reachable from the supplied ones via
@@ -229,12 +247,12 @@
 
             for x in pycompat.xrange(chunk):
                 if i + x >= count:
-                    l[chunk - x:] = [0] * (chunk - x)
+                    l[chunk - x :] = [0] * (chunk - x)
                     break
                 if full is not None:
                     if (i + x) in repo:
                         l[x] = repo[i + x]
-                        l[x].changeset() # force reading
+                        l[x].changeset()  # force reading
                 else:
                     if (i + x) in repo:
                         l[x] = 1
@@ -324,15 +342,20 @@
                 break
             count += 1
 
+
 # git rev-list tries to order things by date, and has the ability to stop
 # at a given commit without walking the whole repo.  TODO add the stop
 # parameter
-@command('debug-rev-list',
-    [('H', 'header', None, _('header')),
-    ('t', 'topo-order', None, _('topo-order')),
-    ('p', 'parents', None, _('parents')),
-    ('n', 'max-count', 0, _('max-count'))],
-    ('[OPTION]... REV...'))
+@command(
+    'debug-rev-list',
+    [
+        ('H', 'header', None, _('header')),
+        ('t', 'topo-order', None, _('topo-order')),
+        ('p', 'parents', None, _('parents')),
+        ('n', 'max-count', 0, _('max-count')),
+    ],
+    '[OPTION]... REV...',
+)
 def revlist(ui, repo, *revs, **opts):
     """print revisions"""
     if opts['header']:
@@ -342,11 +365,13 @@
     copy = [x for x in revs]
     revtree(ui, copy, repo, full, opts[r'max_count'], opts[r'parents'])
 
-@command('view',
-    [('l', 'limit', '',
-     _('limit number of changes displayed'), _('NUM'))],
+
+@command(
+    'view',
+    [('l', 'limit', '', _('limit number of changes displayed'), _('NUM'))],
     _('[-l LIMIT] [REVRANGE]'),
-    helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
+    helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
+)
 def view(ui, repo, *etc, **opts):
     "start interactive history viewer"
     opts = pycompat.byteskwargs(opts)
--- a/hgext/highlight/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/highlight/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -45,6 +45,7 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 def pygmentize(web, field, fctx, tmpl):
     style = web.config('web', 'pygments_style', 'colorful')
     expr = web.config('web', 'highlightfiles', "size('<5M')")
@@ -53,8 +54,10 @@
     ctx = fctx.changectx()
     m = ctx.matchfileset(expr)
     if m(fctx.path()):
-        highlight.pygmentize(field, fctx, style, tmpl,
-                guessfilenameonly=filenameonly)
+        highlight.pygmentize(
+            field, fctx, style, tmpl, guessfilenameonly=filenameonly
+        )
+
 
 def filerevision_highlight(orig, web, fctx):
     mt = web.res.headers['Content-Type']
@@ -70,6 +73,7 @@
 
     return orig(web, fctx)
 
+
 def annotate_highlight(orig, web):
     mt = web.res.headers['Content-Type']
     if 'html' in mt:
@@ -78,21 +82,28 @@
 
     return orig(web)
 
+
 def generate_css(web):
     pg_style = web.config('web', 'pygments_style', 'colorful')
     fmter = highlight.HtmlFormatter(style=pycompat.sysstr(pg_style))
     web.res.headers['Content-Type'] = 'text/css'
     style_defs = fmter.get_style_defs(pycompat.sysstr(''))
-    web.res.setbodybytes(''.join([
-        '/* pygments_style = %s */\n\n' % pg_style,
-        pycompat.bytestr(style_defs),
-    ]))
+    web.res.setbodybytes(
+        ''.join(
+            [
+                '/* pygments_style = %s */\n\n' % pg_style,
+                pycompat.bytestr(style_defs),
+            ]
+        )
+    )
     return web.res.sendresponse()
 
+
 def extsetup(ui):
     # monkeypatch in the new version
-    extensions.wrapfunction(webcommands, '_filerevision',
-                            filerevision_highlight)
+    extensions.wrapfunction(
+        webcommands, '_filerevision', filerevision_highlight
+    )
     extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
     webcommands.highlightcss = generate_css
     webcommands.__all__.append('highlightcss')
--- a/hgext/highlight/highlight.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/highlight/highlight.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,6 +11,7 @@
 from __future__ import absolute_import
 
 from mercurial import demandimport
+
 demandimport.IGNORES.update(['pkgutil', 'pkg_resources', '__main__'])
 
 from mercurial import (
@@ -18,9 +19,7 @@
     pycompat,
 )
 
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 with demandimport.deactivated():
     import pygments
@@ -39,8 +38,10 @@
 TextLexer = pygments.lexers.TextLexer
 HtmlFormatter = pygments.formatters.HtmlFormatter
 
-SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
-              'type="text/css" />')
+SYNTAX_CSS = (
+    '\n<link rel="stylesheet" href="{url}highlightcss" ' 'type="text/css" />'
+)
+
 
 def pygmentize(field, fctx, style, tmpl, guessfilenameonly=False):
 
@@ -67,8 +68,7 @@
     # To get multi-line strings right, we can't format line-by-line
     try:
         path = pycompat.sysstr(fctx.path())
-        lexer = guess_lexer_for_filename(path, text[:1024],
-                                         stripnl=False)
+        lexer = guess_lexer_for_filename(path, text[:1024], stripnl=False)
     except (ClassNotFound, ValueError):
         # guess_lexer will return a lexer if *any* lexer matches. There is
         # no way to specify a minimum match score. This can give a high rate of
@@ -89,8 +89,10 @@
     formatter = HtmlFormatter(nowrap=True, style=pycompat.sysstr(style))
 
     colorized = highlight(text, lexer, formatter)
-    coloriter = (s.encode(pycompat.sysstr(encoding.encoding), 'replace')
-                 for s in colorized.splitlines())
+    coloriter = (
+        s.encode(pycompat.sysstr(encoding.encoding), 'replace')
+        for s in colorized.splitlines()
+    )
 
     tmpl._filters['colorize'] = lambda x: next(coloriter)
 
--- a/hgext/histedit.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/histedit.py	Sun Oct 06 09:45:02 2019 -0400
@@ -240,26 +240,25 @@
 
 configtable = {}
 configitem = registrar.configitem(configtable)
-configitem('experimental', 'histedit.autoverb',
-    default=False,
+configitem(
+    'experimental', 'histedit.autoverb', default=False,
 )
-configitem('histedit', 'defaultrev',
-    default=None,
+configitem(
+    'histedit', 'defaultrev', default=None,
 )
-configitem('histedit', 'dropmissing',
-    default=False,
+configitem(
+    'histedit', 'dropmissing', default=False,
 )
-configitem('histedit', 'linelen',
-    default=80,
-)
-configitem('histedit', 'singletransaction',
-    default=False,
+configitem(
+    'histedit', 'linelen', default=80,
 )
-configitem('ui', 'interface.histedit',
-    default=None,
+configitem(
+    'histedit', 'singletransaction', default=False,
 )
-configitem('histedit', 'summary-template',
-           default='{rev} {desc|firstline}')
+configitem(
+    'ui', 'interface.histedit', default=None,
+)
+configitem('histedit', 'summary-template', default='{rev} {desc|firstline}')
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -273,6 +272,7 @@
 tertiaryactions = set()
 internalactions = set()
 
+
 def geteditcomment(ui, first, last):
     """ construct the editor comment
     The comment includes::
@@ -284,15 +284,18 @@
 
     Commands are only included once.
     """
-    intro = _("""Edit history between %s and %s
+    intro = _(
+        """Edit history between %s and %s
 
 Commits are listed from least to most recent
 
 You can reorder changesets by reordering the lines
 
 Commands:
-""")
+"""
+    )
     actions = []
+
     def addverb(v):
         a = actiontable[v]
         lines = a.message.split("\n")
@@ -302,22 +305,25 @@
         actions.extend(['  %s' for l in lines[1:]])
 
     for v in (
-         sorted(primaryactions) +
-         sorted(secondaryactions) +
-         sorted(tertiaryactions)
-        ):
+        sorted(primaryactions)
+        + sorted(secondaryactions)
+        + sorted(tertiaryactions)
+    ):
         addverb(v)
     actions.append('')
 
     hints = []
     if ui.configbool('histedit', 'dropmissing'):
-        hints.append("Deleting a changeset from the list "
-                     "will DISCARD it from the edited history!")
+        hints.append(
+            "Deleting a changeset from the list "
+            "will DISCARD it from the edited history!"
+        )
 
     lines = (intro % (first, last)).split('\n') + actions + hints
 
     return ''.join(['# %s\n' % l if l else '#\n' for l in lines])
 
+
 class histeditstate(object):
     def __init__(self, repo):
         self.repo = repo
@@ -357,14 +363,23 @@
             backupfile = None
         rules = "\n".join(["%s %s" % (verb, rest) for [verb, rest] in rules])
 
-        return {'parentctxnode': parentctxnode, "rules": rules, "keep": keep,
-                "topmost": topmost, "replacements": replacements,
-                "backupfile": backupfile}
+        return {
+            'parentctxnode': parentctxnode,
+            "rules": rules,
+            "keep": keep,
+            "topmost": topmost,
+            "replacements": replacements,
+            "backupfile": backupfile,
+        }
 
     def write(self, tr=None):
         if tr:
-            tr.addfilegenerator('histedit-state', ('histedit-state',),
-                                self._write, location='plain')
+            tr.addfilegenerator(
+                'histedit-state',
+                ('histedit-state',),
+                self._write,
+                location='plain',
+            )
         else:
             with self.repo.vfs("histedit-state", "w") as f:
                 self._write(f)
@@ -379,8 +394,13 @@
             fp.write('%s\n' % action.tostate())
         fp.write('%d\n' % len(self.replacements))
         for replacement in self.replacements:
-            fp.write('%s%s\n' % (node.hex(replacement[0]), ''.join(node.hex(r)
-                for r in replacement[1])))
+            fp.write(
+                '%s%s\n'
+                % (
+                    node.hex(replacement[0]),
+                    ''.join(node.hex(r) for r in replacement[1]),
+                )
+            )
         backupfile = self.backupfile
         if not backupfile:
             backupfile = ''
@@ -391,7 +411,7 @@
         lines = [l[:-1] for l in fp.readlines()]
 
         index = 0
-        lines[index] # version number
+        lines[index]  # version number
         index += 1
 
         parentctxnode = node.bin(lines[index])
@@ -421,8 +441,10 @@
         for i in pycompat.xrange(replacementlen):
             replacement = lines[index]
             original = node.bin(replacement[:40])
-            succ = [node.bin(replacement[i:i + 40]) for i in
-                    range(40, len(replacement), 40)]
+            succ = [
+                node.bin(replacement[i : i + 40])
+                for i in range(40, len(replacement), 40)
+            ]
             replacements.append((original, succ))
             index += 1
 
@@ -477,13 +499,16 @@
     def _verifynodeconstraints(self, prev, expected, seen):
         # by default command need a node in the edited list
         if self.node not in expected:
-            raise error.ParseError(_('%s "%s" changeset was not a candidate')
-                                   % (self.verb, node.short(self.node)),
-                                   hint=_('only use listed changesets'))
+            raise error.ParseError(
+                _('%s "%s" changeset was not a candidate')
+                % (self.verb, node.short(self.node)),
+                hint=_('only use listed changesets'),
+            )
         # and only one command per node
         if self.node in seen:
-            raise error.ParseError(_('duplicated command for changeset %s') %
-                                   node.short(self.node))
+            raise error.ParseError(
+                _('duplicated command for changeset %s') % node.short(self.node)
+            )
 
     def torule(self):
         """build a histedit rule line for an action
@@ -493,14 +518,18 @@
         """
         ctx = self.repo[self.node]
         ui = self.repo.ui
-        summary = cmdutil.rendertemplate(
-            ctx, ui.config('histedit', 'summary-template')) or ''
+        summary = (
+            cmdutil.rendertemplate(
+                ctx, ui.config('histedit', 'summary-template')
+            )
+            or ''
+        )
         summary = summary.splitlines()[0]
         line = '%s %s %s' % (self.verb, ctx, summary)
         # trim to 75 columns by default so it's not stupidly wide in my editor
         # (the 5 more are left for verb)
         maxlen = self.repo.ui.configint('histedit', 'linelen')
-        maxlen = max(maxlen, 22) # avoid truncating hash
+        maxlen = max(maxlen, 22)  # avoid truncating hash
         return stringutil.ellipsis(line, maxlen)
 
     def tostate(self):
@@ -528,9 +557,10 @@
         repo.dirstate.setbranch(rulectx.branch())
         if stats.unresolvedcount:
             raise error.InterventionRequired(
-                _('Fix up the change (%s %s)') %
-                (self.verb, node.short(self.node)),
-                hint=_('hg histedit --continue to resume'))
+                _('Fix up the change (%s %s)')
+                % (self.verb, node.short(self.node)),
+                hint=_('hg histedit --continue to resume'),
+            )
 
     def continuedirty(self):
         """Continues the action when changes have been applied to the working
@@ -544,8 +574,13 @@
             date = dateutil.makedate()
         else:
             date = rulectx.date()
-        commit(text=rulectx.description(), user=rulectx.user(),
-               date=date, extra=rulectx.extra(), editor=editor)
+        commit(
+            text=rulectx.description(),
+            user=rulectx.user(),
+            date=date,
+            extra=rulectx.extra(),
+            editor=editor,
+        )
 
     def commiteditor(self):
         """The editor to be used to edit the commit message."""
@@ -557,14 +592,17 @@
         rulectx."""
         ctx = self.repo['.']
         if ctx.node() == self.state.parentctxnode:
-            self.repo.ui.warn(_('%s: skipping changeset (no changes)\n') %
-                              node.short(self.node))
+            self.repo.ui.warn(
+                _('%s: skipping changeset (no changes)\n')
+                % node.short(self.node)
+            )
             return ctx, [(self.node, tuple())]
         if ctx.node() == self.node:
             # Nothing changed
             return ctx, []
         return ctx, [(self.node, (ctx.node(),))]
 
+
 def commitfuncfor(repo, src):
     """Build a commit function for the replacement of <src>
 
@@ -576,6 +614,7 @@
     different and not easily factored out of the fold method.
     """
     phasemin = src.phase()
+
     def commitfunc(**kwargs):
         overrides = {('phases', 'new-commit'): phasemin}
         with repo.ui.configoverride(overrides, 'histedit'):
@@ -583,8 +622,10 @@
             extra['histedit_source'] = src.hex()
             kwargs[r'extra'] = extra
             return repo.commit(**kwargs)
+
     return commitfunc
 
+
 def applychanges(ui, repo, ctx, opts):
     """Merge changeset from ctx (only) in the current working directory"""
     wcpar = repo.dirstate.p1()
@@ -598,13 +639,15 @@
     else:
         try:
             # ui.forcemerge is an internal variable, do not document
-            repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
-                              'histedit')
+            repo.ui.setconfig(
+                'ui', 'forcemerge', opts.get('tool', ''), 'histedit'
+            )
             stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'histedit'])
         finally:
             repo.ui.setconfig('ui', 'forcemerge', '', 'histedit')
     return stats
 
+
 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
     """collapse the set of revisions from first to last as new one.
 
@@ -621,7 +664,8 @@
     for c in ctxs:
         if not c.mutable():
             raise error.ParseError(
-                _("cannot fold into public change %s") % node.short(c.node()))
+                _("cannot fold into public change %s") % node.short(c.node())
+            )
     base = firstctx.p1()
 
     # commit a new version of the old changeset, including the update
@@ -637,15 +681,20 @@
     files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
     # commit version of these files as defined by head
     headmf = lastctx.manifest()
+
     def filectxfn(repo, ctx, path):
         if path in headmf:
             fctx = lastctx[path]
             flags = fctx.flags()
-            mctx = context.memfilectx(repo, ctx,
-                                      fctx.path(), fctx.data(),
-                                      islink='l' in flags,
-                                      isexec='x' in flags,
-                                      copysource=copied.get(path))
+            mctx = context.memfilectx(
+                repo,
+                ctx,
+                fctx.path(),
+                fctx.data(),
+                islink='l' in flags,
+                isexec='x' in flags,
+                copysource=copied.get(path),
+            )
             return mctx
         return None
 
@@ -661,24 +710,33 @@
     editor = None
     if not skipprompt:
         editor = cmdutil.getcommiteditor(edit=True, editform='histedit.fold')
-    new = context.memctx(repo,
-                         parents=parents,
-                         text=message,
-                         files=files,
-                         filectxfn=filectxfn,
-                         user=user,
-                         date=date,
-                         extra=extra,
-                         editor=editor)
+    new = context.memctx(
+        repo,
+        parents=parents,
+        text=message,
+        files=files,
+        filectxfn=filectxfn,
+        user=user,
+        date=date,
+        extra=extra,
+        editor=editor,
+    )
     return repo.commitctx(new)
 
+
 def _isdirtywc(repo):
     return repo[None].dirty(missing=True)
 
+
 def abortdirty():
-    raise error.Abort(_('working copy has pending changes'),
-        hint=_('amend, commit, or revert them and run histedit '
-            '--continue, or abort with histedit --abort'))
+    raise error.Abort(
+        _('working copy has pending changes'),
+        hint=_(
+            'amend, commit, or revert them and run histedit '
+            '--continue, or abort with histedit --abort'
+        ),
+    )
+
 
 def action(verbs, message, priority=False, internal=False):
     def wrap(cls):
@@ -699,11 +757,11 @@
         for verb in verbs:
             actiontable[verb] = cls
         return cls
+
     return wrap
 
-@action(['pick', 'p'],
-        _('use commit'),
-        priority=True)
+
+@action(['pick', 'p'], _('use commit'), priority=True)
 class pick(histeditaction):
     def run(self):
         rulectx = self.repo[self.node]
@@ -713,9 +771,8 @@
 
         return super(pick, self).run()
 
-@action(['edit', 'e'],
-        _('use commit, but stop for amending'),
-        priority=True)
+
+@action(['edit', 'e'], _('use commit, but stop for amending'), priority=True)
 class edit(histeditaction):
     def run(self):
         repo = self.repo
@@ -725,13 +782,14 @@
         raise error.InterventionRequired(
             _('Editing (%s), you may commit or record as needed now.')
             % node.short(self.node),
-            hint=_('hg histedit --continue to resume'))
+            hint=_('hg histedit --continue to resume'),
+        )
 
     def commiteditor(self):
         return cmdutil.getcommiteditor(edit=True, editform='histedit.edit')
 
-@action(['fold', 'f'],
-        _('use commit, but combine it with the one above'))
+
+@action(['fold', 'f'], _('use commit, but combine it with the one above'))
 class fold(histeditaction):
     def verify(self, prev, expected, seen):
         """ Verifies semantic correctness of the fold rule"""
@@ -745,17 +803,20 @@
             c = repo[prev.node]
         if not c.mutable():
             raise error.ParseError(
-                _("cannot fold into public change %s") % node.short(c.node()))
-
+                _("cannot fold into public change %s") % node.short(c.node())
+            )
 
     def continuedirty(self):
         repo = self.repo
         rulectx = repo[self.node]
 
         commit = commitfuncfor(repo, rulectx)
-        commit(text='fold-temp-revision %s' % node.short(self.node),
-               user=rulectx.user(), date=rulectx.date(),
-               extra=rulectx.extra())
+        commit(
+            text='fold-temp-revision %s' % node.short(self.node),
+            user=rulectx.user(),
+            date=rulectx.date(),
+            extra=rulectx.extra(),
+        )
 
     def continueclean(self):
         repo = self.repo
@@ -763,25 +824,30 @@
         rulectx = repo[self.node]
         parentctxnode = self.state.parentctxnode
         if ctx.node() == parentctxnode:
-            repo.ui.warn(_('%s: empty changeset\n') %
-                              node.short(self.node))
+            repo.ui.warn(_('%s: empty changeset\n') % node.short(self.node))
             return ctx, [(self.node, (parentctxnode,))]
 
         parentctx = repo[parentctxnode]
-        newcommits = set(c.node() for c in repo.set('(%d::. - %d)',
-                                                    parentctx.rev(),
-                                                    parentctx.rev()))
+        newcommits = set(
+            c.node()
+            for c in repo.set('(%d::. - %d)', parentctx.rev(), parentctx.rev())
+        )
         if not newcommits:
-            repo.ui.warn(_('%s: cannot fold - working copy is not a '
-                           'descendant of previous commit %s\n') %
-                           (node.short(self.node), node.short(parentctxnode)))
+            repo.ui.warn(
+                _(
+                    '%s: cannot fold - working copy is not a '
+                    'descendant of previous commit %s\n'
+                )
+                % (node.short(self.node), node.short(parentctxnode))
+            )
             return ctx, [(self.node, (ctx.node(),))]
 
         middlecommits = newcommits.copy()
         middlecommits.discard(ctx.node())
 
-        return self.finishfold(repo.ui, repo, parentctx, rulectx, ctx.node(),
-                               middlecommits)
+        return self.finishfold(
+            repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
+        )
 
     def skipprompt(self):
         """Returns true if the rule should skip the message editor.
@@ -818,10 +884,14 @@
         if not self.mergedescs():
             newmessage = ctx.description()
         else:
-            newmessage = '\n***\n'.join(
-                [ctx.description()] +
-                [repo[r].description() for r in internalchanges] +
-                [oldctx.description()]) + '\n'
+            newmessage = (
+                '\n***\n'.join(
+                    [ctx.description()]
+                    + [repo[r].description() for r in internalchanges]
+                    + [oldctx.description()]
+                )
+                + '\n'
+            )
         commitopts['message'] = newmessage
         # date
         if self.firstdate():
@@ -841,23 +911,31 @@
         phasemin = max(ctx.phase(), oldctx.phase())
         overrides = {('phases', 'new-commit'): phasemin}
         with repo.ui.configoverride(overrides, 'histedit'):
-            n = collapse(repo, ctx, repo[newnode], commitopts,
-                         skipprompt=self.skipprompt())
+            n = collapse(
+                repo,
+                ctx,
+                repo[newnode],
+                commitopts,
+                skipprompt=self.skipprompt(),
+            )
         if n is None:
             return ctx, []
         hg.updaterepo(repo, n, overwrite=False)
-        replacements = [(oldctx.node(), (newnode,)),
-                        (ctx.node(), (n,)),
-                        (newnode, (n,)),
-                       ]
+        replacements = [
+            (oldctx.node(), (newnode,)),
+            (ctx.node(), (n,)),
+            (newnode, (n,)),
+        ]
         for ich in internalchanges:
             replacements.append((ich, (n,)))
         return repo[n], replacements
 
-@action(['base', 'b'],
-        _('checkout changeset and apply further changesets from there'))
+
+@action(
+    ['base', 'b'],
+    _('checkout changeset and apply further changesets from there'),
+)
 class base(histeditaction):
-
     def run(self):
         if self.repo['.'].node() != self.node:
             mergemod.update(self.repo, self.node, branchmerge=False, force=True)
@@ -876,25 +954,33 @@
             msg = _('%s "%s" changeset was an edited list candidate')
             raise error.ParseError(
                 msg % (self.verb, node.short(self.node)),
-                hint=_('base must only use unlisted changesets'))
-
-@action(['_multifold'],
-        _(
-    """fold subclass used for when multiple folds happen in a row
+                hint=_('base must only use unlisted changesets'),
+            )
+
+
+@action(
+    ['_multifold'],
+    _(
+        """fold subclass used for when multiple folds happen in a row
 
     We only want to fire the editor for the folded message once when
     (say) four changes are folded down into a single change. This is
     similar to rollup, but we should preserve both messages so that
     when the last fold operation runs we can show the user all the
     commit messages in their editor.
-    """),
-        internal=True)
+    """
+    ),
+    internal=True,
+)
 class _multifold(fold):
     def skipprompt(self):
         return True
 
-@action(["roll", "r"],
-        _("like fold, but discard this commit's description and date"))
+
+@action(
+    ["roll", "r"],
+    _("like fold, but discard this commit's description and date"),
+)
 class rollup(fold):
     def mergedescs(self):
         return False
@@ -905,20 +991,24 @@
     def firstdate(self):
         return True
 
-@action(["drop", "d"],
-        _('remove commit from history'))
+
+@action(["drop", "d"], _('remove commit from history'))
 class drop(histeditaction):
     def run(self):
         parentctx = self.repo[self.state.parentctxnode]
         return parentctx, [(self.node, tuple())]
 
-@action(["mess", "m"],
-        _('edit commit message without changing commit content'),
-        priority=True)
+
+@action(
+    ["mess", "m"],
+    _('edit commit message without changing commit content'),
+    priority=True,
+)
 class message(histeditaction):
     def commiteditor(self):
         return cmdutil.getcommiteditor(edit=True, editform='histedit.mess')
 
+
 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
     """utility function to find the first outgoing changeset
 
@@ -945,6 +1035,7 @@
         raise error.Abort(msg, hint=hint)
     return repo[roots[0]].node()
 
+
 # Curses Support
 try:
     import curses
@@ -957,7 +1048,7 @@
     'roll': '^roll',
 }
 
-COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT  = 1, 2, 3, 4, 5
+COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
 
 E_QUIT, E_HISTEDIT = 1, 2
@@ -966,52 +1057,53 @@
 
 KEYTABLE = {
     'global': {
-        'h':         'next-action',
+        'h': 'next-action',
         'KEY_RIGHT': 'next-action',
-        'l':         'prev-action',
-        'KEY_LEFT':  'prev-action',
-        'q':         'quit',
-        'c':         'histedit',
-        'C':         'histedit',
-        'v':         'showpatch',
-        '?':         'help',
+        'l': 'prev-action',
+        'KEY_LEFT': 'prev-action',
+        'q': 'quit',
+        'c': 'histedit',
+        'C': 'histedit',
+        'v': 'showpatch',
+        '?': 'help',
     },
     MODE_RULES: {
-        'd':         'action-drop',
-        'e':         'action-edit',
-        'f':         'action-fold',
-        'm':         'action-mess',
-        'p':         'action-pick',
-        'r':         'action-roll',
-        ' ':         'select',
-        'j':         'down',
-        'k':         'up',
-        'KEY_DOWN':  'down',
-        'KEY_UP':    'up',
-        'J':         'move-down',
-        'K':         'move-up',
+        'd': 'action-drop',
+        'e': 'action-edit',
+        'f': 'action-fold',
+        'm': 'action-mess',
+        'p': 'action-pick',
+        'r': 'action-roll',
+        ' ': 'select',
+        'j': 'down',
+        'k': 'up',
+        'KEY_DOWN': 'down',
+        'KEY_UP': 'up',
+        'J': 'move-down',
+        'K': 'move-up',
         'KEY_NPAGE': 'move-down',
         'KEY_PPAGE': 'move-up',
-        '0':         'goto',  # Used for 0..9
+        '0': 'goto',  # Used for 0..9
     },
     MODE_PATCH: {
-        ' ':         'page-down',
+        ' ': 'page-down',
         'KEY_NPAGE': 'page-down',
         'KEY_PPAGE': 'page-up',
-        'j':         'line-down',
-        'k':         'line-up',
-        'KEY_DOWN':  'line-down',
-        'KEY_UP':    'line-up',
-        'J':         'down',
-        'K':         'up',
+        'j': 'line-down',
+        'k': 'line-up',
+        'KEY_DOWN': 'line-down',
+        'KEY_UP': 'line-up',
+        'J': 'down',
+        'K': 'up',
     },
-    MODE_HELP: {
-    },
+    MODE_HELP: {},
 }
 
+
 def screen_size():
     return struct.unpack('hh', fcntl.ioctl(1, termios.TIOCGWINSZ, '    '))
 
+
 class histeditrule(object):
     def __init__(self, ctx, pos, action='pick'):
         self.ctx = ctx
@@ -1039,7 +1131,8 @@
         if self.action == 'roll':
             desc = ''
         return "#{0:<2} {1:<6} {2}:{3}   {4}".format(
-                self.origpos, action, r, h, desc)
+            self.origpos, action, r, h, desc
+        )
 
     def checkconflicts(self, other):
         if other.pos > self.pos and other.origpos <= self.origpos:
@@ -1051,6 +1144,7 @@
             self.conflicts.remove(other)
         return self.conflicts
 
+
 # ============ EVENTS ===============
 def movecursor(state, oldpos, newpos):
     '''Change the rule/changeset that the cursor is pointing to, regardless of
@@ -1071,15 +1165,18 @@
     # Reset the patch view region to the top of the new patch.
     state['modes'][MODE_PATCH]['line_offset'] = 0
 
+
 def changemode(state, mode):
     curmode, _ = state['mode']
     state['mode'] = (mode, curmode)
     if mode == MODE_PATCH:
         state['modes'][MODE_PATCH]['patchcontents'] = patchcontents(state)
 
+
 def makeselection(state, pos):
     state['selected'] = pos
 
+
 def swap(state, oldpos, newpos):
     """Swap two positions and calculate necessary conflicts in
     O(|newpos-oldpos|) time"""
@@ -1102,12 +1199,14 @@
     if state['selected']:
         makeselection(state, newpos)
 
+
 def changeaction(state, pos, action):
     """Change the action state on the given position to the new action"""
     rules = state['rules']
     assert 0 <= pos < len(rules)
     rules[pos].action = action
 
+
 def cycleaction(state, pos, next=False):
     """Changes the action state the next or the previous action from
     the action list"""
@@ -1124,6 +1223,7 @@
         index -= 1
     changeaction(state, pos, KEY_LIST[index % len(KEY_LIST)])
 
+
 def changeview(state, delta, unit):
     '''Change the region of whatever is being viewed (a patch or the list of
     changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
@@ -1139,6 +1239,7 @@
     newline = mode_state['line_offset'] + delta * unit
     mode_state['line_offset'] = max(0, min(max_offset, newline))
 
+
 def event(state, ch):
     """Change state based on the current character input
 
@@ -1201,6 +1302,7 @@
     elif action == 'line-up':
         return E_LINEUP
 
+
 def makecommands(rules):
     """Returns a list of commands consumable by histedit --commands based on
     our list of rules"""
@@ -1209,6 +1311,7 @@
         commands.append("{0} {1}\n".format(rules.action, rules.ctx))
     return commands
 
+
 def addln(win, y, x, line, color=None):
     """Add a line to the given window left padding but 100% filled with
     whitespace characters, so that the color appears on the whole line"""
@@ -1224,27 +1327,32 @@
     else:
         win.addstr(y, x, line)
 
+
 def _trunc_head(line, n):
     if len(line) <= n:
         return line
-    return '> ' + line[-(n - 2):]
+    return '> ' + line[-(n - 2) :]
+
+
 def _trunc_tail(line, n):
     if len(line) <= n:
         return line
-    return line[:n - 2] + ' >'
+    return line[: n - 2] + ' >'
+
 
 def patchcontents(state):
     repo = state['repo']
     rule = state['rules'][state['pos']]
-    displayer = logcmdutil.changesetdisplayer(repo.ui, repo, {
-        "patch": True,  "template": "status"
-    }, buffered=True)
-    overrides = {('ui',  'verbose'): True}
+    displayer = logcmdutil.changesetdisplayer(
+        repo.ui, repo, {"patch": True, "template": "status"}, buffered=True
+    )
+    overrides = {('ui', 'verbose'): True}
     with repo.ui.configoverride(overrides, source='histedit'):
         displayer.show(rule.ctx)
         displayer.close()
     return displayer.hunk[rule.ctx.rev()].splitlines()
 
+
 def _chisteditmain(repo, rules, stdscr):
     try:
         curses.use_default_colors()
@@ -1362,8 +1470,13 @@
             if y + start == selected:
                 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
             elif y + start == pos:
-                addln(rulesscr, y, 2, rule,
-                      curses.color_pair(COLOR_CURRENT) | curses.A_BOLD)
+                addln(
+                    rulesscr,
+                    y,
+                    2,
+                    rule,
+                    curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
+                )
             else:
                 addln(rulesscr, y, 2, rule)
         rulesscr.noutrefresh()
@@ -1376,13 +1489,14 @@
             if diffcolors:
                 if line and line[0] == '+':
                     win.addstr(
-                        y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE))
+                        y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
+                    )
                 elif line and line[0] == '-':
                     win.addstr(
-                        y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE))
+                        y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
+                    )
                 elif line.startswith('@@ '):
-                    win.addstr(
-                        y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
+                    win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
                 else:
                     win.addstr(y, 0, line)
             else:
@@ -1415,12 +1529,8 @@
         'mode': (MODE_INIT, MODE_INIT),
         'page_height': None,
         'modes': {
-            MODE_RULES: {
-                'line_offset': 0,
-            },
-            MODE_PATCH: {
-                'line_offset': 0,
-            }
+            MODE_RULES: {'line_offset': 0,},
+            MODE_PATCH: {'line_offset': 0,},
         },
         'repo': repo,
     }
@@ -1488,6 +1598,7 @@
         except curses.error:
             pass
 
+
 def _chistedit(ui, repo, *freeargs, **opts):
     """interactively edit changeset history via a curses interface
 
@@ -1507,8 +1618,12 @@
         cmdutil.bailifchanged(repo)
 
         if os.path.exists(os.path.join(repo.path, 'histedit-state')):
-            raise error.Abort(_('history edit already in progress, try '
-                               '--continue or --abort'))
+            raise error.Abort(
+                _(
+                    'history edit already in progress, try '
+                    '--continue or --abort'
+                )
+            )
         revs.extend(freeargs)
         if not revs:
             defaultrev = destutil.desthistedit(ui, repo)
@@ -1516,19 +1631,26 @@
                 revs.append(defaultrev)
         if len(revs) != 1:
             raise error.Abort(
-                _('histedit requires exactly one ancestor revision'))
+                _('histedit requires exactly one ancestor revision')
+            )
 
         rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
         if len(rr) != 1:
-            raise error.Abort(_('The specified revisions must have '
-                'exactly one common root'))
+            raise error.Abort(
+                _(
+                    'The specified revisions must have '
+                    'exactly one common root'
+                )
+            )
         root = rr[0].node()
 
         topmost = repo.dirstate.p1()
         revs = between(repo, root, topmost, keep)
         if not revs:
-            raise error.Abort(_('%s is not an ancestor of working directory') %
-                             node.short(root))
+            raise error.Abort(
+                _('%s is not an ancestor of working directory')
+                % node.short(root)
+            )
 
         ctxs = []
         for i, r in enumerate(revs):
@@ -1556,21 +1678,34 @@
         pass
     return -1
 
-@command('histedit',
-    [('', 'commands', '',
-      _('read history edits from the specified file'), _('FILE')),
-     ('c', 'continue', False, _('continue an edit already in progress')),
-     ('', 'edit-plan', False, _('edit remaining actions list')),
-     ('k', 'keep', False,
-      _("don't strip old nodes after edit is complete")),
-     ('', 'abort', False, _('abort an edit in progress')),
-     ('o', 'outgoing', False, _('changesets not found in destination')),
-     ('f', 'force', False,
-      _('force outgoing even for unrelated repositories')),
-     ('r', 'rev', [], _('first revision to be edited'), _('REV'))] +
-    cmdutil.formatteropts,
-     _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
+
+@command(
+    'histedit',
+    [
+        (
+            '',
+            'commands',
+            '',
+            _('read history edits from the specified file'),
+            _('FILE'),
+        ),
+        ('c', 'continue', False, _('continue an edit already in progress')),
+        ('', 'edit-plan', False, _('edit remaining actions list')),
+        ('k', 'keep', False, _("don't strip old nodes after edit is complete")),
+        ('', 'abort', False, _('abort an edit in progress')),
+        ('o', 'outgoing', False, _('changesets not found in destination')),
+        (
+            'f',
+            'force',
+            False,
+            _('force outgoing even for unrelated repositories'),
+        ),
+        ('r', 'rev', [], _('first revision to be edited'), _('REV')),
+    ]
+    + cmdutil.formatteropts,
+    _("[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+)
 def histedit(ui, repo, *freeargs, **opts):
     """interactively edit changeset history
 
@@ -1673,11 +1808,14 @@
     # kludge: _chistedit only works for starting an edit, not aborting
     # or continuing, so fall back to regular _texthistedit for those
     # operations.
-    if ui.interface('histedit') == 'curses' and  _getgoal(
-            pycompat.byteskwargs(opts)) == goalnew:
+    if (
+        ui.interface('histedit') == 'curses'
+        and _getgoal(pycompat.byteskwargs(opts)) == goalnew
+    ):
         return _chistedit(ui, repo, *freeargs, **opts)
     return _texthistedit(ui, repo, *freeargs, **opts)
 
+
 def _texthistedit(ui, repo, *freeargs, **opts):
     state = histeditstate(repo)
     with repo.wlock() as wlock, repo.lock() as lock:
@@ -1685,11 +1823,13 @@
         state.lock = lock
         _histedit(ui, repo, state, *freeargs, **opts)
 
+
 goalcontinue = 'continue'
 goalabort = 'abort'
 goaleditplan = 'edit-plan'
 goalnew = 'new'
 
+
 def _getgoal(opts):
     if opts.get(b'continue'):
         return goalcontinue
@@ -1699,6 +1839,7 @@
         return goaleditplan
     return goalnew
 
+
 def _readfile(ui, path):
     if path == '-':
         with ui.timeblockedsection('histedit'):
@@ -1707,6 +1848,7 @@
         with open(path, 'rb') as f:
             return f.read()
 
+
 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
     # TODO only abort if we try to histedit mq patches, not just
     # blanket if mq patches are applied somewhere
@@ -1729,18 +1871,24 @@
             raise error.Abort(_('no arguments allowed with --abort'))
     elif goal == 'edit-plan':
         if any((outg, revs, freeargs)):
-            raise error.Abort(_('only --commands argument allowed with '
-                               '--edit-plan'))
+            raise error.Abort(
+                _('only --commands argument allowed with ' '--edit-plan')
+            )
     else:
         if state.inprogress():
-            raise error.Abort(_('history edit already in progress, try '
-                               '--continue or --abort'))
+            raise error.Abort(
+                _(
+                    'history edit already in progress, try '
+                    '--continue or --abort'
+                )
+            )
         if outg:
             if revs:
                 raise error.Abort(_('no revisions allowed with --outgoing'))
             if len(freeargs) > 1:
                 raise error.Abort(
-                    _('only one repo argument allowed with --outgoing'))
+                    _('only one repo argument allowed with --outgoing')
+                )
         else:
             revs.extend(freeargs)
             if len(revs) == 0:
@@ -1750,7 +1898,9 @@
 
             if len(revs) != 1:
                 raise error.Abort(
-                    _('histedit requires exactly one ancestor revision'))
+                    _('histedit requires exactly one ancestor revision')
+                )
+
 
 def _histedit(ui, repo, state, *freeargs, **opts):
     opts = pycompat.byteskwargs(opts)
@@ -1773,10 +1923,14 @@
             if not hastags:
                 hastags = len(tags)
     if hastags:
-        if ui.promptchoice(_('warning: tags associated with the given'
-                             ' changeset will be lost after histedit.\n'
-                             'do you want to continue (yN)? $$ &Yes $$ &No'),
-                           default=1):
+        if ui.promptchoice(
+            _(
+                'warning: tags associated with the given'
+                ' changeset will be lost after histedit.\n'
+                'do you want to continue (yN)? $$ &Yes $$ &No'
+            ),
+            default=1,
+        ):
             raise error.Abort(_('histedit cancelled\n'))
     # rebuild state
     if goal == goalcontinue:
@@ -1796,6 +1950,7 @@
     _finishhistedit(ui, repo, state, fm)
     fm.end()
 
+
 def _continuehistedit(ui, repo, state):
     """This function runs after either:
     - bootstrapcontinue (if the goal is 'continue')
@@ -1804,8 +1959,7 @@
     # preprocess rules so that we can hide inner folds from the user
     # and only show one editor
     actions = state.actions[:]
-    for idx, (action, nextact) in enumerate(
-            zip(actions, actions[1:] + [None])):
+    for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
         if action.verb == 'fold' and nextact and nextact.verb == 'fold':
             state.actions[idx].__class__ = _multifold
 
@@ -1822,15 +1976,17 @@
         # and reopen a transaction. For example, if the action executes an
         # external process it may choose to commit the transaction first.
         tr = repo.transaction('histedit')
-    progress = ui.makeprogress(_("editing"), unit=_('changes'),
-                               total=len(state.actions))
+    progress = ui.makeprogress(
+        _("editing"), unit=_('changes'), total=len(state.actions)
+    )
     with progress, util.acceptintervention(tr):
         while state.actions:
             state.write(tr=tr)
             actobj = state.actions[0]
             progress.increment(item=actobj.torule())
-            ui.debug('histedit: processing %s %s\n' % (actobj.verb,
-                                                       actobj.torule()))
+            ui.debug(
+                'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
+            )
             parentctx, replacement_ = actobj.run()
             state.parentctxnode = parentctx.node()
             state.replacements.extend(replacement_)
@@ -1838,6 +1994,7 @@
 
     state.write()
 
+
 def _finishhistedit(ui, repo, state, fm):
     """This action runs when histedit is finishing its session"""
     hg.updaterepo(repo, state.parentctxnode, overwrite=False)
@@ -1848,8 +2005,10 @@
             if not succs:
                 ui.debug('histedit: %s is dropped\n' % node.short(prec))
             else:
-                ui.debug('histedit: %s is replaced by %s\n' % (
-                    node.short(prec), node.short(succs[0])))
+                ui.debug(
+                    'histedit: %s is replaced by %s\n'
+                    % (node.short(prec), node.short(succs[0]))
+                )
                 if len(succs) > 1:
                     m = 'histedit:                            %s'
                     for n in succs[1:]:
@@ -1868,15 +2027,23 @@
 
     # remove entries about unknown nodes
     nodemap = repo.unfiltered().changelog.nodemap
-    mapping = {k: v for k, v in mapping.items()
-               if k in nodemap and all(n in nodemap for n in v)}
+    mapping = {
+        k: v
+        for k, v in mapping.items()
+        if k in nodemap and all(n in nodemap for n in v)
+    }
     scmutil.cleanupnodes(repo, mapping, 'histedit')
     hf = fm.hexfunc
     fl = fm.formatlist
     fd = fm.formatdict
-    nodechanges = fd({hf(oldn): fl([hf(n) for n in newn], name='node')
-                      for oldn, newn in mapping.iteritems()},
-                     key="oldnode", value="newnodes")
+    nodechanges = fd(
+        {
+            hf(oldn): fl([hf(n) for n in newn], name='node')
+            for oldn, newn in mapping.iteritems()
+        },
+        key="oldnode",
+        value="newnodes",
+    )
     fm.data(nodechanges=nodechanges)
 
     state.clear()
@@ -1885,12 +2052,12 @@
     if repo.vfs.exists('histedit-last-edit.txt'):
         repo.vfs.unlink('histedit-last-edit.txt')
 
+
 def _aborthistedit(ui, repo, state, nobackup=False):
     try:
         state.read()
         __, leafs, tmpnodes, __ = processreplacement(state)
-        ui.debug('restore wc to old parent %s\n'
-                % node.short(state.topmost))
+        ui.debug('restore wc to old parent %s\n' % node.short(state.topmost))
 
         # Recover our old commits if necessary
         if not state.topmost in repo and state.backupfile:
@@ -1898,25 +2065,34 @@
             f = hg.openpath(ui, backupfile)
             gen = exchange.readbundle(ui, f, backupfile)
             with repo.transaction('histedit.abort') as tr:
-                bundle2.applybundle(repo, gen, tr, source='histedit',
-                                    url='bundle:' + backupfile)
+                bundle2.applybundle(
+                    repo, gen, tr, source='histedit', url='bundle:' + backupfile
+                )
 
             os.remove(backupfile)
 
         # check whether we should update away
-        if repo.unfiltered().revs('parents() and (%n  or %ln::)',
-                                state.parentctxnode, leafs | tmpnodes):
+        if repo.unfiltered().revs(
+            'parents() and (%n  or %ln::)',
+            state.parentctxnode,
+            leafs | tmpnodes,
+        ):
             hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
         cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
         cleanupnode(ui, repo, leafs, nobackup=nobackup)
     except Exception:
         if state.inprogress():
-            ui.warn(_('warning: encountered an exception during histedit '
-                '--abort; the repository may not have been completely '
-                'cleaned up\n'))
+            ui.warn(
+                _(
+                    'warning: encountered an exception during histedit '
+                    '--abort; the repository may not have been completely '
+                    'cleaned up\n'
+                )
+            )
         raise
     finally:
-            state.clear()
+        state.clear()
+
 
 def hgaborthistedit(ui, repo):
     state = histeditstate(repo)
@@ -1926,22 +2102,23 @@
         state.lock = lock
         _aborthistedit(ui, repo, state, nobackup=nobackup)
 
+
 def _edithisteditplan(ui, repo, state, rules):
     state.read()
     if not rules:
-        comment = geteditcomment(ui,
-                                 node.short(state.parentctxnode),
-                                 node.short(state.topmost))
+        comment = geteditcomment(
+            ui, node.short(state.parentctxnode), node.short(state.topmost)
+        )
         rules = ruleeditor(repo, ui, state.actions, comment)
     else:
         rules = _readfile(ui, rules)
     actions = parserules(rules, state)
-    ctxs = [repo[act.node]
-            for act in state.actions if act.node]
+    ctxs = [repo[act.node] for act in state.actions if act.node]
     warnverifyactions(ui, repo, actions, state, ctxs)
     state.actions = actions
     state.write()
 
+
 def _newhistedit(ui, repo, state, revs, freeargs, opts):
     outg = opts.get('outgoing')
     rules = opts.get('commands', '')
@@ -1960,14 +2137,19 @@
     else:
         rr = list(repo.set('roots(%ld)', scmutil.revrange(repo, revs)))
         if len(rr) != 1:
-            raise error.Abort(_('The specified revisions must have '
-                'exactly one common root'))
+            raise error.Abort(
+                _(
+                    'The specified revisions must have '
+                    'exactly one common root'
+                )
+            )
         root = rr[0].node()
 
     revs = between(repo, root, topmost, state.keep)
     if not revs:
-        raise error.Abort(_('%s is not an ancestor of working directory') %
-                         node.short(root))
+        raise error.Abort(
+            _('%s is not an ancestor of working directory') % node.short(root)
+        )
 
     ctxs = [repo[r] for r in revs]
 
@@ -1983,15 +2165,24 @@
     for c in [ctxs[0].p1()] + ctxs:
         try:
             mergemod.calculateupdates(
-                repo, wctx, c, ancs,
+                repo,
+                wctx,
+                c,
+                ancs,
                 # These parameters were determined by print-debugging
                 # what happens later on inside histedit.
-                branchmerge=False, force=False, acceptremote=False,
-                followcopies=False)
+                branchmerge=False,
+                force=False,
+                acceptremote=False,
+                followcopies=False,
+            )
         except error.Abort:
             raise error.Abort(
-       _("untracked files in working directory conflict with files in %s") % (
-           c))
+                _(
+                    "untracked files in working directory conflict with files in %s"
+                )
+                % c
+            )
 
     if not rules:
         comment = geteditcomment(ui, node.short(root), node.short(topmost))
@@ -2009,16 +2200,22 @@
     state.topmost = topmost
     state.replacements = []
 
-    ui.log("histedit", "%d actions to histedit\n", len(actions),
-           histedit_num_actions=len(actions))
+    ui.log(
+        "histedit",
+        "%d actions to histedit\n",
+        len(actions),
+        histedit_num_actions=len(actions),
+    )
 
     # Create a backup so we can always abort completely.
     backupfile = None
     if not obsolete.isenabled(repo, obsolete.createmarkersopt):
-        backupfile = repair.backupbundle(repo, [parentctxnode],
-                                         [topmost], root, 'histedit')
+        backupfile = repair.backupbundle(
+            repo, [parentctxnode], [topmost], root, 'histedit'
+        )
     state.backupfile = backupfile
 
+
 def _getsummary(ctx):
     # a common pattern is to extract the summary but default to the empty
     # string
@@ -2027,6 +2224,7 @@
         summary = summary.splitlines()[0]
     return summary
 
+
 def bootstrapcontinue(ui, state, opts):
     repo = state.repo
 
@@ -2048,24 +2246,33 @@
 
     return state
 
+
 def between(repo, old, new, keep):
     """select and validate the set of revision to edit
 
     When keep is false, the specified set can't have children."""
     revs = repo.revs('%n::%n', old, new)
     if revs and not keep:
-        if (not obsolete.isenabled(repo, obsolete.allowunstableopt) and
-            repo.revs('(%ld::) - (%ld)', revs, revs)):
-            raise error.Abort(_('can only histedit a changeset together '
-                                'with all its descendants'))
+        if not obsolete.isenabled(
+            repo, obsolete.allowunstableopt
+        ) and repo.revs('(%ld::) - (%ld)', revs, revs):
+            raise error.Abort(
+                _(
+                    'can only histedit a changeset together '
+                    'with all its descendants'
+                )
+            )
         if repo.revs('(%ld) and merge()', revs):
             raise error.Abort(_('cannot edit history that contains merges'))
         root = repo[revs.first()]  # list is already sorted by repo.revs()
         if not root.mutable():
-            raise error.Abort(_('cannot edit public changeset: %s') % root,
-                             hint=_("see 'hg help phases' for details"))
+            raise error.Abort(
+                _('cannot edit public changeset: %s') % root,
+                hint=_("see 'hg help phases' for details"),
+            )
     return pycompat.maplist(repo.changelog.node, revs)
 
+
 def ruleeditor(repo, ui, actions, editcomment=""):
     """open an editor to edit rules
 
@@ -2085,7 +2292,7 @@
                 if fword in primaryactions | secondaryactions | tertiaryactions:
                     act.verb = fword
                     # get the target summary
-                    tsum = summary[len(fword) + 1:].lstrip()
+                    tsum = summary[len(fword) + 1 :].lstrip()
                     # safe but slow: reverse iterate over the actions so we
                     # don't clash on two commits having the same summary
                     for na, l in reversed(list(newact.iteritems())):
@@ -2108,8 +2315,13 @@
     rules = '\n'.join([act.torule() for act in actions])
     rules += '\n\n'
     rules += editcomment
-    rules = ui.edit(rules, ui.username(), {'prefix': 'histedit'},
-                    repopath=repo.path, action='histedit')
+    rules = ui.edit(
+        rules,
+        ui.username(),
+        {'prefix': 'histedit'},
+        repopath=repo.path,
+        action='histedit',
+    )
 
     # Save edit rules in .hg/histedit-last-edit.txt in case
     # the user needs to ask for help after something
@@ -2119,10 +2331,14 @@
 
     return rules
 
+
 def parserules(rules, state):
     """Read the histedit rules string and return list of action objects """
-    rules = [l for l in (r.strip() for r in rules.splitlines())
-                if l and not l.startswith('#')]
+    rules = [
+        l
+        for l in (r.strip() for r in rules.splitlines())
+        if l and not l.startswith('#')
+    ]
     actions = []
     for r in rules:
         if ' ' not in r:
@@ -2136,15 +2352,21 @@
         actions.append(action)
     return actions
 
+
 def warnverifyactions(ui, repo, actions, state, ctxs):
     try:
         verifyactions(actions, state, ctxs)
     except error.ParseError:
         if repo.vfs.exists('histedit-last-edit.txt'):
-            ui.warn(_('warning: histedit rules saved '
-                      'to: .hg/histedit-last-edit.txt\n'))
+            ui.warn(
+                _(
+                    'warning: histedit rules saved '
+                    'to: .hg/histedit-last-edit.txt\n'
+                )
+            )
         raise
 
+
 def verifyactions(actions, state, ctxs):
     """Verify that there exists exactly one action per given changeset and
     other constraints.
@@ -2157,8 +2379,9 @@
     prev = None
 
     if actions and actions[0].verb in ['roll', 'fold']:
-        raise error.ParseError(_('first changeset cannot use verb "%s"') %
-                               actions[0].verb)
+        raise error.ParseError(
+            _('first changeset cannot use verb "%s"') % actions[0].verb
+        )
 
     for action in actions:
         action.verify(prev, expected, seen)
@@ -2169,19 +2392,25 @@
 
     if state.repo.ui.configbool('histedit', 'dropmissing'):
         if len(actions) == 0:
-            raise error.ParseError(_('no rules provided'),
-                    hint=_('use strip extension to remove commits'))
+            raise error.ParseError(
+                _('no rules provided'),
+                hint=_('use strip extension to remove commits'),
+            )
 
         drops = [drop(state, n) for n in missing]
         # put the in the beginning so they execute immediately and
         # don't show in the edit-plan in the future
         actions[:0] = drops
     elif missing:
-        raise error.ParseError(_('missing rules for changeset %s') %
-                node.short(missing[0]),
-                hint=_('use "drop %s" to discard, see also: '
-                       "'hg help -e histedit.config'")
-                       % node.short(missing[0]))
+        raise error.ParseError(
+            _('missing rules for changeset %s') % node.short(missing[0]),
+            hint=_(
+                'use "drop %s" to discard, see also: '
+                "'hg help -e histedit.config'"
+            )
+            % node.short(missing[0]),
+        )
+
 
 def adjustreplacementsfrommarkers(repo, oldreplacements):
     """Adjust replacements from obsolescence markers
@@ -2199,7 +2428,9 @@
     newreplacements = list(oldreplacements)
     oldsuccs = [r[1] for r in oldreplacements]
     # successors that have already been added to succstocheck once
-    seensuccs = set().union(*oldsuccs) # create a set from an iterable of tuples
+    seensuccs = set().union(
+        *oldsuccs
+    )  # create a set from an iterable of tuples
     succstocheck = list(seensuccs)
     while succstocheck:
         n = succstocheck.pop()
@@ -2218,6 +2449,7 @@
 
     return newreplacements
 
+
 def processreplacement(state):
     """process the list of replacements to return
 
@@ -2279,6 +2511,7 @@
 
     return final, tmpnodes, new, newtopmost
 
+
 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
     """Move bookmark from oldtopmost to newly created topmost
 
@@ -2296,6 +2529,7 @@
                 changes.append((name, newtopmost))
             marks.applychanges(repo, tr, changes)
 
+
 def cleanupnode(ui, repo, nodes, nobackup=False):
     """strip a group of nodes from the repository
 
@@ -2314,22 +2548,28 @@
             backup = not nobackup
             repair.strip(ui, repo, roots, backup=backup)
 
+
 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
     if isinstance(nodelist, str):
         nodelist = [nodelist]
     state = histeditstate(repo)
     if state.inprogress():
         state.read()
-        histedit_nodes = {action.node for action
-                          in state.actions if action.node}
+        histedit_nodes = {
+            action.node for action in state.actions if action.node
+        }
         common_nodes = histedit_nodes & set(nodelist)
         if common_nodes:
-            raise error.Abort(_("histedit in progress, can't strip %s")
-                             % ', '.join(node.short(x) for x in common_nodes))
+            raise error.Abort(
+                _("histedit in progress, can't strip %s")
+                % ', '.join(node.short(x) for x in common_nodes)
+            )
     return orig(ui, repo, nodelist, *args, **kwargs)
 
+
 extensions.wrapfunction(repair, 'strip', stripwrapper)
 
+
 def summaryhook(ui, repo):
     state = histeditstate(repo)
     if not state.inprogress():
@@ -2337,11 +2577,21 @@
     state.read()
     if state.actions:
         # i18n: column positioning for "hg summary"
-        ui.write(_('hist:   %s (histedit --continue)\n') %
-                 (ui.label(_('%d remaining'), 'histedit.remaining') %
-                  len(state.actions)))
+        ui.write(
+            _('hist:   %s (histedit --continue)\n')
+            % (
+                ui.label(_('%d remaining'), 'histedit.remaining')
+                % len(state.actions)
+            )
+        )
+
 
 def extsetup(ui):
     cmdutil.summaryhooks.add('histedit', summaryhook)
-    statemod.addunfinished('histedit', fname='histedit-state', allowcommit=True,
-                            continueflag=True, abortfunc=hgaborthistedit)
+    statemod.addunfinished(
+        'histedit',
+        fname='histedit-state',
+        allowcommit=True,
+        continueflag=True,
+        abortfunc=hgaborthistedit,
+    )
--- a/hgext/infinitepush/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/infinitepush/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -143,38 +143,38 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('infinitepush', 'server',
-    default=False,
+configitem(
+    'infinitepush', 'server', default=False,
 )
-configitem('infinitepush', 'storetype',
-    default='',
+configitem(
+    'infinitepush', 'storetype', default='',
 )
-configitem('infinitepush', 'indextype',
-    default='',
+configitem(
+    'infinitepush', 'indextype', default='',
 )
-configitem('infinitepush', 'indexpath',
-    default='',
+configitem(
+    'infinitepush', 'indexpath', default='',
 )
-configitem('infinitepush', 'storeallparts',
-    default=False,
+configitem(
+    'infinitepush', 'storeallparts', default=False,
 )
-configitem('infinitepush', 'reponame',
-    default='',
+configitem(
+    'infinitepush', 'reponame', default='',
 )
-configitem('scratchbranch', 'storepath',
-    default='',
+configitem(
+    'scratchbranch', 'storepath', default='',
 )
-configitem('infinitepush', 'branchpattern',
-    default='',
+configitem(
+    'infinitepush', 'branchpattern', default='',
 )
-configitem('infinitepush', 'pushtobundlestore',
-    default=False,
+configitem(
+    'infinitepush', 'pushtobundlestore', default=False,
 )
-configitem('experimental', 'server-bundlestore-bookmark',
-    default='',
+configitem(
+    'experimental', 'server-bundlestore-bookmark', default='',
 )
-configitem('experimental', 'infinitepush-scratchpush',
-    default=False,
+configitem(
+    'experimental', 'infinitepush-scratchpush', default=False,
 )
 
 experimental = 'experimental'
@@ -187,6 +187,7 @@
 _scratchbranchmatcher = lambda x: False
 _maybehash = re.compile(r'^[a-f0-9]+$').search
 
+
 def _buildexternalbundlestore(ui):
     put_args = ui.configlist('infinitepush', 'put_args', [])
     put_binary = ui.config('infinitepush', 'put_binary')
@@ -197,8 +198,10 @@
     if not get_binary:
         raise error.Abort('get binary is not specified')
     from . import store
+
     return store.externalbundlestore(put_binary, put_args, get_binary, get_args)
 
+
 def _buildsqlindex(ui):
     sqlhost = ui.config('infinitepush', 'sqlhost')
     if not sqlhost:
@@ -212,10 +215,20 @@
     waittimeout = ui.configint('infinitepush', 'waittimeout', 300)
     locktimeout = ui.configint('infinitepush', 'locktimeout', 120)
     from . import sqlindexapi
+
     return sqlindexapi.sqlindexapi(
-        reponame, host, port, db, user, password,
-        logfile, _getloglevel(ui), waittimeout=waittimeout,
-        locktimeout=locktimeout)
+        reponame,
+        host,
+        port,
+        db,
+        user,
+        password,
+        logfile,
+        _getloglevel(ui),
+        waittimeout=waittimeout,
+        locktimeout=locktimeout,
+    )
+
 
 def _getloglevel(ui):
     loglevel = ui.config('infinitepush', 'loglevel', 'DEBUG')
@@ -224,6 +237,7 @@
         raise error.Abort(_('invalid log level %s') % loglevel)
     return numeric_loglevel
 
+
 def _tryhoist(ui, remotebookmark):
     '''returns a bookmarks with hoisted part removed
 
@@ -236,39 +250,47 @@
     if common.isremotebooksenabled(ui):
         hoist = ui.config('remotenames', 'hoistedpeer') + '/'
         if remotebookmark.startswith(hoist):
-            return remotebookmark[len(hoist):]
+            return remotebookmark[len(hoist) :]
     return remotebookmark
 
+
 class bundlestore(object):
     def __init__(self, repo):
         self._repo = repo
         storetype = self._repo.ui.config('infinitepush', 'storetype')
         if storetype == 'disk':
             from . import store
+
             self.store = store.filebundlestore(self._repo.ui, self._repo)
         elif storetype == 'external':
             self.store = _buildexternalbundlestore(self._repo.ui)
         else:
             raise error.Abort(
-                _('unknown infinitepush store type specified %s') % storetype)
+                _('unknown infinitepush store type specified %s') % storetype
+            )
 
         indextype = self._repo.ui.config('infinitepush', 'indextype')
         if indextype == 'disk':
             from . import fileindexapi
+
             self.index = fileindexapi.fileindexapi(self._repo)
         elif indextype == 'sql':
             self.index = _buildsqlindex(self._repo.ui)
         else:
             raise error.Abort(
-                _('unknown infinitepush index type specified %s') % indextype)
+                _('unknown infinitepush index type specified %s') % indextype
+            )
+
 
 def _isserver(ui):
     return ui.configbool('infinitepush', 'server')
 
+
 def reposetup(ui, repo):
     if _isserver(ui) and repo.local():
         repo.bundlestore = bundlestore(repo)
 
+
 def extsetup(ui):
     commonsetup(ui)
     if _isserver(ui):
@@ -276,43 +298,59 @@
     else:
         clientextsetup(ui)
 
+
 def commonsetup(ui):
     wireprotov1server.commands['listkeyspatterns'] = (
-        wireprotolistkeyspatterns, 'namespace patterns')
+        wireprotolistkeyspatterns,
+        'namespace patterns',
+    )
     scratchbranchpat = ui.config('infinitepush', 'branchpattern')
     if scratchbranchpat:
         global _scratchbranchmatcher
-        kind, pat, _scratchbranchmatcher = (
-                stringutil.stringmatcher(scratchbranchpat))
+        kind, pat, _scratchbranchmatcher = stringutil.stringmatcher(
+            scratchbranchpat
+        )
+
 
 def serverextsetup(ui):
     origpushkeyhandler = bundle2.parthandlermapping['pushkey']
 
     def newpushkeyhandler(*args, **kwargs):
         bundle2pushkey(origpushkeyhandler, *args, **kwargs)
+
     newpushkeyhandler.params = origpushkeyhandler.params
     bundle2.parthandlermapping['pushkey'] = newpushkeyhandler
 
     orighandlephasehandler = bundle2.parthandlermapping['phase-heads']
     newphaseheadshandler = lambda *args, **kwargs: bundle2handlephases(
-        orighandlephasehandler, *args, **kwargs)
+        orighandlephasehandler, *args, **kwargs
+    )
     newphaseheadshandler.params = orighandlephasehandler.params
     bundle2.parthandlermapping['phase-heads'] = newphaseheadshandler
 
-    extensions.wrapfunction(localrepo.localrepository, 'listkeys',
-                            localrepolistkeys)
+    extensions.wrapfunction(
+        localrepo.localrepository, 'listkeys', localrepolistkeys
+    )
     wireprotov1server.commands['lookup'] = (
-        _lookupwrap(wireprotov1server.commands['lookup'][0]), 'key')
+        _lookupwrap(wireprotov1server.commands['lookup'][0]),
+        'key',
+    )
     extensions.wrapfunction(exchange, 'getbundlechunks', getbundlechunks)
 
     extensions.wrapfunction(bundle2, 'processparts', processparts)
 
+
 def clientextsetup(ui):
     entry = extensions.wrapcommand(commands.table, 'push', _push)
 
     entry[1].append(
-        ('', 'bundle-store', None,
-         _('force push to go to bundle store (EXPERIMENTAL)')))
+        (
+            '',
+            'bundle-store',
+            None,
+            _('force push to go to bundle store (EXPERIMENTAL)'),
+        )
+    )
 
     extensions.wrapcommand(commands.table, 'pull', _pull)
 
@@ -323,18 +361,22 @@
     partorder = exchange.b2partsgenorder
     index = partorder.index('changeset')
     partorder.insert(
-        index, partorder.pop(partorder.index(scratchbranchparttype)))
+        index, partorder.pop(partorder.index(scratchbranchparttype))
+    )
+
 
 def _checkheads(orig, pushop):
     if pushop.ui.configbool(experimental, configscratchpush, False):
         return
     return orig(pushop)
 
+
 def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
     patterns = wireprototypes.decodelist(patterns)
     d = repo.listkeys(encoding.tolocal(namespace), patterns).iteritems()
     return pushkey.encodekeys(d)
 
+
 def localrepolistkeys(orig, self, namespace, patterns=None):
     if namespace == 'bookmarks' and patterns:
         index = self.bundlestore.index
@@ -352,6 +394,7 @@
     else:
         return orig(self, namespace)
 
+
 @wireprotov1peer.batchable
 def listkeyspatterns(self, namespace, patterns):
     if not self.capable('pushkey'):
@@ -360,16 +403,17 @@
     self.ui.debug('preparing listkeys for "%s"\n' % namespace)
     yield {
         'namespace': encoding.fromlocal(namespace),
-        'patterns': wireprototypes.encodelist(patterns)
+        'patterns': wireprototypes.encodelist(patterns),
     }, f
     d = f.value
-    self.ui.debug('received listkey for "%s": %i bytes\n'
-                  % (namespace, len(d)))
+    self.ui.debug('received listkey for "%s": %i bytes\n' % (namespace, len(d)))
     yield pushkey.decodekeys(d)
 
+
 def _readbundlerevs(bundlerepo):
     return list(bundlerepo.revs('bundle()'))
 
+
 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
     '''Tells remotefilelog to include all changed files to the changegroup
 
@@ -387,7 +431,7 @@
     changedfiles = '\0'.join(changedfiles)
     newcaps = []
     appended = False
-    for cap in (bundlecaps or []):
+    for cap in bundlecaps or []:
         if cap.startswith('excludepattern='):
             newcaps.append('\0'.join((cap, changedfiles)))
             appended = True
@@ -399,6 +443,7 @@
 
     return newcaps
 
+
 def _rebundle(bundlerepo, bundleroots, unknownhead):
     '''
     Bundle may include more revision then user requested. For example,
@@ -408,8 +453,9 @@
     parts = []
 
     version = '02'
-    outgoing = discovery.outgoing(bundlerepo, commonheads=bundleroots,
-                                  missingheads=[unknownhead])
+    outgoing = discovery.outgoing(
+        bundlerepo, commonheads=bundleroots, missingheads=[unknownhead]
+    )
     cgstream = changegroup.makestream(bundlerepo, outgoing, version, 'pull')
     cgstream = util.chunkbuffer(cgstream).read()
     cgpart = bundle2.bundlepart('changegroup', data=cgstream)
@@ -418,6 +464,7 @@
 
     return parts
 
+
 def _getbundleroots(oldrepo, bundlerepo, bundlerevs):
     cl = bundlerepo.changelog
     bundleroots = []
@@ -431,10 +478,13 @@
                 bundleroots.append(parent)
     return bundleroots
 
+
 def _needsrebundling(head, bundlerepo):
     bundleheads = list(bundlerepo.revs('heads(bundle())'))
-    return not (len(bundleheads) == 1 and
-                bundlerepo[bundleheads[0]].node() == head)
+    return not (
+        len(bundleheads) == 1 and bundlerepo[bundleheads[0]].node() == head
+    )
+
 
 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
     '''generates bundle that will be send to the user
@@ -446,8 +496,9 @@
         with util.posixfile(bundlefile, "rb") as f:
             unbundler = exchange.readbundle(bundlerepo.ui, f, bundlefile)
             if isinstance(unbundler, changegroup.cg1unpacker):
-                part = bundle2.bundlepart('changegroup',
-                                          data=unbundler._stream.read())
+                part = bundle2.bundlepart(
+                    'changegroup', data=unbundler._stream.read()
+                )
                 part.addparam('version', '01')
                 parts.append(part)
             elif isinstance(unbundler, bundle2.unbundle20):
@@ -462,9 +513,10 @@
 
                 if not haschangegroup:
                     raise error.Abort(
-                        'unexpected bundle without changegroup part, ' +
-                        'head: %s' % hex(head),
-                        hint='report to administrator')
+                        'unexpected bundle without changegroup part, '
+                        + 'head: %s' % hex(head),
+                        hint='report to administrator',
+                    )
             else:
                 raise error.Abort('unknown bundle type')
     else:
@@ -472,6 +524,7 @@
 
     return parts
 
+
 def getbundlechunks(orig, repo, source, heads=None, bundlecaps=None, **kwargs):
     heads = heads or []
     # newheads are parents of roots of scratch bundles that were requested
@@ -492,17 +545,22 @@
                     allbundlestocleanup.append((bundlerepo, newbundlefile))
                     bundlerevs = set(_readbundlerevs(bundlerepo))
                     bundlecaps = _includefilelogstobundle(
-                        bundlecaps, bundlerepo, bundlerevs, repo.ui)
+                        bundlecaps, bundlerepo, bundlerevs, repo.ui
+                    )
                     cl = bundlerepo.changelog
                     bundleroots = _getbundleroots(repo, bundlerepo, bundlerevs)
                     for rev in bundlerevs:
                         node = cl.node(rev)
                         newphases[hex(node)] = str(phases.draft)
-                        nodestobundle[node] = (bundlerepo, bundleroots,
-                                               newbundlefile)
+                        nodestobundle[node] = (
+                            bundlerepo,
+                            bundleroots,
+                            newbundlefile,
+                        )
 
                 scratchbundles.append(
-                    _generateoutputparts(head, *nodestobundle[head]))
+                    _generateoutputparts(head, *nodestobundle[head])
+                )
                 newheads.extend(bundleroots)
                 scratchheads.append(head)
     finally:
@@ -520,6 +578,7 @@
     wrappedlistkeys = False
     oldchangegrouppart = exchange.getbundle2partsmapping['changegroup']
     try:
+
         def _changegrouppart(bundler, *args, **kwargs):
             # Order is important here. First add non-scratch part
             # and only then add parts with scratch bundles because
@@ -542,20 +601,24 @@
                 origvalues.update(newphases)
             return origvalues
 
-        extensions.wrapfunction(localrepo.localrepository, 'listkeys',
-                                _listkeys)
+        extensions.wrapfunction(
+            localrepo.localrepository, 'listkeys', _listkeys
+        )
         wrappedlistkeys = True
         heads = list((set(newheads) | set(heads)) - set(scratchheads))
-        result = orig(repo, source, heads=heads,
-                      bundlecaps=bundlecaps, **kwargs)
+        result = orig(
+            repo, source, heads=heads, bundlecaps=bundlecaps, **kwargs
+        )
     finally:
         if wrappedchangegrouppart:
             exchange.getbundle2partsmapping['changegroup'] = oldchangegrouppart
         if wrappedlistkeys:
-            extensions.unwrapfunction(localrepo.localrepository, 'listkeys',
-                                      _listkeys)
+            extensions.unwrapfunction(
+                localrepo.localrepository, 'listkeys', _listkeys
+            )
     return result
 
+
 def _lookupwrap(orig):
     def _lookup(repo, proto, key):
         localkey = encoding.tolocal(key)
@@ -576,8 +639,10 @@
                 else:
                     r = stringutil.forcebytestr(inst)
                     return "%d %s\n" % (0, r)
+
     return _lookup
 
+
 def _pull(orig, ui, repo, source="default", **opts):
     opts = pycompat.byteskwargs(opts)
     # Copy paste from `pull` command
@@ -603,11 +668,13 @@
         if scratchbookmarks:
             other = hg.peer(repo, opts, source)
             fetchedbookmarks = other.listkeyspatterns(
-                'bookmarks', patterns=scratchbookmarks)
+                'bookmarks', patterns=scratchbookmarks
+            )
             for bookmark in scratchbookmarks:
                 if bookmark not in fetchedbookmarks:
-                    raise error.Abort('remote bookmark %s not found!' %
-                                      bookmark)
+                    raise error.Abort(
+                        'remote bookmark %s not found!' % bookmark
+                    )
                 scratchbookmarks[bookmark] = fetchedbookmarks[bookmark]
                 revs.append(fetchedbookmarks[bookmark])
         opts['bookmark'] = bookmarks
@@ -615,8 +682,9 @@
 
     if scratchbookmarks or unknownnodes:
         # Set anyincoming to True
-        extensions.wrapfunction(discovery, 'findcommonincoming',
-                                _findcommonincoming)
+        extensions.wrapfunction(
+            discovery, 'findcommonincoming', _findcommonincoming
+        )
     try:
         # Remote scratch bookmarks will be deleted because remotenames doesn't
         # know about them. Let's save it before pull and restore after
@@ -635,6 +703,7 @@
         if scratchbookmarks:
             extensions.unwrapfunction(discovery, 'findcommonincoming')
 
+
 def _readscratchremotebookmarks(ui, repo, other):
     if common.isremotebooksenabled(ui):
         remotenamesext = extensions.find('remotenames')
@@ -648,14 +717,16 @@
         for remotebookmark in repo.names['remotebookmarks'].listnames(repo):
             path, bookname = remotenamesext.splitremotename(remotebookmark)
             if path == remotepath and _scratchbranchmatcher(bookname):
-                nodes = repo.names['remotebookmarks'].nodes(repo,
-                                                            remotebookmark)
+                nodes = repo.names['remotebookmarks'].nodes(
+                    repo, remotebookmark
+                )
                 if nodes:
                     result[bookname] = hex(nodes[0])
         return result
     else:
         return {}
 
+
 def _saveremotebookmarks(repo, newbookmarks, remote):
     remotenamesext = extensions.find('remotenames')
     remotepath = remotenamesext.activepath(repo.ui, remote)
@@ -680,6 +751,7 @@
         bookmarks[bookmark] = hexnode
     remotenamesext.saveremotenames(repo, remotepath, branches, bookmarks)
 
+
 def _savelocalbookmarks(repo, bookmarks):
     if not bookmarks:
         return
@@ -690,10 +762,12 @@
             changes.append((scratchbook, changectx.node()))
         repo._bookmarks.applychanges(repo, tr, changes)
 
+
 def _findcommonincoming(orig, *args, **kwargs):
     common, inc, remoteheads = orig(*args, **kwargs)
     return common, True, remoteheads
 
+
 def _push(orig, ui, repo, dest=None, *args, **opts):
     opts = pycompat.byteskwargs(opts)
     bookmark = opts.get('bookmark')
@@ -719,14 +793,16 @@
             # rather that should be stored in the bundlestore
             opts['bookmark'] = []
             ui.setconfig(experimental, configscratchpush, True)
-            oldphasemove = extensions.wrapfunction(exchange,
-                                                   '_localphasemove',
-                                                   _phasemove)
+            oldphasemove = extensions.wrapfunction(
+                exchange, '_localphasemove', _phasemove
+            )
         # Copy-paste from `push` command
         path = ui.paths.getpath(dest, default=('default-push', 'default'))
         if not path:
-            raise error.Abort(_('default repository not configured!'),
-                             hint=_("see 'hg help config.paths'"))
+            raise error.Abort(
+                _('default repository not configured!'),
+                hint=_("see 'hg help config.paths'"),
+            )
         destpath = path.pushloc or path.loc
         # Remote scratch bookmarks will be deleted because remotenames doesn't
         # know about them. Let's save it before push and restore after
@@ -735,14 +811,16 @@
         if common.isremotebooksenabled(ui):
             if bookmark and scratchpush:
                 other = hg.peer(repo, opts, destpath)
-                fetchedbookmarks = other.listkeyspatterns('bookmarks',
-                                                          patterns=[bookmark])
+                fetchedbookmarks = other.listkeyspatterns(
+                    'bookmarks', patterns=[bookmark]
+                )
                 remotescratchbookmarks.update(fetchedbookmarks)
             _saveremotebookmarks(repo, remotescratchbookmarks, destpath)
     if oldphasemove:
         exchange._localphasemove = oldphasemove
     return result
 
+
 def _deleteinfinitepushbookmarks(ui, repo, path, names):
     """Prune remote names by removing the bookmarks we don't want anymore,
     then writing the result back to disk
@@ -754,16 +832,24 @@
     nametype_idx = 1
     remote_idx = 2
     name_idx = 3
-    remotenames = [remotename for remotename in
-                   remotenamesext.readremotenames(repo)
-                   if remotename[remote_idx] == path]
-    remote_bm_names = [remotename[name_idx] for remotename in
-                       remotenames if remotename[nametype_idx] == "bookmarks"]
+    remotenames = [
+        remotename
+        for remotename in remotenamesext.readremotenames(repo)
+        if remotename[remote_idx] == path
+    ]
+    remote_bm_names = [
+        remotename[name_idx]
+        for remotename in remotenames
+        if remotename[nametype_idx] == "bookmarks"
+    ]
 
     for name in names:
         if name not in remote_bm_names:
-            raise error.Abort(_("infinitepush bookmark '{}' does not exist "
-                                "in path '{}'").format(name, path))
+            raise error.Abort(
+                _(
+                    "infinitepush bookmark '{}' does not exist " "in path '{}'"
+                ).format(name, path)
+            )
 
     bookmarks = {}
     branches = collections.defaultdict(list)
@@ -776,6 +862,7 @@
 
     remotenamesext.saveremotenames(repo, path, branches, bookmarks)
 
+
 def _phasemove(orig, pushop, nodes, phase=phases.public):
     """prevent commits from being marked public
 
@@ -785,6 +872,7 @@
     if phase != phases.public:
         orig(pushop, nodes, phase)
 
+
 @exchange.b2partsgenerator(scratchbranchparttype)
 def partgen(pushop, bundler):
     bookmark = pushop.ui.config(experimental, configbookmark)
@@ -806,11 +894,9 @@
     # code path.
     bundler.addparam("infinitepush", "True")
 
-    scratchparts = bundleparts.getscratchbranchparts(pushop.repo,
-                                                     pushop.remote,
-                                                     pushop.outgoing,
-                                                     pushop.ui,
-                                                     bookmark)
+    scratchparts = bundleparts.getscratchbranchparts(
+        pushop.repo, pushop.remote, pushop.outgoing, pushop.ui, bookmark
+    )
 
     for scratchpart in scratchparts:
         bundler.addpart(scratchpart)
@@ -821,8 +907,10 @@
 
     return handlereply
 
+
 bundle2.capabilities[bundleparts.scratchbranchparttype] = ()
 
+
 def _getrevs(bundle, oldnode, force, bookmark):
     'extracts and validates the revs to be imported'
     revs = [bundle[r] for r in bundle.revs('sort(bundle())')]
@@ -837,20 +925,30 @@
 
     return revs
 
+
 @contextlib.contextmanager
 def logservicecall(logger, service, **kwargs):
     start = time.time()
     logger(service, eventtype='start', **kwargs)
     try:
         yield
-        logger(service, eventtype='success',
-               elapsedms=(time.time() - start) * 1000, **kwargs)
+        logger(
+            service,
+            eventtype='success',
+            elapsedms=(time.time() - start) * 1000,
+            **kwargs
+        )
     except Exception as e:
-        logger(service, eventtype='failure',
-               elapsedms=(time.time() - start) * 1000, errormsg=str(e),
-               **kwargs)
+        logger(
+            service,
+            eventtype='failure',
+            elapsedms=(time.time() - start) * 1000,
+            errormsg=str(e),
+            **kwargs
+        )
         raise
 
+
 def _getorcreateinfinitepushlogger(op):
     logger = op.records['infinitepushlogger']
     if not logger:
@@ -866,15 +964,20 @@
         random.seed()
         requestid = random.randint(0, 2000000000)
         hostname = socket.gethostname()
-        logger = functools.partial(ui.log, 'infinitepush', user=username,
-                                   requestid=requestid, hostname=hostname,
-                                   reponame=ui.config('infinitepush',
-                                                      'reponame'))
+        logger = functools.partial(
+            ui.log,
+            'infinitepush',
+            user=username,
+            requestid=requestid,
+            hostname=hostname,
+            reponame=ui.config('infinitepush', 'reponame'),
+        )
         op.records.add('infinitepushlogger', logger)
     else:
         logger = logger[0]
     return logger
 
+
 def storetobundlestore(orig, repo, op, unbundler):
     """stores the incoming bundle coming from push command to the bundlestore
     instead of applying on the revlogs"""
@@ -898,13 +1001,12 @@
                 if part.type in ('pushkey', 'changegroup'):
                     if op.reply is not None:
                         rpart = op.reply.newpart('reply:%s' % part.type)
-                        rpart.addparam('in-reply-to', b'%d' % part.id,
-                                       mandatory=False)
+                        rpart.addparam(
+                            'in-reply-to', b'%d' % part.id, mandatory=False
+                        )
                         rpart.addparam('return', '1', mandatory=False)
 
-            op.records.add(part.type, {
-                'return': 1,
-            })
+            op.records.add(part.type, {'return': 1,})
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -925,6 +1027,7 @@
             # we would rather see the original exception
             pass
 
+
 def processparts(orig, repo, op, unbundler):
 
     # make sure we don't wrap processparts in case of `hg unbundle`
@@ -964,8 +1067,9 @@
                 # the part.
                 if not handleallparts:
                     op.records.add(scratchbranchparttype + '_skippushkey', True)
-                    op.records.add(scratchbranchparttype + '_skipphaseheads',
-                                   True)
+                    op.records.add(
+                        scratchbranchparttype + '_skipphaseheads', True
+                    )
             else:
                 if handleallparts:
                     # Ideally we would not process any parts, and instead just
@@ -980,16 +1084,15 @@
                     if part.type == 'pushkey':
                         if op.reply is not None:
                             rpart = op.reply.newpart('reply:pushkey')
-                            rpart.addparam('in-reply-to', str(part.id),
-                                           mandatory=False)
+                            rpart.addparam(
+                                'in-reply-to', str(part.id), mandatory=False
+                            )
                             rpart.addparam('return', '1', mandatory=False)
                 else:
                     bundle2._processpart(op, part)
 
             if handleallparts:
-                op.records.add(part.type, {
-                    'return': 1,
-                })
+                op.records.add(part.type, {'return': 1,})
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -1011,6 +1114,7 @@
                 # we would rather see the original exception
                 pass
 
+
 def storebundle(op, params, bundlefile):
     log = _getorcreateinfinitepushlogger(op)
     parthandlerstart = time.time()
@@ -1035,7 +1139,8 @@
         bundleheads = bundle.revs('heads(bundle())')
         if bookmark and len(bundleheads) > 1:
             raise error.Abort(
-                _('cannot push more than one head to a scratch branch'))
+                _('cannot push more than one head to a scratch branch')
+            )
 
         revs = _getrevs(bundle, oldnode, force, bookmark)
 
@@ -1045,12 +1150,12 @@
         maxoutput = 10
         for i in range(0, min(len(revs), maxoutput)):
             firstline = bundle[revs[i]].description().split('\n')[0][:50]
-            op.repo.ui.warn(("    %s  %s\n") % (revs[i], firstline))
+            op.repo.ui.warn("    %s  %s\n" % (revs[i], firstline))
 
         if len(revs) > maxoutput + 1:
-            op.repo.ui.warn(("    ...\n"))
+            op.repo.ui.warn("    ...\n")
             firstline = bundle[revs[-1]].description().split('\n')[0][:50]
-            op.repo.ui.warn(("    %s  %s\n") % (revs[-1], firstline))
+            op.repo.ui.warn("    %s  %s\n" % (revs[-1], firstline))
 
         nodesctx = [bundle[rev] for rev in revs]
         inindex = lambda rev: bool(index.getbundle(bundle[rev].hex()))
@@ -1067,12 +1172,15 @@
         if newheadscount:
             with open(bundlefile, 'rb') as f:
                 bundledata = f.read()
-                with logservicecall(log, 'bundlestore',
-                                    bundlesize=len(bundledata)):
+                with logservicecall(
+                    log, 'bundlestore', bundlesize=len(bundledata)
+                ):
                     bundlesizelimit = 100 * 1024 * 1024  # 100 MB
                     if len(bundledata) > bundlesizelimit:
-                        error_msg = ('bundle is too big: %d bytes. ' +
-                                     'max allowed size is 100 MB')
+                        error_msg = (
+                            'bundle is too big: %d bytes. '
+                            + 'max allowed size is 100 MB'
+                        )
                         raise error.Abort(error_msg % (len(bundledata),))
                     key = store.write(bundledata)
 
@@ -1081,23 +1189,32 @@
                 index.addbundle(key, nodesctx)
             if bookmark:
                 index.addbookmark(bookmark, bookmarknode)
-                _maybeaddpushbackpart(op, bookmark, bookmarknode,
-                                      bookprevnode, params)
-        log(scratchbranchparttype, eventtype='success',
-            elapsedms=(time.time() - parthandlerstart) * 1000)
+                _maybeaddpushbackpart(
+                    op, bookmark, bookmarknode, bookprevnode, params
+                )
+        log(
+            scratchbranchparttype,
+            eventtype='success',
+            elapsedms=(time.time() - parthandlerstart) * 1000,
+        )
 
     except Exception as e:
-        log(scratchbranchparttype, eventtype='failure',
+        log(
+            scratchbranchparttype,
+            eventtype='failure',
             elapsedms=(time.time() - parthandlerstart) * 1000,
-            errormsg=str(e))
+            errormsg=str(e),
+        )
         raise
     finally:
         if bundle:
             bundle.close()
 
-@bundle2.parthandler(scratchbranchparttype,
-                     ('bookmark', 'bookprevnode', 'force',
-                      'pushbackbookmarks', 'cgversion'))
+
+@bundle2.parthandler(
+    scratchbranchparttype,
+    ('bookmark', 'bookprevnode', 'force', 'pushbackbookmarks', 'cgversion'),
+)
 def bundle2scratchbranch(op, part):
     '''unbundle a bundle2 part containing a changegroup to store'''
 
@@ -1125,6 +1242,7 @@
 
     return 1
 
+
 def _maybeaddpushbackpart(op, bookmark, newnode, oldnode, params):
     if params.get('pushbackbookmarks'):
         if op.reply and 'pushback' in op.reply.capabilities:
@@ -1136,6 +1254,7 @@
             }
             op.reply.newpart('pushkey', mandatoryparams=params.iteritems())
 
+
 def bundle2pushkey(orig, op, part):
     '''Wrapper of bundle2.handlepushkey()
 
@@ -1151,6 +1270,7 @@
 
     return orig(op, part)
 
+
 def bundle2handlephases(orig, op, part):
     '''Wrapper of bundle2.handlephases()
 
@@ -1163,6 +1283,7 @@
 
     return orig(op, part)
 
+
 def _asyncsavemetadata(root, nodes):
     '''starts a separate process that fills metadata for the nodes
 
@@ -1178,9 +1299,18 @@
         nodesargs.append('--node')
         nodesargs.append(node)
     with open(os.devnull, 'w+b') as devnull:
-        cmdline = [util.hgexecutable(), 'debugfillinfinitepushmetadata',
-                   '-R', root] + nodesargs
+        cmdline = [
+            util.hgexecutable(),
+            'debugfillinfinitepushmetadata',
+            '-R',
+            root,
+        ] + nodesargs
         # Process will run in background. We don't care about the return code
-        subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmdline),
-                         close_fds=True, shell=False,
-                         stdin=devnull, stdout=devnull, stderr=devnull)
+        subprocess.Popen(
+            pycompat.rapply(procutil.tonativestr, cmdline),
+            close_fds=True,
+            shell=False,
+            stdin=devnull,
+            stdout=devnull,
+            stderr=devnull,
+        )
--- a/hgext/infinitepush/bundleparts.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/infinitepush/bundleparts.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,6 +23,7 @@
 
 scratchbranchparttype = 'b2x:infinitepush'
 
+
 def getscratchbranchparts(repo, peer, outgoing, ui, bookmark):
     if not outgoing.missing:
         raise error.Abort(_('no commits to push'))
@@ -30,8 +31,9 @@
     if scratchbranchparttype not in bundle2.bundle2caps(peer):
         raise error.Abort(_('no server support for %r') % scratchbranchparttype)
 
-    _validaterevset(repo, revsetlang.formatspec('%ln', outgoing.missing),
-                    bookmark)
+    _validaterevset(
+        repo, revsetlang.formatspec('%ln', outgoing.missing), bookmark
+    )
 
     supportedversions = changegroup.supportedoutgoingversions(repo)
     # Explicitly avoid using '01' changegroup version in infinitepush to
@@ -60,13 +62,17 @@
 
     # .upper() marks this as a mandatory part: server will abort if there's no
     #  handler
-    parts.append(bundle2.bundlepart(
-        scratchbranchparttype.upper(),
-        advisoryparams=params.iteritems(),
-        data=cg))
+    parts.append(
+        bundle2.bundlepart(
+            scratchbranchparttype.upper(),
+            advisoryparams=params.iteritems(),
+            data=cg,
+        )
+    )
 
     return parts
 
+
 def _validaterevset(repo, revset, bookmark):
     """Abort if the revs to be pushed aren't valid for a scratch branch."""
     if not repo.revs(revset):
@@ -76,7 +82,9 @@
         heads = repo.revs('heads(%r)', revset)
         if len(heads) > 1:
             raise error.Abort(
-                _('cannot push more than one head to a scratch branch'))
+                _('cannot push more than one head to a scratch branch')
+            )
+
 
 def _handlelfs(repo, missing):
     '''Special case if lfs is enabled
@@ -91,6 +99,7 @@
         # Ignore if lfs extension is not enabled
         return
 
+
 class copiedpart(object):
     """a copy of unbundlepart content that can be consumed later"""
 
--- a/hgext/infinitepush/common.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/infinitepush/common.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,9 +15,12 @@
     pycompat,
 )
 
+
 def isremotebooksenabled(ui):
-    return ('remotenames' in extensions._extensions and
-            ui.configbool('remotenames', 'bookmarks'))
+    return 'remotenames' in extensions._extensions and ui.configbool(
+        'remotenames', 'bookmarks'
+    )
+
 
 def downloadbundle(repo, unknownbinhead):
     index = repo.bundlestore.index
@@ -28,6 +31,7 @@
     bundleraw = store.read(bundleid)
     return _makebundlefromraw(bundleraw)
 
+
 def _makebundlefromraw(data):
     fp = None
     fd, bundlefile = pycompat.mkstemp()
--- a/hgext/infinitepush/fileindexapi.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/infinitepush/fileindexapi.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,6 +21,7 @@
 
 from . import indexapi
 
+
 class fileindexapi(indexapi.indexapi):
     def __init__(self, repo):
         super(fileindexapi, self).__init__()
--- a/hgext/infinitepush/indexapi.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/infinitepush/indexapi.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,6 +7,7 @@
 
 from __future__ import absolute_import
 
+
 class indexapi(object):
     """Class that manages access to infinitepush index.
 
@@ -66,5 +67,6 @@
         """Saves optional metadata for a given node"""
         raise NotImplementedError()
 
+
 class indexexception(Exception):
     pass
--- a/hgext/infinitepush/sqlindexapi.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/infinitepush/sqlindexapi.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,6 +16,7 @@
 
 from . import indexapi
 
+
 def _convertbookmarkpattern(pattern):
     pattern = pattern.replace('_', '\\_')
     pattern = pattern.replace('%', '\\%')
@@ -23,14 +24,25 @@
         pattern = pattern[:-1] + '%'
     return pattern
 
+
 class sqlindexapi(indexapi.indexapi):
     '''
     Sql backend for infinitepush index. See schema.sql
     '''
 
-    def __init__(self, reponame, host, port,
-                 database, user, password, logfile, loglevel,
-                 waittimeout=300, locktimeout=120):
+    def __init__(
+        self,
+        reponame,
+        host,
+        port,
+        database,
+        user,
+        password,
+        logfile,
+        loglevel,
+        waittimeout=300,
+        locktimeout=120,
+    ):
         super(sqlindexapi, self).__init__()
         self.reponame = reponame
         self.sqlargs = {
@@ -55,8 +67,9 @@
         if self.sqlconn:
             raise indexapi.indexexception("SQL connection already open")
         if self.sqlcursor:
-            raise indexapi.indexexception("SQL cursor already open without"
-                                          " connection")
+            raise indexapi.indexexception(
+                "SQL cursor already open without" " connection"
+            )
         retry = 3
         while True:
             try:
@@ -81,8 +94,9 @@
 
         self.sqlcursor = self.sqlconn.cursor()
         self.sqlcursor.execute("SET wait_timeout=%s" % waittimeout)
-        self.sqlcursor.execute("SET innodb_lock_wait_timeout=%s" %
-                               self._locktimeout)
+        self.sqlcursor.execute(
+            "SET innodb_lock_wait_timeout=%s" % self._locktimeout
+        )
         self._connected = True
 
     def close(self):
@@ -110,14 +124,16 @@
             self.sqlconnect()
         self.log.info("ADD BUNDLE %r %r" % (self.reponame, bundleid))
         self.sqlcursor.execute(
-            "INSERT INTO bundles(bundle, reponame) VALUES "
-            "(%s, %s)", params=(bundleid, self.reponame))
+            "INSERT INTO bundles(bundle, reponame) VALUES " "(%s, %s)",
+            params=(bundleid, self.reponame),
+        )
         for ctx in nodesctx:
             self.sqlcursor.execute(
                 "INSERT INTO nodestobundle(node, bundle, reponame) "
                 "VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE "
                 "bundle=VALUES(bundle)",
-                params=(ctx.hex(), bundleid, self.reponame))
+                params=(ctx.hex(), bundleid, self.reponame),
+            )
 
             extra = ctx.extra()
             author_name = ctx.user()
@@ -129,10 +145,17 @@
                 "author, committer, author_date, committer_date, "
                 "reponame) VALUES "
                 "(%s, %s, %s, %s, %s, %s, %s, %s, %s)",
-                params=(ctx.hex(), ctx.description(),
-                        ctx.p1().hex(), ctx.p2().hex(), author_name,
-                        committer_name, author_date, committer_date,
-                        self.reponame)
+                params=(
+                    ctx.hex(),
+                    ctx.description(),
+                    ctx.p1().hex(),
+                    ctx.p2().hex(),
+                    author_name,
+                    committer_name,
+                    author_date,
+                    committer_date,
+                    self.reponame,
+                ),
             )
 
     def addbookmark(self, bookmark, node):
@@ -141,12 +164,14 @@
         if not self._connected:
             self.sqlconnect()
         self.log.info(
-            "ADD BOOKMARKS %r bookmark: %r node: %r" %
-            (self.reponame, bookmark, node))
+            "ADD BOOKMARKS %r bookmark: %r node: %r"
+            % (self.reponame, bookmark, node)
+        )
         self.sqlcursor.execute(
             "INSERT INTO bookmarkstonode(bookmark, node, reponame) "
             "VALUES (%s, %s, %s) ON DUPLICATE KEY UPDATE node=VALUES(node)",
-            params=(bookmark, node, self.reponame))
+            params=(bookmark, node, self.reponame),
+        )
 
     def addmanybookmarks(self, bookmarks):
         if not self._connected:
@@ -161,7 +186,8 @@
         self.sqlcursor.execute(
             "INSERT INTO bookmarkstonode(bookmark, node, reponame) "
             "VALUES %s ON DUPLICATE KEY UPDATE node=VALUES(node)" % args,
-            params=values)
+            params=values,
+        )
 
     def deletebookmarks(self, patterns):
         """Accepts list of bookmark patterns and deletes them.
@@ -176,7 +202,8 @@
             self.sqlcursor.execute(
                 "DELETE from bookmarkstonode WHERE bookmark LIKE (%s) "
                 "and reponame = %s",
-                params=(pattern, self.reponame))
+                params=(pattern, self.reponame),
+            )
 
     def getbundle(self, node):
         """Returns the bundleid for the bundle that contains the given node."""
@@ -185,7 +212,9 @@
         self.log.info("GET BUNDLE %r %r" % (self.reponame, node))
         self.sqlcursor.execute(
             "SELECT bundle from nodestobundle "
-            "WHERE node = %s AND reponame = %s", params=(node, self.reponame))
+            "WHERE node = %s AND reponame = %s",
+            params=(node, self.reponame),
+        )
         result = self.sqlcursor.fetchall()
         if len(result) != 1 or len(result[0]) != 1:
             self.log.info("No matching node")
@@ -199,10 +228,13 @@
         if not self._connected:
             self.sqlconnect()
         self.log.info(
-            "GET NODE reponame: %r bookmark: %r" % (self.reponame, bookmark))
+            "GET NODE reponame: %r bookmark: %r" % (self.reponame, bookmark)
+        )
         self.sqlcursor.execute(
             "SELECT node from bookmarkstonode WHERE "
-            "bookmark = %s AND reponame = %s", params=(bookmark, self.reponame))
+            "bookmark = %s AND reponame = %s",
+            params=(bookmark, self.reponame),
+        )
         result = self.sqlcursor.fetchall()
         if len(result) != 1 or len(result[0]) != 1:
             self.log.info("No matching bookmark")
@@ -215,12 +247,14 @@
         if not self._connected:
             self.sqlconnect()
         self.log.info(
-            "QUERY BOOKMARKS reponame: %r query: %r" % (self.reponame, query))
+            "QUERY BOOKMARKS reponame: %r query: %r" % (self.reponame, query)
+        )
         query = _convertbookmarkpattern(query)
         self.sqlcursor.execute(
             "SELECT bookmark, node from bookmarkstonode WHERE "
             "reponame = %s AND bookmark LIKE %s",
-            params=(self.reponame, query))
+            params=(self.reponame, query),
+        )
         result = self.sqlcursor.fetchall()
         bookmarks = {}
         for row in result:
@@ -234,18 +268,24 @@
         if not self._connected:
             self.sqlconnect()
         self.log.info(
-            ("INSERT METADATA, QUERY BOOKMARKS reponame: %r " +
-             "node: %r, jsonmetadata: %s") %
-            (self.reponame, node, jsonmetadata))
+            (
+                "INSERT METADATA, QUERY BOOKMARKS reponame: %r "
+                + "node: %r, jsonmetadata: %s"
+            )
+            % (self.reponame, node, jsonmetadata)
+        )
 
         self.sqlcursor.execute(
             "UPDATE nodesmetadata SET optional_json_metadata=%s WHERE "
             "reponame=%s AND node=%s",
-            params=(jsonmetadata, self.reponame, node))
+            params=(jsonmetadata, self.reponame, node),
+        )
+
 
 class CustomConverter(mysql.connector.conversion.MySQLConverter):
     """Ensure that all values being returned are returned as python string
     (versus the default byte arrays)."""
+
     def _STRING_to_python(self, value, dsc=None):
         return str(value)
 
--- a/hgext/infinitepush/store.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/infinitepush/store.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,18 +15,19 @@
     node,
     pycompat,
 )
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 NamedTemporaryFile = tempfile.NamedTemporaryFile
 
+
 class BundleWriteException(Exception):
     pass
 
+
 class BundleReadException(Exception):
     pass
 
+
 class abstractbundlestore(object):
     """Defines the interface for bundle stores.
 
@@ -35,6 +36,7 @@
     be any Python object understood by the corresponding bundle index (see
     ``abstractbundleindex`` below).
     """
+
     __metaclass__ = abc.ABCMeta
 
     @abc.abstractmethod
@@ -56,18 +58,21 @@
         and close().
         """
 
+
 class filebundlestore(object):
     """bundle store in filesystem
 
     meant for storing bundles somewhere on disk and on network filesystems
     """
+
     def __init__(self, ui, repo):
         self.ui = ui
         self.repo = repo
         self.storepath = ui.configpath('scratchbranch', 'storepath')
         if not self.storepath:
-            self.storepath = self.repo.vfs.join("scratchbranches",
-                                                "filebundlestore")
+            self.storepath = self.repo.vfs.join(
+                "scratchbranches", "filebundlestore"
+            )
         if not os.path.exists(self.storepath):
             os.makedirs(self.storepath)
 
@@ -99,6 +104,7 @@
         except IOError:
             return None
 
+
 class externalbundlestore(abstractbundlestore):
     def __init__(self, put_binary, put_args, get_binary, get_args):
         """
@@ -120,8 +126,10 @@
     def _call_binary(self, args):
         p = subprocess.Popen(
             pycompat.rapply(procutil.tonativestr, args),
-            stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-            close_fds=True)
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            close_fds=True,
+        )
         stdout, stderr = p.communicate()
         returncode = p.returncode
         return returncode, stdout, stderr
@@ -135,20 +143,24 @@
             temp.write(data)
             temp.flush()
             temp.seek(0)
-            formatted_args = [arg.format(filename=temp.name)
-                              for arg in self.put_args]
+            formatted_args = [
+                arg.format(filename=temp.name) for arg in self.put_args
+            ]
             returncode, stdout, stderr = self._call_binary(
-                [self.put_binary] + formatted_args)
+                [self.put_binary] + formatted_args
+            )
 
             if returncode != 0:
                 raise BundleWriteException(
-                    'Failed to upload to external store: %s' % stderr)
+                    'Failed to upload to external store: %s' % stderr
+                )
             stdout_lines = stdout.splitlines()
             if len(stdout_lines) == 1:
                 return stdout_lines[0]
             else:
                 raise BundleWriteException(
-                    'Bad output from %s: %s' % (self.put_binary, stdout))
+                    'Bad output from %s: %s' % (self.put_binary, stdout)
+                )
 
     def read(self, handle):
         # Won't work on windows because you can't open file second time without
@@ -156,12 +168,16 @@
         # TODO: rewrite without str.format() and replace NamedTemporaryFile()
         # with pycompat.namedtempfile()
         with NamedTemporaryFile() as temp:
-            formatted_args = [arg.format(filename=temp.name, handle=handle)
-                              for arg in self.get_args]
+            formatted_args = [
+                arg.format(filename=temp.name, handle=handle)
+                for arg in self.get_args
+            ]
             returncode, stdout, stderr = self._call_binary(
-                [self.get_binary] + formatted_args)
+                [self.get_binary] + formatted_args
+            )
 
             if returncode != 0:
                 raise BundleReadException(
-                    'Failed to download from external store: %s' % stderr)
+                    'Failed to download from external store: %s' % stderr
+                )
             return temp.read()
--- a/hgext/journal.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/journal.py	Sun Oct 06 09:45:02 2019 -0400
@@ -68,10 +68,12 @@
     extensions.wrapfunction(dispatch, 'runcommand', runcommand)
     extensions.wrapfunction(bookmarks.bmstore, '_write', recordbookmarks)
     extensions.wrapfilecache(
-        localrepo.localrepository, 'dirstate', wrapdirstate)
+        localrepo.localrepository, 'dirstate', wrapdirstate
+    )
     extensions.wrapfunction(hg, 'postshare', wrappostshare)
     extensions.wrapfunction(hg, 'copystore', unsharejournal)
 
+
 def reposetup(ui, repo):
     if repo.local():
         repo.journal = journalstorage(repo)
@@ -84,15 +86,18 @@
             # wrapped by own wrapdirstate()
             _setupdirstate(repo, dirstate)
 
+
 def runcommand(orig, lui, repo, cmd, fullargs, *args):
     """Track the command line options for recording in the journal"""
     journalstorage.recordcommand(*fullargs)
     return orig(lui, repo, cmd, fullargs, *args)
 
+
 def _setupdirstate(repo, dirstate):
     dirstate.journalstorage = repo.journal
     dirstate.addparentchangecallback('journal', recorddirstateparents)
 
+
 # hooks to record dirstate changes
 def wrapdirstate(orig, repo):
     """Make journal storage available to the dirstate object"""
@@ -101,6 +106,7 @@
         _setupdirstate(repo, dirstate)
     return dirstate
 
+
 def recorddirstateparents(dirstate, old, new):
     """Records all dirstate parent changes in the journal."""
     old = list(old)
@@ -110,7 +116,9 @@
         oldhashes = old[:1] if old[1] == node.nullid else old
         newhashes = new[:1] if new[1] == node.nullid else new
         dirstate.journalstorage.record(
-            wdirparenttype, '.', oldhashes, newhashes)
+            wdirparenttype, '.', oldhashes, newhashes
+        )
+
 
 # hooks to record bookmark changes (both local and remote)
 def recordbookmarks(orig, store, fp):
@@ -124,6 +132,7 @@
                 repo.journal.record(bookmarktype, mark, oldvalue, value)
     return orig(store, fp)
 
+
 # shared repository support
 def _readsharedfeatures(repo):
     """A set of shared features for this repository"""
@@ -134,6 +143,7 @@
             raise
         return set()
 
+
 def _mergeentriesiter(*iterables, **kwargs):
     """Given a set of sorted iterables, yield the next entry in merged order
 
@@ -162,6 +172,7 @@
             # this iterable is empty, remove it from consideration
             del iterable_map[key]
 
+
 def wrappostshare(orig, sourcerepo, destrepo, **kwargs):
     """Mark this shared working copy as sharing journal information"""
     with destrepo.wlock():
@@ -169,10 +180,14 @@
         with destrepo.vfs('shared', 'a') as fp:
             fp.write('journal\n')
 
+
 def unsharejournal(orig, ui, repo, repopath):
     """Copy shared journal entries into this repo when unsharing"""
-    if (repo.path == repopath and repo.shared() and
-            util.safehasattr(repo, 'journal')):
+    if (
+        repo.path == repopath
+        and repo.shared()
+        and util.safehasattr(repo, 'journal')
+    ):
         sharedrepo = hg.sharedreposource(repo)
         sharedfeatures = _readsharedfeatures(repo)
         if sharedrepo and sharedfeatures > {'journal'}:
@@ -184,18 +199,25 @@
                 util.rename(journalpath, journalpath + '.bak')
             storage = repo.journal
             local = storage._open(
-                repo.vfs, filename='namejournal.bak', _newestfirst=False)
+                repo.vfs, filename='namejournal.bak', _newestfirst=False
+            )
             shared = (
-                e for e in storage._open(sharedrepo.vfs, _newestfirst=False)
-                if sharednamespaces.get(e.namespace) in sharedfeatures)
+                e
+                for e in storage._open(sharedrepo.vfs, _newestfirst=False)
+                if sharednamespaces.get(e.namespace) in sharedfeatures
+            )
             for entry in _mergeentriesiter(local, shared, order=min):
                 storage._write(repo.vfs, entry)
 
     return orig(ui, repo, repopath)
 
-class journalentry(collections.namedtuple(
+
+class journalentry(
+    collections.namedtuple(
         r'journalentry',
-        r'timestamp user command namespace name oldhashes newhashes')):
+        r'timestamp user command namespace name oldhashes newhashes',
+    )
+):
     """Individual journal entry
 
     * timestamp: a mercurial (time, timezone) tuple
@@ -212,29 +234,52 @@
     timestamp and timezone are separated by a space.
 
     """
+
     @classmethod
     def fromstorage(cls, line):
-        (time, user, command, namespace, name,
-         oldhashes, newhashes) = line.split('\n')
+        (
+            time,
+            user,
+            command,
+            namespace,
+            name,
+            oldhashes,
+            newhashes,
+        ) = line.split('\n')
         timestamp, tz = time.split()
         timestamp, tz = float(timestamp), int(tz)
         oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(','))
         newhashes = tuple(node.bin(hash) for hash in newhashes.split(','))
         return cls(
-            (timestamp, tz), user, command, namespace, name,
-            oldhashes, newhashes)
+            (timestamp, tz),
+            user,
+            command,
+            namespace,
+            name,
+            oldhashes,
+            newhashes,
+        )
 
     def __bytes__(self):
         """bytes representation for storage"""
         time = ' '.join(map(pycompat.bytestr, self.timestamp))
         oldhashes = ','.join([node.hex(hash) for hash in self.oldhashes])
         newhashes = ','.join([node.hex(hash) for hash in self.newhashes])
-        return '\n'.join((
-            time, self.user, self.command, self.namespace, self.name,
-            oldhashes, newhashes))
+        return '\n'.join(
+            (
+                time,
+                self.user,
+                self.command,
+                self.namespace,
+                self.name,
+                oldhashes,
+                newhashes,
+            )
+        )
 
     __str__ = encoding.strmethod(__bytes__)
 
+
 class journalstorage(object):
     """Storage for journal entries
 
@@ -252,6 +297,7 @@
     the dirstate).
 
     """
+
     _currentcommand = ()
     _lockref = None
 
@@ -273,7 +319,8 @@
     @property
     def command(self):
         commandstr = ' '.join(
-            map(procutil.shellquote, journalstorage._currentcommand))
+            map(procutil.shellquote, journalstorage._currentcommand)
+        )
         if '\n' in commandstr:
             # truncate multi-line commands
             commandstr = commandstr.partition('\n')[0] + ' ...'
@@ -307,11 +354,15 @@
             l = lock.lock(vfs, 'namejournal.lock', 0, desc=desc)
         except error.LockHeld as inst:
             self.ui.warn(
-                _("waiting for lock on %s held by %r\n") % (desc, inst.locker))
+                _("waiting for lock on %s held by %r\n") % (desc, inst.locker)
+            )
             # default to 600 seconds timeout
             l = lock.lock(
-                vfs, 'namejournal.lock',
-                self.ui.configint("ui", "timeout"), desc=desc)
+                vfs,
+                'namejournal.lock',
+                self.ui.configint("ui", "timeout"),
+                desc=desc,
+            )
             self.ui.warn(_("got lock after %s seconds\n") % l.delay)
         self._lockref = weakref.ref(l)
         return l
@@ -334,8 +385,14 @@
             newhashes = [newhashes]
 
         entry = journalentry(
-            dateutil.makedate(), self.user, self.command, namespace, name,
-            oldhashes, newhashes)
+            dateutil.makedate(),
+            self.user,
+            self.command,
+            namespace,
+            name,
+            oldhashes,
+            newhashes,
+        )
 
         vfs = self.vfs
         if self.sharedvfs is not None:
@@ -360,7 +417,8 @@
                     # the file is corrupt. In future, perhaps rotate the file
                     # instead?
                     self.ui.warn(
-                        _("unsupported journal file version '%s'\n") % version)
+                        _("unsupported journal file version '%s'\n") % version
+                    )
                     return
                 if not version:
                     # empty file, write version first
@@ -403,8 +461,10 @@
         # iterate over both local and shared entries, but only those
         # shared entries that are among the currently shared features
         shared = (
-            e for e in self._open(self.sharedvfs)
-            if sharednamespaces.get(e.namespace) in self.sharedfeatures)
+            e
+            for e in self._open(self.sharedvfs)
+            if sharednamespaces.get(e.namespace) in self.sharedfeatures
+        )
         return _mergeentriesiter(local, shared)
 
     def _open(self, vfs, filename='namejournal', _newestfirst=True):
@@ -431,16 +491,22 @@
                 continue
             yield journalentry.fromstorage(line)
 
+
 # journal reading
 # log options that don't make sense for journal
 _ignoreopts = ('no-merges', 'graph')
+
+
 @command(
-    'journal', [
+    'journal',
+    [
         ('', 'all', None, 'show history for all names'),
         ('c', 'commits', None, 'show commit metadata'),
-    ] + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
+    ]
+    + [opt for opt in cmdutil.logopts if opt[1] not in _ignoreopts],
     '[OPTION]... [BOOKMARKNAME]',
-    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def journal(ui, repo, *args, **opts):
     """show the previous position of bookmarks and the working copy
 
@@ -471,12 +537,14 @@
     if opts.get('all'):
         if args:
             raise error.Abort(
-                _("You can't combine --all and filtering on a name"))
+                _("You can't combine --all and filtering on a name")
+            )
         name = None
     if args:
         name = args[0]
 
     fm = ui.formatter('journal', opts)
+
     def formatnodes(nodes):
         return fm.formatlist(map(fm.hexfunc, nodes), name='node', sep=',')
 
@@ -495,16 +563,24 @@
             break
 
         fm.startitem()
-        fm.condwrite(ui.verbose, 'oldnodes', '%s -> ',
-                     formatnodes(entry.oldhashes))
+        fm.condwrite(
+            ui.verbose, 'oldnodes', '%s -> ', formatnodes(entry.oldhashes)
+        )
         fm.write('newnodes', '%s', formatnodes(entry.newhashes))
         fm.condwrite(ui.verbose, 'user', ' %-8s', entry.user)
         fm.condwrite(
             opts.get('all') or name.startswith('re:'),
-            'name', '  %-8s', entry.name)
+            'name',
+            '  %-8s',
+            entry.name,
+        )
 
-        fm.condwrite(ui.verbose, 'date', ' %s',
-                     fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2'))
+        fm.condwrite(
+            ui.verbose,
+            'date',
+            ' %s',
+            fm.formatdate(entry.timestamp, '%Y-%m-%d %H:%M %1%2'),
+        )
         fm.write('command', '  %s\n', entry.command)
 
         if opts.get("commits"):
@@ -512,7 +588,8 @@
                 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
             else:
                 displayer = logcmdutil.changesetformatter(
-                    ui, repo, fm.nested('changesets'), diffopts=opts)
+                    ui, repo, fm.nested('changesets'), diffopts=opts
+                )
             for hash in entry.newhashes:
                 try:
                     ctx = repo[hash]
--- a/hgext/keyword.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/keyword.py	Sun Oct 06 09:45:02 2019 -0400
@@ -125,16 +125,20 @@
 testedwith = 'ships-with-hg-core'
 
 # hg commands that do not act on keywords
-nokwcommands = ('add addremove annotate bundle export grep incoming init log'
-                ' outgoing push tip verify convert email glog')
+nokwcommands = (
+    'add addremove annotate bundle export grep incoming init log'
+    ' outgoing push tip verify convert email glog'
+)
 
 # webcommands that do not act on keywords
-nokwwebcommands = ('annotate changeset rev filediff diff comparison')
+nokwwebcommands = 'annotate changeset rev filediff diff comparison'
 
 # hg commands that trigger expansion only when writing to working dir,
 # not when reading filelog, and unexpand when reading from working dir
-restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
-              ' unshelve rebase graft backout histedit fetch')
+restricted = (
+    'merge kwexpand kwshrink record qrecord resolve transplant'
+    ' unshelve rebase graft backout histedit fetch'
+)
 
 # names of extensions using dorecord
 recordextensions = 'record'
@@ -144,7 +148,7 @@
     'kwfiles.deleted': 'cyan bold underline',
     'kwfiles.enabledunknown': 'green',
     'kwfiles.ignored': 'bold',
-    'kwfiles.ignoredunknown': 'none'
+    'kwfiles.ignoredunknown': 'none',
 }
 
 templatefilter = registrar.templatefilter()
@@ -152,8 +156,8 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('keywordset', 'svn',
-    default=False,
+configitem(
+    'keywordset', 'svn', default=False,
 )
 # date like in cvs' $Date
 @templatefilter('utcdate', intype=templateutil.date)
@@ -162,6 +166,8 @@
     '''
     dateformat = '%Y/%m/%d %H:%M:%S'
     return dateutil.datestr((date[0], 0), dateformat)
+
+
 # date like in svn's $Date
 @templatefilter('svnisodate', intype=templateutil.date)
 def svnisodate(date):
@@ -169,6 +175,8 @@
     +0200 (Tue, 18 Aug 2009)".
     '''
     return dateutil.datestr(date, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
+
+
 # date like in svn's $Id
 @templatefilter('svnutcdate', intype=templateutil.date)
 def svnutcdate(date):
@@ -178,38 +186,45 @@
     dateformat = '%Y-%m-%d %H:%M:%SZ'
     return dateutil.datestr((date[0], 0), dateformat)
 
+
 # make keyword tools accessible
 kwtools = {'hgcmd': ''}
 
+
 def _defaultkwmaps(ui):
     '''Returns default keywordmaps according to keywordset configuration.'''
     templates = {
         'Revision': '{node|short}',
         'Author': '{author|user}',
     }
-    kwsets = ({
-        'Date': '{date|utcdate}',
-        'RCSfile': '{file|basename},v',
-        'RCSFile': '{file|basename},v', # kept for backwards compatibility
-                                        # with hg-keyword
-        'Source': '{root}/{file},v',
-        'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
-        'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
-    }, {
-        'Date': '{date|svnisodate}',
-        'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
-        'LastChangedRevision': '{node|short}',
-        'LastChangedBy': '{author|user}',
-        'LastChangedDate': '{date|svnisodate}',
-    })
+    kwsets = (
+        {
+            'Date': '{date|utcdate}',
+            'RCSfile': '{file|basename},v',
+            'RCSFile': '{file|basename},v',  # kept for backwards compatibility
+            # with hg-keyword
+            'Source': '{root}/{file},v',
+            'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
+            'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
+        },
+        {
+            'Date': '{date|svnisodate}',
+            'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
+            'LastChangedRevision': '{node|short}',
+            'LastChangedBy': '{author|user}',
+            'LastChangedDate': '{date|svnisodate}',
+        },
+    )
     templates.update(kwsets[ui.configbool('keywordset', 'svn')])
     return templates
 
+
 def _shrinktext(text, subfunc):
     '''Helper for keyword expansion removal in text.
     Depending on subfunc also returns number of substitutions.'''
     return subfunc(br'$\1$', text)
 
+
 def _preselect(wstatus, changed):
     '''Retrieves modified and added files from a working directory state
     and returns the subset of each contained in given changed files
@@ -233,7 +248,7 @@
         self.postcommit = False
 
         kwmaps = self.ui.configitems('keywordmaps')
-        if kwmaps: # override default templates
+        if kwmaps:  # override default templates
             self.templates = dict(kwmaps)
         else:
             self.templates = _defaultkwmaps(self.ui)
@@ -259,14 +274,17 @@
 
     def substitute(self, data, path, ctx, subfunc):
         '''Replaces keywords in data with expanded template.'''
+
         def kwsub(mobj):
             kw = mobj.group(1)
-            ct = logcmdutil.maketemplater(self.ui, self.repo,
-                                          self.templates[kw])
+            ct = logcmdutil.maketemplater(
+                self.ui, self.repo, self.templates[kw]
+            )
             self.ui.pushbuffer()
             ct.show(ctx, root=self.repo.root, file=path)
             ekw = templatefilters.firstline(self.ui.popbuffer())
             return '$%s: %s $' % (kw, ekw)
+
         return subfunc(kwsub, data)
 
     def linkctx(self, path, fileid):
@@ -275,8 +293,11 @@
 
     def expand(self, path, node, data):
         '''Returns data with keywords expanded.'''
-        if (not self.restrict and self.match(path)
-            and not stringutil.binary(data)):
+        if (
+            not self.restrict
+            and self.match(path)
+            and not stringutil.binary(data)
+        ):
             ctx = self.linkctx(path, node)
             return self.substitute(data, path, ctx, self.rekw.sub)
         return data
@@ -288,11 +309,11 @@
 
     def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
         '''Overwrites selected files expanding/shrinking keywords.'''
-        if self.restrict or lookup or self.postcommit: # exclude kw_copy
+        if self.restrict or lookup or self.postcommit:  # exclude kw_copy
             candidates = self.iskwfile(candidates, ctx)
         if not candidates:
             return
-        kwcmd = self.restrict and lookup # kwexpand/kwshrink
+        kwcmd = self.restrict and lookup  # kwexpand/kwshrink
         if self.restrict or expand and lookup:
             mf = ctx.manifest()
         if self.restrict or rekw:
@@ -358,11 +379,13 @@
             return self.shrink(fname, data)
         return data
 
+
 class kwfilelog(filelog.filelog):
     '''
     Subclass of filelog to hook into its read, add, cmp methods.
     Keywords are "stored" unexpanded, and processed on reading.
     '''
+
     def __init__(self, opener, kwt, path):
         super(kwfilelog, self).__init__(opener, path)
         self.kwt = kwt
@@ -385,17 +408,22 @@
         text = self.kwt.shrink(self.path, text)
         return super(kwfilelog, self).cmp(node, text)
 
+
 def _status(ui, repo, wctx, kwt, *pats, **opts):
     '''Bails out if [keyword] configuration is not active.
     Returns status of working directory.'''
     if kwt:
         opts = pycompat.byteskwargs(opts)
-        return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
-                           unknown=opts.get('unknown') or opts.get('all'))
+        return repo.status(
+            match=scmutil.match(wctx, pats, opts),
+            clean=True,
+            unknown=opts.get('unknown') or opts.get('all'),
+        )
     if ui.configitems('keyword'):
         raise error.Abort(_('[keyword] patterns cannot match'))
     raise error.Abort(_('no [keyword] patterns configured'))
 
+
 def _kwfwrite(ui, repo, expand, *pats, **opts):
     '''Selects files and passes them to kwtemplater.overwrite.'''
     wctx = repo[None]
@@ -408,12 +436,16 @@
             raise error.Abort(_('outstanding uncommitted changes'))
         kwt.overwrite(wctx, status.clean, True, expand)
 
-@command('kwdemo',
-         [('d', 'default', None, _('show default keyword template maps')),
-          ('f', 'rcfile', '',
-           _('read maps from rcfile'), _('FILE'))],
-         _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
-         optionalrepo=True)
+
+@command(
+    'kwdemo',
+    [
+        ('d', 'default', None, _('show default keyword template maps')),
+        ('f', 'rcfile', '', _('read maps from rcfile'), _('FILE')),
+    ],
+    _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
+    optionalrepo=True,
+)
 def demo(ui, repo, *args, **opts):
     '''print [keywordmaps] configuration and an expansion example
 
@@ -427,6 +459,7 @@
 
     See :hg:`help templates` for information on templates and filters.
     '''
+
     def demoitems(section, items):
         ui.write('[%s]\n' % section)
         for k, v in sorted(items):
@@ -484,7 +517,7 @@
 
     uisetup(ui)
     reposetup(ui, repo)
-    ui.write(('[extensions]\nkeyword =\n'))
+    ui.write('[extensions]\nkeyword =\n')
     demoitems('keyword', ui.configitems('keyword'))
     demoitems('keywordset', ui.configitems('keywordset'))
     demoitems('keywordmaps', kwmaps.iteritems())
@@ -505,10 +538,13 @@
     ui.write(repo.wread(fn))
     repo.wvfs.rmtree(repo.root)
 
-@command('kwexpand',
+
+@command(
+    'kwexpand',
     cmdutil.walkopts,
     _('hg kwexpand [OPTION]... [FILE]...'),
-    inferrepo=True)
+    inferrepo=True,
+)
 def expand(ui, repo, *pats, **opts):
     '''expand keywords in the working directory
 
@@ -519,13 +555,18 @@
     # 3rd argument sets expansion to True
     _kwfwrite(ui, repo, True, *pats, **opts)
 
-@command('kwfiles',
-         [('A', 'all', None, _('show keyword status flags of all files')),
-          ('i', 'ignore', None, _('show files excluded from expansion')),
-          ('u', 'unknown', None, _('only show unknown (not tracked) files')),
-         ] + cmdutil.walkopts,
-         _('hg kwfiles [OPTION]... [FILE]...'),
-         inferrepo=True)
+
+@command(
+    'kwfiles',
+    [
+        ('A', 'all', None, _('show keyword status flags of all files')),
+        ('i', 'ignore', None, _('show files excluded from expansion')),
+        ('u', 'unknown', None, _('only show unknown (not tracked) files')),
+    ]
+    + cmdutil.walkopts,
+    _('hg kwfiles [OPTION]... [FILE]...'),
+    inferrepo=True,
+)
 def files(ui, repo, *pats, **opts):
     '''show files configured for keyword expansion
 
@@ -566,8 +607,10 @@
     else:
         showfiles = [], [], []
     if opts.get('all') or opts.get('ignore'):
-        showfiles += ([f for f in files if f not in kwfiles],
-                      [f for f in status.unknown if f not in kwunknown])
+        showfiles += (
+            [f for f in files if f not in kwfiles],
+            [f for f in status.unknown if f not in kwunknown],
+        )
     kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
     kwstates = zip(kwlabels, pycompat.bytestr('K!kIi'), showfiles)
     fm = ui.formatter('kwfiles', opts)
@@ -582,10 +625,13 @@
             fm.plain(fmt % (char, repo.pathto(f, cwd)), label=label)
     fm.end()
 
-@command('kwshrink',
+
+@command(
+    'kwshrink',
     cmdutil.walkopts,
     _('hg kwshrink [OPTION]... [FILE]...'),
-    inferrepo=True)
+    inferrepo=True,
+)
 def shrink(ui, repo, *pats, **opts):
     '''revert expanded keywords in the working directory
 
@@ -596,8 +642,10 @@
     # 3rd argument sets expansion to False
     _kwfwrite(ui, repo, False, *pats, **opts)
 
+
 # monkeypatches
 
+
 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
     '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
     rejects or conflicts due to expanded keywords in working dir.'''
@@ -607,6 +655,7 @@
         # shrink keywords read from working dir
         self.lines = kwt.shrinklines(self.fname, self.lines)
 
+
 def kwdiff(orig, repo, *args, **kwargs):
     '''Monkeypatch patch.diff to avoid expansion.'''
     kwt = getattr(repo, '_keywordkwt', None)
@@ -620,6 +669,7 @@
         if kwt:
             kwt.restrict = restrict
 
+
 def kwweb_skip(orig, web):
     '''Wraps webcommands.x turning off keyword expansion.'''
     kwt = getattr(web.repo, '_keywordkwt', None)
@@ -633,6 +683,7 @@
         if kwt:
             kwt.match = origmatch
 
+
 def kw_amend(orig, ui, repo, old, extra, pats, opts):
     '''Wraps cmdutil.amend expanding keywords after amend.'''
     kwt = getattr(repo, '_keywordkwt', None)
@@ -648,6 +699,7 @@
             kwt.restrict = False
         return newid
 
+
 def kw_copy(orig, ui, repo, pats, opts, rename=False):
     '''Wraps cmdutil.copy so that copy/rename destinations do not
     contain expanded keywords.
@@ -674,14 +726,19 @@
             expansion. '''
             source = repo.dirstate.copied(dest)
             if 'l' in wctx.flags(source):
-                source = pathutil.canonpath(repo.root, cwd,
-                                           os.path.realpath(source))
+                source = pathutil.canonpath(
+                    repo.root, cwd, os.path.realpath(source)
+                )
             return kwt.match(source)
 
-        candidates = [f for f in repo.dirstate.copies() if
-                      'l' not in wctx.flags(f) and haskwsource(f)]
+        candidates = [
+            f
+            for f in repo.dirstate.copies()
+            if 'l' not in wctx.flags(f) and haskwsource(f)
+        ]
         kwt.overwrite(wctx, candidates, False, False)
 
+
 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
     '''Wraps record.dorecord expanding keywords after recording.'''
     kwt = getattr(repo, '_keywordkwt', None)
@@ -703,6 +760,7 @@
             kwt.restrict = True
         return ret
 
+
 def kwfilectx_cmp(orig, self, fctx):
     if fctx._customcmp:
         return fctx.cmp(self)
@@ -711,14 +769,20 @@
         return orig(self, fctx)
     # keyword affects data size, comparing wdir and filelog size does
     # not make sense
-    if (fctx._filenode is None and
-        (self._repo._encodefilterpats or
-         kwt.match(fctx.path()) and 'l' not in fctx.flags() or
-         self.size() - 4 == fctx.size()) or
-        self.size() == fctx.size()):
+    if (
+        fctx._filenode is None
+        and (
+            self._repo._encodefilterpats
+            or kwt.match(fctx.path())
+            and 'l' not in fctx.flags()
+            or self.size() - 4 == fctx.size()
+        )
+        or self.size() == fctx.size()
+    ):
         return self._filelog.cmp(self._filenode, fctx.data())
     return True
 
+
 def uisetup(ui):
     ''' Monkeypatches dispatch._parse to retrieve user command.
     Overrides file method to return kwfilelog instead of filelog
@@ -744,13 +808,17 @@
     for c in nokwwebcommands.split():
         extensions.wrapfunction(webcommands, c, kwweb_skip)
 
+
 def reposetup(ui, repo):
     '''Sets up repo as kwrepo for keyword substitution.'''
 
     try:
-        if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
+        if (
+            not repo.local()
+            or kwtools['hgcmd'] in nokwcommands.split()
             or '.hg' in util.splitpath(repo.root)
-            or repo._url.startswith('bundle:')):
+            or repo._url.startswith('bundle:')
+        ):
             return
     except AttributeError:
         pass
@@ -791,8 +859,9 @@
             if not kwt.postcommit:
                 restrict = kwt.restrict
                 kwt.restrict = True
-                kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
-                              False, True)
+                kwt.overwrite(
+                    self[n], sorted(ctx.added() + ctx.modified()), False, True
+                )
                 kwt.restrict = restrict
             return n
 
--- a/hgext/largefiles/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -135,14 +135,14 @@
 eh.merge(overrides.eh)
 eh.merge(proto.eh)
 
-eh.configitem('largefiles', 'minsize',
-    default=eh.configitem.dynamicdefault,
+eh.configitem(
+    'largefiles', 'minsize', default=eh.configitem.dynamicdefault,
 )
-eh.configitem('largefiles', 'patterns',
-    default=list,
+eh.configitem(
+    'largefiles', 'patterns', default=list,
 )
-eh.configitem('largefiles', 'usercache',
-    default=None,
+eh.configitem(
+    'largefiles', 'usercache', default=None,
 )
 
 cmdtable = eh.cmdtable
@@ -151,10 +151,12 @@
 reposetup = reposetup.reposetup
 uisetup = eh.finaluisetup
 
+
 def featuresetup(ui, supported):
     # don't die on seeing a repo with the largefiles requirement
     supported |= {'largefiles'}
 
+
 @eh.uisetup
 def _uisetup(ui):
     localrepo.featuresetupfuncs.add(featuresetup)
@@ -165,16 +167,21 @@
 
     # create the new wireproto commands ...
     wireprotov1server.wireprotocommand('putlfile', 'sha', permission='push')(
-        proto.putlfile)
+        proto.putlfile
+    )
     wireprotov1server.wireprotocommand('getlfile', 'sha', permission='pull')(
-        proto.getlfile)
+        proto.getlfile
+    )
     wireprotov1server.wireprotocommand('statlfile', 'sha', permission='pull')(
-        proto.statlfile)
+        proto.statlfile
+    )
     wireprotov1server.wireprotocommand('lheads', '', permission='pull')(
-        wireprotov1server.heads)
+        wireprotov1server.heads
+    )
 
-    extensions.wrapfunction(wireprotov1server.commands['heads'], 'func',
-                            proto.heads)
+    extensions.wrapfunction(
+        wireprotov1server.commands['heads'], 'func', proto.heads
+    )
     # TODO also wrap wireproto.commandsv2 once heads is implemented there.
 
     # can't do this in reposetup because it needs to have happened before
@@ -188,7 +195,7 @@
     for name, module in extensions.extensions():
         if name == 'rebase':
             # TODO: teach exthelper to handle this
-            extensions.wrapfunction(module, 'rebase',
-                                    overrides.overriderebase)
+            extensions.wrapfunction(module, 'rebase', overrides.overriderebase)
+
 
 revsetpredicate = eh.revsetpredicate
--- a/hgext/largefiles/basestore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/basestore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,9 +15,11 @@
 
 from . import lfutil
 
+
 class StoreError(Exception):
     '''Raised when there is a problem getting files from or putting
     files to a central store.'''
+
     def __init__(self, filename, hash, url, detail):
         self.filename = filename
         self.hash = hash
@@ -25,13 +27,17 @@
         self.detail = detail
 
     def longmessage(self):
-        return (_("error getting id %s from url %s for file %s: %s\n") %
-                 (self.hash, util.hidepassword(self.url), self.filename,
-                  self.detail))
+        return _("error getting id %s from url %s for file %s: %s\n") % (
+            self.hash,
+            util.hidepassword(self.url),
+            self.filename,
+            self.detail,
+        )
 
     def __str__(self):
         return "%s: %s" % (util.hidepassword(self.url), self.detail)
 
+
 class basestore(object):
     def __init__(self, ui, repo, url):
         self.ui = ui
@@ -62,16 +68,19 @@
 
         at = 0
         available = self.exists(set(hash for (_filename, hash) in files))
-        with ui.makeprogress(_('getting largefiles'), unit=_('files'),
-                             total=len(files)) as progress:
+        with ui.makeprogress(
+            _('getting largefiles'), unit=_('files'), total=len(files)
+        ) as progress:
             for filename, hash in files:
                 progress.update(at)
                 at += 1
                 ui.note(_('getting %s:%s\n') % (filename, hash))
 
                 if not available.get(hash):
-                    ui.warn(_('%s: largefile %s not available from %s\n')
-                            % (filename, hash, util.hidepassword(self.url)))
+                    ui.warn(
+                        _('%s: largefile %s not available from %s\n')
+                        % (filename, hash, util.hidepassword(self.url))
+                    )
                     missing.append(filename)
                     continue
 
@@ -91,8 +100,9 @@
         storefilename = lfutil.storepath(self.repo, hash)
 
         tmpname = storefilename + '.tmp'
-        with util.atomictempfile(tmpname,
-                createmode=self.repo.store.createmode) as tmpfile:
+        with util.atomictempfile(
+            tmpname, createmode=self.repo.store.createmode
+        ) as tmpfile:
             try:
                 gothash = self._getfile(tmpfile, filename, hash)
             except StoreError as err:
@@ -101,8 +111,10 @@
 
         if gothash != hash:
             if gothash != "":
-                self.ui.warn(_('%s: data corruption (expected %s, got %s)\n')
-                             % (filename, hash, gothash))
+                self.ui.warn(
+                    _('%s: data corruption (expected %s, got %s)\n')
+                    % (filename, hash, gothash)
+                )
             util.unlink(tmpname)
             return False
 
@@ -115,10 +127,11 @@
         file revision referenced by every changeset in revs.
         Return 0 if all is well, non-zero on any errors.'''
 
-        self.ui.status(_('searching %d changesets for largefiles\n') %
-                       len(revs))
-        verified = set()                # set of (filename, filenode) tuples
-        filestocheck = []               # list of (cset, filename, expectedhash)
+        self.ui.status(
+            _('searching %d changesets for largefiles\n') % len(revs)
+        )
+        verified = set()  # set of (filename, filenode) tuples
+        filestocheck = []  # list of (cset, filename, expectedhash)
         for rev in revs:
             cctx = self.repo[rev]
             cset = "%d:%s" % (cctx.rev(), node.short(cctx.node()))
@@ -140,11 +153,13 @@
         if contents:
             self.ui.status(
                 _('verified contents of %d revisions of %d largefiles\n')
-                % (numrevs, numlfiles))
+                % (numrevs, numlfiles)
+            )
         else:
             self.ui.status(
                 _('verified existence of %d revisions of %d largefiles\n')
-                % (numrevs, numlfiles))
+                % (numrevs, numlfiles)
+            )
         return int(failed)
 
     def _getfile(self, tmpfile, filename, hash):
--- a/hgext/largefiles/lfcommands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/lfcommands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -35,10 +35,7 @@
     filemap,
 )
 
-from . import (
-    lfutil,
-    storefactory
-)
+from . import lfutil, storefactory
 
 release = lock.release
 
@@ -46,15 +43,28 @@
 
 eh = exthelper.exthelper()
 
-@eh.command('lfconvert',
-    [('s', 'size', '',
-      _('minimum size (MB) for files to be converted as largefiles'), 'SIZE'),
-    ('', 'to-normal', False,
-     _('convert from a largefiles repo to a normal repo')),
+
+@eh.command(
+    'lfconvert',
+    [
+        (
+            's',
+            'size',
+            '',
+            _('minimum size (MB) for files to be converted as largefiles'),
+            'SIZE',
+        ),
+        (
+            '',
+            'to-normal',
+            False,
+            _('convert from a largefiles repo to a normal repo'),
+        ),
     ],
     _('hg lfconvert SOURCE DEST [FILE ...]'),
     norepo=True,
-    inferrepo=True)
+    inferrepo=True,
+)
 def lfconvert(ui, src, dest, *pats, **opts):
     '''convert a normal repository to a largefiles repository
 
@@ -97,8 +107,10 @@
         # is to simply walk the changelog, using changelog.nodesbetween().
         # Take a look at mercurial/revlog.py:639 for more details.
         # Use a generator instead of a list to decrease memory usage
-        ctxs = (rsrc[ctx] for ctx in rsrc.changelog.nodesbetween(None,
-            rsrc.heads())[0])
+        ctxs = (
+            rsrc[ctx]
+            for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]
+        )
         revmap = {node.nullid: node.nullid}
         if tolfile:
             # Lock destination to prevent modification while it is converted to.
@@ -117,13 +129,24 @@
                 matcher = None
 
             lfiletohash = {}
-            with ui.makeprogress(_('converting revisions'),
-                                 unit=_('revisions'),
-                                 total=rsrc['tip'].rev()) as progress:
+            with ui.makeprogress(
+                _('converting revisions'),
+                unit=_('revisions'),
+                total=rsrc['tip'].rev(),
+            ) as progress:
                 for ctx in ctxs:
                     progress.update(ctx.rev())
-                    _lfconvert_addchangeset(rsrc, rdst, ctx, revmap,
-                        lfiles, normalfiles, matcher, size, lfiletohash)
+                    _lfconvert_addchangeset(
+                        rsrc,
+                        rdst,
+                        ctx,
+                        revmap,
+                        lfiles,
+                        normalfiles,
+                        matcher,
+                        size,
+                        lfiletohash,
+                    )
 
             if rdst.wvfs.exists(lfutil.shortname):
                 rdst.wvfs.rmtree(lfutil.shortname)
@@ -142,6 +165,7 @@
                 rdst.requirements.add('largefiles')
                 rdst._writerequirements()
         else:
+
             class lfsource(filemap.filemap_source):
                 def __init__(self, ui, source):
                     super(lfsource, self).__init__(ui, source, None)
@@ -151,8 +175,10 @@
                     realname, realrev = rev
                     f = super(lfsource, self).getfile(name, rev)
 
-                    if (not realname.startswith(lfutil.shortnameslash)
-                            or f[0] is None):
+                    if (
+                        not realname.startswith(lfutil.shortnameslash)
+                        or f[0] is None
+                    ):
                         return f
 
                     # Substitute in the largefile data for the hash
@@ -160,16 +186,19 @@
                     path = lfutil.findfile(rsrc, hash)
 
                     if path is None:
-                        raise error.Abort(_("missing largefile for '%s' in %s")
-                                          % (realname, realrev))
+                        raise error.Abort(
+                            _("missing largefile for '%s' in %s")
+                            % (realname, realrev)
+                        )
                     return util.readfile(path), f[1]
 
             class converter(convcmd.converter):
                 def __init__(self, ui, source, dest, revmapfile, opts):
                     src = lfsource(ui, source)
 
-                    super(converter, self).__init__(ui, src, dest, revmapfile,
-                                                    opts)
+                    super(converter, self).__init__(
+                        ui, src, dest, revmapfile, opts
+                    )
 
             found, missing = downloadlfiles(ui, rsrc)
             if missing != 0:
@@ -191,8 +220,10 @@
             # we failed, remove the new directory
             shutil.rmtree(rdst.root)
 
-def _lfconvert_addchangeset(rsrc, rdst, ctx, revmap, lfiles, normalfiles,
-        matcher, size, lfiletohash):
+
+def _lfconvert_addchangeset(
+    rsrc, rdst, ctx, revmap, lfiles, normalfiles, matcher, size, lfiletohash
+):
     # Convert src parents to dst parents
     parents = _convertparents(ctx, revmap)
 
@@ -217,8 +248,8 @@
                 if 'l' in fctx.flags():
                     if renamedlfile:
                         raise error.Abort(
-                            _('renamed/copied largefile %s becomes symlink')
-                            % f)
+                            _('renamed/copied largefile %s becomes symlink') % f
+                        )
                     islfile = False
             if islfile:
                 lfiles.add(f)
@@ -243,8 +274,7 @@
                 if f not in lfiletohash or lfiletohash[f] != hash:
                     rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
                     executable = 'x' in ctx[f].flags()
-                    lfutil.writestandin(rdst, fstandin, hash,
-                        executable)
+                    lfutil.writestandin(rdst, fstandin, hash, executable)
                     lfiletohash[f] = hash
         else:
             # normal file
@@ -265,24 +295,39 @@
                 # doesn't change after rename or copy
                 renamed = lfutil.standin(renamed)
 
-            return context.memfilectx(repo, memctx, f,
-                                      lfiletohash[srcfname] + '\n',
-                                      'l' in fctx.flags(), 'x' in fctx.flags(),
-                                      renamed)
+            return context.memfilectx(
+                repo,
+                memctx,
+                f,
+                lfiletohash[srcfname] + '\n',
+                'l' in fctx.flags(),
+                'x' in fctx.flags(),
+                renamed,
+            )
         else:
             return _getnormalcontext(repo, ctx, f, revmap)
 
     # Commit
     _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap)
 
+
 def _commitcontext(rdst, parents, ctx, dstfiles, getfilectx, revmap):
-    mctx = context.memctx(rdst, parents, ctx.description(), dstfiles,
-                          getfilectx, ctx.user(), ctx.date(), ctx.extra())
+    mctx = context.memctx(
+        rdst,
+        parents,
+        ctx.description(),
+        dstfiles,
+        getfilectx,
+        ctx.user(),
+        ctx.date(),
+        ctx.extra(),
+    )
     ret = rdst.commitctx(mctx)
     lfutil.copyalltostore(rdst, ret)
     rdst.setparents(ret)
     revmap[ctx.node()] = rdst.changelog.tip()
 
+
 # Generate list of changed files
 def _getchangedfiles(ctx, parents):
     files = set(ctx.files())
@@ -293,6 +338,7 @@
                 files.add(fn)
     return files
 
+
 # Convert src parents to dst parents
 def _convertparents(ctx, revmap):
     parents = []
@@ -302,6 +348,7 @@
         parents.append(node.nullid)
     return parents
 
+
 # Get memfilectx for a normal file
 def _getnormalcontext(repo, ctx, f, revmap):
     try:
@@ -312,9 +359,11 @@
 
     data = fctx.data()
     if f == '.hgtags':
-        data = _converttags (repo.ui, revmap, data)
-    return context.memfilectx(repo, ctx, f, data, 'l' in fctx.flags(),
-                              'x' in fctx.flags(), renamed)
+        data = _converttags(repo.ui, revmap, data)
+    return context.memfilectx(
+        repo, ctx, f, data, 'l' in fctx.flags(), 'x' in fctx.flags(), renamed
+    )
+
 
 # Remap tag data using a revision map
 def _converttags(ui, revmap, data):
@@ -323,23 +372,21 @@
         try:
             id, name = line.split(' ', 1)
         except ValueError:
-            ui.warn(_('skipping incorrectly formatted tag %s\n')
-                % line)
+            ui.warn(_('skipping incorrectly formatted tag %s\n') % line)
             continue
         try:
             newid = node.bin(id)
         except TypeError:
-            ui.warn(_('skipping incorrectly formatted id %s\n')
-                % id)
+            ui.warn(_('skipping incorrectly formatted id %s\n') % id)
             continue
         try:
-            newdata.append('%s %s\n' % (node.hex(revmap[newid]),
-                name))
+            newdata.append('%s %s\n' % (node.hex(revmap[newid]), name))
         except KeyError:
             ui.warn(_('no mapping for id %s\n') % id)
             continue
     return ''.join(newdata)
 
+
 def _islfile(file, ctx, matcher, size):
     '''Return true if file should be considered a largefile, i.e.
     matcher matches it or it is larger than size.'''
@@ -353,6 +400,7 @@
     except error.LookupError:
         return False
 
+
 def uploadlfiles(ui, rsrc, rdst, files):
     '''upload largefiles to the central store'''
 
@@ -367,18 +415,25 @@
     files = [h for h in files if not retval[h]]
     ui.debug("%d largefiles need to be uploaded\n" % len(files))
 
-    with ui.makeprogress(_('uploading largefiles'), unit=_('files'),
-                         total=len(files)) as progress:
+    with ui.makeprogress(
+        _('uploading largefiles'), unit=_('files'), total=len(files)
+    ) as progress:
         for hash in files:
             progress.update(at)
             source = lfutil.findfile(rsrc, hash)
             if not source:
-                raise error.Abort(_('largefile %s missing from store'
-                                   ' (needs to be uploaded)') % hash)
+                raise error.Abort(
+                    _(
+                        'largefile %s missing from store'
+                        ' (needs to be uploaded)'
+                    )
+                    % hash
+                )
             # XXX check for errors here
             store.put(source, hash)
             at += 1
 
+
 def verifylfiles(ui, repo, all=False, contents=False):
     '''Verify that every largefile revision in the current changeset
     exists in the central store.  With --contents, also verify that
@@ -393,6 +448,7 @@
     store = storefactory.openstore(repo)
     return store.verify(revs, contents=contents)
 
+
 def cachelfiles(ui, repo, node, filelist=None):
     '''cachelfiles ensures that all largefiles needed by the specified revision
     are present in the repository's largefile cache.
@@ -411,7 +467,7 @@
             expectedhash = lfutil.readasstandin(ctx[lfutil.standin(lfile)])
         except IOError as err:
             if err.errno == errno.ENOENT:
-                continue # node must be None and standin wasn't found in wctx
+                continue  # node must be None and standin wasn't found in wctx
             raise
         if not lfutil.findfile(repo, expectedhash):
             toget.append((lfile, expectedhash))
@@ -423,15 +479,17 @@
 
     return ([], [])
 
+
 def downloadlfiles(ui, repo, rev=None):
     match = scmutil.match(repo[None], [repo.wjoin(lfutil.shortname)], {})
+
     def prepare(ctx, fns):
         pass
+
     totalsuccess = 0
     totalmissing = 0
-    if rev != []: # walkchangerevs on empty list would return all revs
-        for ctx in cmdutil.walkchangerevs(repo, match, {'rev' : rev},
-                                          prepare):
+    if rev != []:  # walkchangerevs on empty list would return all revs
+        for ctx in cmdutil.walkchangerevs(repo, match, {'rev': rev}, prepare):
             success, missing = cachelfiles(ui, repo, ctx.node())
             totalsuccess += len(success)
             totalmissing += len(missing)
@@ -440,8 +498,10 @@
         ui.status(_("%d largefiles failed to download\n") % totalmissing)
     return totalsuccess, totalmissing
 
-def updatelfiles(ui, repo, filelist=None, printmessage=None,
-                 normallookup=False):
+
+def updatelfiles(
+    ui, repo, filelist=None, printmessage=None, normallookup=False
+):
     '''Update largefiles according to standins in the working directory
 
     If ``printmessage`` is other than ``None``, it means "print (or
@@ -463,21 +523,19 @@
         wctx = repo[None]
         for lfile in lfiles:
             lfileorig = os.path.relpath(
-                scmutil.backuppath(ui, repo, lfile),
-                start=repo.root)
+                scmutil.backuppath(ui, repo, lfile), start=repo.root
+            )
             standin = lfutil.standin(lfile)
             standinorig = os.path.relpath(
-                scmutil.backuppath(ui, repo, standin),
-                start=repo.root)
+                scmutil.backuppath(ui, repo, standin), start=repo.root
+            )
             if wvfs.exists(standin):
-                if (wvfs.exists(standinorig) and
-                    wvfs.exists(lfile)):
-                    shutil.copyfile(wvfs.join(lfile),
-                                    wvfs.join(lfileorig))
+                if wvfs.exists(standinorig) and wvfs.exists(lfile):
+                    shutil.copyfile(wvfs.join(lfile), wvfs.join(lfileorig))
                     wvfs.unlinkpath(standinorig)
                 expecthash = lfutil.readasstandin(wctx[standin])
                 if expecthash != '':
-                    if lfile not in wctx: # not switched to normal file
+                    if lfile not in wctx:  # not switched to normal file
                         if repo.dirstate[standin] != '?':
                             wvfs.unlinkpath(lfile, ignoremissing=True)
                         else:
@@ -493,8 +551,10 @@
                 # lfile is added to the repository again. This happens when a
                 # largefile is converted back to a normal file: the standin
                 # disappears, but a new (normal) file appears as the lfile.
-                if (wvfs.exists(lfile) and
-                    repo.dirstate.normalize(lfile) not in wctx):
+                if (
+                    wvfs.exists(lfile)
+                    and repo.dirstate.normalize(lfile) not in wctx
+                ):
                     wvfs.unlinkpath(lfile)
                     removed += 1
 
@@ -549,13 +609,17 @@
 
         lfdirstate.write()
         if lfiles:
-            statuswriter(_('%d largefiles updated, %d removed\n') % (updated,
-                removed))
+            statuswriter(
+                _('%d largefiles updated, %d removed\n') % (updated, removed)
+            )
+
 
-@eh.command('lfpull',
-    [('r', 'rev', [], _('pull largefiles for these revisions'))
-    ] + cmdutil.remoteopts,
-    _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'))
+@eh.command(
+    'lfpull',
+    [('r', 'rev', [], _('pull largefiles for these revisions'))]
+    + cmdutil.remoteopts,
+    _('-r REV... [-e CMD] [--remotecmd CMD] [SOURCE]'),
+)
 def lfpull(ui, repo, source="default", **opts):
     """pull largefiles for the specified revisions from the specified source
 
@@ -591,9 +655,8 @@
         numcached += len(cached)
     ui.status(_("%d largefiles cached\n") % numcached)
 
-@eh.command('debuglfput',
-    [] + cmdutil.remoteopts,
-    _('FILE'))
+
+@eh.command('debuglfput', [] + cmdutil.remoteopts, _('FILE'))
 def debuglfput(ui, repo, filepath, **kwargs):
     hash = lfutil.hashfile(filepath)
     storefactory.openstore(repo).put(filepath, hash)
--- a/hgext/largefiles/lfutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/lfutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -37,6 +37,7 @@
 
 # -- Private worker functions ------------------------------------------
 
+
 def getminsize(ui, assumelfiles, opt, default=10):
     lfsize = opt
     if not lfsize and assumelfiles:
@@ -45,12 +46,14 @@
         try:
             lfsize = float(lfsize)
         except ValueError:
-            raise error.Abort(_('largefiles: size must be number (not %s)\n')
-                             % lfsize)
+            raise error.Abort(
+                _('largefiles: size must be number (not %s)\n') % lfsize
+            )
     if lfsize is None:
         raise error.Abort(_('minimum size for largefiles must be specified'))
     return lfsize
 
+
 def link(src, dest):
     """Try to create hardlink - if that fails, efficiently make a copy."""
     util.makedirs(os.path.dirname(dest))
@@ -63,6 +66,7 @@
                 dstf.write(chunk)
         os.chmod(dest, os.stat(src).st_mode)
 
+
 def usercachepath(ui, hash):
     '''Return the correct location in the "global" largefiles cache for a file
     with the given hash.
@@ -70,14 +74,16 @@
     to preserve download bandwidth and storage space.'''
     return os.path.join(_usercachedir(ui), hash)
 
+
 def _usercachedir(ui, name=longname):
     '''Return the location of the "global" largefiles cache.'''
     path = ui.configpath(name, 'usercache')
     if path:
         return path
     if pycompat.iswindows:
-        appdata = encoding.environ.get('LOCALAPPDATA',
-                                       encoding.environ.get('APPDATA'))
+        appdata = encoding.environ.get(
+            'LOCALAPPDATA', encoding.environ.get('APPDATA')
+        )
         if appdata:
             return os.path.join(appdata, name)
     elif pycompat.isdarwin:
@@ -92,14 +98,15 @@
         if home:
             return os.path.join(home, '.cache', name)
     else:
-        raise error.Abort(_('unknown operating system: %s\n')
-                          % pycompat.osname)
+        raise error.Abort(_('unknown operating system: %s\n') % pycompat.osname)
     raise error.Abort(_('unknown %s usercache location') % name)
 
+
 def inusercache(ui, hash):
     path = usercachepath(ui, hash)
     return os.path.exists(path)
 
+
 def findfile(repo, hash):
     '''Return store path of the largefile with the specified hash.
     As a side effect, the file might be linked from user cache.
@@ -115,29 +122,39 @@
         return path
     return None
 
+
 class largefilesdirstate(dirstate.dirstate):
     def __getitem__(self, key):
         return super(largefilesdirstate, self).__getitem__(unixpath(key))
+
     def normal(self, f):
         return super(largefilesdirstate, self).normal(unixpath(f))
+
     def remove(self, f):
         return super(largefilesdirstate, self).remove(unixpath(f))
+
     def add(self, f):
         return super(largefilesdirstate, self).add(unixpath(f))
+
     def drop(self, f):
         return super(largefilesdirstate, self).drop(unixpath(f))
+
     def forget(self, f):
         return super(largefilesdirstate, self).forget(unixpath(f))
+
     def normallookup(self, f):
         return super(largefilesdirstate, self).normallookup(unixpath(f))
+
     def _ignore(self, f):
         return False
+
     def write(self, tr=False):
         # (1) disable PENDING mode always
         #     (lfdirstate isn't yet managed as a part of the transaction)
         # (2) avoid develwarn 'use dirstate.write with ....'
         super(largefilesdirstate, self).write(None)
 
+
 def openlfdirstate(ui, repo, create=True):
     '''
     Return a dirstate object that tracks largefiles: i.e. its root is
@@ -146,17 +163,22 @@
     vfs = repo.vfs
     lfstoredir = longname
     opener = vfsmod.vfs(vfs.join(lfstoredir))
-    lfdirstate = largefilesdirstate(opener, ui, repo.root,
-                                    repo.dirstate._validate,
-                                    lambda: sparse.matcher(repo))
+    lfdirstate = largefilesdirstate(
+        opener,
+        ui,
+        repo.root,
+        repo.dirstate._validate,
+        lambda: sparse.matcher(repo),
+    )
 
     # If the largefiles dirstate does not exist, populate and create
     # it. This ensures that we create it on the first meaningful
     # largefiles operation in a new clone.
     if create and not vfs.exists(vfs.join(lfstoredir, 'dirstate')):
         matcher = getstandinmatcher(repo)
-        standins = repo.dirstate.walk(matcher, subrepos=[], unknown=False,
-                                      ignored=False)
+        standins = repo.dirstate.walk(
+            matcher, subrepos=[], unknown=False, ignored=False
+        )
 
         if len(standins) > 0:
             vfs.makedirs(lfstoredir)
@@ -166,11 +188,13 @@
             lfdirstate.normallookup(lfile)
     return lfdirstate
 
+
 def lfdirstatestatus(lfdirstate, repo):
     pctx = repo['.']
     match = matchmod.always()
-    unsure, s = lfdirstate.status(match, subrepos=[], ignored=False,
-                                  clean=False, unknown=False)
+    unsure, s = lfdirstate.status(
+        match, subrepos=[], ignored=False, clean=False, unknown=False
+    )
     modified, clean = s.modified, s.clean
     for lfile in unsure:
         try:
@@ -184,6 +208,7 @@
             lfdirstate.normal(lfile)
     return s
 
+
 def listlfiles(repo, rev=None, matcher=None):
     '''return a list of largefiles in the working copy or the
     specified changeset'''
@@ -192,14 +217,18 @@
         matcher = getstandinmatcher(repo)
 
     # ignore unknown files in working directory
-    return [splitstandin(f)
-            for f in repo[rev].walk(matcher)
-            if rev is not None or repo.dirstate[f] != '?']
+    return [
+        splitstandin(f)
+        for f in repo[rev].walk(matcher)
+        if rev is not None or repo.dirstate[f] != '?'
+    ]
+
 
 def instore(repo, hash, forcelocal=False):
     '''Return true if a largefile with the given hash exists in the store'''
     return os.path.exists(storepath(repo, hash, forcelocal))
 
+
 def storepath(repo, hash, forcelocal=False):
     '''Return the correct location in the repository largefiles store for a
     file with the given hash.'''
@@ -207,6 +236,7 @@
         return repo.vfs.reljoin(repo.sharedpath, longname, hash)
     return repo.vfs.join(longname, hash)
 
+
 def findstorepath(repo, hash):
     '''Search through the local store path(s) to find the file for the given
     hash.  If the file is not found, its path in the primary store is returned.
@@ -224,6 +254,7 @@
 
     return (path, False)
 
+
 def copyfromcache(repo, hash, filename):
     '''Copy the specified largefile from the repo or system cache to
     filename in the repository. Return true on success or false if the
@@ -238,15 +269,17 @@
     # The write may fail before the file is fully written, but we
     # don't use atomic writes in the working copy.
     with open(path, 'rb') as srcfd, wvfs(filename, 'wb') as destfd:
-        gothash = copyandhash(
-            util.filechunkiter(srcfd), destfd)
+        gothash = copyandhash(util.filechunkiter(srcfd), destfd)
     if gothash != hash:
-        repo.ui.warn(_('%s: data corruption in %s with hash %s\n')
-                     % (filename, path, gothash))
+        repo.ui.warn(
+            _('%s: data corruption in %s with hash %s\n')
+            % (filename, path, gothash)
+        )
         wvfs.unlink(filename)
         return False
     return True
 
+
 def copytostore(repo, ctx, file, fstandin):
     wvfs = repo.wvfs
     hash = readasstandin(ctx[fstandin])
@@ -255,8 +288,11 @@
     if wvfs.exists(file):
         copytostoreabsolute(repo, wvfs.join(file), hash)
     else:
-        repo.ui.warn(_("%s: largefile %s not available from local store\n") %
-                     (file, hash))
+        repo.ui.warn(
+            _("%s: largefile %s not available from local store\n")
+            % (file, hash)
+        )
+
 
 def copyalltostore(repo, node):
     '''Copy all largefiles in a given revision to the store'''
@@ -267,24 +303,28 @@
         if realfile is not None and filename in ctx.manifest():
             copytostore(repo, ctx, realfile, filename)
 
+
 def copytostoreabsolute(repo, file, hash):
     if inusercache(repo.ui, hash):
         link(usercachepath(repo.ui, hash), storepath(repo, hash))
     else:
         util.makedirs(os.path.dirname(storepath(repo, hash)))
         with open(file, 'rb') as srcf:
-            with util.atomictempfile(storepath(repo, hash),
-                                     createmode=repo.store.createmode) as dstf:
+            with util.atomictempfile(
+                storepath(repo, hash), createmode=repo.store.createmode
+            ) as dstf:
                 for chunk in util.filechunkiter(srcf):
                     dstf.write(chunk)
         linktousercache(repo, hash)
 
+
 def linktousercache(repo, hash):
     '''Link / copy the largefile with the specified hash from the store
     to the cache.'''
     path = usercachepath(repo.ui, hash)
     link(storepath(repo, hash), path)
 
+
 def getstandinmatcher(repo, rmatcher=None):
     '''Return a match object that applies rmatcher to the standin directory'''
     wvfs = repo.wvfs
@@ -303,18 +343,22 @@
         match = scmutil.match(repo[None], [wvfs.join(standindir)], badfn=badfn)
     return match
 
+
 def composestandinmatcher(repo, rmatcher):
     '''Return a matcher that accepts standins corresponding to the
     files accepted by rmatcher. Pass the list of files in the matcher
     as the paths specified by the user.'''
     smatcher = getstandinmatcher(repo, rmatcher)
     isstandin = smatcher.matchfn
+
     def composedmatchfn(f):
         return isstandin(f) and rmatcher.matchfn(splitstandin(f))
+
     smatcher.matchfn = composedmatchfn
 
     return smatcher
 
+
 def standin(filename):
     '''Return the repo-relative path to the standin for the specified big
     file.'''
@@ -327,11 +371,13 @@
     #    passed filenames from an external source (like the command line).
     return shortnameslash + util.pconvert(filename)
 
+
 def isstandin(filename):
     '''Return true if filename is a big file standin. filename must be
     in Mercurial's internal form (slash-separated).'''
     return filename.startswith(shortnameslash)
 
+
 def splitstandin(filename):
     # Split on / because that's what dirstate always uses, even on Windows.
     # Change local separator to / first just in case we are passed filenames
@@ -342,6 +388,7 @@
     else:
         return None
 
+
 def updatestandin(repo, lfile, standin):
     """Re-calculate hash value of lfile and write it into standin
 
@@ -355,16 +402,19 @@
     else:
         raise error.Abort(_('%s: file not found!') % lfile)
 
+
 def readasstandin(fctx):
     '''read hex hash from given filectx of standin file
 
     This encapsulates how "standin" data is stored into storage layer.'''
     return fctx.data().strip()
 
+
 def writestandin(repo, standin, hash, executable):
     '''write hash to <repo.root>/<standin>'''
     repo.wwrite(standin, hash + '\n', executable and 'x' or '')
 
+
 def copyandhash(instream, outfile):
     '''Read bytes from instream (iterable) and write them to outfile,
     computing the SHA-1 hash of the data along the way. Return the hash.'''
@@ -374,17 +424,22 @@
         outfile.write(data)
     return hex(hasher.digest())
 
+
 def hashfile(file):
     if not os.path.exists(file):
         return ''
     with open(file, 'rb') as fd:
         return hexsha1(fd)
 
+
 def getexecutable(filename):
     mode = os.stat(filename).st_mode
-    return ((mode & stat.S_IXUSR) and
-            (mode & stat.S_IXGRP) and
-            (mode & stat.S_IXOTH))
+    return (
+        (mode & stat.S_IXUSR)
+        and (mode & stat.S_IXGRP)
+        and (mode & stat.S_IXOTH)
+    )
+
 
 def urljoin(first, second, *arg):
     def join(left, right):
@@ -399,6 +454,7 @@
         url = join(url, a)
     return url
 
+
 def hexsha1(fileobj):
     """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
     object data"""
@@ -407,31 +463,38 @@
         h.update(chunk)
     return hex(h.digest())
 
+
 def httpsendfile(ui, filename):
     return httpconnection.httpsendfile(ui, filename, 'rb')
 
+
 def unixpath(path):
     '''Return a version of path normalized for use with the lfdirstate.'''
     return util.pconvert(os.path.normpath(path))
 
+
 def islfilesrepo(repo):
     '''Return true if the repo is a largefile repo.'''
-    if ('largefiles' in repo.requirements and
-            any(shortnameslash in f[0] for f in repo.store.datafiles())):
+    if 'largefiles' in repo.requirements and any(
+        shortnameslash in f[0] for f in repo.store.datafiles()
+    ):
         return True
 
     return any(openlfdirstate(repo.ui, repo, False))
 
+
 class storeprotonotcapable(Exception):
     def __init__(self, storetypes):
         self.storetypes = storetypes
 
+
 def getstandinsstate(repo):
     standins = []
     matcher = getstandinmatcher(repo)
     wctx = repo[None]
-    for standin in repo.dirstate.walk(matcher, subrepos=[], unknown=False,
-                                      ignored=False):
+    for standin in repo.dirstate.walk(
+        matcher, subrepos=[], unknown=False, ignored=False
+    ):
         lfile = splitstandin(standin)
         try:
             hash = readasstandin(wctx[standin])
@@ -440,6 +503,7 @@
         standins.append((lfile, hash))
     return standins
 
+
 def synclfdirstate(repo, lfdirstate, lfile, normallookup):
     lfstandin = standin(lfile)
     if lfstandin in repo.dirstate:
@@ -448,8 +512,7 @@
     else:
         state, mtime = '?', -1
     if state == 'n':
-        if (normallookup or mtime < 0 or
-            not repo.wvfs.exists(lfile)):
+        if normallookup or mtime < 0 or not repo.wvfs.exists(lfile):
             # state 'n' doesn't ensure 'clean' in this case
             lfdirstate.normallookup(lfile)
         else:
@@ -463,6 +526,7 @@
     elif state == '?':
         lfdirstate.drop(lfile)
 
+
 def markcommitted(orig, ctx, node):
     repo = ctx.repo()
 
@@ -492,6 +556,7 @@
     # at merging.
     copyalltostore(repo, node)
 
+
 def getlfilestoupdate(oldstandins, newstandins):
     changedstandins = set(oldstandins).symmetric_difference(set(newstandins))
     filelist = []
@@ -500,10 +565,14 @@
             filelist.append(f[0])
     return filelist
 
+
 def getlfilestoupload(repo, missing, addfunc):
     makeprogress = repo.ui.makeprogress
-    with makeprogress(_('finding outgoing largefiles'),
-                      unit=_('revisions'), total=len(missing)) as progress:
+    with makeprogress(
+        _('finding outgoing largefiles'),
+        unit=_('revisions'),
+        total=len(missing),
+    ) as progress:
         for i, n in enumerate(missing):
             progress.update(i)
             parents = [p for p in repo[n].parents() if p != node.nullid]
@@ -533,6 +602,7 @@
                 if isstandin(fn) and fn in ctx:
                     addfunc(fn, readasstandin(ctx[fn]))
 
+
 def updatestandinsbymatch(repo, match):
     '''Update standins in the working directory according to specified match
 
@@ -553,8 +623,9 @@
         # large.
         lfdirstate = openlfdirstate(ui, repo)
         dirtymatch = matchmod.always()
-        unsure, s = lfdirstate.status(dirtymatch, subrepos=[], ignored=False,
-                                      clean=False, unknown=False)
+        unsure, s = lfdirstate.status(
+            dirtymatch, subrepos=[], ignored=False, clean=False, unknown=False
+        )
         modifiedfiles = unsure + s.modified + s.added + s.removed
         lfiles = listlfiles(repo)
         # this only loops through largefiles that exist (not
@@ -577,8 +648,9 @@
     # Case 2: user calls commit with specified patterns: refresh
     # any matching big files.
     smatcher = composestandinmatcher(repo, match)
-    standins = repo.dirstate.walk(smatcher, subrepos=[], unknown=False,
-                                  ignored=False)
+    standins = repo.dirstate.walk(
+        smatcher, subrepos=[], unknown=False, ignored=False
+    )
 
     # No matching big files: get out of the way and pass control to
     # the usual commit() method.
@@ -636,6 +708,7 @@
 
     return match
 
+
 class automatedcommithook(object):
     '''Stateful hook to update standins at the 1st commit of resuming
 
@@ -647,16 +720,18 @@
     --continue``) should update them, because largefiles may be
     modified manually.
     '''
+
     def __init__(self, resuming):
         self.resuming = resuming
 
     def __call__(self, repo, match):
         if self.resuming:
-            self.resuming = False # avoids updating at subsequent commits
+            self.resuming = False  # avoids updating at subsequent commits
             return updatestandinsbymatch(repo, match)
         else:
             return match
 
+
 def getstatuswriter(ui, repo, forcibly=None):
     '''Return the function to write largefiles specific status out
 
@@ -670,6 +745,6 @@
         return repo._lfstatuswriters[-1]
     else:
         if forcibly:
-            return ui.status # forcibly WRITE OUT
+            return ui.status  # forcibly WRITE OUT
         else:
-            return lambda *msg, **opts: None # forcibly IGNORE
+            return lambda *msg, **opts: None  # forcibly IGNORE
--- a/hgext/largefiles/localstore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/localstore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,6 +17,7 @@
     lfutil,
 )
 
+
 class localstore(basestore.basestore):
     '''localstore first attempts to grab files out of the store in the remote
     Mercurial repository.  Failing that, it attempts to grab the files from
@@ -40,11 +41,11 @@
     def _getfile(self, tmpfile, filename, hash):
         path = lfutil.findfile(self.remote, hash)
         if not path:
-            raise basestore.StoreError(filename, hash, self.url,
-                _("can't get file locally"))
+            raise basestore.StoreError(
+                filename, hash, self.url, _("can't get file locally")
+            )
         with open(path, 'rb') as fd:
-            return lfutil.copyandhash(
-                util.filechunkiter(fd), tmpfile)
+            return lfutil.copyandhash(util.filechunkiter(fd), tmpfile)
 
     def _verifyfiles(self, contents, filestocheck):
         failed = False
@@ -52,17 +53,20 @@
             storepath, exists = lfutil.findstorepath(self.repo, expectedhash)
             if not exists:
                 storepath, exists = lfutil.findstorepath(
-                    self.remote, expectedhash)
+                    self.remote, expectedhash
+                )
             if not exists:
                 self.ui.warn(
                     _('changeset %s: %s references missing %s\n')
-                    % (cset, filename, storepath))
+                    % (cset, filename, storepath)
+                )
                 failed = True
             elif contents:
                 actualhash = lfutil.hashfile(storepath)
                 if actualhash != expectedhash:
                     self.ui.warn(
                         _('changeset %s: %s references corrupted %s\n')
-                        % (cset, filename, storepath))
+                        % (cset, filename, storepath)
+                    )
                     failed = True
         return failed
--- a/hgext/largefiles/overrides.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/overrides.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,9 +14,7 @@
 
 from mercurial.i18n import _
 
-from mercurial.hgweb import (
-    webcommands,
-)
+from mercurial.hgweb import webcommands
 
 from mercurial import (
     archival,
@@ -51,6 +49,7 @@
 
 # -- Utility functions: commonly/repeatedly needed functionality ---------------
 
+
 def composelargefilematcher(match, manifest):
     '''create a matcher that matches only the largefiles in the original
     matcher'''
@@ -63,14 +62,16 @@
     m.matchfn = lambda f: lfile(f) and origmatchfn(f)
     return m
 
+
 def composenormalfilematcher(match, manifest, exclude=None):
     excluded = set()
     if exclude is not None:
         excluded.update(exclude)
 
     m = copy.copy(match)
-    notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
-            manifest or f in excluded)
+    notlfile = lambda f: not (
+        lfutil.isstandin(f) or lfutil.standin(f) in manifest or f in excluded
+    )
     m._files = [lf for lf in m._files if notlfile(lf)]
     m._fileset = set(m._files)
     m.always = lambda: False
@@ -78,10 +79,12 @@
     m.matchfn = lambda f: notlfile(f) and origmatchfn(f)
     return m
 
+
 def addlargefiles(ui, repo, isaddremove, matcher, uipathfn, **opts):
     large = opts.get(r'large')
     lfsize = lfutil.getminsize(
-        ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize'))
+        ui, lfutil.islfilesrepo(repo), opts.get(r'lfsize')
+    )
 
     lfmatcher = None
     if lfutil.islfilesrepo(repo):
@@ -112,8 +115,9 @@
             if not repo.wvfs.exists(f):
                 continue
 
-            abovemin = (lfsize and
-                        repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024)
+            abovemin = (
+                lfsize and repo.wvfs.lstat(f).st_size >= lfsize * 1024 * 1024
+            )
             if large or abovemin or (lfmatcher and lfmatcher(f)):
                 lfnames.append(f)
                 if ui.verbose or not exact:
@@ -129,21 +133,28 @@
             lfdirstate = lfutil.openlfdirstate(ui, repo)
             for f in lfnames:
                 standinname = lfutil.standin(f)
-                lfutil.writestandin(repo, standinname, hash='',
-                    executable=lfutil.getexecutable(repo.wjoin(f)))
+                lfutil.writestandin(
+                    repo,
+                    standinname,
+                    hash='',
+                    executable=lfutil.getexecutable(repo.wjoin(f)),
+                )
                 standins.append(standinname)
                 if lfdirstate[f] == 'r':
                     lfdirstate.normallookup(f)
                 else:
                     lfdirstate.add(f)
             lfdirstate.write()
-            bad += [lfutil.splitstandin(f)
-                    for f in repo[None].add(standins)
-                    if f in m.files()]
+            bad += [
+                lfutil.splitstandin(f)
+                for f in repo[None].add(standins)
+                if f in m.files()
+            ]
 
         added = [f for f in lfnames if f not in bad]
     return added, bad
 
+
 def removelargefiles(ui, repo, isaddremove, matcher, uipathfn, dryrun, **opts):
     after = opts.get(r'after')
     m = composelargefilematcher(matcher, repo[None].manifest())
@@ -153,10 +164,10 @@
     finally:
         repo.lfstatus = False
     manifest = repo[None].manifest()
-    modified, added, deleted, clean = [[f for f in list
-                                        if lfutil.standin(f) in manifest]
-                                       for list in (s.modified, s.added,
-                                                    s.deleted, s.clean)]
+    modified, added, deleted, clean = [
+        [f for f in list if lfutil.standin(f) in manifest]
+        for list in (s.modified, s.added, s.deleted, s.clean)
+    ]
 
     def warn(files, msg):
         for f in files:
@@ -165,14 +176,28 @@
 
     if after:
         remove = deleted
-        result = warn(modified + added + clean,
-                      _('not removing %s: file still exists\n'))
+        result = warn(
+            modified + added + clean, _('not removing %s: file still exists\n')
+        )
     else:
         remove = deleted + clean
-        result = warn(modified, _('not removing %s: file is modified (use -f'
-                                  ' to force removal)\n'))
-        result = warn(added, _('not removing %s: file has been marked for add'
-                               ' (use forget to undo)\n')) or result
+        result = warn(
+            modified,
+            _(
+                'not removing %s: file is modified (use -f'
+                ' to force removal)\n'
+            ),
+        )
+        result = (
+            warn(
+                added,
+                _(
+                    'not removing %s: file has been marked for add'
+                    ' (use forget to undo)\n'
+                ),
+            )
+            or result
+        )
 
     # Need to lock because standin files are deleted then removed from the
     # repository and we could race in-between.
@@ -198,31 +223,47 @@
         repo[None].forget(remove)
 
         for f in remove:
-            lfutil.synclfdirstate(repo, lfdirstate, lfutil.splitstandin(f),
-                                  False)
+            lfutil.synclfdirstate(
+                repo, lfdirstate, lfutil.splitstandin(f), False
+            )
 
         lfdirstate.write()
 
     return result
 
+
 # For overriding mercurial.hgweb.webcommands so that largefiles will
 # appear at their right place in the manifests.
 @eh.wrapfunction(webcommands, 'decodepath')
 def decodepath(orig, path):
     return lfutil.splitstandin(path) or path
 
+
 # -- Wrappers: modify existing commands --------------------------------
 
-@eh.wrapcommand('add',
-    opts=[('', 'large', None, _('add as largefile')),
-          ('', 'normal', None, _('add as normal file')),
-          ('', 'lfsize', '', _('add all files above this size (in megabytes) '
-                               'as largefiles (default: 10)'))])
+
+@eh.wrapcommand(
+    'add',
+    opts=[
+        ('', 'large', None, _('add as largefile')),
+        ('', 'normal', None, _('add as normal file')),
+        (
+            '',
+            'lfsize',
+            '',
+            _(
+                'add all files above this size (in megabytes) '
+                'as largefiles (default: 10)'
+            ),
+        ),
+    ],
+)
 def overrideadd(orig, ui, repo, *pats, **opts):
     if opts.get(r'normal') and opts.get(r'large'):
         raise error.Abort(_('--normal cannot be used with --large'))
     return orig(ui, repo, *pats, **opts)
 
+
 @eh.wrapfunction(cmdutil, 'add')
 def cmdutiladd(orig, ui, repo, matcher, prefix, uipathfn, explicitonly, **opts):
     # The --normal flag short circuits this override
@@ -230,21 +271,38 @@
         return orig(ui, repo, matcher, prefix, uipathfn, explicitonly, **opts)
 
     ladded, lbad = addlargefiles(ui, repo, False, matcher, uipathfn, **opts)
-    normalmatcher = composenormalfilematcher(matcher, repo[None].manifest(),
-                                             ladded)
+    normalmatcher = composenormalfilematcher(
+        matcher, repo[None].manifest(), ladded
+    )
     bad = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly, **opts)
 
     bad.extend(f for f in lbad)
     return bad
 
+
 @eh.wrapfunction(cmdutil, 'remove')
-def cmdutilremove(orig, ui, repo, matcher, prefix, uipathfn, after, force,
-                  subrepos, dryrun):
+def cmdutilremove(
+    orig, ui, repo, matcher, prefix, uipathfn, after, force, subrepos, dryrun
+):
     normalmatcher = composenormalfilematcher(matcher, repo[None].manifest())
-    result = orig(ui, repo, normalmatcher, prefix, uipathfn, after, force,
-                  subrepos, dryrun)
-    return removelargefiles(ui, repo, False, matcher, uipathfn, dryrun,
-                            after=after, force=force) or result
+    result = orig(
+        ui,
+        repo,
+        normalmatcher,
+        prefix,
+        uipathfn,
+        after,
+        force,
+        subrepos,
+        dryrun,
+    )
+    return (
+        removelargefiles(
+            ui, repo, False, matcher, uipathfn, dryrun, after=after, force=force
+        )
+        or result
+    )
+
 
 @eh.wrapfunction(subrepo.hgsubrepo, 'status')
 def overridestatusfn(orig, repo, rev2, **opts):
@@ -254,6 +312,7 @@
     finally:
         repo._repo.lfstatus = False
 
+
 @eh.wrapcommand('status')
 def overridestatus(orig, ui, repo, *pats, **opts):
     try:
@@ -262,6 +321,7 @@
     finally:
         repo.lfstatus = False
 
+
 @eh.wrapfunction(subrepo.hgsubrepo, 'dirty')
 def overridedirty(orig, repo, ignoreupdate=False, missing=False):
     try:
@@ -270,10 +330,18 @@
     finally:
         repo._repo.lfstatus = False
 
+
 @eh.wrapcommand('log')
 def overridelog(orig, ui, repo, *pats, **opts):
-    def overridematchandpats(orig, ctx, pats=(), opts=None, globbed=False,
-            default='relpath', badfn=None):
+    def overridematchandpats(
+        orig,
+        ctx,
+        pats=(),
+        opts=None,
+        globbed=False,
+        default='relpath',
+        badfn=None,
+    ):
         """Matcher that merges root directory with .hglf, suitable for log.
         It is still possible to match .hglf directly.
         For any listed files run log on the standin too.
@@ -304,13 +372,13 @@
         cwd = repo.getcwd()
         if cwd:
             hglf = lfutil.shortname
-            back = util.pconvert(repo.pathto(hglf)[:-len(hglf)])
+            back = util.pconvert(repo.pathto(hglf)[: -len(hglf)])
 
             def tostandin(f):
                 # The file may already be a standin, so truncate the back
                 # prefix and test before mangling it.  This avoids turning
                 # 'glob:../.hglf/foo*' into 'glob:../.hglf/../.hglf/foo*'.
-                if f.startswith(back) and lfutil.splitstandin(f[len(back):]):
+                if f.startswith(back) and lfutil.splitstandin(f[len(back) :]):
                     return f
 
                 # An absolute path is from outside the repo, so truncate the
@@ -318,15 +386,18 @@
                 # is somewhere in the repo, relative to root, and needs to be
                 # prepended before building the standin.
                 if os.path.isabs(cwd):
-                    f = f[len(back):]
+                    f = f[len(back) :]
                 else:
                     f = cwd + '/' + f
                 return back + lfutil.standin(f)
+
         else:
+
             def tostandin(f):
                 if lfutil.isstandin(f):
                     return f
                 return lfutil.standin(f)
+
         pats.update(fixpats(f, tostandin) for f in p)
 
         for i in range(0, len(m._files)):
@@ -346,12 +417,14 @@
         m._fileset = set(m._files)
         m.always = lambda: False
         origmatchfn = m.matchfn
+
         def lfmatchfn(f):
             lf = lfutil.splitstandin(f)
             if lf is not None and origmatchfn(lf):
                 return True
             r = origmatchfn(f)
             return r
+
         m.matchfn = lfmatchfn
 
         ui.debug('updated patterns: %s\n' % ', '.join(sorted(pats)))
@@ -363,25 +436,45 @@
     # The magic matchandpats override should be used for case (1) but not for
     # case (2).
     oldmatchandpats = scmutil.matchandpats
+
     def overridemakefilematcher(orig, repo, pats, opts, badfn=None):
         wctx = repo[None]
         match, pats = oldmatchandpats(wctx, pats, opts, badfn=badfn)
         return lambda ctx: match
 
-    wrappedmatchandpats = extensions.wrappedfunction(scmutil, 'matchandpats',
-                                                     overridematchandpats)
+    wrappedmatchandpats = extensions.wrappedfunction(
+        scmutil, 'matchandpats', overridematchandpats
+    )
     wrappedmakefilematcher = extensions.wrappedfunction(
-        logcmdutil, '_makenofollowfilematcher', overridemakefilematcher)
+        logcmdutil, '_makenofollowfilematcher', overridemakefilematcher
+    )
     with wrappedmatchandpats, wrappedmakefilematcher:
         return orig(ui, repo, *pats, **opts)
 
-@eh.wrapcommand('verify',
-    opts=[('', 'large', None,
-                _('verify that all largefiles in current revision exists')),
-          ('', 'lfa', None,
-                _('verify largefiles in all revisions, not just current')),
-          ('', 'lfc', None,
-                _('verify local largefile contents, not just existence'))])
+
+@eh.wrapcommand(
+    'verify',
+    opts=[
+        (
+            '',
+            'large',
+            None,
+            _('verify that all largefiles in current revision exists'),
+        ),
+        (
+            '',
+            'lfa',
+            None,
+            _('verify largefiles in all revisions, not just current'),
+        ),
+        (
+            '',
+            'lfc',
+            None,
+            _('verify local largefile contents, not just existence'),
+        ),
+    ],
+)
 def overrideverify(orig, ui, repo, *pats, **opts):
     large = opts.pop(r'large', False)
     all = opts.pop(r'lfa', False)
@@ -392,17 +485,22 @@
         result = result or lfcommands.verifylfiles(ui, repo, all, contents)
     return result
 
-@eh.wrapcommand('debugstate',
-    opts=[('', 'large', None, _('display largefiles dirstate'))])
+
+@eh.wrapcommand(
+    'debugstate', opts=[('', 'large', None, _('display largefiles dirstate'))]
+)
 def overridedebugstate(orig, ui, repo, *pats, **opts):
     large = opts.pop(r'large', False)
     if large:
+
         class fakerepo(object):
             dirstate = lfutil.openlfdirstate(ui, repo)
+
         orig(ui, fakerepo, *pats, **opts)
     else:
         orig(ui, repo, *pats, **opts)
 
+
 # Before starting the manifest merge, merge.updates will call
 # _checkunknownfile to check if there are any files in the merged-in
 # changeset that collide with unknown files in the working copy.
@@ -419,6 +517,7 @@
         return False
     return origfn(repo, wctx, mctx, f, f2)
 
+
 # The manifest merge handles conflicts on the manifest level. We want
 # to handle changes in largefile-ness of files at this level too.
 #
@@ -446,11 +545,13 @@
 # writing the files into the working copy and lfcommands.updatelfiles
 # will update the largefiles.
 @eh.wrapfunction(merge, 'calculateupdates')
-def overridecalculateupdates(origfn, repo, p1, p2, pas, branchmerge, force,
-                             acceptremote, *args, **kwargs):
+def overridecalculateupdates(
+    origfn, repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
+):
     overwrite = force and not branchmerge
     actions, diverge, renamedelete = origfn(
-        repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs)
+        repo, p1, p2, pas, branchmerge, force, acceptremote, *args, **kwargs
+    )
 
     if overwrite:
         return actions, diverge, renamedelete
@@ -474,13 +575,18 @@
                 sargs = (p2[f2].flags(), False)
             # Case 1: normal file in the working copy, largefile in
             # the second parent
-            usermsg = _('remote turned local normal file %s into a largefile\n'
-                        'use (l)argefile or keep (n)ormal file?'
-                        '$$ &Largefile $$ &Normal file') % lfile
-            if repo.ui.promptchoice(usermsg, 0) == 0: # pick remote largefile
+            usermsg = (
+                _(
+                    'remote turned local normal file %s into a largefile\n'
+                    'use (l)argefile or keep (n)ormal file?'
+                    '$$ &Largefile $$ &Normal file'
+                )
+                % lfile
+            )
+            if repo.ui.promptchoice(usermsg, 0) == 0:  # pick remote largefile
                 actions[lfile] = ('r', None, 'replaced by standin')
                 actions[standin] = ('g', sargs, 'replaces standin')
-            else: # keep local normal file
+            else:  # keep local normal file
                 actions[lfile] = ('k', None, 'replaces standin')
                 if branchmerge:
                     actions[standin] = ('k', None, 'replaced by non-standin')
@@ -492,10 +598,15 @@
                 largs = (p2[f2].flags(), False)
             # Case 2: largefile in the working copy, normal file in
             # the second parent
-            usermsg = _('remote turned local largefile %s into a normal file\n'
+            usermsg = (
+                _(
+                    'remote turned local largefile %s into a normal file\n'
                     'keep (l)argefile or use (n)ormal file?'
-                    '$$ &Largefile $$ &Normal file') % lfile
-            if repo.ui.promptchoice(usermsg, 0) == 0: # keep local largefile
+                    '$$ &Largefile $$ &Normal file'
+                )
+                % lfile
+            )
+            if repo.ui.promptchoice(usermsg, 0) == 0:  # keep local largefile
                 if branchmerge:
                     # largefile can be restored from standin safely
                     actions[lfile] = ('k', None, 'replaced by standin')
@@ -503,17 +614,21 @@
                 else:
                     # "lfile" should be marked as "removed" without
                     # removal of itself
-                    actions[lfile] = ('lfmr', None,
-                                      'forget non-standin largefile')
+                    actions[lfile] = (
+                        'lfmr',
+                        None,
+                        'forget non-standin largefile',
+                    )
 
                     # linear-merge should treat this largefile as 're-added'
                     actions[standin] = ('a', None, 'keep standin')
-            else: # pick remote normal file
+            else:  # pick remote normal file
                 actions[lfile] = ('g', largs, 'replaces standin')
                 actions[standin] = ('r', None, 'replaced by non-standin')
 
     return actions, diverge, renamedelete
 
+
 @eh.wrapfunction(merge, 'recordupdates')
 def mergerecordupdates(orig, repo, actions, branchmerge, getfiledata):
     if 'lfmr' in actions:
@@ -528,31 +643,43 @@
 
     return orig(repo, actions, branchmerge, getfiledata)
 
+
 # Override filemerge to prompt the user about how they wish to merge
 # largefiles. This will handle identical edits without prompting the user.
 @eh.wrapfunction(filemerge, '_filemerge')
-def overridefilemerge(origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca,
-                      labels=None):
+def overridefilemerge(
+    origfn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
+):
     if not lfutil.isstandin(orig) or fcd.isabsent() or fco.isabsent():
-        return origfn(premerge, repo, wctx, mynode, orig, fcd, fco, fca,
-                      labels=labels)
+        return origfn(
+            premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
+        )
 
     ahash = lfutil.readasstandin(fca).lower()
     dhash = lfutil.readasstandin(fcd).lower()
     ohash = lfutil.readasstandin(fco).lower()
-    if (ohash != ahash and
-        ohash != dhash and
-        (dhash == ahash or
-         repo.ui.promptchoice(
-             _('largefile %s has a merge conflict\nancestor was %s\n'
-               'you can keep (l)ocal %s or take (o)ther %s.\n'
-               'what do you want to do?'
-               '$$ &Local $$ &Other') %
-               (lfutil.splitstandin(orig), ahash, dhash, ohash),
-             0) == 1)):
+    if (
+        ohash != ahash
+        and ohash != dhash
+        and (
+            dhash == ahash
+            or repo.ui.promptchoice(
+                _(
+                    'largefile %s has a merge conflict\nancestor was %s\n'
+                    'you can keep (l)ocal %s or take (o)ther %s.\n'
+                    'what do you want to do?'
+                    '$$ &Local $$ &Other'
+                )
+                % (lfutil.splitstandin(orig), ahash, dhash, ohash),
+                0,
+            )
+            == 1
+        )
+    ):
         repo.wwrite(fcd.path(), fco.data(), fco.flags())
     return True, 0, False
 
+
 @eh.wrapfunction(copiesmod, 'pathcopies')
 def copiespathcopies(orig, ctx1, ctx2, match=None):
     copies = orig(ctx1, ctx2, match=match)
@@ -563,6 +690,7 @@
 
     return updated
 
+
 # Copy first changes the matchers to match standins instead of
 # largefiles.  Then it overrides util.copyfile in that function it
 # checks if the destination largefile already exists. It also keeps a
@@ -582,12 +710,21 @@
     nonormalfiles = False
     nolfiles = False
     manifest = repo[None].manifest()
-    def normalfilesmatchfn(orig, ctx, pats=(), opts=None, globbed=False,
-        default='relpath', badfn=None):
+
+    def normalfilesmatchfn(
+        orig,
+        ctx,
+        pats=(),
+        opts=None,
+        globbed=False,
+        default='relpath',
+        badfn=None,
+    ):
         if opts is None:
             opts = {}
         match = orig(ctx, pats, opts, globbed, default, badfn=badfn)
         return composenormalfilematcher(match, manifest)
+
     with extensions.wrappedfunction(scmutil, 'match', normalfilesmatchfn):
         try:
             result = orig(ui, repo, pats, opts, rename)
@@ -622,8 +759,16 @@
         wlock = repo.wlock()
 
         manifest = repo[None].manifest()
-        def overridematch(orig, ctx, pats=(), opts=None, globbed=False,
-                default='relpath', badfn=None):
+
+        def overridematch(
+            orig,
+            ctx,
+            pats=(),
+            opts=None,
+            globbed=False,
+            default='relpath',
+            badfn=None,
+        ):
             if opts is None:
                 opts = {}
             newpats = []
@@ -640,14 +785,19 @@
             m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
             m._fileset = set(m._files)
             origmatchfn = m.matchfn
+
             def matchfn(f):
                 lfile = lfutil.splitstandin(f)
-                return (lfile is not None and
-                        (f in manifest) and
-                        origmatchfn(lfile) or
-                        None)
+                return (
+                    lfile is not None
+                    and (f in manifest)
+                    and origmatchfn(lfile)
+                    or None
+                )
+
             m.matchfn = matchfn
             return m
+
         listpats = []
         for pat in pats:
             if matchmod.patkind(pat) is not None:
@@ -656,23 +806,26 @@
                 listpats.append(makestandin(pat))
 
         copiedfiles = []
+
         def overridecopyfile(orig, src, dest, *args, **kwargs):
-            if (lfutil.shortname in src and
-                dest.startswith(repo.wjoin(lfutil.shortname))):
+            if lfutil.shortname in src and dest.startswith(
+                repo.wjoin(lfutil.shortname)
+            ):
                 destlfile = dest.replace(lfutil.shortname, '')
                 if not opts['force'] and os.path.exists(destlfile):
-                    raise IOError('',
-                                  _('destination largefile already exists'))
+                    raise IOError('', _('destination largefile already exists'))
             copiedfiles.append((src, dest))
             orig(src, dest, *args, **kwargs)
+
         with extensions.wrappedfunction(util, 'copyfile', overridecopyfile):
             with extensions.wrappedfunction(scmutil, 'match', overridematch):
                 result += orig(ui, repo, listpats, opts, rename)
 
         lfdirstate = lfutil.openlfdirstate(ui, repo)
         for (src, dest) in copiedfiles:
-            if (lfutil.shortname in src and
-                dest.startswith(repo.wjoin(lfutil.shortname))):
+            if lfutil.shortname in src and dest.startswith(
+                repo.wjoin(lfutil.shortname)
+            ):
                 srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
                 destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
                 destlfiledir = repo.wvfs.dirname(repo.wjoin(destlfile)) or '.'
@@ -686,8 +839,7 @@
                     repo.wvfs.unlinkpath(srclfile, ignoremissing=True)
                     lfdirstate.remove(srclfile)
                 else:
-                    util.copyfile(repo.wjoin(srclfile),
-                                  repo.wjoin(destlfile))
+                    util.copyfile(repo.wjoin(srclfile), repo.wjoin(destlfile))
 
                 lfdirstate.add(destlfile)
         lfdirstate.write()
@@ -704,6 +856,7 @@
 
     return result
 
+
 # When the user calls revert, we have to be careful to not revert any
 # changes to other largefiles accidentally. This means we have to keep
 # track of the largefiles that are being reverted so we only pull down
@@ -726,13 +879,20 @@
             lfutil.updatestandin(repo, lfile, lfutil.standin(lfile))
         for lfile in s.deleted:
             fstandin = lfutil.standin(lfile)
-            if (repo.wvfs.exists(fstandin)):
+            if repo.wvfs.exists(fstandin):
                 repo.wvfs.unlink(fstandin)
 
         oldstandins = lfutil.getstandinsstate(repo)
 
-        def overridematch(orig, mctx, pats=(), opts=None, globbed=False,
-                default='relpath', badfn=None):
+        def overridematch(
+            orig,
+            mctx,
+            pats=(),
+            opts=None,
+            globbed=False,
+            default='relpath',
+            badfn=None,
+        ):
             if opts is None:
                 opts = {}
             match = orig(mctx, pats, opts, globbed, default, badfn=badfn)
@@ -742,8 +902,9 @@
             # currently doesn't work correctly in that case, this match is
             # called, so the lfdirstate above may not be the correct one for
             # this invocation of match.
-            lfdirstate = lfutil.openlfdirstate(mctx.repo().ui, mctx.repo(),
-                                               False)
+            lfdirstate = lfutil.openlfdirstate(
+                mctx.repo().ui, mctx.repo(), False
+            )
 
             wctx = repo[None]
             matchfiles = []
@@ -758,14 +919,16 @@
             m._files = matchfiles
             m._fileset = set(m._files)
             origmatchfn = m.matchfn
+
             def matchfn(f):
                 lfile = lfutil.splitstandin(f)
                 if lfile is not None:
-                    return (origmatchfn(lfile) and
-                            (f in ctx or f in mctx))
+                    return origmatchfn(lfile) and (f in ctx or f in mctx)
                 return origmatchfn(f)
+
             m.matchfn = matchfn
             return m
+
         with extensions.wrappedfunction(scmutil, 'match', overridematch):
             orig(ui, repo, ctx, parents, *pats, **opts)
 
@@ -776,16 +939,31 @@
         # when target revision is explicitly specified: in such case,
         # 'n' and valid timestamp in dirstate doesn't ensure 'clean'
         # of target (standin) file.
-        lfcommands.updatelfiles(ui, repo, filelist, printmessage=False,
-                                normallookup=True)
+        lfcommands.updatelfiles(
+            ui, repo, filelist, printmessage=False, normallookup=True
+        )
+
 
 # after pulling changesets, we need to take some extra care to get
 # largefiles updated remotely
-@eh.wrapcommand('pull',
-    opts=[('', 'all-largefiles', None,
-                _('download all pulled versions of largefiles (DEPRECATED)')),
-          ('', 'lfrev', [],
-                _('download largefiles for these revisions'), _('REV'))])
+@eh.wrapcommand(
+    'pull',
+    opts=[
+        (
+            '',
+            'all-largefiles',
+            None,
+            _('download all pulled versions of largefiles (DEPRECATED)'),
+        ),
+        (
+            '',
+            'lfrev',
+            [],
+            _('download largefiles for these revisions'),
+            _('REV'),
+        ),
+    ],
+)
 def overridepull(orig, ui, repo, source=None, **opts):
     revsprepull = len(repo)
     if not source:
@@ -798,7 +976,7 @@
         lfrevs.append('pulled()')
     if lfrevs and revspostpull > revsprepull:
         numcached = 0
-        repo.firstpulled = revsprepull # for pulled() revset expression
+        repo.firstpulled = revsprepull  # for pulled() revset expression
         try:
             for rev in scmutil.revrange(repo, lfrevs):
                 ui.note(_('pulling largefiles for revision %d\n') % rev)
@@ -809,9 +987,13 @@
         ui.status(_("%d largefiles cached\n") % numcached)
     return result
 
-@eh.wrapcommand('push',
-    opts=[('', 'lfrev', [],
-               _('upload largefiles for these revisions'), _('REV'))])
+
+@eh.wrapcommand(
+    'push',
+    opts=[
+        ('', 'lfrev', [], _('upload largefiles for these revisions'), _('REV'))
+    ],
+)
 def overridepush(orig, ui, repo, *args, **kwargs):
     """Override push command and store --lfrev parameters in opargs"""
     lfrevs = kwargs.pop(r'lfrev', None)
@@ -820,6 +1002,7 @@
         opargs['lfrevs'] = scmutil.revrange(repo, lfrevs)
     return orig(ui, repo, *args, **kwargs)
 
+
 @eh.wrapfunction(exchange, 'pushoperation')
 def exchangepushoperation(orig, *args, **kwargs):
     """Override pushoperation constructor and store lfrevs parameter"""
@@ -828,6 +1011,7 @@
     pushop.lfrevs = lfrevs
     return pushop
 
+
 @eh.revsetpredicate('pulled()')
 def pulledrevsetsymbol(repo, subset, x):
     """Changesets that just has been pulled.
@@ -854,20 +1038,31 @@
         raise error.Abort(_("pulled() only available in --lfrev"))
     return smartset.baseset([r for r in subset if r >= firstpulled])
 
-@eh.wrapcommand('clone',
-    opts=[('', 'all-largefiles', None,
-               _('download all versions of all largefiles'))])
+
+@eh.wrapcommand(
+    'clone',
+    opts=[
+        (
+            '',
+            'all-largefiles',
+            None,
+            _('download all versions of all largefiles'),
+        )
+    ],
+)
 def overrideclone(orig, ui, source, dest=None, **opts):
     d = dest
     if d is None:
         d = hg.defaultdest(source)
     if opts.get(r'all_largefiles') and not hg.islocal(d):
-            raise error.Abort(_(
-            '--all-largefiles is incompatible with non-local destination %s') %
-            d)
+        raise error.Abort(
+            _('--all-largefiles is incompatible with non-local destination %s')
+            % d
+        )
 
     return orig(ui, source, dest, **opts)
 
+
 @eh.wrapfunction(hg, 'clone')
 def hgclone(orig, ui, opts, *args, **kwargs):
     result = orig(ui, opts, *args, **kwargs)
@@ -893,6 +1088,7 @@
 
     return result
 
+
 @eh.wrapcommand('rebase', extension='rebase')
 def overriderebase(orig, ui, repo, **opts):
     if not util.safehasattr(repo, '_largefilesenabled'):
@@ -907,6 +1103,7 @@
         repo._lfstatuswriters.pop()
         repo._lfcommithooks.pop()
 
+
 @eh.wrapcommand('archive')
 def overridearchivecmd(orig, ui, repo, dest, **opts):
     repo.unfiltered().lfstatus = True
@@ -916,6 +1113,7 @@
     finally:
         repo.unfiltered().lfstatus = False
 
+
 @eh.wrapfunction(webcommands, 'archive')
 def hgwebarchive(orig, web):
     web.repo.lfstatus = True
@@ -925,14 +1123,26 @@
     finally:
         web.repo.lfstatus = False
 
+
 @eh.wrapfunction(archival, 'archive')
-def overridearchive(orig, repo, dest, node, kind, decode=True, match=None,
-            prefix='', mtime=None, subrepos=None):
+def overridearchive(
+    orig,
+    repo,
+    dest,
+    node,
+    kind,
+    decode=True,
+    match=None,
+    prefix='',
+    mtime=None,
+    subrepos=None,
+):
     # For some reason setting repo.lfstatus in hgwebarchive only changes the
     # unfiltered repo's attr, so check that as well.
     if not repo.lfstatus and not repo.unfiltered().lfstatus:
-        return orig(repo, dest, node, kind, decode, match, prefix, mtime,
-                    subrepos)
+        return orig(
+            repo, dest, node, kind, decode, match, prefix, mtime, subrepos
+        )
 
     # No need to lock because we are only reading history and
     # largefile caches, neither of which are modified.
@@ -946,8 +1156,7 @@
 
     if kind == 'files':
         if prefix:
-            raise error.Abort(
-                _('cannot give prefix when archiving to files'))
+            raise error.Abort(_('cannot give prefix when archiving to files'))
     else:
         prefix = archival.tidyprefix(dest, kind, prefix)
 
@@ -962,8 +1171,12 @@
     archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
 
     if repo.ui.configbool("ui", "archivemeta"):
-        write('.hg_archival.txt', 0o644, False,
-              lambda: archival.buildmetadata(ctx))
+        write(
+            '.hg_archival.txt',
+            0o644,
+            False,
+            lambda: archival.buildmetadata(ctx),
+        )
 
     for f in ctx:
         ff = ctx.flags(f)
@@ -975,8 +1188,11 @@
 
                 if path is None:
                     raise error.Abort(
-                       _('largefile %s not found in repo store or system cache')
-                       % lfile)
+                        _(
+                            'largefile %s not found in repo store or system cache'
+                        )
+                        % lfile
+                    )
             else:
                 path = lfile
 
@@ -995,6 +1211,7 @@
 
     archiver.done()
 
+
 @eh.wrapfunction(subrepo.hgsubrepo, 'archive')
 def hgsubrepoarchive(orig, repo, archiver, prefix, match=None, decode=True):
     lfenabled = util.safehasattr(repo._repo, '_largefilesenabled')
@@ -1029,8 +1246,11 @@
 
                 if path is None:
                     raise error.Abort(
-                       _('largefile %s not found in repo store or system cache')
-                       % lfile)
+                        _(
+                            'largefile %s not found in repo store or system cache'
+                        )
+                        % lfile
+                    )
             else:
                 path = lfile
 
@@ -1047,6 +1267,7 @@
         sub._repo.lfstatus = True
         sub.archive(archiver, subprefix, submatch, decode)
 
+
 # If a largefile is modified, the change is not reflected in its
 # standin until a commit. cmdutil.bailifchanged() raises an exception
 # if the repo has uncommitted changes. Wrap it to also check if
@@ -1060,6 +1281,7 @@
     if s.modified or s.added or s.removed or s.deleted:
         raise error.Abort(_('uncommitted changes'))
 
+
 @eh.wrapfunction(cmdutil, 'postcommitstatus')
 def postcommitstatus(orig, repo, *args, **kwargs):
     repo.lfstatus = True
@@ -1068,12 +1290,22 @@
     finally:
         repo.lfstatus = False
 
+
 @eh.wrapfunction(cmdutil, 'forget')
-def cmdutilforget(orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun,
-                  interactive):
+def cmdutilforget(
+    orig, ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
+):
     normalmatcher = composenormalfilematcher(match, repo[None].manifest())
-    bad, forgot = orig(ui, repo, normalmatcher, prefix, uipathfn, explicitonly,
-                       dryrun, interactive)
+    bad, forgot = orig(
+        ui,
+        repo,
+        normalmatcher,
+        prefix,
+        uipathfn,
+        explicitonly,
+        dryrun,
+        interactive,
+    )
     m = composelargefilematcher(match, repo[None].manifest())
 
     try:
@@ -1088,8 +1320,9 @@
     for f in forget:
         fstandin = lfutil.standin(f)
         if fstandin not in repo.dirstate and not repo.wvfs.isdir(fstandin):
-            ui.warn(_('not removing %s: file is already untracked\n')
-                    % uipathfn(f))
+            ui.warn(
+                _('not removing %s: file is already untracked\n') % uipathfn(f)
+            )
             bad.append(f)
 
     for f in forget:
@@ -1115,6 +1348,7 @@
     forgot.extend(f for f in forget if f not in rejected)
     return bad, forgot
 
+
 def _getoutgoings(repo, other, missing, addfunc):
     """get pairs of filename and largefile hash in outgoing revisions
     in 'missing'.
@@ -1126,62 +1360,75 @@
     """
     knowns = set()
     lfhashes = set()
+
     def dedup(fn, lfhash):
         k = (fn, lfhash)
         if k not in knowns:
             knowns.add(k)
             lfhashes.add(lfhash)
+
     lfutil.getlfilestoupload(repo, missing, dedup)
     if lfhashes:
         lfexists = storefactory.openstore(repo, other).exists(lfhashes)
         for fn, lfhash in knowns:
-            if not lfexists[lfhash]: # lfhash doesn't exist on "other"
+            if not lfexists[lfhash]:  # lfhash doesn't exist on "other"
                 addfunc(fn, lfhash)
 
+
 def outgoinghook(ui, repo, other, opts, missing):
     if opts.pop('large', None):
         lfhashes = set()
         if ui.debugflag:
             toupload = {}
+
             def addfunc(fn, lfhash):
                 if fn not in toupload:
                     toupload[fn] = []
                 toupload[fn].append(lfhash)
                 lfhashes.add(lfhash)
+
             def showhashes(fn):
                 for lfhash in sorted(toupload[fn]):
-                    ui.debug('    %s\n' % (lfhash))
+                    ui.debug('    %s\n' % lfhash)
+
         else:
             toupload = set()
+
             def addfunc(fn, lfhash):
                 toupload.add(fn)
                 lfhashes.add(lfhash)
+
             def showhashes(fn):
                 pass
+
         _getoutgoings(repo, other, missing, addfunc)
 
         if not toupload:
             ui.status(_('largefiles: no files to upload\n'))
         else:
-            ui.status(_('largefiles to upload (%d entities):\n')
-                      % (len(lfhashes)))
+            ui.status(
+                _('largefiles to upload (%d entities):\n') % (len(lfhashes))
+            )
             for file in sorted(toupload):
                 ui.status(lfutil.splitstandin(file) + '\n')
                 showhashes(file)
             ui.status('\n')
 
-@eh.wrapcommand('outgoing',
-    opts=[('', 'large', None, _('display outgoing largefiles'))])
+
+@eh.wrapcommand(
+    'outgoing', opts=[('', 'large', None, _('display outgoing largefiles'))]
+)
 def _outgoingcmd(orig, *args, **kwargs):
     # Nothing to do here other than add the extra help option- the hook above
     # processes it.
     return orig(*args, **kwargs)
 
+
 def summaryremotehook(ui, repo, opts, changes):
     largeopt = opts.get('large', False)
     if changes is None:
         if largeopt:
-            return (False, True) # only outgoing check is needed
+            return (False, True)  # only outgoing check is needed
         else:
             return (False, False)
     elif largeopt:
@@ -1193,9 +1440,11 @@
 
         toupload = set()
         lfhashes = set()
+
         def addfunc(fn, lfhash):
             toupload.add(fn)
             lfhashes.add(lfhash)
+
         _getoutgoings(repo, peer, outgoing.missing, addfunc)
 
         if not toupload:
@@ -1203,11 +1452,15 @@
             ui.status(_('largefiles: (no files to upload)\n'))
         else:
             # i18n: column positioning for "hg summary"
-            ui.status(_('largefiles: %d entities for %d files to upload\n')
-                      % (len(lfhashes), len(toupload)))
+            ui.status(
+                _('largefiles: %d entities for %d files to upload\n')
+                % (len(lfhashes), len(toupload))
+            )
 
-@eh.wrapcommand('summary',
-    opts=[('', 'large', None, _('display outgoing largefiles'))])
+
+@eh.wrapcommand(
+    'summary', opts=[('', 'large', None, _('display outgoing largefiles'))]
+)
 def overridesummary(orig, ui, repo, *pats, **opts):
     try:
         repo.lfstatus = True
@@ -1215,6 +1468,7 @@
     finally:
         repo.lfstatus = False
 
+
 @eh.wrapfunction(scmutil, 'addremove')
 def scmutiladdremove(orig, repo, matcher, prefix, uipathfn, opts=None):
     if opts is None:
@@ -1223,8 +1477,13 @@
         return orig(repo, matcher, prefix, uipathfn, opts)
     # Get the list of missing largefiles so we can remove them
     lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-    unsure, s = lfdirstate.status(matchmod.always(), subrepos=[],
-                                  ignored=False, clean=False, unknown=False)
+    unsure, s = lfdirstate.status(
+        matchmod.always(),
+        subrepos=[],
+        ignored=False,
+        clean=False,
+        unknown=False,
+    )
 
     # Call into the normal remove code, but the removing of the standin, we want
     # to have handled by original addremove.  Monkey patching here makes sure
@@ -1240,18 +1499,27 @@
         matchfn = m.matchfn
         m.matchfn = lambda f: f in s.deleted and matchfn(f)
 
-        removelargefiles(repo.ui, repo, True, m, uipathfn, opts.get('dry_run'),
-                         **pycompat.strkwargs(opts))
+        removelargefiles(
+            repo.ui,
+            repo,
+            True,
+            m,
+            uipathfn,
+            opts.get('dry_run'),
+            **pycompat.strkwargs(opts)
+        )
     # Call into the normal add code, and any files that *should* be added as
     # largefiles will be
-    added, bad = addlargefiles(repo.ui, repo, True, matcher, uipathfn,
-                               **pycompat.strkwargs(opts))
+    added, bad = addlargefiles(
+        repo.ui, repo, True, matcher, uipathfn, **pycompat.strkwargs(opts)
+    )
     # Now that we've handled largefiles, hand off to the original addremove
     # function to take care of the rest.  Make sure it doesn't do anything with
     # largefiles by passing a matcher that will ignore them.
     matcher = composenormalfilematcher(matcher, repo[None].manifest(), added)
     return orig(repo, matcher, prefix, uipathfn, opts)
 
+
 # Calling purge with --all will cause the largefiles to be deleted.
 # Override repo.status to prevent this from happening.
 @eh.wrapcommand('purge', extension='purge')
@@ -1267,29 +1535,44 @@
     # cleaner instead.
     repo = repo.unfiltered()
     oldstatus = repo.status
-    def overridestatus(node1='.', node2=None, match=None, ignored=False,
-                        clean=False, unknown=False, listsubrepos=False):
-        r = oldstatus(node1, node2, match, ignored, clean, unknown,
-                      listsubrepos)
+
+    def overridestatus(
+        node1='.',
+        node2=None,
+        match=None,
+        ignored=False,
+        clean=False,
+        unknown=False,
+        listsubrepos=False,
+    ):
+        r = oldstatus(
+            node1, node2, match, ignored, clean, unknown, listsubrepos
+        )
         lfdirstate = lfutil.openlfdirstate(ui, repo)
         unknown = [f for f in r.unknown if lfdirstate[f] == '?']
         ignored = [f for f in r.ignored if lfdirstate[f] == '?']
-        return scmutil.status(r.modified, r.added, r.removed, r.deleted,
-                              unknown, ignored, r.clean)
+        return scmutil.status(
+            r.modified, r.added, r.removed, r.deleted, unknown, ignored, r.clean
+        )
+
     repo.status = overridestatus
     orig(ui, repo, *dirs, **opts)
     repo.status = oldstatus
 
+
 @eh.wrapcommand('rollback')
 def overriderollback(orig, ui, repo, **opts):
     with repo.wlock():
         before = repo.dirstate.parents()
-        orphans = set(f for f in repo.dirstate
-                      if lfutil.isstandin(f) and repo.dirstate[f] != 'r')
+        orphans = set(
+            f
+            for f in repo.dirstate
+            if lfutil.isstandin(f) and repo.dirstate[f] != 'r'
+        )
         result = orig(ui, repo, **opts)
         after = repo.dirstate.parents()
         if before == after:
-            return result # no need to restore standins
+            return result  # no need to restore standins
 
         pctx = repo['.']
         for f in repo.dirstate:
@@ -1318,6 +1601,7 @@
         lfdirstate.write()
     return result
 
+
 @eh.wrapcommand('transplant', extension='transplant')
 def overridetransplant(orig, ui, repo, *revs, **opts):
     resuming = opts.get(r'continue')
@@ -1330,6 +1614,7 @@
         repo._lfcommithooks.pop()
     return result
 
+
 @eh.wrapcommand('cat')
 def overridecat(orig, ui, repo, file1, *pats, **opts):
     opts = pycompat.byteskwargs(opts)
@@ -1338,6 +1623,7 @@
     notbad = set()
     m = scmutil.match(ctx, (file1,) + pats, opts)
     origmatchfn = m.matchfn
+
     def lfmatchfn(f):
         if origmatchfn(f):
             return True
@@ -1346,14 +1632,18 @@
             return False
         notbad.add(lf)
         return origmatchfn(lf)
+
     m.matchfn = lfmatchfn
     origbadfn = m.bad
+
     def lfbadfn(f, msg):
         if not f in notbad:
             origbadfn(f, msg)
+
     m.bad = lfbadfn
 
     origvisitdirfn = m.visitdir
+
     def lfvisitdirfn(dir):
         if dir == lfutil.shortname:
             return True
@@ -1364,6 +1654,7 @@
         if lf is None:
             return False
         return origvisitdirfn(lf)
+
     m.visitdir = lfvisitdirfn
 
     for f in ctx.walk(m):
@@ -1382,8 +1673,12 @@
                     success, missing = store.get([(lf, hash)])
                     if len(success) != 1:
                         raise error.Abort(
-                            _('largefile %s is not in cache and could not be '
-                              'downloaded')  % lf)
+                            _(
+                                'largefile %s is not in cache and could not be '
+                                'downloaded'
+                            )
+                            % lf
+                        )
                 path = lfutil.usercachepath(repo.ui, hash)
                 with open(path, "rb") as fpin:
                     for chunk in util.filechunkiter(fpin):
@@ -1391,9 +1686,9 @@
         err = 0
     return err
 
+
 @eh.wrapfunction(merge, 'update')
-def mergeupdate(orig, repo, node, branchmerge, force,
-                *args, **kwargs):
+def mergeupdate(orig, repo, node, branchmerge, force, *args, **kwargs):
     matcher = kwargs.get(r'matcher', None)
     # note if this is a partial update
     partial = matcher and not matcher.always()
@@ -1414,8 +1709,13 @@
         # (*1) deprecated, but used internally (e.g: "rebase --collapse")
 
         lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
-        unsure, s = lfdirstate.status(matchmod.always(), subrepos=[],
-                                      ignored=False, clean=True, unknown=False)
+        unsure, s = lfdirstate.status(
+            matchmod.always(),
+            subrepos=[],
+            ignored=False,
+            clean=True,
+            unknown=False,
+        )
         oldclean = set(s.clean)
         pctx = repo['.']
         dctx = repo[node]
@@ -1425,10 +1725,12 @@
                 continue
             lfhash = lfutil.hashfile(lfileabs)
             standin = lfutil.standin(lfile)
-            lfutil.writestandin(repo, standin, lfhash,
-                                lfutil.getexecutable(lfileabs))
-            if (standin in pctx and
-                lfhash == lfutil.readasstandin(pctx[standin])):
+            lfutil.writestandin(
+                repo, standin, lfhash, lfutil.getexecutable(lfileabs)
+            )
+            if standin in pctx and lfhash == lfutil.readasstandin(
+                pctx[standin]
+            ):
                 oldclean.add(lfile)
         for lfile in s.added:
             fstandin = lfutil.standin(lfile)
@@ -1462,11 +1764,13 @@
         if branchmerge or force or partial:
             filelist.extend(s.deleted + s.removed)
 
-        lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
-                                normallookup=partial)
+        lfcommands.updatelfiles(
+            repo.ui, repo, filelist=filelist, normallookup=partial
+        )
 
         return result
 
+
 @eh.wrapfunction(scmutil, 'marktouched')
 def scmutilmarktouched(orig, repo, files, *args, **kwargs):
     result = orig(repo, files, *args, **kwargs)
@@ -1477,11 +1781,17 @@
         if lf is not None:
             filelist.append(lf)
     if filelist:
-        lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
-                                printmessage=False, normallookup=True)
+        lfcommands.updatelfiles(
+            repo.ui,
+            repo,
+            filelist=filelist,
+            printmessage=False,
+            normallookup=True,
+        )
 
     return result
 
+
 @eh.wrapfunction(upgrade, 'preservedrequirements')
 @eh.wrapfunction(upgrade, 'supporteddestrequirements')
 def upgraderequirements(orig, repo):
@@ -1490,15 +1800,17 @@
         reqs.add('largefiles')
     return reqs
 
+
 _lfscheme = 'largefile://'
 
+
 @eh.wrapfunction(urlmod, 'open')
 def openlargefile(orig, ui, url_, data=None):
     if url_.startswith(_lfscheme):
         if data:
             msg = "cannot use data on a 'largefile://' url"
             raise error.ProgrammingError(msg)
-        lfid = url_[len(_lfscheme):]
+        lfid = url_[len(_lfscheme) :]
         return storefactory.getlfile(ui, lfid)
     else:
         return orig(ui, url_, data=data)
--- a/hgext/largefiles/proto.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/proto.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,16 +19,16 @@
     wireprotov1server,
 )
 
-from . import (
-    lfutil,
-)
+from . import lfutil
 
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-LARGEFILES_REQUIRED_MSG = ('\nThis repository uses the largefiles extension.'
-                           '\n\nPlease enable it in your Mercurial config '
-                           'file.\n')
+LARGEFILES_REQUIRED_MSG = (
+    '\nThis repository uses the largefiles extension.'
+    '\n\nPlease enable it in your Mercurial config '
+    'file.\n'
+)
 
 eh = exthelper.exthelper()
 
@@ -36,6 +36,7 @@
 ssholdcallstream = None
 httpoldcallstream = None
 
+
 def putlfile(repo, proto, sha):
     '''Server command for putting a largefile into a repository's local store
     and into the user cache.'''
@@ -53,22 +54,27 @@
             tmpfp.close()
             lfutil.linktousercache(repo, sha)
         except IOError as e:
-            repo.ui.warn(_('largefiles: failed to put %s into store: %s\n') %
-                         (sha, e.strerror))
+            repo.ui.warn(
+                _('largefiles: failed to put %s into store: %s\n')
+                % (sha, e.strerror)
+            )
             return wireprototypes.pushres(
-                1, output.getvalue() if output else '')
+                1, output.getvalue() if output else ''
+            )
         finally:
             tmpfp.discard()
 
     return wireprototypes.pushres(0, output.getvalue() if output else '')
 
+
 def getlfile(repo, proto, sha):
     '''Server command for retrieving a largefile from the repository-local
     cache or user cache.'''
     filename = lfutil.findfile(repo, sha)
     if not filename:
-        raise error.Abort(_('requested largefile %s not present in cache')
-                          % sha)
+        raise error.Abort(
+            _('requested largefile %s not present in cache') % sha
+        )
     f = open(filename, 'rb')
     length = os.fstat(f.fileno())[6]
 
@@ -81,8 +87,10 @@
         yield '%d\n' % length
         for chunk in util.filechunkiter(f):
             yield chunk
+
     return wireprototypes.streamreslegacy(gen=generator())
 
+
 def statlfile(repo, proto, sha):
     '''Server command for checking if a largefile is present - returns '2\n' if
     the largefile is missing, '0\n' if it seems to be in good condition.
@@ -95,6 +103,7 @@
         return wireprototypes.bytesresponse('2\n')
     return wireprototypes.bytesresponse('0\n')
 
+
 def wirereposetup(ui, repo):
     class lfileswirerepository(repo.__class__):
         def putlfile(self, sha, fd):
@@ -102,12 +111,16 @@
             # input file-like into a bundle before sending it, so we can't use
             # it ...
             if issubclass(self.__class__, httppeer.httppeer):
-                res = self._call('putlfile', data=fd, sha=sha,
-                    headers={r'content-type': r'application/mercurial-0.1'})
+                res = self._call(
+                    'putlfile',
+                    data=fd,
+                    sha=sha,
+                    headers={r'content-type': r'application/mercurial-0.1'},
+                )
                 try:
                     d, output = res.split('\n', 1)
                     for l in output.splitlines(True):
-                        self.ui.warn(_('remote: '), l) # assume l ends with \n
+                        self.ui.warn(_('remote: '), l)  # assume l ends with \n
                     return int(d)
                 except ValueError:
                     self.ui.warn(_('unexpected putlfile response: %r\n') % res)
@@ -119,14 +132,14 @@
                 try:
                     ret, output = self._callpush("putlfile", fd, sha=sha)
                     if ret == "":
-                        raise error.ResponseError(_('putlfile failed:'),
-                                output)
+                        raise error.ResponseError(_('putlfile failed:'), output)
                     return int(ret)
                 except IOError:
                     return 1
                 except ValueError:
                     raise error.ResponseError(
-                        _('putlfile failed (unexpected response):'), ret)
+                        _('putlfile failed (unexpected response):'), ret
+                    )
 
         def getlfile(self, sha):
             """returns an iterable with the chunks of the file with sha sha"""
@@ -135,8 +148,9 @@
             try:
                 length = int(length)
             except ValueError:
-                self._abort(error.ResponseError(_("unexpected response:"),
-                                                length))
+                self._abort(
+                    error.ResponseError(_("unexpected response:"), length)
+                )
 
             # SSH streams will block if reading more than length
             for chunk in util.filechunkiter(stream, limit=length):
@@ -146,8 +160,9 @@
             if issubclass(self.__class__, httppeer.httppeer):
                 chunk = stream.read(1)
                 if chunk:
-                    self._abort(error.ResponseError(_("unexpected response:"),
-                                                    chunk))
+                    self._abort(
+                        error.ResponseError(_("unexpected response:"), chunk)
+                    )
 
         @wireprotov1peer.batchable
         def statlfile(self, sha):
@@ -165,6 +180,7 @@
 
     repo.__class__ = lfileswirerepository
 
+
 # advertise the largefiles=serve capability
 @eh.wrapfunction(wireprotov1server, '_capabilities')
 def _capabilities(orig, repo, proto):
@@ -173,6 +189,7 @@
     caps.append('largefiles=serve')
     return caps
 
+
 def heads(orig, repo, proto):
     '''Wrap server command - largefile capable clients will know to call
     lheads instead'''
@@ -181,6 +198,7 @@
 
     return orig(repo, proto)
 
+
 def sshrepocallstream(self, cmd, **args):
     if cmd == 'heads' and self.capable('largefiles'):
         cmd = 'lheads'
@@ -188,8 +206,10 @@
         args[r'cmds'] = args[r'cmds'].replace('heads ', 'lheads ')
     return ssholdcallstream(self, cmd, **args)
 
+
 headsre = re.compile(br'(^|;)heads\b')
 
+
 def httprepocallstream(self, cmd, **args):
     if cmd == 'heads' and self.capable('largefiles'):
         cmd = 'lheads'
--- a/hgext/largefiles/remotestore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/remotestore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,9 +14,7 @@
     util,
 )
 
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 from . import (
     basestore,
@@ -27,8 +25,10 @@
 urlerr = util.urlerr
 urlreq = util.urlreq
 
+
 class remotestore(basestore.basestore):
     '''a largefile store accessed over a network'''
+
     def __init__(self, ui, repo, url):
         super(remotestore, self).__init__(ui, repo, url)
         self._lstore = None
@@ -39,14 +39,18 @@
         if self.sendfile(source, hash):
             raise error.Abort(
                 _('remotestore: could not put %s to remote store %s')
-                % (source, util.hidepassword(self.url)))
+                % (source, util.hidepassword(self.url))
+            )
         self.ui.debug(
             _('remotestore: put %s to remote store %s\n')
-            % (source, util.hidepassword(self.url)))
+            % (source, util.hidepassword(self.url))
+        )
 
     def exists(self, hashes):
-        return dict((h, s == 0) for (h, s) in # dict-from-generator
-                    self._stat(hashes).iteritems())
+        return dict(
+            (h, s == 0)
+            for (h, s) in self._stat(hashes).iteritems()  # dict-from-generator
+        )
 
     def sendfile(self, filename, hash):
         self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
@@ -56,7 +60,8 @@
         except IOError as e:
             raise error.Abort(
                 _('remotestore: could not open file %s: %s')
-                % (filename, stringutil.forcebytestr(e)))
+                % (filename, stringutil.forcebytestr(e))
+            )
 
     def _getfile(self, tmpfile, filename, hash):
         try:
@@ -64,17 +69,20 @@
         except urlerr.httperror as e:
             # 401s get converted to error.Aborts; everything else is fine being
             # turned into a StoreError
-            raise basestore.StoreError(filename, hash, self.url,
-                                       stringutil.forcebytestr(e))
+            raise basestore.StoreError(
+                filename, hash, self.url, stringutil.forcebytestr(e)
+            )
         except urlerr.urlerror as e:
             # This usually indicates a connection problem, so don't
             # keep trying with the other files... they will probably
             # all fail too.
-            raise error.Abort('%s: %s' %
-                             (util.hidepassword(self.url), e.reason))
+            raise error.Abort(
+                '%s: %s' % (util.hidepassword(self.url), e.reason)
+            )
         except IOError as e:
-            raise basestore.StoreError(filename, hash, self.url,
-                                       stringutil.forcebytestr(e))
+            raise basestore.StoreError(
+                filename, hash, self.url, stringutil.forcebytestr(e)
+            )
 
         return lfutil.copyandhash(chunks, tmpfile)
 
@@ -85,17 +93,24 @@
 
     def _verifyfiles(self, contents, filestocheck):
         failed = False
-        expectedhashes = [expectedhash
-                          for cset, filename, expectedhash in filestocheck]
+        expectedhashes = [
+            expectedhash for cset, filename, expectedhash in filestocheck
+        ]
         localhashes = self._hashesavailablelocally(expectedhashes)
-        stats = self._stat([expectedhash for expectedhash in expectedhashes
-                            if expectedhash not in localhashes])
+        stats = self._stat(
+            [
+                expectedhash
+                for expectedhash in expectedhashes
+                if expectedhash not in localhashes
+            ]
+        )
 
         for cset, filename, expectedhash in filestocheck:
             if expectedhash in localhashes:
                 filetocheck = (cset, filename, expectedhash)
-                verifyresult = self._lstore._verifyfiles(contents,
-                                                         [filetocheck])
+                verifyresult = self._lstore._verifyfiles(
+                    contents, [filetocheck]
+                )
                 if verifyresult:
                     failed = True
             else:
@@ -104,16 +119,19 @@
                     if stat == 1:
                         self.ui.warn(
                             _('changeset %s: %s: contents differ\n')
-                            % (cset, filename))
+                            % (cset, filename)
+                        )
                         failed = True
                     elif stat == 2:
                         self.ui.warn(
-                            _('changeset %s: %s missing\n')
-                            % (cset, filename))
+                            _('changeset %s: %s missing\n') % (cset, filename)
+                        )
                         failed = True
                     else:
-                        raise RuntimeError('verify failed: unexpected response '
-                                           'from statlfile (%r)' % stat)
+                        raise RuntimeError(
+                            'verify failed: unexpected response '
+                            'from statlfile (%r)' % stat
+                        )
         return failed
 
     def _put(self, hash, fd):
--- a/hgext/largefiles/reposetup.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/reposetup.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,6 +25,7 @@
     lfutil,
 )
 
+
 def reposetup(ui, repo):
     # wire repositories should be given new wireproto functions
     # by "proto.wirereposetup()" via "hg.wirepeersetupfuncs"
@@ -36,6 +37,7 @@
         _largefilesenabled = True
 
         lfstatus = False
+
         def status_nolfiles(self, *args, **kwargs):
             return super(lfilesrepo, self).status(*args, **kwargs)
 
@@ -46,19 +48,25 @@
         def __getitem__(self, changeid):
             ctx = super(lfilesrepo, self).__getitem__(changeid)
             if self.lfstatus:
+
                 class lfilesctx(ctx.__class__):
                     def files(self):
                         filenames = super(lfilesctx, self).files()
                         return [lfutil.splitstandin(f) or f for f in filenames]
+
                     def manifest(self):
                         man1 = super(lfilesctx, self).manifest()
+
                         class lfilesmanifest(man1.__class__):
                             def __contains__(self, filename):
                                 orig = super(lfilesmanifest, self).__contains__
-                                return (orig(filename) or
-                                        orig(lfutil.standin(filename)))
+                                return orig(filename) or orig(
+                                    lfutil.standin(filename)
+                                )
+
                         man1.__class__ = lfilesmanifest
                         return man1
+
                     def filectx(self, path, fileid=None, filelog=None):
                         orig = super(lfilesctx, self).filectx
                         try:
@@ -70,13 +78,15 @@
                             # Adding a null character will cause Mercurial to
                             # identify this as a binary file.
                             if filelog is not None:
-                                result = orig(lfutil.standin(path), fileid,
-                                              filelog)
+                                result = orig(
+                                    lfutil.standin(path), fileid, filelog
+                                )
                             else:
                                 result = orig(lfutil.standin(path), fileid)
                             olddata = result.data
                             result.data = lambda: olddata() + '\0'
                         return result
+
                 ctx.__class__ = lfilesctx
             return ctx
 
@@ -87,13 +97,28 @@
         # XXX large file status is buggy when used on repo proxy.
         # XXX this needs to be investigated.
         @localrepo.unfilteredmethod
-        def status(self, node1='.', node2=None, match=None, ignored=False,
-                clean=False, unknown=False, listsubrepos=False):
+        def status(
+            self,
+            node1='.',
+            node2=None,
+            match=None,
+            ignored=False,
+            clean=False,
+            unknown=False,
+            listsubrepos=False,
+        ):
             listignored, listclean, listunknown = ignored, clean, unknown
             orig = super(lfilesrepo, self).status
             if not self.lfstatus:
-                return orig(node1, node2, match, listignored, listclean,
-                            listunknown, listsubrepos)
+                return orig(
+                    node1,
+                    node2,
+                    match,
+                    listignored,
+                    listclean,
+                    listunknown,
+                    listsubrepos,
+                )
 
             # some calls in this function rely on the old version of status
             self.lfstatus = False
@@ -124,8 +149,15 @@
                         if match(f):
                             break
                     else:
-                        return orig(node1, node2, match, listignored, listclean,
-                                    listunknown, listsubrepos)
+                        return orig(
+                            node1,
+                            node2,
+                            match,
+                            listignored,
+                            listclean,
+                            listunknown,
+                            listsubrepos,
+                        )
 
                 # Create a copy of match that matches standins instead
                 # of largefiles.
@@ -149,8 +181,9 @@
                 m = copy.copy(match)
                 m._files = tostandins(m._files)
 
-                result = orig(node1, node2, m, ignored, clean, unknown,
-                              listsubrepos)
+                result = orig(
+                    node1, node2, m, ignored, clean, unknown, listsubrepos
+                )
                 if working:
 
                     def sfindirstate(f):
@@ -158,24 +191,32 @@
                         dirstate = self.dirstate
                         return sf in dirstate or dirstate.hasdir(sf)
 
-                    match._files = [f for f in match._files
-                                    if sfindirstate(f)]
+                    match._files = [f for f in match._files if sfindirstate(f)]
                     # Don't waste time getting the ignored and unknown
                     # files from lfdirstate
-                    unsure, s = lfdirstate.status(match, subrepos=[],
-                                                  ignored=False,
-                                                  clean=listclean,
-                                                  unknown=False)
+                    unsure, s = lfdirstate.status(
+                        match,
+                        subrepos=[],
+                        ignored=False,
+                        clean=listclean,
+                        unknown=False,
+                    )
                     (modified, added, removed, deleted, clean) = (
-                        s.modified, s.added, s.removed, s.deleted, s.clean)
+                        s.modified,
+                        s.added,
+                        s.removed,
+                        s.deleted,
+                        s.clean,
+                    )
                     if parentworking:
                         for lfile in unsure:
                             standin = lfutil.standin(lfile)
                             if standin not in ctx1:
                                 # from second parent
                                 modified.append(lfile)
-                            elif (lfutil.readasstandin(ctx1[standin])
-                                  != lfutil.hashfile(self.wjoin(lfile))):
+                            elif lfutil.readasstandin(
+                                ctx1[standin]
+                            ) != lfutil.hashfile(self.wjoin(lfile)):
                                 modified.append(lfile)
                             else:
                                 if listclean:
@@ -190,11 +231,14 @@
                             standin = lfutil.standin(lfile)
                             if standin in ctx1:
                                 abslfile = self.wjoin(lfile)
-                                if ((lfutil.readasstandin(ctx1[standin]) !=
-                                     lfutil.hashfile(abslfile)) or
-                                    (checkexec and
-                                     ('x' in ctx1.flags(standin)) !=
-                                     bool(lfutil.getexecutable(abslfile)))):
+                                if (
+                                    lfutil.readasstandin(ctx1[standin])
+                                    != lfutil.hashfile(abslfile)
+                                ) or (
+                                    checkexec
+                                    and ('x' in ctx1.flags(standin))
+                                    != bool(lfutil.getexecutable(abslfile))
+                                ):
                                     modified.append(lfile)
                                 elif listclean:
                                     clean.append(lfile)
@@ -205,8 +249,11 @@
                         # marked as 'R' in the working context.
                         # then, largefiles not managed also in the target
                         # context should be excluded from 'removed'.
-                        removed = [lfile for lfile in removed
-                                   if lfutil.standin(lfile) in ctx1]
+                        removed = [
+                            lfile
+                            for lfile in removed
+                            if lfutil.standin(lfile) in ctx1
+                        ]
 
                     # Standins no longer found in lfdirstate have been deleted
                     for standin in ctx1.walk(lfutil.getstandinmatcher(self)):
@@ -229,10 +276,8 @@
                     # files are not really removed if they are still in
                     # lfdirstate. This happens in merges where files
                     # change type.
-                    removed = [f for f in removed
-                               if f not in self.dirstate]
-                    result[2] = [f for f in result[2]
-                                 if f not in lfdirstate]
+                    removed = [f for f in removed if f not in self.dirstate]
+                    result[2] = [f for f in result[2] if f not in lfdirstate]
 
                     lfiles = set(lfdirstate)
                     # Unknown files
@@ -240,16 +285,28 @@
                     # Ignored files
                     result[5] = set(result[5]).difference(lfiles)
                     # combine normal files and largefiles
-                    normals = [[fn for fn in filelist
-                                if not lfutil.isstandin(fn)]
-                               for filelist in result]
-                    lfstatus = (modified, added, removed, deleted, [], [],
-                                clean)
-                    result = [sorted(list1 + list2)
-                              for (list1, list2) in zip(normals, lfstatus)]
-                else: # not against working directory
-                    result = [[lfutil.splitstandin(f) or f for f in items]
-                              for items in result]
+                    normals = [
+                        [fn for fn in filelist if not lfutil.isstandin(fn)]
+                        for filelist in result
+                    ]
+                    lfstatus = (
+                        modified,
+                        added,
+                        removed,
+                        deleted,
+                        [],
+                        [],
+                        clean,
+                    )
+                    result = [
+                        sorted(list1 + list2)
+                        for (list1, list2) in zip(normals, lfstatus)
+                    ]
+                else:  # not against working directory
+                    result = [
+                        [lfutil.splitstandin(f) or f for f in items]
+                        for items in result
+                    ]
 
                 if wlock:
                     lfdirstate.write()
@@ -263,18 +320,28 @@
 
         def commitctx(self, ctx, *args, **kwargs):
             node = super(lfilesrepo, self).commitctx(ctx, *args, **kwargs)
+
             class lfilesctx(ctx.__class__):
                 def markcommitted(self, node):
                     orig = super(lfilesctx, self).markcommitted
                     return lfutil.markcommitted(orig, self, node)
+
             ctx.__class__ = lfilesctx
             return node
 
         # Before commit, largefile standins have not had their
         # contents updated to reflect the hash of their largefile.
         # Do that here.
-        def commit(self, text="", user=None, date=None, match=None,
-                force=False, editor=False, extra=None):
+        def commit(
+            self,
+            text="",
+            user=None,
+            date=None,
+            match=None,
+            force=False,
+            editor=False,
+            extra=None,
+        ):
             if extra is None:
                 extra = {}
             orig = super(lfilesrepo, self).commit
@@ -282,20 +349,30 @@
             with self.wlock():
                 lfcommithook = self._lfcommithooks[-1]
                 match = lfcommithook(self, match)
-                result = orig(text=text, user=user, date=date, match=match,
-                                force=force, editor=editor, extra=extra)
+                result = orig(
+                    text=text,
+                    user=user,
+                    date=date,
+                    match=match,
+                    force=force,
+                    editor=editor,
+                    extra=extra,
+                )
                 return result
 
         def push(self, remote, force=False, revs=None, newbranch=False):
             if remote.local():
                 missing = set(self.requirements) - remote.local().supported
                 if missing:
-                    msg = _("required features are not"
-                            " supported in the destination:"
-                            " %s") % (', '.join(sorted(missing)))
+                    msg = _(
+                        "required features are not"
+                        " supported in the destination:"
+                        " %s"
+                    ) % (', '.join(sorted(missing)))
                     raise error.Abort(msg)
-            return super(lfilesrepo, self).push(remote, force=force, revs=revs,
-                newbranch=newbranch)
+            return super(lfilesrepo, self).push(
+                remote, force=force, revs=revs, newbranch=newbranch
+            )
 
         # TODO: _subdirlfs should be moved into "lfutil.py", because
         # it is referred only from "lfutil.updatestandinsbymatch"
@@ -319,7 +396,8 @@
                 if lfutil.isstandin(f + '/'):
                     raise error.Abort(
                         _('file "%s" is a largefile standin') % f,
-                        hint=('commit the largefile itself instead'))
+                        hint='commit the largefile itself instead',
+                    )
                 # Scan directories
                 if self.wvfs.isdir(f):
                     dirs.append(f)
@@ -377,17 +455,19 @@
         if lfrevs:
             toupload = set()
             addfunc = lambda fn, lfhash: toupload.add(lfhash)
-            lfutil.getlfilestoupload(pushop.repo, lfrevs,
-                                     addfunc)
+            lfutil.getlfilestoupload(pushop.repo, lfrevs, addfunc)
             lfcommands.uploadlfiles(ui, pushop.repo, pushop.remote, toupload)
+
     repo.prepushoutgoinghooks.add("largefiles", prepushoutgoinghook)
 
     def checkrequireslfiles(ui, repo, **kwargs):
         if 'largefiles' not in repo.requirements and any(
-                lfutil.shortname+'/' in f[0] for f in repo.store.datafiles()):
+            lfutil.shortname + '/' in f[0] for f in repo.store.datafiles()
+        ):
             repo.requirements.add('largefiles')
             repo._writerequirements()
 
-    ui.setconfig('hooks', 'changegroup.lfiles', checkrequireslfiles,
-                 'largefiles')
+    ui.setconfig(
+        'hooks', 'changegroup.lfiles', checkrequireslfiles, 'largefiles'
+    )
     ui.setconfig('hooks', 'commit.lfiles', checkrequireslfiles, 'largefiles')
--- a/hgext/largefiles/storefactory.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/storefactory.py	Sun Oct 06 09:45:02 2019 -0400
@@ -53,7 +53,7 @@
     path = util.safehasattr(remote, 'url') and remote.url() or remote.path
 
     match = _scheme_re.match(path)
-    if not match:                       # regular filesystem path
+    if not match:  # regular filesystem path
         scheme = 'file'
     else:
         scheme = match.group(1)
@@ -69,17 +69,21 @@
         except lfutil.storeprotonotcapable:
             pass
 
-    raise error.Abort(_('%s does not appear to be a largefile store') %
-                     util.hidepassword(path))
+    raise error.Abort(
+        _('%s does not appear to be a largefile store')
+        % util.hidepassword(path)
+    )
+
 
 _storeprovider = {
-    'file':  [localstore.localstore],
-    'http':  [wirestore.wirestore],
+    'file': [localstore.localstore],
+    'http': [wirestore.wirestore],
     'https': [wirestore.wirestore],
     'ssh': [wirestore.wirestore],
-    }
+}
 
 _scheme_re = re.compile(br'^([a-zA-Z0-9+-.]+)://')
 
+
 def getlfile(ui, hash):
     return util.chunkbuffer(openstore(ui=ui)._get(hash))
--- a/hgext/largefiles/wirestore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/largefiles/wirestore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,6 +11,7 @@
     remotestore,
 )
 
+
 class wirestore(remotestore.remotestore):
     def __init__(self, ui, repo, remote):
         cap = remote.capable('largefiles')
@@ -36,8 +37,6 @@
         with self.remote.commandexecutor() as e:
             fs = []
             for hash in hashes:
-                fs.append((hash, e.callcommand('statlfile', {
-                    'sha': hash,
-                })))
+                fs.append((hash, e.callcommand('statlfile', {'sha': hash,})))
 
             return {hash: f.result() for hash, f in fs}
--- a/hgext/lfs/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/lfs/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -147,9 +147,7 @@
     util,
 )
 
-from mercurial.interfaces import (
-    repository,
-)
+from mercurial.interfaces import repository
 
 from . import (
     blobstore,
@@ -175,34 +173,34 @@
 reposetup = eh.finalreposetup
 templatekeyword = eh.templatekeyword
 
-eh.configitem('experimental', 'lfs.serve',
-    default=True,
+eh.configitem(
+    'experimental', 'lfs.serve', default=True,
 )
-eh.configitem('experimental', 'lfs.user-agent',
-    default=None,
+eh.configitem(
+    'experimental', 'lfs.user-agent', default=None,
 )
-eh.configitem('experimental', 'lfs.disableusercache',
-    default=False,
+eh.configitem(
+    'experimental', 'lfs.disableusercache', default=False,
 )
-eh.configitem('experimental', 'lfs.worker-enable',
-    default=False,
+eh.configitem(
+    'experimental', 'lfs.worker-enable', default=False,
 )
 
-eh.configitem('lfs', 'url',
-    default=None,
+eh.configitem(
+    'lfs', 'url', default=None,
 )
-eh.configitem('lfs', 'usercache',
-    default=None,
+eh.configitem(
+    'lfs', 'usercache', default=None,
 )
 # Deprecated
-eh.configitem('lfs', 'threshold',
-    default=None,
+eh.configitem(
+    'lfs', 'threshold', default=None,
 )
-eh.configitem('lfs', 'track',
-    default='none()',
+eh.configitem(
+    'lfs', 'track', default='none()',
 )
-eh.configitem('lfs', 'retry',
-    default=5,
+eh.configitem(
+    'lfs', 'retry', default=5,
 )
 
 lfsprocessor = (
@@ -211,14 +209,17 @@
     wrapper.bypasscheckhash,
 )
 
+
 def featuresetup(ui, supported):
     # don't die on seeing a repo with the lfs requirement
     supported |= {'lfs'}
 
+
 @eh.uisetup
 def _uisetup(ui):
     localrepo.featuresetupfuncs.add(featuresetup)
 
+
 @eh.reposetup
 def _reposetup(ui, repo):
     # Nothing to do with a remote repo
@@ -237,6 +238,7 @@
     repo.__class__ = lfsrepo
 
     if 'lfs' not in repo.requirements:
+
         def checkrequireslfs(ui, repo, **kwargs):
             if 'lfs' in repo.requirements:
                 return 0
@@ -250,8 +252,9 @@
             match = repo._storenarrowmatch
             for ctx in s:
                 # TODO: is there a way to just walk the files in the commit?
-                if any(ctx[f].islfs() for f in ctx.files()
-                       if f in ctx and match(f)):
+                if any(
+                    ctx[f].islfs() for f in ctx.files() if f in ctx and match(f)
+                ):
                     repo.requirements.add('lfs')
                     repo.features.add(repository.REPO_FEATURE_LFS)
                     repo._writerequirements()
@@ -263,6 +266,7 @@
     else:
         repo.prepushoutgoinghooks.add('lfs', wrapper.prepush)
 
+
 def _trackedmatcher(repo):
     """Return a function (path, size) -> bool indicating whether or not to
     track a given file with lfs."""
@@ -288,8 +292,10 @@
     cfg.parse('.hglfs', data)
 
     try:
-        rules = [(minifileset.compile(pattern), minifileset.compile(rule))
-                 for pattern, rule in cfg.items('track')]
+        rules = [
+            (minifileset.compile(pattern), minifileset.compile(rule))
+            for pattern, rule in cfg.items('track')
+        ]
     except error.ParseError as e:
         # The original exception gives no indicator that the error is in the
         # .hglfs file, so add that.
@@ -306,6 +312,7 @@
 
     return _match
 
+
 # Called by remotefilelog
 def wrapfilelog(filelog):
     wrapfunction = extensions.wrapfunction
@@ -314,14 +321,17 @@
     wrapfunction(filelog, 'renamed', wrapper.filelogrenamed)
     wrapfunction(filelog, 'size', wrapper.filelogsize)
 
+
 @eh.wrapfunction(localrepo, 'resolverevlogstorevfsoptions')
 def _resolverevlogstorevfsoptions(orig, ui, requirements, features):
     opts = orig(ui, requirements, features)
     for name, module in extensions.extensions(ui):
         if module is sys.modules[__name__]:
             if revlog.REVIDX_EXTSTORED in opts[b'flagprocessors']:
-                msg = (_(b"cannot register multiple processors on flag '%#x'.")
-                       % revlog.REVIDX_EXTSTORED)
+                msg = (
+                    _(b"cannot register multiple processors on flag '%#x'.")
+                    % revlog.REVIDX_EXTSTORED
+                )
                 raise error.Abort(msg)
 
             opts[b'flagprocessors'][revlog.REVIDX_EXTSTORED] = lfsprocessor
@@ -329,6 +339,7 @@
 
     return opts
 
+
 @eh.extsetup
 def _extsetup(ui):
     wrapfilelog(filelog.filelog)
@@ -342,23 +353,27 @@
     # "packed1". Using "packed1" with lfs will likely cause trouble.
     exchange._bundlespeccontentopts["v2"]["cg.version"] = "03"
 
+
 @eh.filesetpredicate('lfs()')
 def lfsfileset(mctx, x):
     """File that uses LFS storage."""
     # i18n: "lfs" is a keyword
     filesetlang.getargs(x, 0, 0, _("lfs takes no arguments"))
     ctx = mctx.ctx
+
     def lfsfilep(f):
         return wrapper.pointerfromctx(ctx, f, removed=True) is not None
+
     return mctx.predicate(lfsfilep, predrepr='<lfs>')
 
+
 @eh.templatekeyword('lfs_files', requires={'ctx'})
 def lfsfiles(context, mapping):
     """List of strings. All files modified, added, or removed by this
     changeset."""
     ctx = context.resource(mapping, 'ctx')
 
-    pointers = wrapper.pointersfromctx(ctx, removed=True) # {path: pointer}
+    pointers = wrapper.pointersfromctx(ctx, removed=True)  # {path: pointer}
     files = sorted(pointers.keys())
 
     def pointer(v):
@@ -377,8 +392,11 @@
     f = templateutil._showcompatlist(context, mapping, 'lfs_file', files)
     return templateutil.hybrid(f, files, makemap, pycompat.identity)
 
-@eh.command('debuglfsupload',
-            [('r', 'rev', [], _('upload large files introduced by REV'))])
+
+@eh.command(
+    'debuglfsupload',
+    [('r', 'rev', [], _('upload large files introduced by REV'))],
+)
 def debuglfsupload(ui, repo, **opts):
     """upload lfs blobs added by the working copy parent or given revisions"""
     revs = opts.get(r'rev', [])
--- a/hgext/lfs/blobstore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/lfs/blobstore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,15 +29,14 @@
     worker,
 )
 
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 from ..largefiles import lfutil
 
 # 64 bytes for SHA256
 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
 
+
 class lfsvfs(vfsmod.vfs):
     def join(self, path):
         """split the path at first two characters, like: XX/XXXXX..."""
@@ -56,18 +55,20 @@
         prefixlen = len(pathutil.normasprefix(root))
         oids = []
 
-        for dirpath, dirs, files in os.walk(self.reljoin(self.base, path
-                                                         or b''),
-                                            onerror=onerror):
+        for dirpath, dirs, files in os.walk(
+            self.reljoin(self.base, path or b''), onerror=onerror
+        ):
             dirpath = dirpath[prefixlen:]
 
             # Silently skip unexpected files and directories
             if len(dirpath) == 2:
-                oids.extend([dirpath + f for f in files
-                             if _lfsre.match(dirpath + f)])
+                oids.extend(
+                    [dirpath + f for f in files if _lfsre.match(dirpath + f)]
+                )
 
         yield ('', [], oids)
 
+
 class nullvfs(lfsvfs):
     def __init__(self):
         pass
@@ -80,8 +81,10 @@
         # self.vfs.  Raise the same error as a normal vfs when asked to read a
         # file that doesn't exist.  The only difference is the full file path
         # isn't available in the error.
-        raise IOError(errno.ENOENT,
-                      pycompat.sysstr(b'%s: No such file or directory' % oid))
+        raise IOError(
+            errno.ENOENT,
+            pycompat.sysstr(b'%s: No such file or directory' % oid),
+        )
 
     def walk(self, path=None, onerror=None):
         return (b'', [], [])
@@ -89,6 +92,7 @@
     def write(self, oid, data):
         pass
 
+
 class filewithprogress(object):
     """a file-like object that supports __len__ and read.
 
@@ -97,7 +101,7 @@
 
     def __init__(self, fp, callback):
         self._fp = fp
-        self._callback = callback # func(readsize)
+        self._callback = callback  # func(readsize)
         fp.seek(0, os.SEEK_END)
         self._len = fp.tell()
         fp.seek(0)
@@ -117,6 +121,7 @@
             self._fp = None
         return data
 
+
 class local(object):
     """Local blobstore for large file contents.
 
@@ -161,8 +166,9 @@
 
             realoid = node.hex(sha256.digest())
             if realoid != oid:
-                raise LfsCorruptionError(_(b'corrupt remote lfs object: %s')
-                                         % oid)
+                raise LfsCorruptionError(
+                    _(b'corrupt remote lfs object: %s') % oid
+                )
 
         self._linktousercache(oid)
 
@@ -186,16 +192,16 @@
         blob, but that doesn't happen when the server tells the client that it
         already has the blob.
         """
-        if (not isinstance(self.cachevfs, nullvfs)
-            and not self.vfs.exists(oid)):
+        if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
             self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
             lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
 
     def _linktousercache(self, oid):
         # XXX: should we verify the content of the cache, and hardlink back to
         # the local store on success, but truncate, write and link on failure?
-        if (not self.cachevfs.exists(oid)
-            and not isinstance(self.cachevfs, nullvfs)):
+        if not self.cachevfs.exists(oid) and not isinstance(
+            self.cachevfs, nullvfs
+        ):
             self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
             lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
 
@@ -240,6 +246,7 @@
         False otherwise."""
         return self.cachevfs.exists(oid) or self.vfs.exists(oid)
 
+
 def _urlerrorreason(urlerror):
     '''Create a friendly message for the given URLError to be used in an
     LfsRemoteError message.
@@ -250,7 +257,7 @@
         inst = urlerror.reason
 
     if util.safehasattr(inst, 'reason'):
-        try: # usually it is in the form (errno, strerror)
+        try:  # usually it is in the form (errno, strerror)
             reason = inst.reason.args[1]
         except (AttributeError, IndexError):
             # it might be anything, for example a string
@@ -264,6 +271,7 @@
     else:
         return stringutil.forcebytestr(urlerror)
 
+
 class lfsauthhandler(util.urlreq.basehandler):
     handler_order = 480  # Before HTTPDigestAuthHandler (== 490)
 
@@ -277,13 +285,17 @@
 
             if scheme.lower() != r'basic':
                 msg = _(b'the server must support Basic Authentication')
-                raise util.urlerr.httperror(req.get_full_url(), code,
-                                            encoding.strfromlocal(msg), headers,
-                                            fp)
+                raise util.urlerr.httperror(
+                    req.get_full_url(),
+                    code,
+                    encoding.strfromlocal(msg),
+                    headers,
+                    fp,
+                )
         return None
 
+
 class _gitlfsremote(object):
-
     def __init__(self, repo, url):
         ui = repo.ui
         self.ui = ui
@@ -310,12 +322,15 @@
         Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
         See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
         """
-        objects = [{r'oid': pycompat.strurl(p.oid()),
-                    r'size': p.size()} for p in pointers]
-        requestdata = pycompat.bytesurl(json.dumps({
-            r'objects': objects,
-            r'operation': pycompat.strurl(action),
-        }))
+        objects = [
+            {r'oid': pycompat.strurl(p.oid()), r'size': p.size()}
+            for p in pointers
+        ]
+        requestdata = pycompat.bytesurl(
+            json.dumps(
+                {r'objects': objects, r'operation': pycompat.strurl(action),}
+            )
+        )
         url = b'%s/objects/batch' % self.baseurl
         batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
         batchreq.add_header(r'Accept', r'application/vnd.git-lfs+json')
@@ -325,41 +340,56 @@
                 rawjson = rsp.read()
         except util.urlerr.httperror as ex:
             hints = {
-                400: _(b'check that lfs serving is enabled on %s and "%s" is '
-                       b'supported') % (self.baseurl, action),
+                400: _(
+                    b'check that lfs serving is enabled on %s and "%s" is '
+                    b'supported'
+                )
+                % (self.baseurl, action),
                 404: _(b'the "lfs.url" config may be used to override %s')
-                       % self.baseurl,
+                % self.baseurl,
             }
             hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
             raise LfsRemoteError(
                 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
-                hint=hint)
+                hint=hint,
+            )
         except util.urlerr.urlerror as ex:
-            hint = (_(b'the "lfs.url" config may be used to override %s')
-                    % self.baseurl)
-            raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex),
-                                 hint=hint)
+            hint = (
+                _(b'the "lfs.url" config may be used to override %s')
+                % self.baseurl
+            )
+            raise LfsRemoteError(
+                _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
+            )
         try:
             response = json.loads(rawjson)
         except ValueError:
-            raise LfsRemoteError(_(b'LFS server returns invalid JSON: %s')
-                                 % rawjson.encode("utf-8"))
+            raise LfsRemoteError(
+                _(b'LFS server returns invalid JSON: %s')
+                % rawjson.encode("utf-8")
+            )
 
         if self.ui.debugflag:
             self.ui.debug(b'Status: %d\n' % rsp.status)
             # lfs-test-server and hg serve return headers in different order
             headers = pycompat.bytestr(rsp.info()).strip()
-            self.ui.debug(b'%s\n'
-                          % b'\n'.join(sorted(headers.splitlines())))
+            self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
 
             if r'objects' in response:
-                response[r'objects'] = sorted(response[r'objects'],
-                                              key=lambda p: p[r'oid'])
-            self.ui.debug(b'%s\n'
-                          % pycompat.bytesurl(
-                              json.dumps(response, indent=2,
-                                         separators=(r'', r': '),
-                                         sort_keys=True)))
+                response[r'objects'] = sorted(
+                    response[r'objects'], key=lambda p: p[r'oid']
+                )
+            self.ui.debug(
+                b'%s\n'
+                % pycompat.bytesurl(
+                    json.dumps(
+                        response,
+                        indent=2,
+                        separators=(r'', r': '),
+                        sort_keys=True,
+                    )
+                )
+            )
 
         def encodestr(x):
             if isinstance(x, pycompat.unicode):
@@ -378,8 +408,9 @@
             # but just removes "download" from "actions". Treat that case
             # as the same as 404 error.
             if b'error' not in response:
-                if (action == b'download'
-                    and action not in response.get(b'actions', [])):
+                if action == b'download' and action not in response.get(
+                    b'actions', []
+                ):
                     code = 404
                 else:
                     continue
@@ -399,12 +430,14 @@
                     500: b'Internal server error',
                 }
                 msg = errors.get(code, b'status code %d' % code)
-                raise LfsRemoteError(_(b'LFS server error for "%s": %s')
-                                     % (filename, msg))
+                raise LfsRemoteError(
+                    _(b'LFS server error for "%s": %s') % (filename, msg)
+                )
             else:
                 raise LfsRemoteError(
                     _(b'LFS server error. Unsolicited response for oid %s')
-                    % response[b'oid'])
+                    % response[b'oid']
+                )
 
     def _extractobjects(self, response, pointers, action):
         """extract objects from response of the batch API
@@ -419,8 +452,9 @@
 
         # Filter objects with given action. Practically, this skips uploading
         # objects which exist in the server.
-        filteredobjects = [o for o in objects
-                           if action in o.get(b'actions', [])]
+        filteredobjects = [
+            o for o in objects if action in o.get(b'actions', [])
+        ]
 
         return filteredobjects
 
@@ -442,8 +476,10 @@
         if action == b'upload':
             # If uploading blobs, read data from local blobstore.
             if not localstore.verify(oid):
-                raise error.Abort(_(b'detected corrupt lfs object: %s') % oid,
-                                  hint=_(b'run hg verify'))
+                raise error.Abort(
+                    _(b'detected corrupt lfs object: %s') % oid,
+                    hint=_(b'run hg verify'),
+                )
             request.data = filewithprogress(localstore.open(oid), None)
             request.get_method = lambda: r'PUT'
             request.add_header(r'Content-Type', r'application/octet-stream')
@@ -461,8 +497,7 @@
                     # lfs-test-server and hg serve return headers in different
                     # order
                     headers = pycompat.bytestr(req.info()).strip()
-                    ui.debug(b'%s\n'
-                             % b'\n'.join(sorted(headers.splitlines())))
+                    ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
 
                 if action == b'download':
                     # If downloading blobs, store downloaded data to local
@@ -478,14 +513,20 @@
                         ui.debug(b'lfs %s response: %s' % (action, response))
         except util.urlerr.httperror as ex:
             if self.ui.debugflag:
-                self.ui.debug(b'%s: %s\n' % (oid, ex.read())) # XXX: also bytes?
-            raise LfsRemoteError(_(b'LFS HTTP error: %s (oid=%s, action=%s)')
-                                 % (stringutil.forcebytestr(ex), oid, action))
+                self.ui.debug(
+                    b'%s: %s\n' % (oid, ex.read())
+                )  # XXX: also bytes?
+            raise LfsRemoteError(
+                _(b'LFS HTTP error: %s (oid=%s, action=%s)')
+                % (stringutil.forcebytestr(ex), oid, action)
+            )
         except util.urlerr.urlerror as ex:
-            hint = (_(b'attempted connection to %s')
-                    % pycompat.bytesurl(util.urllibcompat.getfullurl(request)))
-            raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex),
-                                 hint=hint)
+            hint = _(b'attempted connection to %s') % pycompat.bytesurl(
+                util.urllibcompat.getfullurl(request)
+            )
+            raise LfsRemoteError(
+                _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
+            )
 
     def _batch(self, pointers, localstore, action):
         if action not in [b'upload', b'download']:
@@ -497,11 +538,15 @@
         sizes = {}
         for obj in objects:
             sizes[obj.get(b'oid')] = obj.get(b'size', 0)
-        topic = {b'upload': _(b'lfs uploading'),
-                 b'download': _(b'lfs downloading')}[action]
+        topic = {
+            b'upload': _(b'lfs uploading'),
+            b'download': _(b'lfs downloading'),
+        }[action]
         if len(objects) > 1:
-            self.ui.note(_(b'lfs: need to transfer %d objects (%s)\n')
-                         % (len(objects), util.bytecount(total)))
+            self.ui.note(
+                _(b'lfs: need to transfer %d objects (%s)\n')
+                % (len(objects), util.bytecount(total))
+            )
 
         def transfer(chunk):
             for obj in chunk:
@@ -511,8 +556,9 @@
                         msg = _(b'lfs: downloading %s (%s)\n')
                     elif action == b'upload':
                         msg = _(b'lfs: uploading %s (%s)\n')
-                    self.ui.note(msg % (obj.get(b'oid'),
-                                 util.bytecount(objsize)))
+                    self.ui.note(
+                        msg % (obj.get(b'oid'), util.bytecount(objsize))
+                    )
                 retry = self.retry
                 while True:
                     try:
@@ -523,15 +569,21 @@
                         if retry > 0:
                             self.ui.note(
                                 _(b'lfs: failed: %r (remaining retry %d)\n')
-                                % (stringutil.forcebytestr(ex), retry))
+                                % (stringutil.forcebytestr(ex), retry)
+                            )
                             retry -= 1
                             continue
                         raise
 
         # Until https multiplexing gets sorted out
         if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
-            oids = worker.worker(self.ui, 0.1, transfer, (),
-                                 sorted(objects, key=lambda o: o.get(b'oid')))
+            oids = worker.worker(
+                self.ui,
+                0.1,
+                transfer,
+                (),
+                sorted(objects, key=lambda o: o.get(b'oid')),
+            )
         else:
             oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
 
@@ -547,11 +599,15 @@
 
         if blobs > 0:
             if action == b'upload':
-                self.ui.status(_(b'lfs: uploaded %d files (%s)\n')
-                               % (blobs, util.bytecount(processed)))
+                self.ui.status(
+                    _(b'lfs: uploaded %d files (%s)\n')
+                    % (blobs, util.bytecount(processed))
+                )
             elif action == b'download':
-                self.ui.status(_(b'lfs: downloaded %d files (%s)\n')
-                               % (blobs, util.bytecount(processed)))
+                self.ui.status(
+                    _(b'lfs: downloaded %d files (%s)\n')
+                    % (blobs, util.bytecount(processed))
+                )
 
     def __del__(self):
         # copied from mercurial/httppeer.py
@@ -559,7 +615,8 @@
         if urlopener:
             for h in urlopener.handlers:
                 h.close()
-                getattr(h, "close_all", lambda : None)()
+                getattr(h, "close_all", lambda: None)()
+
 
 class _dummyremote(object):
     """Dummy store storing blobs to temp directory."""
@@ -579,6 +636,7 @@
             with self.vfs(p.oid(), b'rb') as fp:
                 tostore.download(p.oid(), fp)
 
+
 class _nullremote(object):
     """Null store storing blobs to /dev/null."""
 
@@ -591,6 +649,7 @@
     def readbatch(self, pointers, tostore):
         pass
 
+
 class _promptremote(object):
     """Prompt user to set lfs.url when accessed."""
 
@@ -606,6 +665,7 @@
     def _prompt(self):
         raise error.Abort(_(b'lfs.url needs to be configured'))
 
+
 _storemap = {
     b'https': _gitlfsremote,
     b'http': _gitlfsremote,
@@ -614,6 +674,7 @@
     None: _promptremote,
 }
 
+
 def _deduplicate(pointers):
     """Remove any duplicate oids that exist in the list"""
     reduced = util.sortdict()
@@ -621,11 +682,15 @@
         reduced[p.oid()] = p
     return reduced.values()
 
+
 def _verify(oid, content):
     realoid = node.hex(hashlib.sha256(content).digest())
     if realoid != oid:
-        raise LfsCorruptionError(_(b'detected corrupt lfs object: %s') % oid,
-                                 hint=_(b'run hg verify'))
+        raise LfsCorruptionError(
+            _(b'detected corrupt lfs object: %s') % oid,
+            hint=_(b'run hg verify'),
+        )
+
 
 def remote(repo, remote=None):
     """remotestore factory. return a store in _storemap depending on config
@@ -669,9 +734,11 @@
         raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
     return _storemap[scheme](repo, url)
 
+
 class LfsRemoteError(error.StorageError):
     pass
 
+
 class LfsCorruptionError(error.Abort):
     """Raised when a corrupt blob is detected, aborting an operation
 
--- a/hgext/lfs/pointer.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/lfs/pointer.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,13 +15,13 @@
     error,
     pycompat,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
+
 
 class InvalidPointer(error.StorageError):
     pass
 
+
 class gitlfspointer(dict):
     VERSION = 'https://git-lfs.github.com/spec/v1'
 
@@ -34,9 +34,10 @@
     def deserialize(cls, text):
         try:
             return cls(l.split(' ', 1) for l in text.splitlines()).validate()
-        except ValueError: # l.split returns 1 item instead of 2
-            raise InvalidPointer(_('cannot parse git-lfs text: %s')
-                                 % stringutil.pprint(text))
+        except ValueError:  # l.split returns 1 item instead of 2
+            raise InvalidPointer(
+                _('cannot parse git-lfs text: %s') % stringutil.pprint(text)
+            )
 
     def serialize(self):
         sortkeyfunc = lambda x: (x[0] != 'version', x)
@@ -67,17 +68,22 @@
                 if not self._requiredre[k].match(v):
                     raise InvalidPointer(
                         _('unexpected lfs pointer value: %s=%s')
-                        % (k, stringutil.pprint(v)))
+                        % (k, stringutil.pprint(v))
+                    )
                 requiredcount += 1
             elif not self._keyre.match(k):
                 raise InvalidPointer(_('unexpected lfs pointer key: %s') % k)
             if not self._valuere.match(v):
-                raise InvalidPointer(_('unexpected lfs pointer value: %s=%s')
-                                     % (k, stringutil.pprint(v)))
+                raise InvalidPointer(
+                    _('unexpected lfs pointer value: %s=%s')
+                    % (k, stringutil.pprint(v))
+                )
         if len(self._requiredre) != requiredcount:
             miss = sorted(set(self._requiredre.keys()).difference(self.keys()))
-            raise InvalidPointer(_('missing lfs pointer keys: %s')
-                                 % ', '.join(miss))
+            raise InvalidPointer(
+                _('missing lfs pointer keys: %s') % ', '.join(miss)
+            )
         return self
 
+
 deserialize = gitlfspointer.deserialize
--- a/hgext/lfs/wireprotolfsserver.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/lfs/wireprotolfsserver.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,9 +12,7 @@
 import json
 import traceback
 
-from mercurial.hgweb import (
-    common as hgwebcommon,
-)
+from mercurial.hgweb import common as hgwebcommon
 
 from mercurial import (
     exthelper,
@@ -35,6 +33,7 @@
 
 eh = exthelper.exthelper()
 
+
 @eh.wrapfunction(wireprotoserver, 'handlewsgirequest')
 def handlewsgirequest(orig, rctx, req, res, checkperm):
     """Wrap wireprotoserver.handlewsgirequest() to possibly process an LFS
@@ -59,9 +58,9 @@
         # TODO: reserve and use a path in the proposed http wireprotocol /api/
         #       namespace?
         elif req.dispatchpath.startswith(b'.hg/lfs/objects'):
-            return _processbasictransfer(rctx.repo, req, res,
-                                         lambda perm:
-                                                checkperm(rctx, req, perm))
+            return _processbasictransfer(
+                rctx.repo, req, res, lambda perm: checkperm(rctx, req, perm)
+            )
         return False
     except hgwebcommon.ErrorResponse as e:
         # XXX: copied from the handler surrounding wireprotoserver._callhttp()
@@ -73,11 +72,13 @@
         res.setbodybytes(b'0\n%s\n' % pycompat.bytestr(e))
         return True
 
+
 def _sethttperror(res, code, message=None):
     res.status = hgwebcommon.statusmessage(code, message=message)
     res.headers[b'Content-Type'] = b'text/plain; charset=utf-8'
     res.setbodybytes(b'')
 
+
 def _logexception(req):
     """Write information about the current exception to wsgi.errors."""
     tb = pycompat.sysbytes(traceback.format_exc())
@@ -88,8 +89,10 @@
         uri += req.apppath
     uri += b'/' + req.dispatchpath
 
-    errorlog.write(b"Exception happened while processing request '%s':\n%s" %
-                   (uri, tb))
+    errorlog.write(
+        b"Exception happened while processing request '%s':\n%s" % (uri, tb)
+    )
+
 
 def _processbatchrequest(repo, req, res):
     """Handle a request for the Batch API, which is the gateway to granting file
@@ -134,22 +137,32 @@
 
     # If no transfer handlers are explicitly requested, 'basic' is assumed.
     if r'basic' not in lfsreq.get(r'transfers', [r'basic']):
-        _sethttperror(res, HTTP_BAD_REQUEST,
-                      b'Only the basic LFS transfer handler is supported')
+        _sethttperror(
+            res,
+            HTTP_BAD_REQUEST,
+            b'Only the basic LFS transfer handler is supported',
+        )
         return True
 
     operation = lfsreq.get(r'operation')
     operation = pycompat.bytestr(operation)
 
     if operation not in (b'upload', b'download'):
-        _sethttperror(res, HTTP_BAD_REQUEST,
-                      b'Unsupported LFS transfer operation: %s' % operation)
+        _sethttperror(
+            res,
+            HTTP_BAD_REQUEST,
+            b'Unsupported LFS transfer operation: %s' % operation,
+        )
         return True
 
     localstore = repo.svfs.lfslocalblobstore
 
-    objects = [p for p in _batchresponseobjects(req, lfsreq.get(r'objects', []),
-                                                operation, localstore)]
+    objects = [
+        p
+        for p in _batchresponseobjects(
+            req, lfsreq.get(r'objects', []), operation, localstore
+        )
+    ]
 
     rsp = {
         r'transfer': r'basic',
@@ -162,6 +175,7 @@
 
     return True
 
+
 def _batchresponseobjects(req, objects, action, store):
     """Yield one dictionary of attributes for the Batch API response for each
     object in the list.
@@ -197,7 +211,7 @@
         rsp = {
             r'oid': soid,
             r'size': obj.get(r'size'),  # XXX: should this check the local size?
-            #r'authenticated': True,
+            # r'authenticated': True,
         }
 
         exists = True
@@ -222,7 +236,7 @@
 
                 rsp[r'error'] = {
                     r'code': 500,
-                    r'message': inst.strerror or r'Internal Server Server'
+                    r'message': inst.strerror or r'Internal Server Server',
                 }
                 yield rsp
                 continue
@@ -235,15 +249,15 @@
             if not exists:
                 rsp[r'error'] = {
                     r'code': 404,
-                    r'message': r"The object does not exist"
+                    r'message': r"The object does not exist",
                 }
                 yield rsp
                 continue
 
             elif not verifies:
                 rsp[r'error'] = {
-                    r'code': 422,   # XXX: is this the right code?
-                    r'message': r"The object is corrupt"
+                    r'code': 422,  # XXX: is this the right code?
+                    r'message': r"The object is corrupt",
                 }
                 yield rsp
                 continue
@@ -258,9 +272,7 @@
             # The spec doesn't mention the Accept header here, but avoid
             # a gratuitous deviation from lfs-test-server in the test
             # output.
-            hdr = {
-                r'Accept': r'application/vnd.git-lfs'
-            }
+            hdr = {r'Accept': r'application/vnd.git-lfs'}
 
             auth = req.headers.get(b'Authorization', b'')
             if auth.startswith(b'Basic '):
@@ -269,9 +281,11 @@
             return hdr
 
         rsp[r'actions'] = {
-            r'%s' % pycompat.strurl(action): {
-                r'href': pycompat.strurl(b'%s%s/.hg/lfs/objects/%s'
-                    % (req.baseurl, req.apppath, oid)),
+            r'%s'
+            % pycompat.strurl(action): {
+                r'href': pycompat.strurl(
+                    b'%s%s/.hg/lfs/objects/%s' % (req.baseurl, req.apppath, oid)
+                ),
                 # datetime.isoformat() doesn't include the 'Z' suffix
                 r"expires_at": expiresat.strftime(r'%Y-%m-%dT%H:%M:%SZ'),
                 r'header': _buildheader(),
@@ -280,6 +294,7 @@
 
         yield rsp
 
+
 def _processbasictransfer(repo, req, res, checkperm):
     """Handle a single file upload (PUT) or download (GET) action for the Basic
     Transfer Adapter.
@@ -347,6 +362,9 @@
 
         return True
     else:
-        _sethttperror(res, HTTP_METHOD_NOT_ALLOWED,
-                      message=b'Unsupported LFS transfer method: %s' % method)
+        _sethttperror(
+            res,
+            HTTP_METHOD_NOT_ALLOWED,
+            message=b'Unsupported LFS transfer method: %s' % method,
+        )
         return True
--- a/hgext/lfs/wrapper.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/lfs/wrapper.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,9 +29,7 @@
     wireprotov1server,
 )
 
-from mercurial.interfaces import (
-    repository,
-)
+from mercurial.interfaces import repository
 
 from mercurial.utils import (
     storageutil,
@@ -47,6 +45,7 @@
 
 eh = exthelper.exthelper()
 
+
 @eh.wrapfunction(localrepo, 'makefilestorage')
 def localrepomakefilestorage(orig, requirements, features, **kwargs):
     if b'lfs' in requirements:
@@ -54,12 +53,14 @@
 
     return orig(requirements=requirements, features=features, **kwargs)
 
+
 @eh.wrapfunction(changegroup, 'allsupportedversions')
 def allsupportedversions(orig, ui):
     versions = orig(ui)
     versions.add('03')
     return versions
 
+
 @eh.wrapfunction(wireprotov1server, '_capabilities')
 def _capabilities(orig, repo, proto):
     '''Wrap server command to announce lfs server capability'''
@@ -75,9 +76,11 @@
         caps.append('lfs')
     return caps
 
+
 def bypasscheckhash(self, text):
     return False
 
+
 def readfromstore(self, text):
     """Read filelog content from local blobstore transform for flagprocessor.
 
@@ -99,13 +102,14 @@
     hgmeta = {}
     for k in p.keys():
         if k.startswith('x-hg-'):
-            name = k[len('x-hg-'):]
+            name = k[len('x-hg-') :]
             hgmeta[name] = p[k]
     if hgmeta or text.startswith('\1\n'):
         text = storageutil.packmeta(hgmeta, text)
 
     return (text, True, {})
 
+
 def writetostore(self, text, sidedata):
     # hg filelog metadata (includes rename, etc)
     hgmeta, offset = storageutil.parsemeta(text)
@@ -136,6 +140,7 @@
     rawtext = metadata.serialize()
     return (rawtext, False)
 
+
 def _islfs(rlog, node=None, rev=None):
     if rev is None:
         if node is None:
@@ -149,10 +154,21 @@
     flags = rlog._revlog.flags(rev)
     return bool(flags & revlog.REVIDX_EXTSTORED)
 
+
 # Wrapping may also be applied by remotefilelog
-def filelogaddrevision(orig, self, text, transaction, link, p1, p2,
-                       cachedelta=None, node=None,
-                       flags=revlog.REVIDX_DEFAULT_FLAGS, **kwds):
+def filelogaddrevision(
+    orig,
+    self,
+    text,
+    transaction,
+    link,
+    p1,
+    p2,
+    cachedelta=None,
+    node=None,
+    flags=revlog.REVIDX_DEFAULT_FLAGS,
+    **kwds
+):
     # The matcher isn't available if reposetup() wasn't called.
     lfstrack = self._revlog.opener.options.get('lfstrack')
 
@@ -166,8 +182,19 @@
         if lfstrack(self._revlog.filename, textlen):
             flags |= revlog.REVIDX_EXTSTORED
 
-    return orig(self, text, transaction, link, p1, p2, cachedelta=cachedelta,
-                node=node, flags=flags, **kwds)
+    return orig(
+        self,
+        text,
+        transaction,
+        link,
+        p1,
+        p2,
+        cachedelta=cachedelta,
+        node=node,
+        flags=flags,
+        **kwds
+    )
+
 
 # Wrapping may also be applied by remotefilelog
 def filelogrenamed(orig, self, node):
@@ -182,6 +209,7 @@
             return False
     return orig(self, node)
 
+
 # Wrapping may also be applied by remotefilelog
 def filelogsize(orig, self, rev):
     if _islfs(self, rev=rev):
@@ -191,6 +219,7 @@
         return int(metadata['size'])
     return orig(self, rev)
 
+
 @eh.wrapfunction(context.basefilectx, 'cmp')
 def filectxcmp(orig, self, fctx):
     """returns True if text is different than fctx"""
@@ -202,6 +231,7 @@
         return p1.oid() != p2.oid()
     return orig(self, fctx)
 
+
 @eh.wrapfunction(context.basefilectx, 'isbinary')
 def filectxisbinary(orig, self):
     if self.islfs():
@@ -211,24 +241,45 @@
         return bool(int(metadata.get('x-is-binary', 1)))
     return orig(self)
 
+
 def filectxislfs(self):
     return _islfs(self.filelog(), self.filenode())
 
+
 @eh.wrapfunction(cmdutil, '_updatecatformatter')
 def _updatecatformatter(orig, fm, ctx, matcher, path, decode):
     orig(fm, ctx, matcher, path, decode)
     fm.data(rawdata=ctx[path].rawdata())
 
+
 @eh.wrapfunction(scmutil, 'wrapconvertsink')
 def convertsink(orig, sink):
     sink = orig(sink)
     if sink.repotype == 'hg':
+
         class lfssink(sink.__class__):
-            def putcommit(self, files, copies, parents, commit, source, revmap,
-                          full, cleanp2):
+            def putcommit(
+                self,
+                files,
+                copies,
+                parents,
+                commit,
+                source,
+                revmap,
+                full,
+                cleanp2,
+            ):
                 pc = super(lfssink, self).putcommit
-                node = pc(files, copies, parents, commit, source, revmap, full,
-                          cleanp2)
+                node = pc(
+                    files,
+                    copies,
+                    parents,
+                    commit,
+                    source,
+                    revmap,
+                    full,
+                    cleanp2,
+                )
 
                 if 'lfs' not in self.repo.requirements:
                     ctx = self.repo[node]
@@ -245,6 +296,7 @@
 
     return sink
 
+
 # bundlerepo uses "vfsmod.readonlyvfs(othervfs)", we need to make sure lfs
 # options and blob stores are passed from othervfs to the new readonlyvfs.
 @eh.wrapfunction(vfsmod.readonlyvfs, '__init__')
@@ -260,6 +312,7 @@
         if util.safehasattr(othervfs, name):
             setattr(self, name, getattr(othervfs, name))
 
+
 def _prefetchfiles(repo, revs, match):
     """Ensure that required LFS blobs are present, fetching them as a group if
     needed."""
@@ -284,6 +337,7 @@
         # on the repo by a clone command to be used for the update.
         blobstore.remote(repo).readbatch(pointers, localstore)
 
+
 def _canskipupload(repo):
     # Skip if this hasn't been passed to reposetup()
     if not util.safehasattr(repo.svfs, 'lfsremoteblobstore'):
@@ -292,6 +346,7 @@
     # if remotestore is a null store, upload is a no-op and can be skipped
     return isinstance(repo.svfs.lfsremoteblobstore, blobstore._nullremote)
 
+
 def candownload(repo):
     # Skip if this hasn't been passed to reposetup()
     if not util.safehasattr(repo.svfs, 'lfsremoteblobstore'):
@@ -300,6 +355,7 @@
     # if remotestore is a null store, downloads will lead to nothing
     return not isinstance(repo.svfs.lfsremoteblobstore, blobstore._nullremote)
 
+
 def uploadblobsfromrevs(repo, revs):
     '''upload lfs blobs introduced by revs
 
@@ -310,6 +366,7 @@
     pointers = extractpointers(repo, revs)
     uploadblobs(repo, pointers)
 
+
 def prepush(pushop):
     """Prepush hook.
 
@@ -319,6 +376,7 @@
     """
     return uploadblobsfromrevs(pushop.repo, pushop.outgoing.missing)
 
+
 @eh.wrapfunction(exchange, 'push')
 def push(orig, repo, remote, *args, **kwargs):
     """bail on push if the extension isn't enabled on remote when needed, and
@@ -331,8 +389,9 @@
             # This is a copy of the message in exchange.push() when requirements
             # are missing between local repos.
             m = _("required features are not supported in the destination: %s")
-            raise error.Abort(m % 'lfs',
-                              hint=_('enable the lfs extension on the server'))
+            raise error.Abort(
+                m % 'lfs', hint=_('enable the lfs extension on the server')
+            )
 
         # Repositories where this extension is disabled won't have the field.
         # But if there's a requirement, then the extension must be loaded AND
@@ -346,14 +405,18 @@
     else:
         return orig(repo, remote, *args, **kwargs)
 
+
 # when writing a bundle via "hg bundle" command, upload related LFS blobs
 @eh.wrapfunction(bundle2, 'writenewbundle')
-def writenewbundle(orig, ui, repo, source, filename, bundletype, outgoing,
-                   *args, **kwargs):
+def writenewbundle(
+    orig, ui, repo, source, filename, bundletype, outgoing, *args, **kwargs
+):
     """upload LFS blobs added by outgoing revisions on 'hg bundle'"""
     uploadblobsfromrevs(repo, outgoing.missing)
-    return orig(ui, repo, source, filename, bundletype, outgoing, *args,
-                **kwargs)
+    return orig(
+        ui, repo, source, filename, bundletype, outgoing, *args, **kwargs
+    )
+
 
 def extractpointers(repo, revs):
     """return a list of lfs pointers added by given revs"""
@@ -369,6 +432,7 @@
             progress.increment()
         return sorted(pointers.values(), key=lambda p: p.oid())
 
+
 def pointerfromctx(ctx, f, removed=False):
     """return a pointer for the named file from the given changectx, or None if
     the file isn't LFS.
@@ -396,8 +460,11 @@
             return p
         return {}
     except pointer.InvalidPointer as ex:
-        raise error.Abort(_('lfs: corrupted pointer (%s@%s): %s\n')
-                          % (f, short(_ctx.node()), ex))
+        raise error.Abort(
+            _('lfs: corrupted pointer (%s@%s): %s\n')
+            % (f, short(_ctx.node()), ex)
+        )
+
 
 def pointersfromctx(ctx, removed=False):
     """return a dict {path: pointer} for given single changectx.
@@ -417,6 +484,7 @@
             result[f] = p
     return result
 
+
 def uploadblobs(repo, pointers):
     """upload given pointers from local blobstore"""
     if not pointers:
@@ -425,13 +493,15 @@
     remoteblob = repo.svfs.lfsremoteblobstore
     remoteblob.writebatch(pointers, repo.svfs.lfslocalblobstore)
 
+
 @eh.wrapfunction(upgrade, '_finishdatamigration')
 def upgradefinishdatamigration(orig, ui, srcrepo, dstrepo, requirements):
     orig(ui, srcrepo, dstrepo, requirements)
 
     # Skip if this hasn't been passed to reposetup()
-    if (util.safehasattr(srcrepo.svfs, 'lfslocalblobstore') and
-        util.safehasattr(dstrepo.svfs, 'lfslocalblobstore')):
+    if util.safehasattr(srcrepo.svfs, 'lfslocalblobstore') and util.safehasattr(
+        dstrepo.svfs, 'lfslocalblobstore'
+    ):
         srclfsvfs = srcrepo.svfs.lfslocalblobstore.vfs
         dstlfsvfs = dstrepo.svfs.lfslocalblobstore.vfs
 
@@ -440,6 +510,7 @@
                 ui.write(_('copying lfs blob %s\n') % oid)
                 lfutil.link(srclfsvfs.join(oid), dstlfsvfs.join(oid))
 
+
 @eh.wrapfunction(upgrade, 'preservedrequirements')
 @eh.wrapfunction(upgrade, 'supporteddestrequirements')
 def upgraderequirements(orig, repo):
--- a/hgext/logtoprocess.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/logtoprocess.py	Sun Oct 06 09:45:02 2019 -0400
@@ -36,9 +36,7 @@
 
 import os
 
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -46,6 +44,7 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 class processlogger(object):
     """Map log events to external commands
 
@@ -66,10 +65,12 @@
             b'MSG1': msg,
         }
         # keyword arguments get prefixed with OPT_ and uppercased
-        env.update((b'OPT_%s' % key.upper(), value)
-                   for key, value in opts.items())
+        env.update(
+            (b'OPT_%s' % key.upper(), value) for key, value in opts.items()
+        )
         fullenv = procutil.shellenviron(env)
         procutil.runbgcommand(script, fullenv, shell=True)
 
+
 def uipopulate(ui):
     ui.setlogger(b'logtoprocess', processlogger(ui))
--- a/hgext/mq.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/mq.py	Sun Oct 06 09:45:02 2019 -0400
@@ -117,17 +117,17 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('mq', 'git',
-    default='auto',
+configitem(
+    'mq', 'git', default='auto',
 )
-configitem('mq', 'keepchanges',
-    default=False,
+configitem(
+    'mq', 'keepchanges', default=False,
 )
-configitem('mq', 'plain',
-    default=False,
+configitem(
+    'mq', 'plain', default=False,
 )
-configitem('mq', 'secret',
-    default=False,
+configitem(
+    'mq', 'secret', default=False,
 )
 
 # force load strip extension formerly included in mq and import some utility
@@ -139,12 +139,15 @@
     class dummyui(object):
         def debug(self, msg):
             pass
+
         def log(self, event, msgfmt, *msgargs, **opts):
             pass
+
     stripext = extensions.load(dummyui(), 'strip', '')
 
 strip = stripext.strip
 
+
 def checksubstate(repo, baserev=None):
     '''return list of subrepos at a different revision than substate.
     Abort if any subrepos have uncommitted changes.'''
@@ -160,10 +163,12 @@
             inclsubs.append(s)
     return inclsubs
 
+
 # Patch names looks like unix-file names.
 # They must be joinable with queue directory and result in the patch path.
 normname = util.normpath
 
+
 class statusentry(object):
     def __init__(self, node, name):
         self.node, self.name = node, name
@@ -174,22 +179,24 @@
     __str__ = encoding.strmethod(__bytes__)
     __repr__ = encoding.strmethod(__bytes__)
 
+
 # The order of the headers in 'hg export' HG patches:
 HGHEADERS = [
-#   '# HG changeset patch',
+    #   '# HG changeset patch',
     '# User ',
     '# Date ',
     '#      ',
     '# Branch ',
     '# Node ID ',
-    '# Parent  ', # can occur twice for merges - but that is not relevant for mq
-    ]
+    '# Parent  ',  # can occur twice for merges - but that is not relevant for mq
+]
 # The order of headers in plain 'mail style' patches:
 PLAINHEADERS = {
     'from': 0,
     'date': 1,
     'subject': 2,
-    }
+}
+
 
 def inserthgheader(lines, header, value):
     """Assuming lines contains a HG patch header, add a header line with value.
@@ -229,10 +236,11 @@
                     return lines
                 if lineindex > newindex:
                     bestpos = min(bestpos, i)
-                break # next line
+                break  # next line
     lines.insert(bestpos, header + value)
     return lines
 
+
 def insertplainheader(lines, header, value):
     """For lines containing a plain patch header, add a header line with value.
     >>> insertplainheader([], b'Date', b'z')
@@ -270,17 +278,21 @@
     lines.insert(bestpos, '%s: %s' % (header, value))
     return lines
 
+
 class patchheader(object):
     def __init__(self, pf, plainmode=False):
         def eatdiff(lines):
             while lines:
                 l = lines[-1]
-                if (l.startswith("diff -") or
-                    l.startswith("Index:") or
-                    l.startswith("===========")):
+                if (
+                    l.startswith("diff -")
+                    or l.startswith("Index:")
+                    or l.startswith("===========")
+                ):
                     del lines[-1]
                 else:
                     break
+
         def eatempty(lines):
             while lines:
                 if not lines[-1].strip():
@@ -301,11 +313,12 @@
 
         for line in open(pf, 'rb'):
             line = line.rstrip()
-            if (line.startswith('diff --git')
-                or (diffstart and line.startswith('+++ '))):
+            if line.startswith('diff --git') or (
+                diffstart and line.startswith('+++ ')
+            ):
                 diffstart = 2
                 break
-            diffstart = 0 # reset
+            diffstart = 0  # reset
             if line.startswith("--- "):
                 diffstart = 1
                 continue
@@ -316,7 +329,7 @@
                 elif line.startswith("# Date "):
                     date = line[7:]
                 elif line.startswith("# Parent "):
-                    parent = line[9:].lstrip() # handle double trailing space
+                    parent = line[9:].lstrip()  # handle double trailing space
                 elif line.startswith("# Branch "):
                     branch = line[9:]
                 elif line.startswith("# Node ID "):
@@ -327,16 +340,19 @@
             elif line == '# HG changeset patch':
                 message = []
                 format = "hgpatch"
-            elif (format != "tagdone" and (line.startswith("Subject: ") or
-                                           line.startswith("subject: "))):
+            elif format != "tagdone" and (
+                line.startswith("Subject: ") or line.startswith("subject: ")
+            ):
                 subject = line[9:]
                 format = "tag"
-            elif (format != "tagdone" and (line.startswith("From: ") or
-                                           line.startswith("from: "))):
+            elif format != "tagdone" and (
+                line.startswith("From: ") or line.startswith("from: ")
+            ):
                 user = line[6:]
                 format = "tag"
-            elif (format != "tagdone" and (line.startswith("Date: ") or
-                                           line.startswith("date: "))):
+            elif format != "tagdone" and (
+                line.startswith("Date: ") or line.startswith("date: ")
+            ):
                 date = line[6:]
                 format = "tag"
             elif format == "tag" and line == "":
@@ -368,11 +384,14 @@
         self.nodeid = nodeid
         self.branch = branch
         self.haspatch = diffstart > 1
-        self.plainmode = (plainmode or
-                          '# HG changeset patch' not in self.comments and
-                          any(c.startswith('Date: ') or
-                                   c.startswith('From: ')
-                                   for c in self.comments))
+        self.plainmode = (
+            plainmode
+            or '# HG changeset patch' not in self.comments
+            and any(
+                c.startswith('Date: ') or c.startswith('From: ')
+                for c in self.comments
+            )
+        )
 
     def setuser(self, user):
         try:
@@ -439,6 +458,7 @@
                 ci += 1
             del self.comments[ci]
 
+
 def newcommit(repo, phase, *args, **kwargs):
     """helper dedicated to ensure a commit respect mq.secret setting
 
@@ -456,9 +476,11 @@
         repo.ui.setconfig('ui', 'allowemptycommit', True)
         return repo.commit(*args, **kwargs)
 
+
 class AbortNoCleanup(error.Abort):
     pass
 
+
 class queue(object):
     def __init__(self, ui, baseui, path, patchdir=None):
         self.basepath = path
@@ -506,9 +528,12 @@
                     n, name = entry
                     yield statusentry(bin(n), name)
                 elif l.strip():
-                    self.ui.warn(_('malformated mq status line: %s\n') %
-                                 stringutil.pprint(entry))
+                    self.ui.warn(
+                        _('malformated mq status line: %s\n')
+                        % stringutil.pprint(entry)
+                    )
                 # else we ignore empty lines
+
         try:
             lines = self.opener.read(self.statuspath).splitlines()
             return list(parselines(lines))
@@ -548,8 +573,13 @@
     def diffopts(self, opts=None, patchfn=None, plain=False):
         """Return diff options tweaked for this mq use, possibly upgrading to
         git format, and possibly plain and without lossy options."""
-        diffopts = patchmod.difffeatureopts(self.ui, opts,
-            git=True, whitespace=not plain, formatchanging=not plain)
+        diffopts = patchmod.difffeatureopts(
+            self.ui,
+            opts,
+            git=True,
+            whitespace=not plain,
+            formatchanging=not plain,
+        )
         if self.gitmode == 'auto':
             diffopts.upgrade = True
         elif self.gitmode == 'keep':
@@ -557,8 +587,10 @@
         elif self.gitmode in ('yes', 'no'):
             diffopts.git = self.gitmode == 'yes'
         else:
-            raise error.Abort(_('mq.git option can be auto/keep/yes/no'
-                               ' got %s') % self.gitmode)
+            raise error.Abort(
+                _('mq.git option can be auto/keep/yes/no' ' got %s')
+                % self.gitmode
+            )
         if patchfn:
             diffopts = self.patchopts(diffopts, patchfn)
         return diffopts
@@ -572,8 +604,9 @@
             for patchfn in patches:
                 patchf = self.opener(patchfn, 'r')
                 # if the patch was a git patch, refresh it as a git patch
-                diffopts.git = any(line.startswith('diff --git')
-                                   for line in patchf)
+                diffopts.git = any(
+                    line.startswith('diff --git') for line in patchf
+                )
                 patchf.close()
         return diffopts
 
@@ -584,6 +617,7 @@
         def matchpatch(l):
             l = l.split('#', 1)[0]
             return l.strip() == patch
+
         for index, l in enumerate(self.fullseries):
             if matchpatch(l):
                 return index
@@ -607,8 +641,10 @@
             patch = patch.strip()
             if patch:
                 if patch in self.series:
-                    raise error.Abort(_('%s appears more than once in %s') %
-                                     (patch, self.join(self.seriespath)))
+                    raise error.Abort(
+                        _('%s appears more than once in %s')
+                        % (patch, self.join(self.seriespath))
+                    )
                 self.series.append(patch)
                 self.seriesguards.append(self.guard_re.findall(comment))
 
@@ -618,8 +654,10 @@
         bad_chars = '# \t\r\n\f'
         first = guard[0]
         if first in '-+':
-            return (_('guard %r starts with invalid character: %r') %
-                      (guard, first))
+            return _('guard %r starts with invalid character: %r') % (
+                guard,
+                first,
+            )
         for c in bad_chars:
             if c in guard:
                 return _('invalid character in guard %r: %r') % (guard, c)
@@ -646,8 +684,9 @@
             for i, guard in enumerate(guards):
                 bad = self.checkguard(guard)
                 if bad:
-                    self.ui.warn('%s:%d: %s\n' %
-                                 (self.join(self.guardspath), i + 1, bad))
+                    self.ui.warn(
+                        '%s:%d: %s\n' % (self.join(self.guardspath), i + 1, bad)
+                    )
                 else:
                     self.activeguards.append(guard)
         return self.activeguards
@@ -673,8 +712,9 @@
         if not patchguards:
             return True, None
         guards = self.active()
-        exactneg = [g for g in patchguards
-                    if g.startswith('-') and g[1:] in guards]
+        exactneg = [
+            g for g in patchguards if g.startswith('-') and g[1:] in guards
+        ]
         if exactneg:
             return False, stringutil.pprint(exactneg[0])
         pos = [g for g in patchguards if g.startswith('+')]
@@ -697,22 +737,32 @@
             pushable, why = self.pushable(idx)
             if all_patches and pushable:
                 if why is None:
-                    write(_('allowing %s - no guards in effect\n') %
-                          self.series[idx])
+                    write(
+                        _('allowing %s - no guards in effect\n')
+                        % self.series[idx]
+                    )
                 else:
                     if not why:
-                        write(_('allowing %s - no matching negative guards\n') %
-                              self.series[idx])
+                        write(
+                            _('allowing %s - no matching negative guards\n')
+                            % self.series[idx]
+                        )
                     else:
-                        write(_('allowing %s - guarded by %s\n') %
-                              (self.series[idx], why))
+                        write(
+                            _('allowing %s - guarded by %s\n')
+                            % (self.series[idx], why)
+                        )
             if not pushable:
                 if why:
-                    write(_('skipping %s - guarded by %s\n') %
-                          (self.series[idx], why))
+                    write(
+                        _('skipping %s - guarded by %s\n')
+                        % (self.series[idx], why)
+                    )
                 else:
-                    write(_('skipping %s - no matching guards\n') %
-                          self.series[idx])
+                    write(
+                        _('skipping %s - no matching guards\n')
+                        % self.series[idx]
+                    )
 
     def savedirty(self):
         def writelist(items, path):
@@ -720,6 +770,7 @@
             for i in items:
                 fp.write("%s\n" % i)
             fp.close()
+
         if self.applieddirty:
             writelist(map(bytes, self.applied), self.statuspath)
             self.applieddirty = False
@@ -742,8 +793,9 @@
         try:
             os.unlink(undo)
         except OSError as inst:
-            self.ui.warn(_('error removing undo: %s\n') %
-                         stringutil.forcebytestr(inst))
+            self.ui.warn(
+                _('error removing undo: %s\n') % stringutil.forcebytestr(inst)
+            )
 
     def backup(self, repo, files, copy=False):
         # backup local changes in --force case
@@ -751,27 +803,40 @@
             absf = repo.wjoin(f)
             if os.path.lexists(absf):
                 absorig = scmutil.backuppath(self.ui, repo, f)
-                self.ui.note(_('saving current version of %s as %s\n') %
-                             (f, os.path.relpath(absorig)))
+                self.ui.note(
+                    _('saving current version of %s as %s\n')
+                    % (f, os.path.relpath(absorig))
+                )
 
                 if copy:
                     util.copyfile(absf, absorig)
                 else:
                     util.rename(absf, absorig)
 
-    def printdiff(self, repo, diffopts, node1, node2=None, files=None,
-                  fp=None, changes=None, opts=None):
+    def printdiff(
+        self,
+        repo,
+        diffopts,
+        node1,
+        node2=None,
+        files=None,
+        fp=None,
+        changes=None,
+        opts=None,
+    ):
         if opts is None:
             opts = {}
         stat = opts.get('stat')
         m = scmutil.match(repo[node1], files, opts)
-        logcmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
-                                  changes, stat, fp)
+        logcmdutil.diffordiffstat(
+            self.ui, repo, diffopts, node1, node2, m, changes, stat, fp
+        )
 
     def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
         # first try just applying the patch
-        (err, n) = self.apply(repo, [patch], update_status=False,
-                              strict=True, merge=rev)
+        (err, n) = self.apply(
+            repo, [patch], update_status=False, strict=True, merge=rev
+        )
 
         if err == 0:
             return (err, n)
@@ -872,8 +937,9 @@
         patchfile: name of patch file'''
         files = set()
         try:
-            fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
-                                  files=files, eolmode=None)
+            fuzz = patchmod.patch(
+                self.ui, repo, patchfile, strip=1, files=files, eolmode=None
+            )
             return (True, list(files), fuzz)
         except Exception as inst:
             self.ui.note(stringutil.forcebytestr(inst) + '\n')
@@ -882,18 +948,37 @@
             self.ui.traceback()
             return (False, list(files), False)
 
-    def apply(self, repo, series, list=False, update_status=True,
-              strict=False, patchdir=None, merge=None, all_files=None,
-              tobackup=None, keepchanges=False):
+    def apply(
+        self,
+        repo,
+        series,
+        list=False,
+        update_status=True,
+        strict=False,
+        patchdir=None,
+        merge=None,
+        all_files=None,
+        tobackup=None,
+        keepchanges=False,
+    ):
         wlock = lock = tr = None
         try:
             wlock = repo.wlock()
             lock = repo.lock()
             tr = repo.transaction("qpush")
             try:
-                ret = self._apply(repo, series, list, update_status,
-                                  strict, patchdir, merge, all_files=all_files,
-                                  tobackup=tobackup, keepchanges=keepchanges)
+                ret = self._apply(
+                    repo,
+                    series,
+                    list,
+                    update_status,
+                    strict,
+                    patchdir,
+                    merge,
+                    all_files=all_files,
+                    tobackup=tobackup,
+                    keepchanges=keepchanges,
+                )
                 tr.close()
                 self.savedirty()
                 return ret
@@ -901,7 +986,7 @@
                 tr.close()
                 self.savedirty()
                 raise
-            except: # re-raises
+            except:  # re-raises
                 try:
                     tr.abort()
                 finally:
@@ -911,9 +996,19 @@
             release(tr, lock, wlock)
             self.removeundo(repo)
 
-    def _apply(self, repo, series, list=False, update_status=True,
-               strict=False, patchdir=None, merge=None, all_files=None,
-               tobackup=None, keepchanges=False):
+    def _apply(
+        self,
+        repo,
+        series,
+        list=False,
+        update_status=True,
+        strict=False,
+        patchdir=None,
+        merge=None,
+        all_files=None,
+        tobackup=None,
+        keepchanges=False,
+    ):
         """returns (error, hash)
 
         error = 1 for unable to read, 2 for patch failed, 3 for patch
@@ -957,7 +1052,8 @@
                     if touched and keepchanges:
                         raise AbortNoCleanup(
                             _("conflicting local changes found"),
-                            hint=_("did you forget to qrefresh?"))
+                            hint=_("did you forget to qrefresh?"),
+                        )
                     self.backup(repo, touched, copy=True)
                     tobackup = tobackup - touched
                 (patcherr, files, fuzz) = self.patch(repo, pf)
@@ -989,14 +1085,16 @@
                 wctx = repo[None]
                 pctx = repo['.']
                 overwrite = False
-                mergedsubstate = subrepoutil.submerge(repo, pctx, wctx, wctx,
-                                                      overwrite)
+                mergedsubstate = subrepoutil.submerge(
+                    repo, pctx, wctx, wctx, overwrite
+                )
                 files += mergedsubstate.keys()
 
             match = scmutil.matchfiles(repo, files or [])
             oldtip = repo.changelog.tip()
-            n = newcommit(repo, None, message, ph.user, ph.date, match=match,
-                          force=True)
+            n = newcommit(
+                repo, None, message, ph.user, ph.date, match=match, force=True
+            )
             if repo.changelog.tip() == oldtip:
                 raise error.Abort(_("qpush exactly duplicates child changeset"))
             if n is None:
@@ -1006,8 +1104,9 @@
                 self.applied.append(statusentry(n, patchname))
 
             if patcherr:
-                self.ui.warn(_("patch failed, rejects left in working "
-                               "directory\n"))
+                self.ui.warn(
+                    _("patch failed, rejects left in working " "directory\n")
+                )
                 err = 2
                 break
 
@@ -1054,7 +1153,7 @@
 
         if unknown:
             if numrevs:
-                rev  = dict((entry.name, entry.node) for entry in qfinished)
+                rev = dict((entry.name, entry.node) for entry in qfinished)
                 for p in unknown:
                     msg = _('revision %s refers to unknown patches: %s\n')
                     self.ui.warn(msg % (short(rev[p]), p))
@@ -1106,8 +1205,9 @@
 
     def delete(self, repo, patches, opts):
         if not patches and not opts.get('rev'):
-            raise error.Abort(_('qdelete requires at least one revision or '
-                               'patch name'))
+            raise error.Abort(
+                _('qdelete requires at least one revision or ' 'patch name')
+            )
 
         realpatches = []
         for patch in patches:
@@ -1145,13 +1245,13 @@
     def putsubstate2changes(self, substatestate, changes):
         for files in changes[:3]:
             if '.hgsubstate' in files:
-                return # already listed up
+                return  # already listed up
         # not yet listed up
         if substatestate in 'a?':
             changes[1].append('.hgsubstate')
         elif substatestate in 'r':
             changes[2].append('.hgsubstate')
-        else: # modified
+        else:  # modified
             changes[0].append('.hgsubstate')
 
     def checklocalchanges(self, repo, force=False, refresh=True):
@@ -1166,39 +1266,46 @@
         if not force:
             cmdutil.checkunfinished(repo)
             if s.modified or s.added or s.removed or s.deleted:
-                _("local changes found") # i18n tool detection
+                _("local changes found")  # i18n tool detection
                 raise error.Abort(_("local changes found" + excsuffix))
             if checksubstate(repo):
-                _("local changed subrepos found") # i18n tool detection
+                _("local changed subrepos found")  # i18n tool detection
                 raise error.Abort(_("local changed subrepos found" + excsuffix))
         else:
             cmdutil.checkunfinished(repo, skipmerge=True)
         return s
 
     _reserved = ('series', 'status', 'guards', '.', '..')
+
     def checkreservedname(self, name):
         if name in self._reserved:
-            raise error.Abort(_('"%s" cannot be used as the name of a patch')
-                             % name)
+            raise error.Abort(
+                _('"%s" cannot be used as the name of a patch') % name
+            )
         if name != name.strip():
             # whitespace is stripped by parseseries()
-            raise error.Abort(_('patch name cannot begin or end with '
-                                'whitespace'))
+            raise error.Abort(
+                _('patch name cannot begin or end with ' 'whitespace')
+            )
         for prefix in ('.hg', '.mq'):
             if name.startswith(prefix):
-                raise error.Abort(_('patch name cannot begin with "%s"')
-                                 % prefix)
+                raise error.Abort(
+                    _('patch name cannot begin with "%s"') % prefix
+                )
         for c in ('#', ':', '\r', '\n'):
             if c in name:
-                raise error.Abort(_('%r cannot be used in the name of a patch')
-                                 % pycompat.bytestr(c))
+                raise error.Abort(
+                    _('%r cannot be used in the name of a patch')
+                    % pycompat.bytestr(c)
+                )
 
     def checkpatchname(self, name, force=False):
         self.checkreservedname(name)
         if not force and os.path.exists(self.join(name)):
             if os.path.isdir(self.join(name)):
-                raise error.Abort(_('"%s" already exists as a directory')
-                                 % name)
+                raise error.Abort(
+                    _('"%s" already exists as a directory') % name
+                )
             else:
                 raise error.Abort(_('patch "%s" already exists') % name)
 
@@ -1206,7 +1313,7 @@
         """Return a suitable filename for title, adding a suffix to make
         it unique in the existing list"""
         namebase = re.sub(br'[\s\W_]+', b'_', title.lower()).strip(b'_')
-        namebase = namebase[:75] # avoid too long name (issue5117)
+        namebase = namebase[:75]  # avoid too long name (issue5117)
         if namebase:
             try:
                 self.checkreservedname(namebase)
@@ -1252,8 +1359,9 @@
         if opts.get('include') or opts.get('exclude') or pats:
             # detect missing files in pats
             def badfn(f, msg):
-                if f != '.hgsubstate': # .hgsubstate is auto-created
+                if f != '.hgsubstate':  # .hgsubstate is auto-created
                     raise error.Abort('%s: %s' % (f, msg))
+
             match = scmutil.match(repo[None], pats, opts, badfn=badfn)
             changes = repo.status(match=match)
         else:
@@ -1271,28 +1379,42 @@
                 # if patch file write fails, abort early
                 p = self.opener(patchfn, "w")
             except IOError as e:
-                raise error.Abort(_('cannot write patch "%s": %s')
-                                 % (patchfn, encoding.strtolocal(e.strerror)))
+                raise error.Abort(
+                    _('cannot write patch "%s": %s')
+                    % (patchfn, encoding.strtolocal(e.strerror))
+                )
             try:
                 defaultmsg = "[mq]: %s" % patchfn
                 editor = cmdutil.getcommiteditor(editform=editform)
                 if edit:
+
                     def finishdesc(desc):
                         if desc.rstrip():
                             return desc
                         else:
                             return defaultmsg
+
                     # i18n: this message is shown in editor with "HG: " prefix
                     extramsg = _('Leave message empty to use default message.')
-                    editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
-                                                     extramsg=extramsg,
-                                                     editform=editform)
+                    editor = cmdutil.getcommiteditor(
+                        finishdesc=finishdesc,
+                        extramsg=extramsg,
+                        editform=editform,
+                    )
                     commitmsg = msg
                 else:
                     commitmsg = msg or defaultmsg
 
-                n = newcommit(repo, None, commitmsg, user, date, match=match,
-                              force=True, editor=editor)
+                n = newcommit(
+                    repo,
+                    None,
+                    commitmsg,
+                    user,
+                    date,
+                    match=match,
+                    force=True,
+                    editor=editor,
+                )
                 if n is None:
                     raise error.Abort(_("repo commit failed"))
                 try:
@@ -1317,15 +1439,20 @@
                         parent = self.qparents(repo, n)
                         if inclsubs:
                             self.putsubstate2changes(substatestate, changes)
-                        chunks = patchmod.diff(repo, node1=parent, node2=n,
-                                               changes=changes, opts=diffopts)
+                        chunks = patchmod.diff(
+                            repo,
+                            node1=parent,
+                            node2=n,
+                            changes=changes,
+                            opts=diffopts,
+                        )
                         for chunk in chunks:
                             p.write(chunk)
                     p.close()
                     r = self.qrepo()
                     if r:
                         r[None].add([patchfn])
-                except: # re-raises
+                except:  # re-raises
                     repo.rollback()
                     raise
             except Exception:
@@ -1391,7 +1518,7 @@
                     if res:
                         i = self.series.index(res)
                         try:
-                            off = int(patch[minus + 1:] or 1)
+                            off = int(patch[minus + 1 :] or 1)
                         except (ValueError, OverflowError):
                             pass
                         else:
@@ -1403,7 +1530,7 @@
                     if res:
                         i = self.series.index(res)
                         try:
-                            off = int(patch[plus + 1:] or 1)
+                            off = int(patch[plus + 1 :] or 1)
                         except (ValueError, OverflowError):
                             pass
                         else:
@@ -1411,9 +1538,19 @@
                                 return self.series[i + off]
         raise error.Abort(_("patch %s not in series") % patch)
 
-    def push(self, repo, patch=None, force=False, list=False, mergeq=None,
-             all=False, move=False, exact=False, nobackup=False,
-             keepchanges=False):
+    def push(
+        self,
+        repo,
+        patch=None,
+        force=False,
+        list=False,
+        mergeq=None,
+        all=False,
+        move=False,
+        exact=False,
+        nobackup=False,
+        keepchanges=False,
+    ):
         self.checkkeepchanges(keepchanges, force)
         diffopts = self.diffopts()
         with repo.wlock():
@@ -1437,15 +1574,15 @@
                 patch = self.lookup(patch)
                 info = self.isapplied(patch)
                 if info and info[0] >= len(self.applied) - 1:
-                    self.ui.warn(
-                        _('qpush: %s is already at the top\n') % patch)
+                    self.ui.warn(_('qpush: %s is already at the top\n') % patch)
                     return 0
 
                 pushable, reason = self.pushable(patch)
                 if pushable:
                     if self.series.index(patch) < self.seriesend():
                         raise error.Abort(
-                            _("cannot push to a previous patch: %s") % patch)
+                            _("cannot push to a previous patch: %s") % patch
+                        )
                 else:
                     if reason:
                         reason = _('guarded by %s') % reason
@@ -1474,18 +1611,22 @@
             if exact:
                 if keepchanges:
                     raise error.Abort(
-                        _("cannot use --exact and --keep-changes together"))
+                        _("cannot use --exact and --keep-changes together")
+                    )
                 if move:
-                    raise error.Abort(_('cannot use --exact and --move '
-                                       'together'))
+                    raise error.Abort(
+                        _('cannot use --exact and --move ' 'together')
+                    )
                 if self.applied:
-                    raise error.Abort(_('cannot push --exact with applied '
-                                       'patches'))
+                    raise error.Abort(
+                        _('cannot push --exact with applied ' 'patches')
+                    )
                 root = self.series[start]
                 target = patchheader(self.join(root), self.plainmode).parent
                 if not target:
                     raise error.Abort(
-                        _("%s does not have a parent recorded") % root)
+                        _("%s does not have a parent recorded") % root
+                    )
                 if not repo[target] == repo['.']:
                     hg.update(repo, target)
 
@@ -1521,8 +1662,12 @@
             if (not nobackup and force) or keepchanges:
                 status = self.checklocalchanges(repo, force=True)
                 if keepchanges:
-                    tobackup.update(status.modified + status.added +
-                                    status.removed + status.deleted)
+                    tobackup.update(
+                        status.modified
+                        + status.added
+                        + status.removed
+                        + status.deleted
+                    )
                 else:
                     tobackup.update(status.modified + status.added)
 
@@ -1532,14 +1677,25 @@
                 if mergeq:
                     ret = self.mergepatch(repo, mergeq, s, diffopts)
                 else:
-                    ret = self.apply(repo, s, list, all_files=all_files,
-                                     tobackup=tobackup, keepchanges=keepchanges)
+                    ret = self.apply(
+                        repo,
+                        s,
+                        list,
+                        all_files=all_files,
+                        tobackup=tobackup,
+                        keepchanges=keepchanges,
+                    )
             except AbortNoCleanup:
                 raise
-            except: # re-raises
+            except:  # re-raises
                 self.ui.warn(_('cleaning up working directory...\n'))
-                cmdutil.revert(self.ui, repo, repo['.'],
-                               repo.dirstate.parents(), no_backup=True)
+                cmdutil.revert(
+                    self.ui,
+                    repo,
+                    repo['.'],
+                    repo.dirstate.parents(),
+                    no_backup=True,
+                )
                 # only remove unknown files that we know we touched or
                 # created while patching
                 for f in all_files:
@@ -1558,8 +1714,16 @@
                 self.ui.write(_("now at: %s\n") % top)
             return ret[0]
 
-    def pop(self, repo, patch=None, force=False, update=True, all=False,
-            nobackup=False, keepchanges=False):
+    def pop(
+        self,
+        repo,
+        patch=None,
+        force=False,
+        update=True,
+        all=False,
+        nobackup=False,
+        keepchanges=False,
+    ):
         self.checkkeepchanges(keepchanges, force)
         with repo.wlock():
             if patch:
@@ -1597,8 +1761,9 @@
                         update = True
             else:
                 parents = [p.node() for p in repo[None].parents()]
-                update = any(entry.node in parents
-                             for entry in self.applied[start:])
+                update = any(
+                    entry.node in parents for entry in self.applied[start:]
+                )
 
             tobackup = set()
             if update:
@@ -1607,8 +1772,9 @@
                     if not nobackup:
                         tobackup.update(s.modified + s.added)
                 elif keepchanges:
-                    tobackup.update(s.modified + s.added +
-                                    s.removed + s.deleted)
+                    tobackup.update(
+                        s.modified + s.added + s.removed + s.deleted
+                    )
 
             self.applieddirty = True
             end = len(self.applied)
@@ -1621,12 +1787,17 @@
                 raise error.Abort(_('trying to pop unknown node %s') % node)
 
             if heads != [self.applied[-1].node]:
-                raise error.Abort(_("popping would remove a revision not "
-                                   "managed by this patch queue"))
+                raise error.Abort(
+                    _(
+                        "popping would remove a revision not "
+                        "managed by this patch queue"
+                    )
+                )
             if not repo[self.applied[-1].node].mutable():
                 raise error.Abort(
                     _("popping would remove a public revision"),
-                    hint=_("see 'hg help phases' for details"))
+                    hint=_("see 'hg help phases' for details"),
+                )
 
             # we know there are no local changes, so we can make a simplified
             # form of hg.update.
@@ -1694,8 +1865,10 @@
             if repo.changelog.heads(top) != [top]:
                 raise error.Abort(_("cannot qrefresh a revision with children"))
             if not repo[top].mutable():
-                raise error.Abort(_("cannot qrefresh public revision"),
-                                 hint=_("see 'hg help phases' for details"))
+                raise error.Abort(
+                    _("cannot qrefresh public revision"),
+                    hint=_("see 'hg help phases' for details"),
+                )
 
             cparents = repo.changelog.parents(top)
             patchparent = self.qparents(repo, top)
@@ -1705,8 +1878,9 @@
                 substatestate = repo.dirstate['.hgsubstate']
 
             ph = patchheader(self.join(patchfn), self.plainmode)
-            diffopts = self.diffopts({'git': opts.get('git')}, patchfn,
-                                     plain=True)
+            diffopts = self.diffopts(
+                {'git': opts.get('git')}, patchfn, plain=True
+            )
             if newuser:
                 ph.setuser(newuser)
             if newdate:
@@ -1804,7 +1978,8 @@
                         src = ctx[dst].copysource()
                         if src:
                             copies.setdefault(src, []).extend(
-                                copies.get(dst, []))
+                                copies.get(dst, [])
+                            )
                             if dst in a:
                                 copies[src].append(dst)
                         # we can't copy a file created by the patch itself
@@ -1854,16 +2029,20 @@
                 defaultmsg = "[mq]: %s" % patchfn
                 editor = cmdutil.getcommiteditor(editform=editform)
                 if edit:
+
                     def finishdesc(desc):
                         if desc.rstrip():
                             ph.setmessage(desc)
                             return desc
                         return defaultmsg
+
                     # i18n: this message is shown in editor with "HG: " prefix
                     extramsg = _('Leave message empty to use default message.')
-                    editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
-                                                     extramsg=extramsg,
-                                                     editform=editform)
+                    editor = cmdutil.getcommiteditor(
+                        finishdesc=finishdesc,
+                        extramsg=extramsg,
+                        editform=editform,
+                    )
                     message = msg or "\n".join(ph.message)
                 elif not msg:
                     if not ph.message:
@@ -1880,14 +2059,23 @@
                 try:
                     lock = repo.lock()
                     tr = repo.transaction('mq')
-                    n = newcommit(repo, oldphase, message, user, ph.date,
-                              match=match, force=True, editor=editor)
+                    n = newcommit(
+                        repo,
+                        oldphase,
+                        message,
+                        user,
+                        ph.date,
+                        match=match,
+                        force=True,
+                        editor=editor,
+                    )
                     # only write patch after a successful commit
                     c = [list(x) for x in refreshchanges]
                     if inclsubs:
                         self.putsubstate2changes(substatestate, c)
-                    chunks = patchmod.diff(repo, patchparent,
-                                           changes=c, opts=diffopts)
+                    chunks = patchmod.diff(
+                        repo, patchparent, changes=c, opts=diffopts
+                    )
                     comments = bytes(ph)
                     if comments:
                         patchf.write(comments)
@@ -1902,12 +2090,16 @@
                     self.applied.append(statusentry(n, patchfn))
                 finally:
                     lockmod.release(tr, lock)
-            except: # re-raises
+            except:  # re-raises
                 ctx = repo[cparents[0]]
                 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
                 self.savedirty()
-                self.ui.warn(_('qrefresh interrupted while patch was popped! '
-                               '(revert --all, qpush to recover)\n'))
+                self.ui.warn(
+                    _(
+                        'qrefresh interrupted while patch was popped! '
+                        '(revert --all, qpush to recover)\n'
+                    )
+                )
                 raise
         finally:
             wlock.release()
@@ -1939,8 +2131,15 @@
             self.explainpushable(i)
         return unapplied
 
-    def qseries(self, repo, missing=None, start=0, length=None, status=None,
-                summary=False):
+    def qseries(
+        self,
+        repo,
+        missing=None,
+        start=0,
+        length=None,
+        status=None,
+        summary=False,
+    ):
         def displayname(pfx, patchname, state):
             if pfx:
                 self.ui.write(pfx)
@@ -1987,16 +2186,22 @@
         else:
             msng_list = []
             for root, dirs, files in os.walk(self.path):
-                d = root[len(self.path) + 1:]
+                d = root[len(self.path) + 1 :]
                 for f in files:
                     fl = os.path.join(d, f)
-                    if (fl not in self.series and
-                        fl not in (self.statuspath, self.seriespath,
-                                   self.guardspath)
-                        and not fl.startswith('.')):
+                    if (
+                        fl not in self.series
+                        and fl
+                        not in (
+                            self.statuspath,
+                            self.seriespath,
+                            self.guardspath,
+                        )
+                        and not fl.startswith('.')
+                    ):
                         msng_list.append(fl)
             for x in sorted(msng_list):
-                pfx = self.ui.verbose and ('D ') or ''
+                pfx = self.ui.verbose and 'D ' or ''
                 displayname(pfx, x, 'missing')
 
     def issaveline(self, l):
@@ -2009,10 +2214,15 @@
         if self.ui.pageractive and not ui.pageractive:
             ui.pageractive = self.ui.pageractive
             # internal config: ui.formatted
-            ui.setconfig('ui', 'formatted',
-                         self.ui.config('ui', 'formatted'), 'mqpager')
-            ui.setconfig('ui', 'interactive',
-                         self.ui.config('ui', 'interactive'), 'mqpager')
+            ui.setconfig(
+                'ui', 'formatted', self.ui.config('ui', 'formatted'), 'mqpager'
+            )
+            ui.setconfig(
+                'ui',
+                'interactive',
+                self.ui.config('ui', 'interactive'),
+                'mqpager',
+            )
         if create or os.path.isdir(self.join(".hg")):
             return hg.repository(ui, path=self.path, create=create)
 
@@ -2060,8 +2270,10 @@
                     update = False
                 strip(self.ui, repo, [rev], update=update, backup=False)
         if qpp:
-            self.ui.warn(_("saved queue repository parents: %s %s\n") %
-                         (short(qpp[0]), short(qpp[1])))
+            self.ui.warn(
+                _("saved queue repository parents: %s %s\n")
+                % (short(qpp[0]), short(qpp[1]))
+            )
             if qupdate:
                 self.ui.status(_("updating queue directory\n"))
                 r = self.qrepo()
@@ -2112,6 +2324,7 @@
         index of the first patch past the last applied one.
         """
         end = 0
+
         def nextpatch(start):
             if all_patches or start >= len(self.series):
                 return start
@@ -2121,6 +2334,7 @@
                     return i
                 self.explainpushable(i)
             return len(self.series)
+
         if self.applied:
             p = self.applied[-1].name
             try:
@@ -2138,24 +2352,35 @@
             p = ("%d" % self.series.index(pname)) + " " + pname
         return p
 
-    def qimport(self, repo, files, patchname=None, rev=None, existing=None,
-                force=None, git=False):
+    def qimport(
+        self,
+        repo,
+        files,
+        patchname=None,
+        rev=None,
+        existing=None,
+        force=None,
+        git=False,
+    ):
         def checkseries(patchname):
             if patchname in self.series:
-                raise error.Abort(_('patch %s is already in the series file')
-                                 % patchname)
+                raise error.Abort(
+                    _('patch %s is already in the series file') % patchname
+                )
 
         if rev:
             if files:
-                raise error.Abort(_('option "-r" not valid when importing '
-                                   'files'))
+                raise error.Abort(
+                    _('option "-r" not valid when importing ' 'files')
+                )
             rev = scmutil.revrange(repo, rev)
             rev.sort(reverse=True)
         elif not files:
             raise error.Abort(_('no files or revisions specified'))
         if (len(files) > 1 or len(rev) > 1) and patchname:
-            raise error.Abort(_('option "-n" not valid when importing multiple '
-                               'patches'))
+            raise error.Abort(
+                _('option "-n" not valid when importing multiple ' 'patches')
+            )
         imported = []
         if rev:
             # If mq patches are applied, we can only import revisions
@@ -2163,46 +2388,56 @@
             # Otherwise, they should form a linear path to a head.
             heads = repo.changelog.heads(repo.changelog.node(rev.first()))
             if len(heads) > 1:
-                raise error.Abort(_('revision %d is the root of more than one '
-                                   'branch') % rev.last())
+                raise error.Abort(
+                    _('revision %d is the root of more than one ' 'branch')
+                    % rev.last()
+                )
             if self.applied:
                 base = repo.changelog.node(rev.first())
                 if base in [n.node for n in self.applied]:
-                    raise error.Abort(_('revision %d is already managed')
-                                     % rev.first())
+                    raise error.Abort(
+                        _('revision %d is already managed') % rev.first()
+                    )
                 if heads != [self.applied[-1].node]:
-                    raise error.Abort(_('revision %d is not the parent of '
-                                       'the queue') % rev.first())
+                    raise error.Abort(
+                        _('revision %d is not the parent of ' 'the queue')
+                        % rev.first()
+                    )
                 base = repo.changelog.rev(self.applied[0].node)
                 lastparent = repo.changelog.parentrevs(base)[0]
             else:
                 if heads != [repo.changelog.node(rev.first())]:
-                    raise error.Abort(_('revision %d has unmanaged children')
-                                     % rev.first())
+                    raise error.Abort(
+                        _('revision %d has unmanaged children') % rev.first()
+                    )
                 lastparent = None
 
             diffopts = self.diffopts({'git': git})
             with repo.transaction('qimport') as tr:
                 for r in rev:
                     if not repo[r].mutable():
-                        raise error.Abort(_('revision %d is not mutable') % r,
-                                         hint=_("see 'hg help phases' "
-                                                'for details'))
+                        raise error.Abort(
+                            _('revision %d is not mutable') % r,
+                            hint=_("see 'hg help phases' " 'for details'),
+                        )
                     p1, p2 = repo.changelog.parentrevs(r)
                     n = repo.changelog.node(r)
                     if p2 != nullrev:
-                        raise error.Abort(_('cannot import merge revision %d')
-                                         % r)
+                        raise error.Abort(
+                            _('cannot import merge revision %d') % r
+                        )
                     if lastparent and lastparent != r:
-                        raise error.Abort(_('revision %d is not the parent of '
-                                           '%d')
-                                         % (r, lastparent))
+                        raise error.Abort(
+                            _('revision %d is not the parent of ' '%d')
+                            % (r, lastparent)
+                        )
                     lastparent = p1
 
                     if not patchname:
                         patchname = self.makepatchname(
                             repo[r].description().split('\n', 1)[0],
-                            '%d.diff' % r)
+                            '%d.diff' % r,
+                        )
                     checkseries(patchname)
                     self.checkpatchname(patchname, force)
                     self.fullseries.insert(0, patchname)
@@ -2226,21 +2461,24 @@
         for i, filename in enumerate(files):
             if existing:
                 if filename == '-':
-                    raise error.Abort(_('-e is incompatible with import from -')
-                                     )
+                    raise error.Abort(
+                        _('-e is incompatible with import from -')
+                    )
                 filename = normname(filename)
                 self.checkreservedname(filename)
                 if util.url(filename).islocal():
                     originpath = self.join(filename)
                     if not os.path.isfile(originpath):
                         raise error.Abort(
-                            _("patch %s does not exist") % filename)
+                            _("patch %s does not exist") % filename
+                        )
 
                 if patchname:
                     self.checkpatchname(patchname, force)
 
-                    self.ui.write(_('renaming %s to %s\n')
-                                        % (filename, patchname))
+                    self.ui.write(
+                        _('renaming %s to %s\n') % (filename, patchname)
+                    )
                     util.rename(originpath, self.join(patchname))
                 else:
                     patchname = filename
@@ -2278,20 +2516,28 @@
         self.removeundo(repo)
         return imported
 
+
 def fixkeepchangesopts(ui, opts):
-    if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
-        or opts.get('exact')):
+    if (
+        not ui.configbool('mq', 'keepchanges')
+        or opts.get('force')
+        or opts.get('exact')
+    ):
         return opts
     opts = dict(opts)
     opts['keep_changes'] = True
     return opts
 
-@command("qdelete|qremove|qrm",
-         [('k', 'keep', None, _('keep patch file')),
-          ('r', 'rev', [],
-           _('stop managing a revision (DEPRECATED)'), _('REV'))],
-         _('hg qdelete [-k] [PATCH]...'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qdelete|qremove|qrm",
+    [
+        ('k', 'keep', None, _('keep patch file')),
+        ('r', 'rev', [], _('stop managing a revision (DEPRECATED)'), _('REV')),
+    ],
+    _('hg qdelete [-k] [PATCH]...'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def delete(ui, repo, *patches, **opts):
     """remove patches from queue
 
@@ -2306,11 +2552,14 @@
     q.savedirty()
     return 0
 
-@command("qapplied",
-         [('1', 'last', None, _('show only the preceding applied patch'))
-          ] + seriesopts,
-         _('hg qapplied [-1] [-s] [PATCH]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qapplied",
+    [('1', 'last', None, _('show only the preceding applied patch'))]
+    + seriesopts,
+    _('hg qapplied [-1] [-s] [PATCH]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def applied(ui, repo, patch=None, **opts):
     """print the patches already applied
 
@@ -2338,14 +2587,17 @@
     else:
         start = 0
 
-    q.qseries(repo, length=end, start=start, status='A',
-              summary=opts.get('summary'))
-
-
-@command("qunapplied",
-         [('1', 'first', None, _('show only the first patch'))] + seriesopts,
-         _('hg qunapplied [-1] [-s] [PATCH]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+    q.qseries(
+        repo, length=end, start=start, status='A', summary=opts.get('summary')
+    )
+
+
+@command(
+    "qunapplied",
+    [('1', 'first', None, _('show only the first patch'))] + seriesopts,
+    _('hg qunapplied [-1] [-s] [PATCH]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def unapplied(ui, repo, patch=None, **opts):
     """print the patches not yet applied
 
@@ -2368,20 +2620,34 @@
         length = 1
     else:
         length = None
-    q.qseries(repo, start=start, length=length, status='U',
-              summary=opts.get('summary'))
-
-@command("qimport",
-         [('e', 'existing', None, _('import file in patch directory')),
-          ('n', 'name', '',
-           _('name of patch file'), _('NAME')),
-          ('f', 'force', None, _('overwrite existing files')),
-          ('r', 'rev', [],
-           _('place existing revisions under mq control'), _('REV')),
-          ('g', 'git', None, _('use git extended diff format')),
-          ('P', 'push', None, _('qpush after importing'))],
-         _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
-         helpcategory=command.CATEGORY_IMPORT_EXPORT)
+    q.qseries(
+        repo,
+        start=start,
+        length=length,
+        status='U',
+        summary=opts.get('summary'),
+    )
+
+
+@command(
+    "qimport",
+    [
+        ('e', 'existing', None, _('import file in patch directory')),
+        ('n', 'name', '', _('name of patch file'), _('NAME')),
+        ('f', 'force', None, _('overwrite existing files')),
+        (
+            'r',
+            'rev',
+            [],
+            _('place existing revisions under mq control'),
+            _('REV'),
+        ),
+        ('g', 'git', None, _('use git extended diff format')),
+        ('P', 'push', None, _('qpush after importing')),
+    ],
+    _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def qimport(ui, repo, *filename, **opts):
     """import a patch or existing changeset
 
@@ -2417,13 +2683,18 @@
     Returns 0 if import succeeded.
     """
     opts = pycompat.byteskwargs(opts)
-    with repo.lock(): # cause this may move phase
+    with repo.lock():  # cause this may move phase
         q = repo.mq
         try:
             imported = q.qimport(
-                repo, filename, patchname=opts.get('name'),
-                existing=opts.get('existing'), force=opts.get('force'),
-                rev=opts.get('rev'), git=opts.get('git'))
+                repo,
+                filename,
+                patchname=opts.get('name'),
+                existing=opts.get('existing'),
+                force=opts.get('force'),
+                rev=opts.get('rev'),
+                git=opts.get('git'),
+            )
         finally:
             q.savedirty()
 
@@ -2431,6 +2702,7 @@
         return q.push(repo, imported[-1])
     return 0
 
+
 def qinit(ui, repo, create):
     """initialize a new queue repository
 
@@ -2457,11 +2729,14 @@
         commands.add(ui, r)
     return 0
 
-@command("qinit",
-         [('c', 'create-repo', None, _('create queue repository'))],
-         _('hg qinit [-c]'),
-         helpcategory=command.CATEGORY_REPO_CREATION,
-         helpbasic=True)
+
+@command(
+    "qinit",
+    [('c', 'create-repo', None, _('create queue repository'))],
+    _('hg qinit [-c]'),
+    helpcategory=command.CATEGORY_REPO_CREATION,
+    helpbasic=True,
+)
 def init(ui, repo, **opts):
     """init a new queue repository (DEPRECATED)
 
@@ -2475,18 +2750,31 @@
     commands. With -c, use :hg:`init --mq` instead."""
     return qinit(ui, repo, create=opts.get(r'create_repo'))
 
-@command("qclone",
-         [('', 'pull', None, _('use pull protocol to copy metadata')),
-          ('U', 'noupdate', None,
-           _('do not update the new working directories')),
-          ('', 'uncompressed', None,
-           _('use uncompressed transfer (fast over LAN)')),
-          ('p', 'patches', '',
-           _('location of source patch repository'), _('REPO')),
-         ] + cmdutil.remoteopts,
-         _('hg qclone [OPTION]... SOURCE [DEST]'),
-         helpcategory=command.CATEGORY_REPO_CREATION,
-         norepo=True)
+
+@command(
+    "qclone",
+    [
+        ('', 'pull', None, _('use pull protocol to copy metadata')),
+        ('U', 'noupdate', None, _('do not update the new working directories')),
+        (
+            '',
+            'uncompressed',
+            None,
+            _('use uncompressed transfer (fast over LAN)'),
+        ),
+        (
+            'p',
+            'patches',
+            '',
+            _('location of source patch repository'),
+            _('REPO'),
+        ),
+    ]
+    + cmdutil.remoteopts,
+    _('hg qclone [OPTION]... SOURCE [DEST]'),
+    helpcategory=command.CATEGORY_REPO_CREATION,
+    norepo=True,
+)
 def clone(ui, source, dest=None, **opts):
     '''clone main and patch repository at same time
 
@@ -2505,6 +2793,7 @@
     Return 0 on success.
     '''
     opts = pycompat.byteskwargs(opts)
+
     def patchdir(repo):
         """compute a patch repo url from a repo object"""
         url = repo.url()
@@ -2525,8 +2814,9 @@
     try:
         hg.peer(ui, opts, patchespath)
     except error.RepoError:
-        raise error.Abort(_('versioned patch repository not found'
-                           ' (see init --mq)'))
+        raise error.Abort(
+            _('versioned patch repository not found' ' (see init --mq)')
+        )
     qbase, destrev = None, None
     if sr.local():
         repo = sr.local()
@@ -2543,32 +2833,47 @@
             pass
 
     ui.note(_('cloning main repository\n'))
-    sr, dr = hg.clone(ui, opts, sr.url(), dest,
-                      pull=opts.get('pull'),
-                      revs=destrev,
-                      update=False,
-                      stream=opts.get('uncompressed'))
+    sr, dr = hg.clone(
+        ui,
+        opts,
+        sr.url(),
+        dest,
+        pull=opts.get('pull'),
+        revs=destrev,
+        update=False,
+        stream=opts.get('uncompressed'),
+    )
 
     ui.note(_('cloning patch repository\n'))
-    hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
-             pull=opts.get('pull'), update=not opts.get('noupdate'),
-             stream=opts.get('uncompressed'))
+    hg.clone(
+        ui,
+        opts,
+        opts.get('patches') or patchdir(sr),
+        patchdir(dr),
+        pull=opts.get('pull'),
+        update=not opts.get('noupdate'),
+        stream=opts.get('uncompressed'),
+    )
 
     if dr.local():
         repo = dr.local()
         if qbase:
-            ui.note(_('stripping applied patches from destination '
-                      'repository\n'))
+            ui.note(
+                _('stripping applied patches from destination ' 'repository\n')
+            )
             strip(ui, repo, [qbase], update=False, backup=None)
         if not opts.get('noupdate'):
             ui.note(_('updating destination repository\n'))
             hg.update(repo, repo.changelog.tip())
 
-@command("qcommit|qci",
-         commands.table["commit|ci"][1],
-         _('hg qcommit [OPTION]... [FILE]...'),
-         helpcategory=command.CATEGORY_COMMITTING,
-         inferrepo=True)
+
+@command(
+    "qcommit|qci",
+    commands.table["commit|ci"][1],
+    _('hg qcommit [OPTION]... [FILE]...'),
+    helpcategory=command.CATEGORY_COMMITTING,
+    inferrepo=True,
+)
 def commit(ui, repo, *pats, **opts):
     """commit changes in the queue repository (DEPRECATED)
 
@@ -2579,21 +2884,29 @@
         raise error.Abort('no queue repository')
     commands.commit(r.ui, r, *pats, **opts)
 
-@command("qseries",
-         [('m', 'missing', None, _('print patches not in series')),
-         ] + seriesopts,
-          _('hg qseries [-ms]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qseries",
+    [('m', 'missing', None, _('print patches not in series')),] + seriesopts,
+    _('hg qseries [-ms]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def series(ui, repo, **opts):
     """print the entire series file
 
     Returns 0 on success."""
-    repo.mq.qseries(repo, missing=opts.get(r'missing'),
-                    summary=opts.get(r'summary'))
+    repo.mq.qseries(
+        repo, missing=opts.get(r'missing'), summary=opts.get(r'summary')
+    )
     return 0
 
-@command("qtop", seriesopts, _('hg qtop [-s]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qtop",
+    seriesopts,
+    _('hg qtop [-s]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def top(ui, repo, **opts):
     """print the name of the current patch
 
@@ -2605,14 +2918,24 @@
         t = 0
 
     if t:
-        q.qseries(repo, start=t - 1, length=1, status='A',
-                  summary=opts.get(r'summary'))
+        q.qseries(
+            repo,
+            start=t - 1,
+            length=1,
+            status='A',
+            summary=opts.get(r'summary'),
+        )
     else:
         ui.write(_("no patches applied\n"))
         return 1
 
-@command("qnext", seriesopts, _('hg qnext [-s]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qnext",
+    seriesopts,
+    _('hg qnext [-s]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def next(ui, repo, **opts):
     """print the name of the next pushable patch
 
@@ -2624,8 +2947,13 @@
         return 1
     q.qseries(repo, start=end, length=1, summary=opts.get(r'summary'))
 
-@command("qprev", seriesopts, _('hg qprev [-s]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qprev",
+    seriesopts,
+    _('hg qprev [-s]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def prev(ui, repo, **opts):
     """print the name of the preceding applied patch
 
@@ -2639,8 +2967,10 @@
         ui.write(_("no patches applied\n"))
         return 1
     idx = q.series.index(q.applied[-2].name)
-    q.qseries(repo, start=idx, length=1, status='A',
-              summary=opts.get(r'summary'))
+    q.qseries(
+        repo, start=idx, length=1, status='A', summary=opts.get(r'summary')
+    )
+
 
 def setupheaderopts(ui, opts):
     if not opts.get('user') and opts.get('currentuser'):
@@ -2648,20 +2978,25 @@
     if not opts.get('date') and opts.get('currentdate'):
         opts['date'] = "%d %d" % dateutil.makedate()
 
-@command("qnew",
-         [('e', 'edit', None, _('invoke editor on commit messages')),
-          ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
-          ('g', 'git', None, _('use git extended diff format')),
-          ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
-          ('u', 'user', '',
-           _('add "From: <USER>" to patch'), _('USER')),
-          ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
-          ('d', 'date', '',
-           _('add "Date: <DATE>" to patch'), _('DATE'))
-          ] + cmdutil.walkopts + cmdutil.commitopts,
-         _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
-         helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
-         inferrepo=True)
+
+@command(
+    "qnew",
+    [
+        ('e', 'edit', None, _('invoke editor on commit messages')),
+        ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
+        ('g', 'git', None, _('use git extended diff format')),
+        ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
+        ('u', 'user', '', _('add "From: <USER>" to patch'), _('USER')),
+        ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
+        ('d', 'date', '', _('add "Date: <DATE>" to patch'), _('DATE')),
+    ]
+    + cmdutil.walkopts
+    + cmdutil.commitopts,
+    _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
+    helpcategory=command.CATEGORY_COMMITTING,
+    helpbasic=True,
+    inferrepo=True,
+)
 def new(ui, repo, patch, *args, **opts):
     """create a new patch
 
@@ -2696,23 +3031,52 @@
     q.savedirty()
     return 0
 
-@command("qrefresh",
-         [('e', 'edit', None, _('invoke editor on commit messages')),
-          ('g', 'git', None, _('use git extended diff format')),
-          ('s', 'short', None,
-           _('refresh only files already in the patch and specified files')),
-          ('U', 'currentuser', None,
-           _('add/update author field in patch with current user')),
-          ('u', 'user', '',
-           _('add/update author field in patch with given user'), _('USER')),
-          ('D', 'currentdate', None,
-           _('add/update date field in patch with current date')),
-          ('d', 'date', '',
-           _('add/update date field in patch with given date'), _('DATE'))
-          ] + cmdutil.walkopts + cmdutil.commitopts,
-         _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
-         helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
-         inferrepo=True)
+
+@command(
+    "qrefresh",
+    [
+        ('e', 'edit', None, _('invoke editor on commit messages')),
+        ('g', 'git', None, _('use git extended diff format')),
+        (
+            's',
+            'short',
+            None,
+            _('refresh only files already in the patch and specified files'),
+        ),
+        (
+            'U',
+            'currentuser',
+            None,
+            _('add/update author field in patch with current user'),
+        ),
+        (
+            'u',
+            'user',
+            '',
+            _('add/update author field in patch with given user'),
+            _('USER'),
+        ),
+        (
+            'D',
+            'currentdate',
+            None,
+            _('add/update date field in patch with current date'),
+        ),
+        (
+            'd',
+            'date',
+            '',
+            _('add/update date field in patch with given date'),
+            _('DATE'),
+        ),
+    ]
+    + cmdutil.walkopts
+    + cmdutil.commitopts,
+    _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
+    helpcategory=command.CATEGORY_COMMITTING,
+    helpbasic=True,
+    inferrepo=True,
+)
 def refresh(ui, repo, *pats, **opts):
     """update the current patch
 
@@ -2743,11 +3107,15 @@
         q.savedirty()
         return ret
 
-@command("qdiff",
-         cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
-         _('hg qdiff [OPTION]... [FILE]...'),
-         helpcategory=command.CATEGORY_FILE_CONTENTS, helpbasic=True,
-         inferrepo=True)
+
+@command(
+    "qdiff",
+    cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
+    _('hg qdiff [OPTION]... [FILE]...'),
+    helpcategory=command.CATEGORY_FILE_CONTENTS,
+    helpbasic=True,
+    inferrepo=True,
+)
 def diff(ui, repo, *pats, **opts):
     """diff of the current patch and subsequent modifications
 
@@ -2767,12 +3135,17 @@
     repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
     return 0
 
-@command('qfold',
-         [('e', 'edit', None, _('invoke editor on commit messages')),
-          ('k', 'keep', None, _('keep folded patch files')),
-         ] + cmdutil.commitopts,
-         _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
-         helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
+
+@command(
+    'qfold',
+    [
+        ('e', 'edit', None, _('invoke editor on commit messages')),
+        ('k', 'keep', None, _('keep folded patch files')),
+    ]
+    + cmdutil.commitopts,
+    _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+)
 def fold(ui, repo, *files, **opts):
     """fold the named patches into the current patch
 
@@ -2805,8 +3178,9 @@
         if p in patches or p == parent:
             ui.warn(_('skipping already folded patch %s\n') % p)
         if q.isapplied(p):
-            raise error.Abort(_('qfold cannot fold already applied patch %s')
-                             % p)
+            raise error.Abort(
+                _('qfold cannot fold already applied patch %s') % p
+            )
         patches.append(p)
 
     for p in patches:
@@ -2831,18 +3205,27 @@
 
     diffopts = q.patchopts(q.diffopts(), *patches)
     with repo.wlock():
-        q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
-                  editform='mq.qfold')
+        q.refresh(
+            repo,
+            msg=message,
+            git=diffopts.git,
+            edit=opts.get('edit'),
+            editform='mq.qfold',
+        )
         q.delete(repo, patches, opts)
         q.savedirty()
 
-@command("qgoto",
-         [('', 'keep-changes', None,
-           _('tolerate non-conflicting local changes')),
-          ('f', 'force', None, _('overwrite any local changes')),
-          ('', 'no-backup', None, _('do not save backup copies of files'))],
-         _('hg qgoto [OPTION]... PATCH'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qgoto",
+    [
+        ('', 'keep-changes', None, _('tolerate non-conflicting local changes')),
+        ('f', 'force', None, _('overwrite any local changes')),
+        ('', 'no-backup', None, _('do not save backup copies of files')),
+    ],
+    _('hg qgoto [OPTION]... PATCH'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def goto(ui, repo, patch, **opts):
     '''push or pop patches until named patch is at top of stack
 
@@ -2854,19 +3237,34 @@
     nobackup = opts.get('no_backup')
     keepchanges = opts.get('keep_changes')
     if q.isapplied(patch):
-        ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
-                    keepchanges=keepchanges)
+        ret = q.pop(
+            repo,
+            patch,
+            force=opts.get('force'),
+            nobackup=nobackup,
+            keepchanges=keepchanges,
+        )
     else:
-        ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
-                     keepchanges=keepchanges)
+        ret = q.push(
+            repo,
+            patch,
+            force=opts.get('force'),
+            nobackup=nobackup,
+            keepchanges=keepchanges,
+        )
     q.savedirty()
     return ret
 
-@command("qguard",
-         [('l', 'list', None, _('list all patches and guards')),
-          ('n', 'none', None, _('drop all guards'))],
-         _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qguard",
+    [
+        ('l', 'list', None, _('list all patches and guards')),
+        ('n', 'none', None, _('drop all guards')),
+    ],
+    _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def guard(ui, repo, *args, **opts):
     '''set or print guards for a patch
 
@@ -2889,6 +3287,7 @@
 
     Returns 0 on success.
     '''
+
     def status(idx):
         guards = q.seriesguards[idx] or ['unguarded']
         if q.series[idx] in applied:
@@ -2910,14 +3309,16 @@
             if i != len(guards) - 1:
                 ui.write(' ')
         ui.write('\n')
+
     q = repo.mq
     applied = set(p.name for p in q.applied)
     patch = None
     args = list(args)
     if opts.get(r'list'):
         if args or opts.get(r'none'):
-            raise error.Abort(_('cannot mix -l/--list with options or '
-                               'arguments'))
+            raise error.Abort(
+                _('cannot mix -l/--list with options or ' 'arguments')
+            )
         for i in pycompat.xrange(len(q.series)):
             status(i)
         return
@@ -2938,8 +3339,13 @@
     else:
         status(q.series.index(q.lookup(patch)))
 
-@command("qheader", [], _('hg qheader [PATCH]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qheader",
+    [],
+    _('hg qheader [PATCH]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def header(ui, repo, patch=None):
     """print the header of the topmost or specified patch
 
@@ -2957,6 +3363,7 @@
 
     ui.write('\n'.join(ph.message) + '\n')
 
+
 def lastsavename(path):
     (directory, base) = os.path.split(path)
     names = os.listdir(directory)
@@ -2974,6 +3381,7 @@
         return (os.path.join(directory, maxname), maxindex)
     return (None, None)
 
+
 def savename(path):
     (last, index) = lastsavename(path)
     if last is None:
@@ -2981,23 +3389,29 @@
     newpath = path + ".%d" % (index + 1)
     return newpath
 
-@command("qpush",
-         [('', 'keep-changes', None,
-           _('tolerate non-conflicting local changes')),
-          ('f', 'force', None, _('apply on top of local changes')),
-          ('e', 'exact', None,
-           _('apply the target patch to its recorded parent')),
-          ('l', 'list', None, _('list patch name in commit text')),
-          ('a', 'all', None, _('apply all patches')),
-          ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
-          ('n', 'name', '',
-           _('merge queue name (DEPRECATED)'), _('NAME')),
-          ('', 'move', None,
-           _('reorder patch series and apply only the patch')),
-          ('', 'no-backup', None, _('do not save backup copies of files'))],
-         _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
-         helpbasic=True)
+
+@command(
+    "qpush",
+    [
+        ('', 'keep-changes', None, _('tolerate non-conflicting local changes')),
+        ('f', 'force', None, _('apply on top of local changes')),
+        (
+            'e',
+            'exact',
+            None,
+            _('apply the target patch to its recorded parent'),
+        ),
+        ('l', 'list', None, _('list patch name in commit text')),
+        ('a', 'all', None, _('apply all patches')),
+        ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
+        ('n', 'name', '', _('merge queue name (DEPRECATED)'), _('NAME')),
+        ('', 'move', None, _('reorder patch series and apply only the patch')),
+        ('', 'no-backup', None, _('do not save backup copies of files')),
+    ],
+    _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+    helpbasic=True,
+)
 def push(ui, repo, patch=None, **opts):
     """push the next patch onto the stack
 
@@ -3023,23 +3437,34 @@
             return 1
         mergeq = queue(ui, repo.baseui, repo.path, newpath)
         ui.warn(_("merging with queue at: %s\n") % mergeq.path)
-    ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
-                 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
-                 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
-                 keepchanges=opts.get('keep_changes'))
+    ret = q.push(
+        repo,
+        patch,
+        force=opts.get('force'),
+        list=opts.get('list'),
+        mergeq=mergeq,
+        all=opts.get('all'),
+        move=opts.get('move'),
+        exact=opts.get('exact'),
+        nobackup=opts.get('no_backup'),
+        keepchanges=opts.get('keep_changes'),
+    )
     return ret
 
-@command("qpop",
-         [('a', 'all', None, _('pop all patches')),
-          ('n', 'name', '',
-           _('queue name to pop (DEPRECATED)'), _('NAME')),
-          ('', 'keep-changes', None,
-           _('tolerate non-conflicting local changes')),
-          ('f', 'force', None, _('forget any local changes to patched files')),
-          ('', 'no-backup', None, _('do not save backup copies of files'))],
-         _('hg qpop [-a] [-f] [PATCH | INDEX]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
-         helpbasic=True)
+
+@command(
+    "qpop",
+    [
+        ('a', 'all', None, _('pop all patches')),
+        ('n', 'name', '', _('queue name to pop (DEPRECATED)'), _('NAME')),
+        ('', 'keep-changes', None, _('tolerate non-conflicting local changes')),
+        ('f', 'force', None, _('forget any local changes to patched files')),
+        ('', 'no-backup', None, _('do not save backup copies of files')),
+    ],
+    _('hg qpop [-a] [-f] [PATCH | INDEX]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+    helpbasic=True,
+)
 def pop(ui, repo, patch=None, **opts):
     """pop the current patch off the stack
 
@@ -3063,14 +3488,25 @@
         localupdate = False
     else:
         q = repo.mq
-    ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
-                all=opts.get('all'), nobackup=opts.get('no_backup'),
-                keepchanges=opts.get('keep_changes'))
+    ret = q.pop(
+        repo,
+        patch,
+        force=opts.get('force'),
+        update=localupdate,
+        all=opts.get('all'),
+        nobackup=opts.get('no_backup'),
+        keepchanges=opts.get('keep_changes'),
+    )
     q.savedirty()
     return ret
 
-@command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qrename|qmv",
+    [],
+    _('hg qrename PATCH1 [PATCH2]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def rename(ui, repo, patch, name=None, **opts):
     """rename a patch
 
@@ -3125,30 +3561,41 @@
 
     q.savedirty()
 
-@command("qrestore",
-         [('d', 'delete', None, _('delete save entry')),
-          ('u', 'update', None, _('update queue working directory'))],
-         _('hg qrestore [-d] [-u] REV'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qrestore",
+    [
+        ('d', 'delete', None, _('delete save entry')),
+        ('u', 'update', None, _('update queue working directory')),
+    ],
+    _('hg qrestore [-d] [-u] REV'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def restore(ui, repo, rev, **opts):
     """restore the queue state saved by a revision (DEPRECATED)
 
     This command is deprecated, use :hg:`rebase` instead."""
     rev = repo.lookup(rev)
     q = repo.mq
-    q.restore(repo, rev, delete=opts.get(r'delete'),
-              qupdate=opts.get(r'update'))
+    q.restore(
+        repo, rev, delete=opts.get(r'delete'), qupdate=opts.get(r'update')
+    )
     q.savedirty()
     return 0
 
-@command("qsave",
-         [('c', 'copy', None, _('copy patch directory')),
-          ('n', 'name', '',
-           _('copy directory name'), _('NAME')),
-          ('e', 'empty', None, _('clear queue status file')),
-          ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
-         _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qsave",
+    [
+        ('c', 'copy', None, _('copy patch directory')),
+        ('n', 'name', '', _('copy directory name'), _('NAME')),
+        ('e', 'empty', None, _('clear queue status file')),
+        ('f', 'force', None, _('force copy')),
+    ]
+    + cmdutil.commitopts,
+    _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def save(ui, repo, **opts):
     """save current queue state (DEPRECATED)
 
@@ -3159,18 +3606,21 @@
     ret = q.save(repo, msg=message)
     if ret:
         return ret
-    q.savedirty() # save to .hg/patches before copying
+    q.savedirty()  # save to .hg/patches before copying
     if opts.get('copy'):
         path = q.path
         if opts.get('name'):
             newpath = os.path.join(q.basepath, opts.get('name'))
             if os.path.exists(newpath):
                 if not os.path.isdir(newpath):
-                    raise error.Abort(_('destination %s exists and is not '
-                                       'a directory') % newpath)
+                    raise error.Abort(
+                        _('destination %s exists and is not ' 'a directory')
+                        % newpath
+                    )
                 if not opts.get('force'):
-                    raise error.Abort(_('destination %s exists, '
-                                       'use -f to force') % newpath)
+                    raise error.Abort(
+                        _('destination %s exists, ' 'use -f to force') % newpath
+                    )
         else:
             newpath = savename(path)
         ui.warn(_("copy %s to %s\n") % (path, newpath))
@@ -3182,13 +3632,17 @@
     return 0
 
 
-@command("qselect",
-         [('n', 'none', None, _('disable all guards')),
-          ('s', 'series', None, _('list all guards in series file')),
-          ('', 'pop', None, _('pop to before first guarded applied patch')),
-          ('', 'reapply', None, _('pop, then reapply patches'))],
-         _('hg qselect [OPTION]... [GUARD]...'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+@command(
+    "qselect",
+    [
+        ('n', 'none', None, _('disable all guards')),
+        ('s', 'series', None, _('list all guards in series file')),
+        ('', 'pop', None, _('pop to before first guarded applied patch')),
+        ('', 'reapply', None, _('pop, then reapply patches')),
+    ],
+    _('hg qselect [OPTION]... [GUARD]...'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def select(ui, repo, *args, **opts):
     '''set or print guarded patches to push
 
@@ -3230,24 +3684,34 @@
     pushable = lambda i: q.pushable(q.applied[i].name)[0]
     if args or opts.get('none'):
         old_unapplied = q.unapplied(repo)
-        old_guarded = [i for i in pycompat.xrange(len(q.applied))
-                       if not pushable(i)]
+        old_guarded = [
+            i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
+        ]
         q.setactive(args)
         q.savedirty()
         if not args:
             ui.status(_('guards deactivated\n'))
         if not opts.get('pop') and not opts.get('reapply'):
             unapplied = q.unapplied(repo)
-            guarded = [i for i in pycompat.xrange(len(q.applied))
-                       if not pushable(i)]
+            guarded = [
+                i for i in pycompat.xrange(len(q.applied)) if not pushable(i)
+            ]
             if len(unapplied) != len(old_unapplied):
-                ui.status(_('number of unguarded, unapplied patches has '
-                            'changed from %d to %d\n') %
-                          (len(old_unapplied), len(unapplied)))
+                ui.status(
+                    _(
+                        'number of unguarded, unapplied patches has '
+                        'changed from %d to %d\n'
+                    )
+                    % (len(old_unapplied), len(unapplied))
+                )
             if len(guarded) != len(old_guarded):
-                ui.status(_('number of guarded, applied patches has changed '
-                            'from %d to %d\n') %
-                          (len(old_guarded), len(guarded)))
+                ui.status(
+                    _(
+                        'number of guarded, applied patches has changed '
+                        'from %d to %d\n'
+                    )
+                    % (len(old_guarded), len(guarded))
+                )
     elif opts.get('series'):
         guards = {}
         noguards = 0
@@ -3295,10 +3759,13 @@
         finally:
             q.savedirty()
 
-@command("qfinish",
-         [('a', 'applied', None, _('finish all applied changesets'))],
-         _('hg qfinish [-a] [REV]...'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qfinish",
+    [('a', 'applied', None, _('finish all applied changesets'))],
+    _('hg qfinish [-a] [REV]...'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def finish(ui, repo, *revrange, **opts):
     """move applied patches into repository history
 
@@ -3338,16 +3805,20 @@
         q.savedirty()
     return 0
 
-@command("qqueue",
-         [('l', 'list', False, _('list all available queues')),
-          ('', 'active', False, _('print name of active queue')),
-          ('c', 'create', False, _('create new queue')),
-          ('', 'rename', False, _('rename active queue')),
-          ('', 'delete', False, _('delete reference to queue')),
-          ('', 'purge', False, _('delete queue, and remove patch dir')),
-         ],
-         _('[OPTION] [QUEUE]'),
-         helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+
+@command(
+    "qqueue",
+    [
+        ('l', 'list', False, _('list all available queues')),
+        ('', 'active', False, _('print name of active queue')),
+        ('c', 'create', False, _('create new queue')),
+        ('', 'rename', False, _('rename active queue')),
+        ('', 'delete', False, _('delete reference to queue')),
+        ('', 'purge', False, _('delete queue, and remove patch dir')),
+    ],
+    _('[OPTION] [QUEUE]'),
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def qqueue(ui, repo, name=None, **opts):
     '''manage multiple patch queues
 
@@ -3405,8 +3876,12 @@
 
     def _setactive(name):
         if q.applied:
-            raise error.Abort(_('new queue created, but cannot make active '
-                               'as patches are applied'))
+            raise error.Abort(
+                _(
+                    'new queue created, but cannot make active '
+                    'as patches are applied'
+                )
+            )
         _setactivenocheck(name)
 
     def _setactivenocheck(name):
@@ -3465,7 +3940,8 @@
 
     if not _validname(name):
         raise error.Abort(
-                _('invalid queue name, may not contain the characters ":\\/."'))
+            _('invalid queue name, may not contain the characters ":\\/."')
+        )
 
     with repo.wlock():
         existing = _getqueues()
@@ -3480,8 +3956,9 @@
         elif opts.get('rename'):
             current = _getcurrent()
             if name == current:
-                raise error.Abort(_('can\'t rename "%s" to its current name')
-                                  % name)
+                raise error.Abort(
+                    _('can\'t rename "%s" to its current name') % name
+                )
             if name in existing:
                 raise error.Abort(_('queue "%s" already exists') % name)
 
@@ -3489,8 +3966,9 @@
             newdir = _queuedir(name)
 
             if os.path.exists(newdir):
-                raise error.Abort(_('non-queue directory "%s" already exists') %
-                        newdir)
+                raise error.Abort(
+                    _('non-queue directory "%s" already exists') % newdir
+                )
 
             fh = repo.vfs('patches.queues.new', 'w')
             for queue in existing:
@@ -3516,6 +3994,7 @@
                 raise error.Abort(_('use --create to create a new queue'))
             _setactive(name)
 
+
 def mqphasedefaults(repo, roots):
     """callback used to set mq changeset as secret when no phase data exists"""
     if repo.mq.applied:
@@ -3527,6 +4006,7 @@
         roots[mqphase].add(qbase.node())
     return roots
 
+
 def reposetup(ui, repo):
     class mqrepo(repo.__class__):
         @localrepo.unfilteredpropertycache
@@ -3546,16 +4026,25 @@
                 if any(p in patches for p in parents):
                     raise error.Abort(errmsg)
 
-        def commit(self, text="", user=None, date=None, match=None,
-                   force=False, editor=False, extra=None):
+        def commit(
+            self,
+            text="",
+            user=None,
+            date=None,
+            match=None,
+            force=False,
+            editor=False,
+            extra=None,
+        ):
             if extra is None:
                 extra = {}
             self.abortifwdirpatched(
-                _('cannot commit over an applied mq patch'),
-                force)
-
-            return super(mqrepo, self).commit(text, user, date, match, force,
-                                              editor, extra)
+                _('cannot commit over an applied mq patch'), force
+            )
+
+            return super(mqrepo, self).commit(
+                text, user, date, match, force, editor, extra
+            )
 
         def checkpush(self, pushop):
             if self.mq.applied and self.mq.checkapplied and not pushop.force:
@@ -3591,8 +4080,10 @@
                 # for now ignore filtering business
                 self.unfiltered().changelog.rev(mqtags[-1][0])
             except error.LookupError:
-                self.ui.warn(_('mq status file refers to unknown node %s\n')
-                             % short(mqtags[-1][0]))
+                self.ui.warn(
+                    _('mq status file refers to unknown node %s\n')
+                    % short(mqtags[-1][0])
+                )
                 return result
 
             # do not add fake tags for filtered revisions
@@ -3607,8 +4098,10 @@
             tags = result[0]
             for patch in mqtags:
                 if patch[1] in tags:
-                    self.ui.warn(_('tag %s overrides mq patch of the same '
-                                   'name\n') % patch[1])
+                    self.ui.warn(
+                        _('tag %s overrides mq patch of the same ' 'name\n')
+                        % patch[1]
+                    )
                 else:
                     tags[patch[1]] = patch[0]
 
@@ -3619,13 +4112,17 @@
 
         repo._phasedefaults.append(mqphasedefaults)
 
+
 def mqimport(orig, ui, repo, *args, **kwargs):
-    if (util.safehasattr(repo, 'abortifwdirpatched')
-        and not kwargs.get(r'no_commit', False)):
-        repo.abortifwdirpatched(_('cannot import over an applied patch'),
-                                   kwargs.get(r'force'))
+    if util.safehasattr(repo, 'abortifwdirpatched') and not kwargs.get(
+        r'no_commit', False
+    ):
+        repo.abortifwdirpatched(
+            _('cannot import over an applied patch'), kwargs.get(r'force')
+        )
     return orig(ui, repo, *args, **kwargs)
 
+
 def mqinit(orig, ui, *args, **kwargs):
     mq = kwargs.pop(r'mq', None)
 
@@ -3635,16 +4132,19 @@
     if args:
         repopath = args[0]
         if not hg.islocal(repopath):
-            raise error.Abort(_('only a local queue repository '
-                               'may be initialized'))
+            raise error.Abort(
+                _('only a local queue repository ' 'may be initialized')
+            )
     else:
         repopath = cmdutil.findrepo(encoding.getcwd())
         if not repopath:
-            raise error.Abort(_('there is no Mercurial repository here '
-                               '(.hg not found)'))
+            raise error.Abort(
+                _('there is no Mercurial repository here ' '(.hg not found)')
+            )
     repo = hg.repository(ui, repopath)
     return qinit(ui, repo, True)
 
+
 def mqcommand(orig, ui, repo, *args, **kwargs):
     """Add --mq option to operate on patch repository instead of main"""
 
@@ -3660,6 +4160,7 @@
         raise error.Abort(_('no queue repository'))
     return orig(r.ui, r, *args, **kwargs)
 
+
 def summaryhook(ui, repo):
     q = repo.mq
     m = []
@@ -3675,8 +4176,10 @@
         # i18n: column positioning for "hg summary"
         ui.note(_("mq:     (empty queue)\n"))
 
+
 revsetpredicate = registrar.revsetpredicate()
 
+
 @revsetpredicate('mq()')
 def revsetmq(repo, subset, x):
     """Changesets managed by MQ.
@@ -3685,9 +4188,11 @@
     applied = {repo[r.node].rev() for r in repo.mq.applied}
     return smartset.baseset([r for r in subset if r in applied])
 
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = [revsetmq]
 
+
 def extsetup(ui):
     # Ensure mq wrappers are called first, regardless of extension load order by
     # NOT wrapping in uisetup() and instead deferring to init stage two here.
@@ -3714,10 +4219,13 @@
         if extmodule.__file__ != __file__:
             dotable(getattr(extmodule, 'cmdtable', {}))
 
-colortable = {'qguard.negative': 'red',
-              'qguard.positive': 'yellow',
-              'qguard.unguarded': 'green',
-              'qseries.applied': 'blue bold underline',
-              'qseries.guarded': 'black bold',
-              'qseries.missing': 'red bold',
-              'qseries.unapplied': 'black bold'}
+
+colortable = {
+    'qguard.negative': 'red',
+    'qguard.positive': 'yellow',
+    'qguard.unguarded': 'green',
+    'qseries.applied': 'blue bold underline',
+    'qseries.guarded': 'black bold',
+    'qseries.missing': 'red bold',
+    'qseries.unapplied': 'black bold',
+}
--- a/hgext/narrow/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/narrow/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,9 +19,7 @@
     registrar,
 )
 
-from mercurial.interfaces import (
-    repository,
-)
+from mercurial.interfaces import repository
 
 from . import (
     narrowbundle2,
@@ -42,17 +40,21 @@
 # of this writining in late 2017, all repositories large enough for
 # ellipsis nodes to be a hard requirement also enforce strictly linear
 # history for other scaling reasons.
-configitem('experimental', 'narrowservebrokenellipses',
-           default=False,
-           alias=[('narrow', 'serveellipses')],
+configitem(
+    'experimental',
+    'narrowservebrokenellipses',
+    default=False,
+    alias=[('narrow', 'serveellipses')],
 )
 
 # Export the commands table for Mercurial to see.
 cmdtable = narrowcommands.table
 
+
 def featuresetup(ui, features):
     features.add(repository.NARROW_REQUIREMENT)
 
+
 def uisetup(ui):
     """Wraps user-facing mercurial commands with narrow-aware versions."""
     localrepo.featuresetupfuncs.add(featuresetup)
@@ -60,6 +62,7 @@
     narrowcommands.setup()
     narrowwirepeer.uisetup()
 
+
 def reposetup(ui, repo):
     """Wraps local repositories with narrow repo support."""
     if not repo.local():
@@ -70,5 +73,6 @@
         narrowrepo.wraprepo(repo)
         narrowwirepeer.reposetup(repo)
 
+
 templatekeyword = narrowtemplates.templatekeyword
 revsetpredicate = narrowtemplates.revsetpredicate
--- a/hgext/narrow/narrowbundle2.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/narrow/narrowbundle2.py	Sun Oct 06 09:45:02 2019 -0400
@@ -26,12 +26,8 @@
     util,
     wireprototypes,
 )
-from mercurial.interfaces import (
-    repository,
-)
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.interfaces import repository
+from mercurial.utils import stringutil
 
 _NARROWACL_SECTION = 'narrowacl'
 _CHANGESPECPART = 'narrow:changespec'
@@ -41,20 +37,30 @@
 _SPECPART_EXCLUDE = 'exclude'
 _KILLNODESIGNAL = 'KILL'
 _DONESIGNAL = 'DONE'
-_ELIDEDCSHEADER = '>20s20s20sl' # cset id, p1, p2, len(text)
-_ELIDEDMFHEADER = '>20s20s20s20sl' # manifest id, p1, p2, link id, len(text)
+_ELIDEDCSHEADER = '>20s20s20sl'  # cset id, p1, p2, len(text)
+_ELIDEDMFHEADER = '>20s20s20s20sl'  # manifest id, p1, p2, link id, len(text)
 _CSHEADERSIZE = struct.calcsize(_ELIDEDCSHEADER)
 _MFHEADERSIZE = struct.calcsize(_ELIDEDMFHEADER)
 
 # Serve a changegroup for a client with a narrow clone.
-def getbundlechangegrouppart_narrow(bundler, repo, source,
-                                    bundlecaps=None, b2caps=None, heads=None,
-                                    common=None, **kwargs):
+def getbundlechangegrouppart_narrow(
+    bundler,
+    repo,
+    source,
+    bundlecaps=None,
+    b2caps=None,
+    heads=None,
+    common=None,
+    **kwargs
+):
     assert repo.ui.configbool('experimental', 'narrowservebrokenellipses')
 
     cgversions = b2caps.get('changegroup')
-    cgversions = [v for v in cgversions
-                  if v in changegroup.supportedoutgoingversions(repo)]
+    cgversions = [
+        v
+        for v in cgversions
+        if v in changegroup.supportedoutgoingversions(repo)
+    ]
     if not cgversions:
         raise ValueError(_('no common changegroup version'))
     version = max(cgversions)
@@ -64,14 +70,37 @@
     newinclude = sorted(filter(bool, kwargs.get(r'includepats', [])))
     newexclude = sorted(filter(bool, kwargs.get(r'excludepats', [])))
     known = {bin(n) for n in kwargs.get(r'known', [])}
-    generateellipsesbundle2(bundler, repo, oldinclude, oldexclude, newinclude,
-                            newexclude, version, common, heads, known,
-                            kwargs.get(r'depth', None))
+    generateellipsesbundle2(
+        bundler,
+        repo,
+        oldinclude,
+        oldexclude,
+        newinclude,
+        newexclude,
+        version,
+        common,
+        heads,
+        known,
+        kwargs.get(r'depth', None),
+    )
+
 
-def generateellipsesbundle2(bundler, repo, oldinclude, oldexclude, newinclude,
-                            newexclude, version, common, heads, known, depth):
-    newmatch = narrowspec.match(repo.root, include=newinclude,
-                                exclude=newexclude)
+def generateellipsesbundle2(
+    bundler,
+    repo,
+    oldinclude,
+    oldexclude,
+    newinclude,
+    newexclude,
+    version,
+    common,
+    heads,
+    known,
+    depth,
+):
+    newmatch = narrowspec.match(
+        repo.root, include=newinclude, exclude=newexclude
+    )
     if depth is not None:
         depth = int(depth)
         if depth < 1:
@@ -104,21 +133,27 @@
         # what to strip, instead of us explicitly sending every
         # single node.
         deadrevs = known
+
         def genkills():
             for r in deadrevs:
                 yield _KILLNODESIGNAL
                 yield repo.changelog.node(r)
             yield _DONESIGNAL
+
         bundler.newpart(_CHANGESPECPART, data=genkills())
         newvisit, newfull, newellipsis = exchange._computeellipsis(
-            repo, set(), common, known, newmatch)
+            repo, set(), common, known, newmatch
+        )
         if newvisit:
-            packer = changegroup.getbundler(version, repo,
-                                            matcher=newmatch,
-                                            ellipses=True,
-                                            shallow=depth is not None,
-                                            ellipsisroots=newellipsis,
-                                            fullnodes=newfull)
+            packer = changegroup.getbundler(
+                version,
+                repo,
+                matcher=newmatch,
+                ellipses=True,
+                shallow=depth is not None,
+                ellipsisroots=newellipsis,
+                fullnodes=newfull,
+            )
             cgdata = packer.generate(common, newvisit, False, 'narrow_widen')
 
             part = bundler.newpart('changegroup', data=cgdata)
@@ -127,16 +162,20 @@
                 part.addparam('treemanifest', '1')
 
     visitnodes, relevant_nodes, ellipsisroots = exchange._computeellipsis(
-        repo, common, heads, set(), newmatch, depth=depth)
+        repo, common, heads, set(), newmatch, depth=depth
+    )
 
     repo.ui.debug('Found %d relevant revs\n' % len(relevant_nodes))
     if visitnodes:
-        packer = changegroup.getbundler(version, repo,
-                                        matcher=newmatch,
-                                        ellipses=True,
-                                        shallow=depth is not None,
-                                        ellipsisroots=ellipsisroots,
-                                        fullnodes=relevant_nodes)
+        packer = changegroup.getbundler(
+            version,
+            repo,
+            matcher=newmatch,
+            ellipses=True,
+            shallow=depth is not None,
+            ellipsisroots=ellipsisroots,
+            fullnodes=relevant_nodes,
+        )
         cgdata = packer.generate(common, visitnodes, False, 'narrow_widen')
 
         part = bundler.newpart('changegroup', data=cgdata)
@@ -144,6 +183,7 @@
         if 'treemanifest' in repo.requirements:
             part.addparam('treemanifest', '1')
 
+
 @bundle2.parthandler(_SPECPART, (_SPECPART_INCLUDE, _SPECPART_EXCLUDE))
 def _handlechangespec_2(op, inpart):
     # XXX: This bundle2 handling is buggy and should be removed after hg5.2 is
@@ -161,6 +201,7 @@
     op.repo.setnarrowpats(includepats, excludepats)
     narrowspec.copytoworkingcopy(op.repo)
 
+
 @bundle2.parthandler(_RESSPECS)
 def _handlenarrowspecs(op, inpart):
     data = inpart.read()
@@ -176,6 +217,7 @@
     op.repo.setnarrowpats(includepats, excludepats)
     narrowspec.copytoworkingcopy(op.repo)
 
+
 @bundle2.parthandler(_CHANGESPECPART)
 def _handlechangespec(op, inpart):
     repo = op.repo
@@ -199,18 +241,22 @@
                 clkills.add(ck)
         else:
             raise error.Abort(
-                _('unexpected changespec node chunk type: %s') % chunksignal)
+                _('unexpected changespec node chunk type: %s') % chunksignal
+            )
         chunksignal = changegroup.readexactly(inpart, 4)
 
     if clkills:
         # preserve bookmarks that repair.strip() would otherwise strip
         op._bookmarksbackup = repo._bookmarks
+
         class dummybmstore(dict):
             def applychanges(self, repo, tr, changes):
                 pass
+
         localrepo.localrepository._bookmarks.set(repo, dummybmstore())
-        chgrpfile = repair.strip(op.ui, repo, list(clkills), backup=True,
-                                 topic='widen')
+        chgrpfile = repair.strip(
+            op.ui, repo, list(clkills), backup=True, topic='widen'
+        )
         if chgrpfile:
             op._widen_uninterr = repo.ui.uninterruptible()
             op._widen_uninterr.__enter__()
@@ -223,6 +269,7 @@
     if util.safehasattr(repo, 'setnewnarrowpats'):
         repo.setnewnarrowpats()
 
+
 def handlechangegroup_widen(op, inpart):
     """Changegroup exchange handler which restores temporarily-stripped nodes"""
     # We saved a bundle with stripped node data we must now restore.
@@ -257,13 +304,16 @@
             undovfs.unlink(undofile)
         except OSError as e:
             if e.errno != errno.ENOENT:
-                ui.warn(_('error removing %s: %s\n') %
-                        (undovfs.join(undofile), stringutil.forcebytestr(e)))
+                ui.warn(
+                    _('error removing %s: %s\n')
+                    % (undovfs.join(undofile), stringutil.forcebytestr(e))
+                )
 
     # Remove partial backup only if there were no exceptions
     op._widen_uninterr.__exit__(None, None, None)
     vfs.unlink(chgrpfile)
 
+
 def setup():
     """Enable narrow repo support in bundle2-related extension points."""
     getbundleargs = wireprototypes.GETBUNDLE_ARGUMENTS
@@ -276,27 +326,32 @@
 
     # Extend changegroup serving to handle requests from narrow clients.
     origcgfn = exchange.getbundle2partsmapping['changegroup']
+
     def wrappedcgfn(*args, **kwargs):
         repo = args[1]
         if repo.ui.has_section(_NARROWACL_SECTION):
             kwargs = exchange.applynarrowacl(repo, kwargs)
 
-        if (kwargs.get(r'narrow', False) and
-            repo.ui.configbool('experimental', 'narrowservebrokenellipses')):
+        if kwargs.get(r'narrow', False) and repo.ui.configbool(
+            'experimental', 'narrowservebrokenellipses'
+        ):
             getbundlechangegrouppart_narrow(*args, **kwargs)
         else:
             origcgfn(*args, **kwargs)
+
     exchange.getbundle2partsmapping['changegroup'] = wrappedcgfn
 
     # Extend changegroup receiver so client can fixup after widen requests.
     origcghandler = bundle2.parthandlermapping['changegroup']
+
     def wrappedcghandler(op, inpart):
         origcghandler(op, inpart)
         if util.safehasattr(op, '_widen_bundle'):
             handlechangegroup_widen(op, inpart)
         if util.safehasattr(op, '_bookmarksbackup'):
-            localrepo.localrepository._bookmarks.set(op.repo,
-                                                     op._bookmarksbackup)
+            localrepo.localrepository._bookmarks.set(
+                op.repo, op._bookmarksbackup
+            )
             del op._bookmarksbackup
 
     wrappedcghandler.params = origcghandler.params
--- a/hgext/narrow/narrowcommands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/narrow/narrowcommands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,37 +30,45 @@
     util,
     wireprototypes,
 )
-from mercurial.interfaces import (
-    repository,
-)
+from mercurial.interfaces import repository
 
 table = {}
 command = registrar.command(table)
 
+
 def setup():
     """Wraps user-facing mercurial commands with narrow-aware versions."""
 
     entry = extensions.wrapcommand(commands.table, 'clone', clonenarrowcmd)
-    entry[1].append(('', 'narrow', None,
-                     _("create a narrow clone of select files")))
-    entry[1].append(('', 'depth', '',
-                     _("limit the history fetched by distance from heads")))
-    entry[1].append(('', 'narrowspec', '',
-                     _("read narrowspecs from file")))
+    entry[1].append(
+        ('', 'narrow', None, _("create a narrow clone of select files"))
+    )
+    entry[1].append(
+        ('', 'depth', '', _("limit the history fetched by distance from heads"))
+    )
+    entry[1].append(('', 'narrowspec', '', _("read narrowspecs from file")))
     # TODO(durin42): unify sparse/narrow --include/--exclude logic a bit
     if 'sparse' not in extensions.enabled():
-        entry[1].append(('', 'include', [],
-                         _("specifically fetch this file/directory")))
+        entry[1].append(
+            ('', 'include', [], _("specifically fetch this file/directory"))
+        )
         entry[1].append(
-            ('', 'exclude', [],
-             _("do not fetch this file/directory, even if included")))
+            (
+                '',
+                'exclude',
+                [],
+                _("do not fetch this file/directory, even if included"),
+            )
+        )
 
     entry = extensions.wrapcommand(commands.table, 'pull', pullnarrowcmd)
-    entry[1].append(('', 'depth', '',
-                     _("limit the history fetched by distance from heads")))
+    entry[1].append(
+        ('', 'depth', '', _("limit the history fetched by distance from heads"))
+    )
 
     extensions.wrapcommand(commands.table, 'archive', archivenarrowcmd)
 
+
 def clonenarrowcmd(orig, ui, repo, *args, **opts):
     """Wraps clone command, so 'hg clone' first wraps localrepo.clone()."""
     opts = pycompat.byteskwargs(opts)
@@ -73,13 +81,19 @@
         try:
             fdata = util.readfile(filepath)
         except IOError as inst:
-            raise error.Abort(_("cannot read narrowspecs from '%s': %s") %
-                              (filepath, encoding.strtolocal(inst.strerror)))
+            raise error.Abort(
+                _("cannot read narrowspecs from '%s': %s")
+                % (filepath, encoding.strtolocal(inst.strerror))
+            )
 
         includes, excludes, profiles = sparse.parseconfig(ui, fdata, 'narrow')
         if profiles:
-            raise error.Abort(_("cannot specify other files using '%include' in"
-                                " narrowspec"))
+            raise error.Abort(
+                _(
+                    "cannot specify other files using '%include' in"
+                    " narrowspec"
+                )
+            )
 
         narrowspec.validatepatterns(includes)
         narrowspec.validatepatterns(excludes)
@@ -90,17 +104,21 @@
         opts['exclude'].extend(excludes)
 
     if opts['narrow']:
+
         def pullbundle2extraprepare_widen(orig, pullop, kwargs):
             orig(pullop, kwargs)
 
             if opts.get('depth'):
                 kwargs['depth'] = opts['depth']
-        wrappedextraprepare = extensions.wrappedfunction(exchange,
-            '_pullbundle2extraprepare', pullbundle2extraprepare_widen)
+
+        wrappedextraprepare = extensions.wrappedfunction(
+            exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
+        )
 
     with wrappedextraprepare:
         return orig(ui, repo, *args, **pycompat.strkwargs(opts))
 
+
 def pullnarrowcmd(orig, ui, repo, *args, **opts):
     """Wraps pull command to allow modifying narrow spec."""
     wrappedextraprepare = util.nullcontextmanager()
@@ -110,12 +128,15 @@
             orig(pullop, kwargs)
             if opts.get(r'depth'):
                 kwargs['depth'] = opts[r'depth']
-        wrappedextraprepare = extensions.wrappedfunction(exchange,
-            '_pullbundle2extraprepare', pullbundle2extraprepare_widen)
+
+        wrappedextraprepare = extensions.wrappedfunction(
+            exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
+        )
 
     with wrappedextraprepare:
         return orig(ui, repo, *args, **opts)
 
+
 def archivenarrowcmd(orig, ui, repo, *args, **opts):
     """Wraps archive command to narrow the default includes."""
     if repository.NARROW_REQUIREMENT in repo.requirements:
@@ -123,13 +144,15 @@
         includes = set(opts.get(r'include', []))
         excludes = set(opts.get(r'exclude', []))
         includes, excludes, unused_invalid = narrowspec.restrictpatterns(
-            includes, excludes, repo_includes, repo_excludes)
+            includes, excludes, repo_includes, repo_excludes
+        )
         if includes:
             opts[r'include'] = includes
         if excludes:
             opts[r'exclude'] = excludes
     return orig(ui, repo, *args, **opts)
 
+
 def pullbundle2extraprepare(orig, pullop, kwargs):
     repo = pullop.repo
     if repository.NARROW_REQUIREMENT not in repo.requirements:
@@ -149,20 +172,34 @@
     # calculate known nodes only in ellipses cases because in non-ellipses cases
     # we have all the nodes
     if wireprototypes.ELLIPSESCAP1 in pullop.remote.capabilities():
-        kwargs['known'] = [node.hex(ctx.node()) for ctx in
-                           repo.set('::%ln', pullop.common)
-                           if ctx.node() != node.nullid]
+        kwargs['known'] = [
+            node.hex(ctx.node())
+            for ctx in repo.set('::%ln', pullop.common)
+            if ctx.node() != node.nullid
+        ]
         if not kwargs['known']:
             # Mercurial serializes an empty list as '' and deserializes it as
             # [''], so delete it instead to avoid handling the empty string on
             # the server.
             del kwargs['known']
 
-extensions.wrapfunction(exchange,'_pullbundle2extraprepare',
-                        pullbundle2extraprepare)
+
+extensions.wrapfunction(
+    exchange, '_pullbundle2extraprepare', pullbundle2extraprepare
+)
+
 
-def _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes,
-            newincludes, newexcludes, force):
+def _narrow(
+    ui,
+    repo,
+    remote,
+    commoninc,
+    oldincludes,
+    oldexcludes,
+    newincludes,
+    newexcludes,
+    force,
+):
     oldmatch = narrowspec.match(repo.root, oldincludes, oldexcludes)
     newmatch = narrowspec.match(repo.root, newincludes, newexcludes)
 
@@ -170,8 +207,7 @@
     # commits. We will then check that the local-only commits don't
     # have any changes to files that will be untracked.
     unfi = repo.unfiltered()
-    outgoing = discovery.findcommonoutgoing(unfi, remote,
-                                            commoninc=commoninc)
+    outgoing = discovery.findcommonoutgoing(unfi, remote, commoninc=commoninc)
     ui.status(_('looking for local changes to affected paths\n'))
     localnodes = []
     for n in itertools.chain(outgoing.missing, outgoing.excluded):
@@ -179,11 +215,16 @@
             localnodes.append(n)
     revstostrip = unfi.revs('descendants(%ln)', localnodes)
     hiddenrevs = repoview.filterrevs(repo, 'visible')
-    visibletostrip = list(repo.changelog.node(r)
-                          for r in (revstostrip - hiddenrevs))
+    visibletostrip = list(
+        repo.changelog.node(r) for r in (revstostrip - hiddenrevs)
+    )
     if visibletostrip:
-        ui.status(_('The following changeset(s) or their ancestors have '
-                    'local changes not on the remote:\n'))
+        ui.status(
+            _(
+                'The following changeset(s) or their ancestors have '
+                'local changes not on the remote:\n'
+            )
+        )
         maxnodes = 10
         if ui.verbose or len(visibletostrip) <= maxnodes:
             for n in visibletostrip:
@@ -191,20 +232,26 @@
         else:
             for n in visibletostrip[:maxnodes]:
                 ui.status('%s\n' % node.short(n))
-            ui.status(_('...and %d more, use --verbose to list all\n') %
-                      (len(visibletostrip) - maxnodes))
+            ui.status(
+                _('...and %d more, use --verbose to list all\n')
+                % (len(visibletostrip) - maxnodes)
+            )
         if not force:
-            raise error.Abort(_('local changes found'),
-                              hint=_('use --force-delete-local-changes to '
-                                     'ignore'))
+            raise error.Abort(
+                _('local changes found'),
+                hint=_('use --force-delete-local-changes to ' 'ignore'),
+            )
 
     with ui.uninterruptible():
         if revstostrip:
             tostrip = [unfi.changelog.node(r) for r in revstostrip]
             if repo['.'].node() in tostrip:
                 # stripping working copy, so move to a different commit first
-                urev = max(repo.revs('(::%n) - %ln + null',
-                                     repo['.'].node(), visibletostrip))
+                urev = max(
+                    repo.revs(
+                        '(::%n) - %ln + null', repo['.'].node(), visibletostrip
+                    )
+                )
                 hg.clean(repo, urev)
             overrides = {('devel', 'strip-obsmarkers'): False}
             with ui.configoverride(overrides, 'narrow'):
@@ -247,8 +294,17 @@
 
         repo.destroyed()
 
-def _widen(ui, repo, remote, commoninc, oldincludes, oldexcludes,
-           newincludes, newexcludes):
+
+def _widen(
+    ui,
+    repo,
+    remote,
+    commoninc,
+    oldincludes,
+    oldexcludes,
+    newincludes,
+    newexcludes,
+):
     # for now we assume that if a server has ellipses enabled, we will be
     # exchanging ellipses nodes. In future we should add ellipses as a client
     # side requirement (maybe) to distinguish a client is shallow or not and
@@ -256,26 +312,33 @@
     # Theoretically a non-ellipses repo should be able to use narrow
     # functionality from an ellipses enabled server
     remotecap = remote.capabilities()
-    ellipsesremote = any(cap in remotecap
-                         for cap in wireprototypes.SUPPORTED_ELLIPSESCAP)
+    ellipsesremote = any(
+        cap in remotecap for cap in wireprototypes.SUPPORTED_ELLIPSESCAP
+    )
 
     # check whether we are talking to a server which supports old version of
     # ellipses capabilities
-    isoldellipses = (ellipsesremote and wireprototypes.ELLIPSESCAP1 in
-                     remotecap and wireprototypes.ELLIPSESCAP not in remotecap)
+    isoldellipses = (
+        ellipsesremote
+        and wireprototypes.ELLIPSESCAP1 in remotecap
+        and wireprototypes.ELLIPSESCAP not in remotecap
+    )
 
     def pullbundle2extraprepare_widen(orig, pullop, kwargs):
         orig(pullop, kwargs)
         # The old{in,ex}cludepats have already been set by orig()
         kwargs['includepats'] = newincludes
         kwargs['excludepats'] = newexcludes
-    wrappedextraprepare = extensions.wrappedfunction(exchange,
-        '_pullbundle2extraprepare', pullbundle2extraprepare_widen)
+
+    wrappedextraprepare = extensions.wrappedfunction(
+        exchange, '_pullbundle2extraprepare', pullbundle2extraprepare_widen
+    )
 
     # define a function that narrowbundle2 can call after creating the
     # backup bundle, but before applying the bundle from the server
     def setnewnarrowpats():
         repo.setnarrowpats(newincludes, newexcludes)
+
     repo.setnewnarrowpats = setnewnarrowpats
     # silence the devel-warning of applying an empty changegroup
     overrides = {('devel', 'all-warnings'): False}
@@ -293,25 +356,31 @@
         else:
             known = []
             if ellipsesremote:
-                known = [ctx.node() for ctx in
-                         repo.set('::%ln', common)
-                         if ctx.node() != node.nullid]
+                known = [
+                    ctx.node()
+                    for ctx in repo.set('::%ln', common)
+                    if ctx.node() != node.nullid
+                ]
             with remote.commandexecutor() as e:
-                bundle = e.callcommand('narrow_widen', {
-                    'oldincludes': oldincludes,
-                    'oldexcludes': oldexcludes,
-                    'newincludes': newincludes,
-                    'newexcludes': newexcludes,
-                    'cgversion': '03',
-                    'commonheads': common,
-                    'known': known,
-                    'ellipses': ellipsesremote,
-                }).result()
+                bundle = e.callcommand(
+                    'narrow_widen',
+                    {
+                        'oldincludes': oldincludes,
+                        'oldexcludes': oldexcludes,
+                        'newincludes': newincludes,
+                        'newexcludes': newexcludes,
+                        'cgversion': '03',
+                        'commonheads': common,
+                        'known': known,
+                        'ellipses': ellipsesremote,
+                    },
+                ).result()
 
             trmanager = exchange.transactionmanager(repo, 'widen', remote.url())
             with trmanager, repo.ui.configoverride(overrides, 'widen'):
-                op = bundle2.bundleoperation(repo, trmanager.transaction,
-                                             source='widen')
+                op = bundle2.bundleoperation(
+                    repo, trmanager.transaction, source='widen'
+                )
                 # TODO: we should catch error.Abort here
                 bundle2.processbundle(repo, bundle, op=op)
 
@@ -324,23 +393,40 @@
             narrowspec.updateworkingcopy(repo)
             narrowspec.copytoworkingcopy(repo)
 
+
 # TODO(rdamazio): Make new matcher format and update description
-@command('tracked',
-    [('', 'addinclude', [], _('new paths to include')),
-     ('', 'removeinclude', [], _('old paths to no longer include')),
-     ('', 'auto-remove-includes', False,
-      _('automatically choose unused includes to remove')),
-     ('', 'addexclude', [], _('new paths to exclude')),
-     ('', 'import-rules', '', _('import narrowspecs from a file')),
-     ('', 'removeexclude', [], _('old paths to no longer exclude')),
-     ('', 'clear', False, _('whether to replace the existing narrowspec')),
-     ('', 'force-delete-local-changes', False,
-       _('forces deletion of local changes when narrowing')),
-     ('', 'update-working-copy', False,
-      _('update working copy when the store has changed')),
-    ] + commands.remoteopts,
+@command(
+    'tracked',
+    [
+        ('', 'addinclude', [], _('new paths to include')),
+        ('', 'removeinclude', [], _('old paths to no longer include')),
+        (
+            '',
+            'auto-remove-includes',
+            False,
+            _('automatically choose unused includes to remove'),
+        ),
+        ('', 'addexclude', [], _('new paths to exclude')),
+        ('', 'import-rules', '', _('import narrowspecs from a file')),
+        ('', 'removeexclude', [], _('old paths to no longer exclude')),
+        ('', 'clear', False, _('whether to replace the existing narrowspec')),
+        (
+            '',
+            'force-delete-local-changes',
+            False,
+            _('forces deletion of local changes when narrowing'),
+        ),
+        (
+            '',
+            'update-working-copy',
+            False,
+            _('update working copy when the store has changed'),
+        ),
+    ]
+    + commands.remoteopts,
     _('[OPTIONS]... [REMOTE]'),
-    inferrepo=True)
+    inferrepo=True,
+)
 def trackedcmd(ui, repo, remotepath=None, *pats, **opts):
     """show or change the current narrowspec
 
@@ -376,8 +462,12 @@
     """
     opts = pycompat.byteskwargs(opts)
     if repository.NARROW_REQUIREMENT not in repo.requirements:
-        raise error.Abort(_('the tracked command is only supported on '
-                            'repositories cloned with --narrow'))
+        raise error.Abort(
+            _(
+                'the tracked command is only supported on '
+                'repositories cloned with --narrow'
+            )
+        )
 
     # Before supporting, decide whether it "hg tracked --clear" should mean
     # tracking no paths or all paths.
@@ -391,13 +481,20 @@
             filepath = os.path.join(encoding.getcwd(), newrules)
             fdata = util.readfile(filepath)
         except IOError as inst:
-            raise error.Abort(_("cannot read narrowspecs from '%s': %s") %
-                              (filepath, encoding.strtolocal(inst.strerror)))
-        includepats, excludepats, profiles = sparse.parseconfig(ui, fdata,
-                                                                'narrow')
+            raise error.Abort(
+                _("cannot read narrowspecs from '%s': %s")
+                % (filepath, encoding.strtolocal(inst.strerror))
+            )
+        includepats, excludepats, profiles = sparse.parseconfig(
+            ui, fdata, 'narrow'
+        )
         if profiles:
-            raise error.Abort(_("including other spec files using '%include' "
-                                "is not supported in narrowspec"))
+            raise error.Abort(
+                _(
+                    "including other spec files using '%include' "
+                    "is not supported in narrowspec"
+                )
+            )
         opts['addinclude'].extend(includepats)
         opts['addexclude'].extend(excludepats)
 
@@ -408,9 +505,15 @@
     autoremoveincludes = opts['auto_remove_includes']
 
     update_working_copy = opts['update_working_copy']
-    only_show = not (addedincludes or removedincludes or addedexcludes or
-                     removedexcludes or newrules or autoremoveincludes or
-                     update_working_copy)
+    only_show = not (
+        addedincludes
+        or removedincludes
+        or addedexcludes
+        or removedexcludes
+        or newrules
+        or autoremoveincludes
+        or update_working_copy
+    )
 
     oldincludes, oldexcludes = repo.narrowpats
 
@@ -469,8 +572,9 @@
         commoninc = discovery.findcommonincoming(repo, remote)
 
         if autoremoveincludes:
-            outgoing = discovery.findcommonoutgoing(repo, remote,
-                                                    commoninc=commoninc)
+            outgoing = discovery.findcommonoutgoing(
+                repo, remote, commoninc=commoninc
+            )
             ui.status(_('looking for unused includes to remove\n'))
             localfiles = set()
             for n in itertools.chain(outgoing.missing, outgoing.excluded):
@@ -483,8 +587,12 @@
             if suggestedremovals:
                 for s in suggestedremovals:
                     ui.status('%s\n' % s)
-                if (ui.promptchoice(_('remove these unused includes (yn)?'
-                                      '$$ &Yes $$ &No')) == 0):
+                if (
+                    ui.promptchoice(
+                        _('remove these unused includes (yn)?' '$$ &Yes $$ &No')
+                    )
+                    == 0
+                ):
                     removedincludes.update(suggestedremovals)
                     narrowing = True
             else:
@@ -493,9 +601,17 @@
         if narrowing:
             newincludes = oldincludes - removedincludes
             newexcludes = oldexcludes | addedexcludes
-            _narrow(ui, repo, remote, commoninc, oldincludes, oldexcludes,
-                    newincludes, newexcludes,
-                    opts['force_delete_local_changes'])
+            _narrow(
+                ui,
+                repo,
+                remote,
+                commoninc,
+                oldincludes,
+                oldexcludes,
+                newincludes,
+                newexcludes,
+                opts['force_delete_local_changes'],
+            )
             # _narrow() updated the narrowspec and _widen() below needs to
             # use the updated values as its base (otherwise removed includes
             # and addedexcludes will be lost in the resulting narrowspec)
@@ -505,7 +621,15 @@
         if widening:
             newincludes = oldincludes | addedincludes
             newexcludes = oldexcludes - removedexcludes
-            _widen(ui, repo, remote, commoninc, oldincludes, oldexcludes,
-                    newincludes, newexcludes)
+            _widen(
+                ui,
+                repo,
+                remote,
+                commoninc,
+                oldincludes,
+                oldexcludes,
+                newincludes,
+                newexcludes,
+            )
 
     return 0
--- a/hgext/narrow/narrowdirstate.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/narrow/narrowdirstate.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,9 +8,8 @@
 from __future__ import absolute_import
 
 from mercurial.i18n import _
-from mercurial import (
-    error,
-)
+from mercurial import error
+
 
 def wrapdirstate(repo, dirstate):
     """Add narrow spec dirstate ignore, block changes outside narrow spec."""
@@ -20,9 +19,15 @@
             narrowmatch = repo.narrowmatch()
             for f in args:
                 if f is not None and not narrowmatch(f) and f not in self:
-                    raise error.Abort(_("cannot track '%s' - it is outside " +
-                        "the narrow clone") % f)
+                    raise error.Abort(
+                        _(
+                            "cannot track '%s' - it is outside "
+                            + "the narrow clone"
+                        )
+                        % f
+                    )
             return fn(self, *args, **kwargs)
+
         return _wrapper
 
     class narrowdirstate(dirstate.__class__):
--- a/hgext/narrow/narrowrepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/narrow/narrowrepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,19 +7,15 @@
 
 from __future__ import absolute_import
 
-from mercurial import (
-    wireprototypes,
-)
+from mercurial import wireprototypes
 
-from . import (
-    narrowdirstate,
-)
+from . import narrowdirstate
+
 
 def wraprepo(repo):
     """Enables narrow clone functionality on a single local repository."""
 
     class narrowrepository(repo.__class__):
-
         def _makedirstate(self):
             dirstate = super(narrowrepository, self)._makedirstate()
             return narrowdirstate.wrapdirstate(self, dirstate)
--- a/hgext/narrow/narrowtemplates.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/narrow/narrowtemplates.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,11 +16,13 @@
 templatekeyword = registrar.templatekeyword(keywords)
 revsetpredicate = registrar.revsetpredicate()
 
+
 def _isellipsis(repo, rev):
     if repo.changelog.flags(rev) & revlog.REVIDX_ELLIPSIS:
         return True
     return False
 
+
 @templatekeyword('ellipsis', requires={'repo', 'ctx'})
 def ellipsis(context, mapping):
     """String. 'ellipsis' if the change is an ellipsis node, else ''."""
@@ -30,6 +32,7 @@
         return 'ellipsis'
     return ''
 
+
 @templatekeyword('outsidenarrow', requires={'repo', 'ctx'})
 def outsidenarrow(context, mapping):
     """String. 'outsidenarrow' if the change affects no tracked files,
@@ -42,6 +45,7 @@
             return 'outsidenarrow'
     return ''
 
+
 @revsetpredicate('ellipsis()')
 def ellipsisrevset(repo, subset, x):
     """Changesets that are ellipsis nodes."""
--- a/hgext/narrow/narrowwirepeer.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/narrow/narrowwirepeer.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,9 +21,11 @@
 
 from . import narrowbundle2
 
+
 def uisetup():
     wireprotov1peer.wirepeer.narrow_widen = peernarrowwiden
 
+
 def reposetup(repo):
     def wirereposetup(ui, peer):
         def wrapped(orig, cmd, *args, **kwargs):
@@ -34,16 +36,32 @@
                 kwargs[r"includepats"] = ','.join(include)
                 kwargs[r"excludepats"] = ','.join(exclude)
             return orig(cmd, *args, **kwargs)
+
         extensions.wrapfunction(peer, '_calltwowaystream', wrapped)
+
     hg.wirepeersetupfuncs.append(wirereposetup)
 
-@wireprotov1server.wireprotocommand('narrow_widen', 'oldincludes oldexcludes'
-                                                    ' newincludes newexcludes'
-                                                    ' commonheads cgversion'
-                                                    ' known ellipses',
-                                    permission='pull')
-def narrow_widen(repo, proto, oldincludes, oldexcludes, newincludes,
-                 newexcludes, commonheads, cgversion, known, ellipses):
+
+@wireprotov1server.wireprotocommand(
+    'narrow_widen',
+    'oldincludes oldexcludes'
+    ' newincludes newexcludes'
+    ' commonheads cgversion'
+    ' known ellipses',
+    permission='pull',
+)
+def narrow_widen(
+    repo,
+    proto,
+    oldincludes,
+    oldexcludes,
+    newincludes,
+    newexcludes,
+    commonheads,
+    cgversion,
+    known,
+    ellipses,
+):
     """wireprotocol command to send data when a narrow clone is widen. We will
     be sending a changegroup here.
 
@@ -60,9 +78,11 @@
 
     preferuncompressed = False
     try:
+
         def splitpaths(data):
             # work around ''.split(',') => ['']
             return data.split(b',') if data else []
+
         oldincludes = splitpaths(oldincludes)
         newincludes = splitpaths(newincludes)
         oldexcludes = splitpaths(oldexcludes)
@@ -83,16 +103,36 @@
 
         bundler = bundle2.bundle20(repo.ui)
         if not ellipses:
-            newmatch = narrowspec.match(repo.root, include=newincludes,
-                                        exclude=newexcludes)
-            oldmatch = narrowspec.match(repo.root, include=oldincludes,
-                                        exclude=oldexcludes)
-            bundle2.widen_bundle(bundler, repo, oldmatch, newmatch, common,
-                                 known, cgversion, ellipses)
+            newmatch = narrowspec.match(
+                repo.root, include=newincludes, exclude=newexcludes
+            )
+            oldmatch = narrowspec.match(
+                repo.root, include=oldincludes, exclude=oldexcludes
+            )
+            bundle2.widen_bundle(
+                bundler,
+                repo,
+                oldmatch,
+                newmatch,
+                common,
+                known,
+                cgversion,
+                ellipses,
+            )
         else:
-            narrowbundle2.generateellipsesbundle2(bundler, repo, oldincludes,
-                    oldexcludes, newincludes, newexcludes, cgversion, common,
-                    list(common), known, None)
+            narrowbundle2.generateellipsesbundle2(
+                bundler,
+                repo,
+                oldincludes,
+                oldexcludes,
+                newincludes,
+                newexcludes,
+                cgversion,
+                common,
+                list(common),
+                known,
+                None,
+            )
     except error.Abort as exc:
         bundler = bundle2.bundle20(repo.ui)
         manargs = [('message', pycompat.bytestr(exc))]
@@ -103,8 +143,10 @@
         preferuncompressed = True
 
     chunks = bundler.getchunks()
-    return wireprototypes.streamres(gen=chunks,
-                                    prefer_uncompressed=preferuncompressed)
+    return wireprototypes.streamres(
+        gen=chunks, prefer_uncompressed=preferuncompressed
+    )
+
 
 def peernarrowwiden(remote, **kwargs):
     for ch in (r'commonheads', r'known'):
--- a/hgext/notify.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/notify.py	Sun Oct 06 09:45:02 2019 -0400
@@ -178,62 +178,62 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('notify', 'changegroup',
-    default=None,
+configitem(
+    'notify', 'changegroup', default=None,
 )
-configitem('notify', 'config',
-    default=None,
+configitem(
+    'notify', 'config', default=None,
 )
-configitem('notify', 'diffstat',
-    default=True,
+configitem(
+    'notify', 'diffstat', default=True,
 )
-configitem('notify', 'domain',
-    default=None,
+configitem(
+    'notify', 'domain', default=None,
 )
-configitem('notify', 'messageidseed',
-    default=None,
+configitem(
+    'notify', 'messageidseed', default=None,
 )
-configitem('notify', 'fromauthor',
-    default=None,
+configitem(
+    'notify', 'fromauthor', default=None,
 )
-configitem('notify', 'incoming',
-    default=None,
+configitem(
+    'notify', 'incoming', default=None,
 )
-configitem('notify', 'maxdiff',
-    default=300,
+configitem(
+    'notify', 'maxdiff', default=300,
 )
-configitem('notify', 'maxdiffstat',
-    default=-1,
+configitem(
+    'notify', 'maxdiffstat', default=-1,
 )
-configitem('notify', 'maxsubject',
-    default=67,
+configitem(
+    'notify', 'maxsubject', default=67,
 )
-configitem('notify', 'mbox',
-    default=None,
+configitem(
+    'notify', 'mbox', default=None,
 )
-configitem('notify', 'merge',
-    default=True,
+configitem(
+    'notify', 'merge', default=True,
 )
-configitem('notify', 'outgoing',
-    default=None,
+configitem(
+    'notify', 'outgoing', default=None,
 )
-configitem('notify', 'sources',
-    default='serve',
+configitem(
+    'notify', 'sources', default='serve',
 )
-configitem('notify', 'showfunc',
-    default=None,
+configitem(
+    'notify', 'showfunc', default=None,
 )
-configitem('notify', 'strip',
-    default=0,
+configitem(
+    'notify', 'strip', default=0,
 )
-configitem('notify', 'style',
-    default=None,
+configitem(
+    'notify', 'style', default=None,
 )
-configitem('notify', 'template',
-    default=None,
+configitem(
+    'notify', 'template', default=None,
 )
-configitem('notify', 'test',
-    default=True,
+configitem(
+    'notify', 'test', default=True,
 )
 
 # template for single changeset can include email headers.
@@ -260,6 +260,7 @@
     'changegroup': multiple_template,
 }
 
+
 class notifier(object):
     '''email notification class.'''
 
@@ -283,8 +284,9 @@
             self.showfunc = self.ui.configbool('diff', 'showfunc')
 
         mapfile = None
-        template = (self.ui.config('notify', hooktype) or
-                    self.ui.config('notify', 'template'))
+        template = self.ui.config('notify', hooktype) or self.ui.config(
+            'notify', 'template'
+        )
         if not template:
             mapfile = self.ui.config('notify', 'style')
         if not mapfile and not template:
@@ -301,7 +303,7 @@
             c = path.find('/')
             if c == -1:
                 break
-            path = path[c + 1:]
+            path = path[c + 1 :]
             count -= 1
         return path
 
@@ -336,16 +338,23 @@
             if fnmatch.fnmatch(self.repo.root, pat):
                 for user in users.split(','):
                     subs.add((self.fixmail(user), revs))
-        return [(mail.addressencode(self.ui, s, self.charsets, self.test), r)
-                for s, r in sorted(subs)]
+        return [
+            (mail.addressencode(self.ui, s, self.charsets, self.test), r)
+            for s, r in sorted(subs)
+        ]
 
     def node(self, ctx, **props):
         '''format one changeset, unless it is a suppressed merge.'''
         if not self.merge and len(ctx.parents()) > 1:
             return False
-        self.t.show(ctx, changes=ctx.changeset(),
-                    baseurl=self.ui.config('web', 'baseurl'),
-                    root=self.repo.root, webroot=self.root, **props)
+        self.t.show(
+            ctx,
+            changes=ctx.changeset(),
+            baseurl=self.ui.config('web', 'baseurl'),
+            root=self.repo.root,
+            webroot=self.root,
+            **props
+        )
         return True
 
     def skipsource(self, source):
@@ -367,8 +376,9 @@
                 subs.add(sub)
                 continue
         if len(subs) == 0:
-            self.ui.debug('notify: no subscribers to selected repo '
-                          'and revset\n')
+            self.ui.debug(
+                'notify: no subscribers to selected repo ' 'and revset\n'
+            )
             return
 
         p = emailparser.Parser()
@@ -398,7 +408,8 @@
                 msg[k] = v
 
         msg[r'Date'] = encoding.strfromlocal(
-            dateutil.datestr(format="%a, %d %b %Y %H:%M:%S %1%2"))
+            dateutil.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
+        )
 
         # try to make subject line exist and be useful
         if not subject:
@@ -411,7 +422,8 @@
         if maxsubject:
             subject = stringutil.ellipsis(subject, maxsubject)
         msg[r'Subject'] = encoding.strfromlocal(
-            mail.headencode(self.ui, subject, self.charsets, self.test))
+            mail.headencode(self.ui, subject, self.charsets, self.test)
+        )
 
         # try to make message have proper sender
         if not sender:
@@ -419,7 +431,8 @@
         if '@' not in sender or '@localhost' in sender:
             sender = self.fixmail(sender)
         msg[r'From'] = encoding.strfromlocal(
-            mail.addressencode(self.ui, sender, self.charsets, self.test))
+            mail.addressencode(self.ui, sender, self.charsets, self.test)
+        )
 
         msg[r'X-Hg-Notification'] = r'changeset %s' % ctx
         if not msg[r'Message-Id']:
@@ -432,10 +445,17 @@
             if not msgtext.endswith('\n'):
                 self.ui.write('\n')
         else:
-            self.ui.status(_('notify: sending %d subscribers %d changes\n') %
-                           (len(subs), count))
-            mail.sendmail(self.ui, stringutil.email(msg[r'From']),
-                          subs, msgtext, mbox=self.mbox)
+            self.ui.status(
+                _('notify: sending %d subscribers %d changes\n')
+                % (len(subs), count)
+            )
+            mail.sendmail(
+                self.ui,
+                stringutil.email(msg[r'From']),
+                subs,
+                msgtext,
+                mbox=self.mbox,
+            )
 
     def diff(self, ctx, ref=None):
 
@@ -474,6 +494,7 @@
 
         self.ui.write("\n".join(difflines))
 
+
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
     '''send email notifications to interested subscribers.
 
@@ -502,16 +523,20 @@
                     author = repo[rev].user()
             else:
                 data += ui.popbuffer()
-                ui.note(_('notify: suppressing notification for merge %d:%s\n')
-                        % (rev, repo[rev].hex()[:12]))
+                ui.note(
+                    _('notify: suppressing notification for merge %d:%s\n')
+                    % (rev, repo[rev].hex()[:12])
+                )
                 ui.pushbuffer()
         if count:
             n.diff(ctx, repo['tip'])
     elif ctx.rev() in repo:
         if not n.node(ctx):
             ui.popbuffer()
-            ui.note(_('notify: suppressing notification for merge %d:%s\n') %
-                    (ctx.rev(), ctx.hex()[:12]))
+            ui.note(
+                _('notify: suppressing notification for merge %d:%s\n')
+                % (ctx.rev(), ctx.hex()[:12])
+            )
             return
         count += 1
         n.diff(ctx)
@@ -526,6 +551,7 @@
     if count:
         n.send(ctx, count, data)
 
+
 def messageid(ctx, domain, messageidseed):
     if domain and messageidseed:
         host = domain
@@ -535,6 +561,10 @@
         messagehash = hashlib.sha512(ctx.hex() + messageidseed)
         messageid = '<hg.%s@%s>' % (messagehash.hexdigest()[:64], host)
     else:
-        messageid = '<hg.%s.%d.%d@%s>' % (ctx, int(time.time()),
-                                          hash(ctx.repo().root), host)
+        messageid = '<hg.%s.%d.%d@%s>' % (
+            ctx,
+            int(time.time()),
+            hash(ctx.repo().root),
+            host,
+        )
     return encoding.strfromlocal(messageid)
--- a/hgext/pager.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/pager.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,7 +29,7 @@
     dispatch,
     extensions,
     registrar,
-    )
+)
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -40,12 +40,12 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('pager', 'attend',
-        default=lambda: attended,
+configitem(
+    'pager', 'attend', default=lambda: attended,
 )
 
+
 def uisetup(ui):
-
     def pagecmd(orig, ui, options, cmd, cmdfunc):
         auto = options['pager'] == 'auto'
         if auto and not ui.pageractive:
@@ -59,8 +59,7 @@
                 if ui.config('pager', var, None):
                     usepager = ui.configbool('pager', var, True)
                     break
-                if (cmd in attend or
-                     (cmd not in ignore and not attend)):
+                if cmd in attend or (cmd not in ignore and not attend):
                     usepager = True
                     break
 
@@ -78,4 +77,5 @@
 
     extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
 
+
 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
--- a/hgext/patchbomb.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/patchbomb.py	Sun Oct 06 09:45:02 2019 -0400
@@ -100,6 +100,7 @@
     util,
 )
 from mercurial.utils import dateutil
+
 stringio = util.stringio
 
 cmdtable = {}
@@ -108,35 +109,35 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('patchbomb', 'bundletype',
-    default=None,
+configitem(
+    'patchbomb', 'bundletype', default=None,
 )
-configitem('patchbomb', 'bcc',
-    default=None,
+configitem(
+    'patchbomb', 'bcc', default=None,
 )
-configitem('patchbomb', 'cc',
-    default=None,
+configitem(
+    'patchbomb', 'cc', default=None,
 )
-configitem('patchbomb', 'confirm',
-    default=False,
+configitem(
+    'patchbomb', 'confirm', default=False,
 )
-configitem('patchbomb', 'flagtemplate',
-    default=None,
+configitem(
+    'patchbomb', 'flagtemplate', default=None,
 )
-configitem('patchbomb', 'from',
-    default=None,
+configitem(
+    'patchbomb', 'from', default=None,
 )
-configitem('patchbomb', 'intro',
-    default='auto',
+configitem(
+    'patchbomb', 'intro', default='auto',
 )
-configitem('patchbomb', 'publicurl',
-    default=None,
+configitem(
+    'patchbomb', 'publicurl', default=None,
 )
-configitem('patchbomb', 'reply-to',
-    default=None,
+configitem(
+    'patchbomb', 'reply-to', default=None,
 )
-configitem('patchbomb', 'to',
-    default=None,
+configitem(
+    'patchbomb', 'to', default=None,
 )
 
 if pycompat.ispy3:
@@ -150,6 +151,7 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 def _addpullheader(seq, ctx):
     """Add a header pointing to a public URL where the changeset is available
     """
@@ -159,24 +161,31 @@
     # destination before patchbombing anything.
     publicurl = repo.ui.config('patchbomb', 'publicurl')
     if publicurl:
-        return ('Available At %s\n'
-                '#              hg pull %s -r %s' % (publicurl, publicurl, ctx))
+        return 'Available At %s\n' '#              hg pull %s -r %s' % (
+            publicurl,
+            publicurl,
+            ctx,
+        )
     return None
 
+
 def uisetup(ui):
     cmdutil.extraexport.append('pullurl')
     cmdutil.extraexportmap['pullurl'] = _addpullheader
 
+
 def reposetup(ui, repo):
     if not repo.local():
         return
     repo._wlockfreeprefix.add('last-email.txt')
 
+
 def prompt(ui, prompt, default=None, rest=':'):
     if default:
         prompt += ' [%s]' % default
     return ui.prompt(prompt + rest, default)
 
+
 def introwanted(ui, opts, number):
     '''is an introductory message apparently wanted?'''
     introconfig = ui.config('patchbomb', 'intro')
@@ -189,12 +198,14 @@
     elif introconfig == 'auto':
         intro = number > 1
     else:
-        ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
-                     % introconfig)
+        ui.write_err(
+            _('warning: invalid patchbomb.intro value "%s"\n') % introconfig
+        )
         ui.write_err(_('(should be one of always, never, auto)\n'))
         intro = number > 1
     return intro
 
+
 def _formatflags(ui, repo, rev, flags):
     """build flag string optionally by template"""
     tmpl = ui.config('patchbomb', 'flagtemplate')
@@ -208,6 +219,7 @@
         fm.write('flags', '%s', fm.formatlist(flags, name='flag'))
     return out.getvalue()
 
+
 def _formatprefix(ui, repo, rev, flags, idx, total, numbered):
     """build prefix to patch subject"""
     flag = _formatflags(ui, repo, rev, flags)
@@ -220,8 +232,19 @@
         tlen = len("%d" % total)
         return '[PATCH %0*d of %d%s]' % (tlen, idx, total, flag)
 
-def makepatch(ui, repo, rev, patchlines, opts, _charsets, idx, total, numbered,
-              patchname=None):
+
+def makepatch(
+    ui,
+    repo,
+    rev,
+    patchlines,
+    opts,
+    _charsets,
+    idx,
+    total,
+    numbered,
+    patchname=None,
+):
 
     desc = []
     node = None
@@ -240,8 +263,9 @@
         raise ValueError
 
     if opts.get('attach') and not opts.get('body'):
-        body = ('\n'.join(desc[1:]).strip() or
-                'Patch subject is complete summary.')
+        body = (
+            '\n'.join(desc[1:]).strip() or 'Patch subject is complete summary.'
+        )
         body += '\n\n\n'
 
     if opts.get('plain'):
@@ -264,31 +288,38 @@
         msg = emimemultipart.MIMEMultipart()
         if body:
             msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
-        p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
-                               opts.get('test'))
+        p = mail.mimetextpatch(
+            '\n'.join(patchlines), 'x-patch', opts.get('test')
+        )
         binnode = nodemod.bin(node)
         # if node is mq patch, it will have the patch file's name as a tag
         if not patchname:
-            patchtags = [t for t in repo.nodetags(binnode)
-                         if t.endswith('.patch') or t.endswith('.diff')]
+            patchtags = [
+                t
+                for t in repo.nodetags(binnode)
+                if t.endswith('.patch') or t.endswith('.diff')
+            ]
             if patchtags:
                 patchname = patchtags[0]
             elif total > 1:
-                patchname = cmdutil.makefilename(repo[node], '%b-%n.patch',
-                                                 seqno=idx, total=total)
+                patchname = cmdutil.makefilename(
+                    repo[node], '%b-%n.patch', seqno=idx, total=total
+                )
             else:
                 patchname = cmdutil.makefilename(repo[node], '%b.patch')
         disposition = r'inline'
         if opts.get('attach'):
             disposition = r'attachment'
         p[r'Content-Disposition'] = (
-            disposition + r'; filename=' + encoding.strfromlocal(patchname))
+            disposition + r'; filename=' + encoding.strfromlocal(patchname)
+        )
         msg.attach(p)
     else:
         msg = mail.mimetextpatch(body, display=opts.get('test'))
 
-    prefix = _formatprefix(ui, repo, rev, opts.get('flag'), idx, total,
-                           numbered)
+    prefix = _formatprefix(
+        ui, repo, rev, opts.get('flag'), idx, total, numbered
+    )
     subj = desc[0].strip().rstrip('. ')
     if not numbered:
         subj = ' '.join([prefix, opts.get('subject') or subj])
@@ -300,6 +331,7 @@
     msg['X-Mercurial-Series-Total'] = '%i' % total
     return msg, subj, ds
 
+
 def _getpatches(repo, revs, **opts):
     """return a list of patches for a list of revisions
 
@@ -309,12 +341,16 @@
     prev = repo['.'].rev()
     for r in revs:
         if r == prev and (repo[None].files() or repo[None].deleted()):
-            ui.warn(_('warning: working directory has '
-                      'uncommitted changes\n'))
+            ui.warn(
+                _('warning: working directory has ' 'uncommitted changes\n')
+            )
         output = stringio()
-        cmdutil.exportfile(repo, [r], output,
-                           opts=patch.difffeatureopts(ui, opts, git=True))
+        cmdutil.exportfile(
+            repo, [r], output, opts=patch.difffeatureopts(ui, opts, git=True)
+        )
         yield output.getvalue().split('\n')
+
+
 def _getbundle(repo, dest, **opts):
     """return a bundle containing changesets missing in "dest"
 
@@ -339,6 +375,7 @@
             pass
         os.rmdir(tmpdir)
 
+
 def _getdescription(repo, defaultbody, sender, **opts):
     """obtain the body of the introduction message and return it
 
@@ -351,16 +388,19 @@
     if opts.get(r'desc'):
         body = open(opts.get(r'desc')).read()
     else:
-        ui.write(_('\nWrite the introductory message for the '
-                   'patch series.\n\n'))
-        body = ui.edit(defaultbody, sender, repopath=repo.path,
-                       action='patchbombbody')
+        ui.write(
+            _('\nWrite the introductory message for the ' 'patch series.\n\n')
+        )
+        body = ui.edit(
+            defaultbody, sender, repopath=repo.path, action='patchbombbody'
+        )
         # Save series description in case sendmail fails
         msgfile = repo.vfs('last-email.txt', 'wb')
         msgfile.write(body)
         msgfile.close()
     return body
 
+
 def _getbundlemsgs(repo, sender, bundle, **opts):
     """Get the full email for sending a given bundle
 
@@ -369,8 +409,9 @@
     """
     ui = repo.ui
     _charsets = mail._charsets(ui)
-    subj = (opts.get(r'subject')
-            or prompt(ui, 'Subject:', 'A bundle for your repository'))
+    subj = opts.get(r'subject') or prompt(
+        ui, 'Subject:', 'A bundle for your repository'
+    )
 
     body = _getdescription(repo, '', sender, **opts)
     msg = emimemultipart.MIMEMultipart()
@@ -379,13 +420,17 @@
     datapart = emimebase.MIMEBase(r'application', r'x-mercurial-bundle')
     datapart.set_payload(bundle)
     bundlename = '%s.hg' % opts.get(r'bundlename', 'bundle')
-    datapart.add_header(r'Content-Disposition', r'attachment',
-                        filename=encoding.strfromlocal(bundlename))
+    datapart.add_header(
+        r'Content-Disposition',
+        r'attachment',
+        filename=encoding.strfromlocal(bundlename),
+    )
     emailencoders.encode_base64(datapart)
     msg.attach(datapart)
     msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
     return [(msg, subj, None)]
 
+
 def _makeintro(repo, sender, revs, patches, **opts):
     """make an introduction email, asking the user for content if needed
 
@@ -394,12 +439,14 @@
     _charsets = mail._charsets(ui)
 
     # use the last revision which is likely to be a bookmarked head
-    prefix = _formatprefix(ui, repo, revs.last(), opts.get(r'flag'),
-                           0, len(patches), numbered=True)
-    subj = (opts.get(r'subject') or
-            prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
+    prefix = _formatprefix(
+        ui, repo, revs.last(), opts.get(r'flag'), 0, len(patches), numbered=True
+    )
+    subj = opts.get(r'subject') or prompt(
+        ui, '(optional) Subject: ', rest=prefix, default=''
+    )
     if not subj:
-        return None         # skip intro if the user doesn't bother
+        return None  # skip intro if the user doesn't bother
 
     subj = prefix + ' ' + subj
 
@@ -413,10 +460,10 @@
 
     body = _getdescription(repo, body, sender, **opts)
     msg = mail.mimeencode(ui, body, _charsets, opts.get(r'test'))
-    msg['Subject'] = mail.headencode(ui, subj, _charsets,
-                                     opts.get(r'test'))
+    msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get(r'test'))
     return (msg, subj, diffstat)
 
+
 def _getpatchmsgs(repo, sender, revs, patchnames=None, **opts):
     """return a list of emails from a list of patches
 
@@ -430,8 +477,7 @@
     patches = list(_getpatches(repo, revs, **opts))
     msgs = []
 
-    ui.write(_('this patch series consists of %d patches.\n\n')
-             % len(patches))
+    ui.write(_('this patch series consists of %d patches.\n\n') % len(patches))
 
     # build the intro message, or skip it if the user declines
     if introwanted(ui, bytesopts, len(patches)):
@@ -448,12 +494,23 @@
     for i, (r, p) in enumerate(zip(revs, patches)):
         if patchnames:
             name = patchnames[i]
-        msg = makepatch(ui, repo, r, p, bytesopts, _charsets,
-                        i + 1, len(patches), numbered, name)
+        msg = makepatch(
+            ui,
+            repo,
+            r,
+            p,
+            bytesopts,
+            _charsets,
+            i + 1,
+            len(patches),
+            numbered,
+            name,
+        )
         msgs.append(msg)
 
     return msgs
 
+
 def _getoutgoing(repo, dest, revs):
     '''Return the revisions present locally but not in dest'''
     ui = repo.ui
@@ -469,54 +526,122 @@
         ui.status(_("no changes found\n"))
     return revs
 
+
 def _msgid(node, timestamp):
     hostname = encoding.strtolocal(socket.getfqdn())
     hostname = encoding.environ.get('HGHOSTNAME', hostname)
     return '<%s.%d@%s>' % (node, timestamp, hostname)
 
+
 emailopts = [
     ('', 'body', None, _('send patches as inline message text (default)')),
     ('a', 'attach', None, _('send patches as attachments')),
     ('i', 'inline', None, _('send patches as inline attachments')),
-    ('', 'bcc', [],
-     _('email addresses of blind carbon copy recipients'), _('EMAIL')),
+    (
+        '',
+        'bcc',
+        [],
+        _('email addresses of blind carbon copy recipients'),
+        _('EMAIL'),
+    ),
     ('c', 'cc', [], _('email addresses of copy recipients'), _('EMAIL')),
     ('', 'confirm', None, _('ask for confirmation before sending')),
     ('d', 'diffstat', None, _('add diffstat output to messages')),
     ('', 'date', '', _('use the given date as the sending date'), _('DATE')),
-    ('', 'desc', '',
-     _('use the given file as the series description'), _('FILE')),
+    (
+        '',
+        'desc',
+        '',
+        _('use the given file as the series description'),
+        _('FILE'),
+    ),
     ('f', 'from', '', _('email address of sender'), _('EMAIL')),
     ('n', 'test', None, _('print messages that would be sent')),
-    ('m', 'mbox', '',
-     _('write messages to mbox file instead of sending them'), _('FILE')),
-    ('', 'reply-to', [],
-     _('email addresses replies should be sent to'), _('EMAIL')),
-    ('s', 'subject', '',
-     _('subject of first message (intro or single patch)'), _('TEXT')),
+    (
+        'm',
+        'mbox',
+        '',
+        _('write messages to mbox file instead of sending them'),
+        _('FILE'),
+    ),
+    (
+        '',
+        'reply-to',
+        [],
+        _('email addresses replies should be sent to'),
+        _('EMAIL'),
+    ),
+    (
+        's',
+        'subject',
+        '',
+        _('subject of first message (intro or single patch)'),
+        _('TEXT'),
+    ),
     ('', 'in-reply-to', '', _('message identifier to reply to'), _('MSGID')),
     ('', 'flag', [], _('flags to add in subject prefixes'), _('FLAG')),
-    ('t', 'to', [], _('email addresses of recipients'), _('EMAIL'))]
+    ('t', 'to', [], _('email addresses of recipients'), _('EMAIL')),
+]
+
 
-@command('email',
-    [('g', 'git', None, _('use git extended diff format')),
-    ('', 'plain', None, _('omit hg patch header')),
-    ('o', 'outgoing', None,
-     _('send changes not found in the target repository')),
-    ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
-    ('B', 'bookmark', '',
-     _('send changes only reachable by given bookmark'), _('BOOKMARK')),
-    ('', 'bundlename', 'bundle',
-     _('name of the bundle attachment file'), _('NAME')),
-    ('r', 'rev', [], _('a revision to send'), _('REV')),
-    ('', 'force', None, _('run even when remote repository is unrelated '
-       '(with -b/--bundle)')),
-    ('', 'base', [], _('a base changeset to specify instead of a destination '
-       '(with -b/--bundle)'), _('REV')),
-    ('', 'intro', None, _('send an introduction email for a single patch')),
-    ] + emailopts + cmdutil.remoteopts,
+@command(
+    'email',
+    [
+        ('g', 'git', None, _('use git extended diff format')),
+        ('', 'plain', None, _('omit hg patch header')),
+        (
+            'o',
+            'outgoing',
+            None,
+            _('send changes not found in the target repository'),
+        ),
+        (
+            'b',
+            'bundle',
+            None,
+            _('send changes not in target as a binary bundle'),
+        ),
+        (
+            'B',
+            'bookmark',
+            '',
+            _('send changes only reachable by given bookmark'),
+            _('BOOKMARK'),
+        ),
+        (
+            '',
+            'bundlename',
+            'bundle',
+            _('name of the bundle attachment file'),
+            _('NAME'),
+        ),
+        ('r', 'rev', [], _('a revision to send'), _('REV')),
+        (
+            '',
+            'force',
+            None,
+            _(
+                'run even when remote repository is unrelated '
+                '(with -b/--bundle)'
+            ),
+        ),
+        (
+            '',
+            'base',
+            [],
+            _(
+                'a base changeset to specify instead of a destination '
+                '(with -b/--bundle)'
+            ),
+            _('REV'),
+        ),
+        ('', 'intro', None, _('send an introduction email for a single patch')),
+    ]
+    + emailopts
+    + cmdutil.remoteopts,
     _('hg email [OPTION]... [DEST]...'),
-    helpcategory=command.CATEGORY_IMPORT_EXPORT)
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def email(ui, repo, *revs, **opts):
     '''send changesets by email
 
@@ -621,8 +746,12 @@
         raise error.Abort(_('specify at least one changeset with -B, -r or -o'))
 
     if outgoing and bundle:
-        raise error.Abort(_("--outgoing mode always on with --bundle;"
-                           " do not re-specify --outgoing"))
+        raise error.Abort(
+            _(
+                "--outgoing mode always on with --bundle;"
+                " do not re-specify --outgoing"
+            )
+        )
     if rev and bookmark:
         raise error.Abort(_("-r and -B are mutually exclusive"))
 
@@ -657,8 +786,9 @@
         try:
             publicpeer = hg.peer(repo, {}, publicurl)
         except error.RepoError:
-            repo.ui.write_err(_('unable to access public repo: %s\n')
-                              % publicurl)
+            repo.ui.write_err(
+                _('unable to access public repo: %s\n') % publicurl
+            )
             raise
         if not publicpeer.capable('known'):
             repo.ui.debug('skipping existence checks: public repo too old\n')
@@ -677,8 +807,9 @@
                     msg = _('public url %s is missing %s')
                     msg %= (publicurl, missing[0])
                 missingrevs = [ctx.rev() for ctx in missing]
-                revhint = ' '.join('-r %s' % h
-                                   for h in repo.set('heads(%ld)', missingrevs))
+                revhint = ' '.join(
+                    '-r %s' % h for h in repo.set('heads(%ld)', missingrevs)
+                )
                 hint = _("use 'hg push %s %s'") % (publicurl, revhint)
                 raise error.Abort(msg, hint=hint)
 
@@ -692,9 +823,12 @@
         return _msgid(id[:20], int(start_time[0]))
 
     # deprecated config: patchbomb.from
-    sender = (opts.get('from') or ui.config('email', 'from') or
-              ui.config('patchbomb', 'from') or
-              prompt(ui, 'From', ui.username()))
+    sender = (
+        opts.get('from')
+        or ui.config('email', 'from')
+        or ui.config('patchbomb', 'from')
+        or prompt(ui, 'From', ui.username())
+    )
 
     if bundle:
         stropts = pycompat.strkwargs(opts)
@@ -716,11 +850,13 @@
             return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
 
         # not on the command line: fallback to config and then maybe ask
-        addr = (ui.config('email', configkey) or
-                ui.config('patchbomb', configkey))
+        addr = ui.config('email', configkey) or ui.config(
+            'patchbomb', configkey
+        )
         if not addr:
-            specified = (ui.hasconfig('email', configkey) or
-                         ui.hasconfig('patchbomb', configkey))
+            specified = ui.hasconfig('email', configkey) or ui.hasconfig(
+                'patchbomb', configkey
+            )
             if not specified and ask:
                 addr = prompt(ui, header, default=default)
         if addr:
@@ -728,7 +864,8 @@
             return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
         elif default:
             return mail.addrlistencode(
-                ui, [default], _charsets, opts.get('test'))
+                ui, [default], _charsets, opts.get('test')
+            )
         return []
 
     to = getaddrs('To', ask=True)
@@ -752,8 +889,9 @@
             if ds:
                 ui.write(ds, label='patchbomb.diffstats')
         ui.write('\n')
-        if ui.promptchoice(_('are you sure you want to send (yn)?'
-                             '$$ &Yes $$ &No')):
+        if ui.promptchoice(
+            _('are you sure you want to send (yn)?' '$$ &Yes $$ &No')
+        ):
             raise error.Abort(_('patchbomb canceled'))
 
     ui.write('\n')
@@ -792,7 +930,7 @@
         m['From'] = sender
         m['To'] = ', '.join(to)
         if cc:
-            m['Cc']  = ', '.join(cc)
+            m['Cc'] = ', '.join(cc)
         if bcc:
             m['Bcc'] = ', '.join(bcc)
         if replyto:
--- a/hgext/phabricator.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/phabricator.py	Sun Oct 06 09:45:02 2019 -0400
@@ -89,24 +89,24 @@
 templatekeyword = eh.templatekeyword
 
 # developer config: phabricator.batchsize
-eh.configitem(b'phabricator', b'batchsize',
-    default=12,
+eh.configitem(
+    b'phabricator', b'batchsize', default=12,
 )
-eh.configitem(b'phabricator', b'callsign',
-    default=None,
+eh.configitem(
+    b'phabricator', b'callsign', default=None,
 )
-eh.configitem(b'phabricator', b'curlcmd',
-    default=None,
+eh.configitem(
+    b'phabricator', b'curlcmd', default=None,
 )
 # developer config: phabricator.repophid
-eh.configitem(b'phabricator', b'repophid',
-    default=None,
+eh.configitem(
+    b'phabricator', b'repophid', default=None,
 )
-eh.configitem(b'phabricator', b'url',
-    default=None,
+eh.configitem(
+    b'phabricator', b'url', default=None,
 )
-eh.configitem(b'phabsend', b'confirm',
-    default=False,
+eh.configitem(
+    b'phabsend', b'confirm', default=False,
 )
 
 colortable = {
@@ -119,15 +119,22 @@
 }
 
 _VCR_FLAGS = [
-    (b'', b'test-vcr', b'',
-     _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
-       b', otherwise will mock all http requests using the specified vcr file.'
-       b' (ADVANCED)'
-     )),
+    (
+        b'',
+        b'test-vcr',
+        b'',
+        _(
+            b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
+            b', otherwise will mock all http requests using the specified vcr file.'
+            b' (ADVANCED)'
+        ),
+    ),
 ]
 
+
 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False):
     fullflags = flags + _VCR_FLAGS
+
     def hgmatcher(r1, r2):
         if r1.uri != r2.uri or r1.method != r2.method:
             return False
@@ -137,9 +144,7 @@
 
     def sanitiserequest(request):
         request.body = re.sub(
-            br'cli-[a-z0-9]+',
-            br'cli-hahayouwish',
-            request.body
+            br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body
         )
         return request
 
@@ -153,29 +158,46 @@
             cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None))
             if cassette:
                 import hgdemandimport
+
                 with hgdemandimport.deactivated():
                     import vcr as vcrmod
                     import vcr.stubs as stubs
+
                     vcr = vcrmod.VCR(
                         serializer=r'json',
                         before_record_request=sanitiserequest,
                         before_record_response=sanitiseresponse,
                         custom_patches=[
-                            (urlmod, r'httpconnection',
-                             stubs.VCRHTTPConnection),
-                            (urlmod, r'httpsconnection',
-                             stubs.VCRHTTPSConnection),
-                        ])
+                            (
+                                urlmod,
+                                r'httpconnection',
+                                stubs.VCRHTTPConnection,
+                            ),
+                            (
+                                urlmod,
+                                r'httpsconnection',
+                                stubs.VCRHTTPSConnection,
+                            ),
+                        ],
+                    )
                     vcr.register_matcher(r'hgmatcher', hgmatcher)
                     with vcr.use_cassette(cassette, match_on=[r'hgmatcher']):
                         return fn(*args, **kwargs)
             return fn(*args, **kwargs)
+
         inner.__name__ = fn.__name__
         inner.__doc__ = fn.__doc__
-        return command(name, fullflags, spec, helpcategory=helpcategory,
-                       optionalrepo=optionalrepo)(inner)
+        return command(
+            name,
+            fullflags,
+            spec,
+            helpcategory=helpcategory,
+            optionalrepo=optionalrepo,
+        )(inner)
+
     return decorate
 
+
 def urlencodenested(params):
     """like urlencode, but works with nested parameters.
 
@@ -184,6 +206,7 @@
     urlencode. Note: the encoding is consistent with PHP's http_build_query.
     """
     flatparams = util.sortdict()
+
     def process(prefix, obj):
         if isinstance(obj, bool):
             obj = {True: b'true', False: b'false'}[obj]  # Python -> PHP form
@@ -197,9 +220,11 @@
                     process(b'%s[%s]' % (prefix, k), v)
                 else:
                     process(k, v)
+
     process(b'', params)
     return util.urlreq.urlencode(flatparams)
 
+
 def readurltoken(ui):
     """return conduit url, token and make sure they exist
 
@@ -208,8 +233,9 @@
     """
     url = ui.config(b'phabricator', b'url')
     if not url:
-        raise error.Abort(_(b'config %s.%s is required')
-                          % (b'phabricator', b'url'))
+        raise error.Abort(
+            _(b'config %s.%s is required') % (b'phabricator', b'url')
+        )
 
     res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user)
     token = None
@@ -222,11 +248,13 @@
         token = auth.get(b'phabtoken')
 
     if not token:
-        raise error.Abort(_(b'Can\'t find conduit token associated to %s')
-                            % (url,))
+        raise error.Abort(
+            _(b'Can\'t find conduit token associated to %s') % (url,)
+        )
 
     return url, token
 
+
 def callconduit(ui, name, params):
     """call Conduit API, params is a dict. return json.loads result, or None"""
     host, token = readurltoken(ui)
@@ -237,8 +265,9 @@
     data = urlencodenested(params)
     curlcmd = ui.config(b'phabricator', b'curlcmd')
     if curlcmd:
-        sin, sout = procutil.popen2(b'%s -d @- %s'
-                                    % (curlcmd, procutil.shellquote(url)))
+        sin, sout = procutil.popen2(
+            b'%s -d @- %s' % (curlcmd, procutil.shellquote(url))
+        )
         sin.write(data)
         sin.close()
         body = sout.read()
@@ -249,17 +278,21 @@
             body = rsp.read()
     ui.debug(b'Conduit Response: %s\n' % body)
     parsed = pycompat.rapply(
-        lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode)
+        lambda x: encoding.unitolocal(x)
+        if isinstance(x, pycompat.unicode)
         else x,
         # json.loads only accepts bytes from py3.6+
-        json.loads(encoding.unifromlocal(body))
+        json.loads(encoding.unifromlocal(body)),
     )
     if parsed.get(b'error_code'):
-        msg = (_(b'Conduit Error (%s): %s')
-               % (parsed[b'error_code'], parsed[b'error_info']))
+        msg = _(b'Conduit Error (%s): %s') % (
+            parsed[b'error_code'],
+            parsed[b'error_info'],
+        )
         raise error.Abort(msg)
     return parsed[b'result']
 
+
 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True)
 def debugcallconduit(ui, repo, name):
     """call Conduit API
@@ -270,18 +303,21 @@
     # json.loads only accepts bytes from 3.6+
     rawparams = encoding.unifromlocal(ui.fin.read())
     # json.loads only returns unicode strings
-    params = pycompat.rapply(lambda x:
-        encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x,
-        json.loads(rawparams)
+    params = pycompat.rapply(
+        lambda x: encoding.unitolocal(x)
+        if isinstance(x, pycompat.unicode)
+        else x,
+        json.loads(rawparams),
     )
     # json.dumps only accepts unicode strings
-    result = pycompat.rapply(lambda x:
-        encoding.unifromlocal(x) if isinstance(x, bytes) else x,
-        callconduit(ui, name, params)
+    result = pycompat.rapply(
+        lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x,
+        callconduit(ui, name, params),
     )
     s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': '))
     ui.write(b'%s\n' % encoding.unitolocal(s))
 
+
 def getrepophid(repo):
     """given callsign, return repository PHID or None"""
     # developer config: phabricator.repophid
@@ -291,17 +327,23 @@
     callsign = repo.ui.config(b'phabricator', b'callsign')
     if not callsign:
         return None
-    query = callconduit(repo.ui, b'diffusion.repository.search',
-                        {b'constraints': {b'callsigns': [callsign]}})
+    query = callconduit(
+        repo.ui,
+        b'diffusion.repository.search',
+        {b'constraints': {b'callsigns': [callsign]}},
+    )
     if len(query[b'data']) == 0:
         return None
     repophid = query[b'data'][0][b'phid']
     repo.ui.setconfig(b'phabricator', b'repophid', repophid)
     return repophid
 
+
 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
 _differentialrevisiondescre = re.compile(
-    br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
+    br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M
+)
+
 
 def getoldnodedrevmap(repo, nodelist):
     """find previous nodes that has been sent to Phabricator
@@ -324,8 +366,8 @@
     unfi = repo.unfiltered()
     nodemap = unfi.changelog.nodemap
 
-    result = {} # {node: (oldnode?, lastdiff?, drev)}
-    toconfirm = {} # {node: (force, {precnode}, drev)}
+    result = {}  # {node: (oldnode?, lastdiff?, drev)}
+    toconfirm = {}  # {node: (force, {precnode}, drev)}
     for node in nodelist:
         ctx = unfi[node]
         # For tags like "D123", put them into "toconfirm" to verify later
@@ -347,13 +389,14 @@
     # Phabricator, and expect precursors overlap with it.
     if toconfirm:
         drevs = [drev for force, precs, drev in toconfirm.values()]
-        alldiffs = callconduit(unfi.ui, b'differential.querydiffs',
-                               {b'revisionIDs': drevs})
-        getnode = lambda d: bin(
-            getdiffmeta(d).get(b'node', b'')) or None
+        alldiffs = callconduit(
+            unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs}
+        )
+        getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None
         for newnode, (force, precset, drev) in toconfirm.items():
-            diffs = [d for d in alldiffs.values()
-                     if int(d[b'revisionID']) == drev]
+            diffs = [
+                d for d in alldiffs.values() if int(d[b'revisionID']) == drev
+            ]
 
             # "precursors" as known by Phabricator
             phprecset = set(getnode(d) for d in diffs)
@@ -362,10 +405,22 @@
             # and force is not set (when commit message says nothing)
             if not force and not bool(phprecset & precset):
                 tagname = b'D%d' % drev
-                tags.tag(repo, tagname, nullid, message=None, user=None,
-                         date=None, local=True)
-                unfi.ui.warn(_(b'D%s: local tag removed - does not match '
-                               b'Differential history\n') % drev)
+                tags.tag(
+                    repo,
+                    tagname,
+                    nullid,
+                    message=None,
+                    user=None,
+                    date=None,
+                    local=True,
+                )
+                unfi.ui.warn(
+                    _(
+                        b'D%s: local tag removed - does not match '
+                        b'Differential history\n'
+                    )
+                    % drev
+                )
                 continue
 
             # Find the last node using Phabricator metadata, and make sure it
@@ -381,14 +436,17 @@
 
     return result
 
+
 def getdiff(ctx, diffopts):
     """plain-text diff without header (user, commit message, etc)"""
     output = util.stringio()
-    for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
-                                      None, opts=diffopts):
+    for chunk, _label in patch.diffui(
+        ctx.repo(), ctx.p1().node(), ctx.node(), None, opts=diffopts
+    ):
         output.write(chunk)
     return output.getvalue()
 
+
 def creatediff(ctx):
     """create a Differential Diff"""
     repo = ctx.repo()
@@ -402,40 +460,52 @@
         raise error.Abort(_(b'cannot create diff for %s') % ctx)
     return diff
 
+
 def writediffproperties(ctx, diff):
     """write metadata to diff so patches could be applied losslessly"""
     params = {
         b'diff_id': diff[b'id'],
         b'name': b'hg:meta',
-        b'data': templatefilters.json({
-            b'user': ctx.user(),
-            b'date': b'%d %d' % ctx.date(),
-            b'branch': ctx.branch(),
-            b'node': ctx.hex(),
-            b'parent': ctx.p1().hex(),
-        }),
+        b'data': templatefilters.json(
+            {
+                b'user': ctx.user(),
+                b'date': b'%d %d' % ctx.date(),
+                b'branch': ctx.branch(),
+                b'node': ctx.hex(),
+                b'parent': ctx.p1().hex(),
+            }
+        ),
     }
     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
 
     params = {
         b'diff_id': diff[b'id'],
         b'name': b'local:commits',
-        b'data': templatefilters.json({
-            ctx.hex(): {
-                b'author': stringutil.person(ctx.user()),
-                b'authorEmail': stringutil.email(ctx.user()),
-                b'time': int(ctx.date()[0]),
-                b'commit': ctx.hex(),
-                b'parents': [ctx.p1().hex()],
-                b'branch': ctx.branch(),
-            },
-        }),
+        b'data': templatefilters.json(
+            {
+                ctx.hex(): {
+                    b'author': stringutil.person(ctx.user()),
+                    b'authorEmail': stringutil.email(ctx.user()),
+                    b'time': int(ctx.date()[0]),
+                    b'commit': ctx.hex(),
+                    b'parents': [ctx.p1().hex()],
+                    b'branch': ctx.branch(),
+                },
+            }
+        ),
     }
     callconduit(ctx.repo().ui, b'differential.setdiffproperty', params)
 
-def createdifferentialrevision(ctx, revid=None, parentrevphid=None,
-                               oldnode=None, olddiff=None, actions=None,
-                               comment=None):
+
+def createdifferentialrevision(
+    ctx,
+    revid=None,
+    parentrevphid=None,
+    oldnode=None,
+    olddiff=None,
+    actions=None,
+    comment=None,
+):
     """create or update a Differential Revision
 
     If revid is None, create a new Differential Revision, otherwise update
@@ -450,7 +520,7 @@
     if oldnode:
         diffopts = mdiff.diffopts(git=True, context=32767)
         oldctx = repo.unfiltered()[oldnode]
-        neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
+        neednewdiff = getdiff(ctx, diffopts) != getdiff(oldctx, diffopts)
     else:
         neednewdiff = True
 
@@ -470,16 +540,18 @@
 
     # Set the parent Revision every time, so commit re-ordering is picked-up
     if parentrevphid:
-        transactions.append({b'type': b'parents.set',
-                             b'value': [parentrevphid]})
+        transactions.append(
+            {b'type': b'parents.set', b'value': [parentrevphid]}
+        )
 
     if actions:
         transactions += actions
 
     # Parse commit message and update related fields.
     desc = ctx.description()
-    info = callconduit(repo.ui, b'differential.parsecommitmessage',
-                       {b'corpus': desc})
+    info = callconduit(
+        repo.ui, b'differential.parsecommitmessage', {b'corpus': desc}
+    )
     for k, v in info[b'fields'].items():
         if k in [b'title', b'summary', b'testPlan']:
             transactions.append({b'type': k, b'value': v})
@@ -495,6 +567,7 @@
 
     return revision, diff
 
+
 def userphids(repo, names):
     """convert user names to PHIDs"""
     names = [name.lower() for name in names]
@@ -506,20 +579,30 @@
     resolved = set(entry[b'fields'][b'username'].lower() for entry in data)
     unresolved = set(names) - resolved
     if unresolved:
-        raise error.Abort(_(b'unknown username: %s')
-                          % b' '.join(sorted(unresolved)))
+        raise error.Abort(
+            _(b'unknown username: %s') % b' '.join(sorted(unresolved))
+        )
     return [entry[b'phid'] for entry in data]
 
-@vcrcommand(b'phabsend',
-         [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
-          (b'', b'amend', True, _(b'update commit messages')),
-          (b'', b'reviewer', [], _(b'specify reviewers')),
-          (b'', b'blocker', [], _(b'specify blocking reviewers')),
-          (b'm', b'comment', b'',
-           _(b'add a comment to Revisions with new/updated Diffs')),
-          (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
-         _(b'REV [OPTIONS]'),
-         helpcategory=command.CATEGORY_IMPORT_EXPORT)
+
+@vcrcommand(
+    b'phabsend',
+    [
+        (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
+        (b'', b'amend', True, _(b'update commit messages')),
+        (b'', b'reviewer', [], _(b'specify reviewers')),
+        (b'', b'blocker', [], _(b'specify blocking reviewers')),
+        (
+            b'm',
+            b'comment',
+            b'',
+            _(b'add a comment to Revisions with new/updated Diffs'),
+        ),
+        (b'', b'confirm', None, _(b'ask for confirmation before sending')),
+    ],
+    _(b'REV [OPTIONS]'),
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def phabsend(ui, repo, *revs, **opts):
     """upload changesets to Phabricator
 
@@ -573,14 +656,14 @@
     if reviewers:
         phids.extend(userphids(repo, reviewers))
     if blockers:
-        phids.extend(map(
-            lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers)
-        ))
+        phids.extend(
+            map(lambda phid: b'blocking(%s)' % phid, userphids(repo, blockers))
+        )
     if phids:
         actions.append({b'type': b'reviewers.add', b'value': phids})
 
-    drevids = [] # [int]
-    diffmap = {} # {newnode: diff}
+    drevids = []  # [int]
+    diffmap = {}  # {newnode: diff}
 
     # Send patches one by one so we know their Differential Revision PHIDs and
     # can provide dependency relationship
@@ -594,8 +677,14 @@
         if oldnode != ctx.node() or opts.get(b'amend'):
             # Create or update Differential Revision
             revision, diff = createdifferentialrevision(
-                ctx, revid, lastrevphid, oldnode, olddiff, actions,
-                opts.get(b'comment'))
+                ctx,
+                revid,
+                lastrevphid,
+                oldnode,
+                olddiff,
+                actions,
+                opts.get(b'comment'),
+            )
             diffmap[ctx.node()] = diff
             newrevid = int(revision[b'object'][b'id'])
             newrevphid = revision[b'object'][b'phid']
@@ -609,8 +698,15 @@
             m = _differentialrevisiondescre.search(ctx.description())
             if not m or int(m.group(r'id')) != newrevid:
                 tagname = b'D%d' % newrevid
-                tags.tag(repo, tagname, ctx.node(), message=None, user=None,
-                         date=None, local=True)
+                tags.tag(
+                    repo,
+                    tagname,
+                    ctx.node(),
+                    message=None,
+                    user=None,
+                    date=None,
+                    local=True,
+                )
         else:
             # Nothing changed. But still set "newrevphid" so the next revision
             # could depend on this one and "newrevid" for the summary line.
@@ -619,15 +715,19 @@
             action = b'skipped'
 
         actiondesc = ui.label(
-            {b'created': _(b'created'),
-             b'skipped': _(b'skipped'),
-             b'updated': _(b'updated')}[action],
-            b'phabricator.action.%s' % action)
+            {
+                b'created': _(b'created'),
+                b'skipped': _(b'skipped'),
+                b'updated': _(b'updated'),
+            }[action],
+            b'phabricator.action.%s' % action,
+        )
         drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
         nodedesc = ui.label(bytes(ctx), b'phabricator.node')
         desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
-        ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
-                                             desc))
+        ui.write(
+            _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc)
+        )
         drevids.append(newrevid)
         lastrevphid = newrevphid
 
@@ -637,7 +737,7 @@
         drevs = callconduit(ui, b'differential.query', {b'ids': drevids})
         with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
             wnode = unfi[b'.'].node()
-            mapping = {} # {oldnode: [newnode]}
+            mapping = {}  # {oldnode: [newnode]}
             for i, rev in enumerate(revs):
                 old = unfi[rev]
                 drevid = drevids[i]
@@ -646,16 +746,24 @@
                 # Make sure commit message contain "Differential Revision"
                 if old.description() != newdesc:
                     if old.phase() == phases.public:
-                        ui.warn(_("warning: not updating public commit %s\n")
-                                % scmutil.formatchangeid(old))
+                        ui.warn(
+                            _("warning: not updating public commit %s\n")
+                            % scmutil.formatchangeid(old)
+                        )
                         continue
                     parents = [
                         mapping.get(old.p1().node(), (old.p1(),))[0],
                         mapping.get(old.p2().node(), (old.p2(),))[0],
                     ]
                     new = context.metadataonlyctx(
-                        repo, old, parents=parents, text=newdesc,
-                        user=old.user(), date=old.date(), extra=old.extra())
+                        repo,
+                        old,
+                        parents=parents,
+                        text=newdesc,
+                        user=old.user(),
+                        date=old.date(),
+                        extra=old.extra(),
+                    )
 
                     newnode = new.commit()
 
@@ -670,17 +778,32 @@
                 # Remove local tags since it's no longer necessary
                 tagname = b'D%d' % drevid
                 if tagname in repo.tags():
-                    tags.tag(repo, tagname, nullid, message=None, user=None,
-                             date=None, local=True)
+                    tags.tag(
+                        repo,
+                        tagname,
+                        nullid,
+                        message=None,
+                        user=None,
+                        date=None,
+                        local=True,
+                    )
             scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
             if wnode in mapping:
                 unfi.setparents(mapping[wnode][0])
 
+
 # Map from "hg:meta" keys to header understood by "hg import". The order is
 # consistent with "hg export" output.
-_metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'),
-                              (b'branch', b'Branch'), (b'node', b'Node ID'),
-                              (b'parent', b'Parent ')])
+_metanamemap = util.sortdict(
+    [
+        (b'user', b'User'),
+        (b'date', b'Date'),
+        (b'branch', b'Branch'),
+        (b'node', b'Node ID'),
+        (b'parent', b'Parent '),
+    ]
+)
+
 
 def _confirmbeforesend(repo, revs, oldmap):
     url, token = readurltoken(repo.ui)
@@ -694,62 +817,81 @@
         else:
             drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
 
-        ui.write(_(b'%s - %s: %s\n')
-                 % (drevdesc,
-                    ui.label(bytes(ctx), b'phabricator.node'),
-                    ui.label(desc, b'phabricator.desc')))
+        ui.write(
+            _(b'%s - %s: %s\n')
+            % (
+                drevdesc,
+                ui.label(bytes(ctx), b'phabricator.node'),
+                ui.label(desc, b'phabricator.desc'),
+            )
+        )
 
-    if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
-                         b'$$ &Yes $$ &No') % url):
+    if ui.promptchoice(
+        _(b'Send the above changes to %s (yn)?' b'$$ &Yes $$ &No') % url
+    ):
         return False
 
     return True
 
-_knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
-                     b'abandoned'}
+
+_knownstatusnames = {
+    b'accepted',
+    b'needsreview',
+    b'needsrevision',
+    b'closed',
+    b'abandoned',
+}
+
 
 def _getstatusname(drev):
     """get normalized status name from a Differential Revision"""
     return drev[b'statusName'].replace(b' ', b'').lower()
 
+
 # Small language to specify differential revisions. Support symbols: (), :X,
 # +, and -.
 
 _elements = {
     # token-type: binding-strength, primary, prefix, infix, suffix
-    b'(':      (12, None, (b'group', 1, b')'), None, None),
-    b':':      (8, None, (b'ancestors', 8), None, None),
-    b'&':      (5,  None, None, (b'and_', 5), None),
-    b'+':      (4,  None, None, (b'add', 4), None),
-    b'-':      (4,  None, None, (b'sub', 4), None),
-    b')':      (0,  None, None, None, None),
+    b'(': (12, None, (b'group', 1, b')'), None, None),
+    b':': (8, None, (b'ancestors', 8), None, None),
+    b'&': (5, None, None, (b'and_', 5), None),
+    b'+': (4, None, None, (b'add', 4), None),
+    b'-': (4, None, None, (b'sub', 4), None),
+    b')': (0, None, None, None, None),
     b'symbol': (0, b'symbol', None, None, None),
-    b'end':    (0, None, None, None, None),
+    b'end': (0, None, None, None, None),
 }
 
+
 def _tokenize(text):
-    view = memoryview(text) # zero-copy slice
+    view = memoryview(text)  # zero-copy slice
     special = b'():+-& '
     pos = 0
     length = len(text)
     while pos < length:
-        symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
-                                              pycompat.iterbytestr(view[pos:])))
+        symbol = b''.join(
+            itertools.takewhile(
+                lambda ch: ch not in special, pycompat.iterbytestr(view[pos:])
+            )
+        )
         if symbol:
             yield (b'symbol', symbol, pos)
             pos += len(symbol)
-        else: # special char, ignore space
+        else:  # special char, ignore space
             if text[pos] != b' ':
                 yield (text[pos], None, pos)
             pos += 1
     yield (b'end', None, pos)
 
+
 def _parse(text):
     tree, pos = parser.parser(_elements).parse(_tokenize(text))
     if pos != len(text):
         raise error.ParseError(b'invalid token', pos)
     return tree
 
+
 def _parsedrev(symbol):
     """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
     if symbol.startswith(b'D') and symbol[1:].isdigit():
@@ -757,6 +899,7 @@
     if symbol.isdigit():
         return int(symbol)
 
+
 def _prefetchdrevs(tree):
     """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
     drevs = set()
@@ -778,6 +921,7 @@
             ancestordrevs.update(a)
     return drevs, ancestordrevs
 
+
 def querydrev(repo, spec):
     """return a list of "Differential Revision" dicts
 
@@ -820,6 +964,7 @@
             "sourcePath": null
         }
     """
+
     def fetch(params):
         """params -> single drev or None"""
         key = (params.get(b'ids') or params.get(b'phids') or [None])[0]
@@ -831,8 +976,9 @@
             prefetched[drev[b'phid']] = drev
             prefetched[int(drev[b'id'])] = drev
         if key not in prefetched:
-            raise error.Abort(_(b'cannot get Differential Revision %r')
-                              % params)
+            raise error.Abort(
+                _(b'cannot get Differential Revision %r') % params
+            )
         return prefetched[key]
 
     def getstack(topdrevids):
@@ -855,7 +1001,7 @@
         return smartset.baseset(result)
 
     # Initialize prefetch cache
-    prefetched = {} # {id or phid: drev}
+    prefetched = {}  # {id or phid: drev}
 
     tree = _parse(spec)
     drevs, ancestordrevs = _prefetchdrevs(tree)
@@ -879,8 +1025,11 @@
             if drev:
                 return smartset.baseset([drev])
             elif tree[1] in _knownstatusnames:
-                drevs = [r for r in validids
-                         if _getstatusname(prefetched[r]) == tree[1]]
+                drevs = [
+                    r
+                    for r in validids
+                    if _getstatusname(prefetched[r]) == tree[1]
+                ]
                 return smartset.baseset(drevs)
             else:
                 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
@@ -896,6 +1045,7 @@
 
     return [prefetched[r] for r in walk(tree)]
 
+
 def getdescfromdrev(drev):
     """get description (commit message) from "Differential Revision"
 
@@ -910,6 +1060,7 @@
     uri = b'Differential Revision: %s' % drev[b'uri']
     return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
 
+
 def getdiffmeta(diff):
     """get commit metadata (date, node, user, p1) from a diff object
 
@@ -954,8 +1105,10 @@
             commit = sorted(props[b'local:commits'].values())[0]
             meta = {}
             if b'author' in commit and b'authorEmail' in commit:
-                meta[b'user'] = b'%s <%s>' % (commit[b'author'],
-                                              commit[b'authorEmail'])
+                meta[b'user'] = b'%s <%s>' % (
+                    commit[b'author'],
+                    commit[b'authorEmail'],
+                )
             if b'time' in commit:
                 meta[b'date'] = b'%d 0' % int(commit[b'time'])
             if b'branch' in commit:
@@ -975,6 +1128,7 @@
         meta[b'parent'] = diff[b'sourceControlBaseRevision']
     return meta
 
+
 def readpatch(repo, drevs, write):
     """generate plain-text patch readable by 'hg import'
 
@@ -990,8 +1144,9 @@
         repo.ui.note(_(b'reading D%s\n') % drev[b'id'])
 
         diffid = max(int(v) for v in drev[b'diffs'])
-        body = callconduit(repo.ui, b'differential.getrawdiff',
-                           {b'diffID': diffid})
+        body = callconduit(
+            repo.ui, b'differential.getrawdiff', {b'diffID': diffid}
+        )
         desc = getdescfromdrev(drev)
         header = b'# HG changeset patch\n'
 
@@ -1006,10 +1161,13 @@
         content = b'%s%s\n%s' % (header, desc, body)
         write(content)
 
-@vcrcommand(b'phabread',
-         [(b'', b'stack', False, _(b'read dependencies'))],
-         _(b'DREVSPEC [OPTIONS]'),
-         helpcategory=command.CATEGORY_IMPORT_EXPORT)
+
+@vcrcommand(
+    b'phabread',
+    [(b'', b'stack', False, _(b'read dependencies'))],
+    _(b'DREVSPEC [OPTIONS]'),
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def phabread(ui, repo, spec, **opts):
     """print patches from Phabricator suitable for importing
 
@@ -1035,14 +1193,19 @@
     drevs = querydrev(repo, spec)
     readpatch(repo, drevs, ui.write)
 
-@vcrcommand(b'phabupdate',
-         [(b'', b'accept', False, _(b'accept revisions')),
-          (b'', b'reject', False, _(b'reject revisions')),
-          (b'', b'abandon', False, _(b'abandon revisions')),
-          (b'', b'reclaim', False, _(b'reclaim revisions')),
-          (b'm', b'comment', b'', _(b'comment on the last revision')),
-          ], _(b'DREVSPEC [OPTIONS]'),
-          helpcategory=command.CATEGORY_IMPORT_EXPORT)
+
+@vcrcommand(
+    b'phabupdate',
+    [
+        (b'', b'accept', False, _(b'accept revisions')),
+        (b'', b'reject', False, _(b'reject revisions')),
+        (b'', b'abandon', False, _(b'abandon revisions')),
+        (b'', b'reclaim', False, _(b'reclaim revisions')),
+        (b'm', b'comment', b'', _(b'comment on the last revision')),
+    ],
+    _(b'DREVSPEC [OPTIONS]'),
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def phabupdate(ui, repo, spec, **opts):
     """update Differential Revision in batch
 
@@ -1062,10 +1225,13 @@
         if i + 1 == len(drevs) and opts.get(b'comment'):
             actions.append({b'type': b'comment', b'value': opts[b'comment']})
         if actions:
-            params = {b'objectIdentifier': drev[b'phid'],
-                      b'transactions': actions}
+            params = {
+                b'objectIdentifier': drev[b'phid'],
+                b'transactions': actions,
+            }
             callconduit(ui, b'differential.revision.edit', params)
 
+
 @eh.templatekeyword(b'phabreview', requires={b'ctx'})
 def template_review(context, mapping):
     """:phabreview: Object describing the review for this changeset.
@@ -1074,10 +1240,9 @@
     ctx = context.resource(mapping, b'ctx')
     m = _differentialrevisiondescre.search(ctx.description())
     if m:
-        return templateutil.hybriddict({
-            b'url': m.group(r'url'),
-            b'id': b"D%s" % m.group(r'id'),
-        })
+        return templateutil.hybriddict(
+            {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),}
+        )
     else:
         tags = ctx.repo().nodetags(ctx.node())
         for t in tags:
@@ -1087,8 +1252,5 @@
                     url += b'/'
                 url += t
 
-                return templateutil.hybriddict({
-                    b'url': url,
-                    b'id': t,
-                })
+                return templateutil.hybriddict({b'url': url, b'id': t,})
     return None
--- a/hgext/purge.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/purge.py	Sun Oct 06 09:45:02 2019 -0400
@@ -42,17 +42,29 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-@command('purge|clean',
-    [('a', 'abort-on-err', None, _('abort if an error occurs')),
-    ('',  'all', None, _('purge ignored files too')),
-    ('',  'dirs', None, _('purge empty directories')),
-    ('',  'files', None, _('purge files')),
-    ('p', 'print', None, _('print filenames instead of deleting them')),
-    ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
-                            ' (implies -p/--print)')),
-    ] + cmdutil.walkopts,
+
+@command(
+    'purge|clean',
+    [
+        ('a', 'abort-on-err', None, _('abort if an error occurs')),
+        ('', 'all', None, _('purge ignored files too')),
+        ('', 'dirs', None, _('purge empty directories')),
+        ('', 'files', None, _('purge files')),
+        ('p', 'print', None, _('print filenames instead of deleting them')),
+        (
+            '0',
+            'print0',
+            None,
+            _(
+                'end filenames with NUL, for use with xargs'
+                ' (implies -p/--print)'
+            ),
+        ),
+    ]
+    + cmdutil.walkopts,
     _('hg purge [OPTION]... [DIR]...'),
-    helpcategory=command.CATEGORY_MAINTENANCE)
+    helpcategory=command.CATEGORY_MAINTENANCE,
+)
 def purge(ui, repo, *dirs, **opts):
     '''removes files not tracked by Mercurial
 
@@ -89,7 +101,7 @@
     eol = '\n'
     if opts.get('print0'):
         eol = '\0'
-        act = False # --print0 implies --print
+        act = False  # --print0 implies --print
 
     removefiles = opts.get('files')
     removedirs = opts.get('dirs')
@@ -101,10 +113,14 @@
     match = scmutil.match(repo[None], dirs, opts)
 
     paths = mergemod.purge(
-        repo, match, ignored=opts.get('all', False),
-        removeemptydirs=removedirs, removefiles=removefiles,
+        repo,
+        match,
+        ignored=opts.get('all', False),
+        removeemptydirs=removedirs,
+        removefiles=removefiles,
         abortonerror=opts.get('abort_on_err'),
-        noop=not act)
+        noop=not act,
+    )
 
     for path in paths:
         if not act:
--- a/hgext/rebase.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/rebase.py	Sun Oct 06 09:45:02 2019 -0400
@@ -70,9 +70,11 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 def _nothingtorebase():
     return 1
 
+
 def _savegraft(ctx, extra):
     s = ctx.extra().get('source', None)
     if s is not None:
@@ -81,18 +83,27 @@
     if s is not None:
         extra['intermediate-source'] = s
 
+
 def _savebranch(ctx, extra):
     extra['branch'] = ctx.branch()
 
+
 def _destrebase(repo, sourceset, destspace=None):
     """small wrapper around destmerge to pass the right extra args
 
     Please wrap destutil.destmerge instead."""
-    return destutil.destmerge(repo, action='rebase', sourceset=sourceset,
-                              onheadcheck=False, destspace=destspace)
+    return destutil.destmerge(
+        repo,
+        action='rebase',
+        sourceset=sourceset,
+        onheadcheck=False,
+        destspace=destspace,
+    )
+
 
 revsetpredicate = registrar.revsetpredicate()
 
+
 @revsetpredicate('_destrebase')
 def _revsetdestrebase(repo, subset, x):
     # ``_rebasedefaultdest()``
@@ -106,6 +117,7 @@
         sourceset = revset.getset(repo, smartset.fullreposet(repo), x)
     return subset & smartset.baseset([_destrebase(repo, sourceset)])
 
+
 @revsetpredicate('_destautoorphanrebase')
 def _revsetdestautoorphanrebase(repo, subset, x):
     # ``_destautoorphanrebase()``
@@ -122,15 +134,16 @@
     dests = destutil.orphanpossibledestination(repo, src)
     if len(dests) > 1:
         raise error.Abort(
-            _("ambiguous automatic rebase: %r could end up on any of %r") % (
-                src, dests))
+            _("ambiguous automatic rebase: %r could end up on any of %r")
+            % (src, dests)
+        )
     # We have zero or one destination, so we can just return here.
     return smartset.baseset(dests)
 
+
 def _ctxdesc(ctx):
     """short description for a context"""
-    desc = '%d:%s "%s"' % (ctx.rev(), ctx,
-                           ctx.description().split('\n', 1)[0])
+    desc = '%d:%s "%s"' % (ctx.rev(), ctx, ctx.description().split('\n', 1)[0])
     repo = ctx.repo()
     names = []
     for nsname, ns in repo.names.iteritems():
@@ -141,8 +154,10 @@
         desc += ' (%s)' % ' '.join(names)
     return desc
 
+
 class rebaseruntime(object):
     """This class is a container for rebase runtime state"""
+
     def __init__(self, repo, ui, inmemory=False, opts=None):
         if opts is None:
             opts = {}
@@ -174,7 +189,7 @@
         self.collapsemsg = cmdutil.logmessage(ui, opts)
         self.date = opts.get('date', None)
 
-        e = opts.get('extrafn') # internal, used by e.g. hgsubversion
+        e = opts.get('extrafn')  # internal, used by e.g. hgsubversion
         self.extrafns = [_savegraft]
         if e:
             self.extrafns = [e]
@@ -197,8 +212,12 @@
     def storestatus(self, tr=None):
         """Store the current status to allow recovery"""
         if tr:
-            tr.addfilegenerator('rebasestate', ('rebasestate',),
-                                self._writestatus, location='plain')
+            tr.addfilegenerator(
+                'rebasestate',
+                ('rebasestate',),
+                self._writestatus,
+                location='plain',
+            )
         else:
             with self.repo.vfs("rebasestate", "w") as f:
                 self._writestatus(f)
@@ -247,8 +266,14 @@
         self.prepared = True
         repo = self.repo
         assert repo.filtername is None
-        data = {'keepbranches': None, 'collapse': None, 'activebookmark': None,
-                'external': nullrev, 'keep': None, 'originalwd': None}
+        data = {
+            'keepbranches': None,
+            'collapse': None,
+            'activebookmark': None,
+            'external': nullrev,
+            'keep': None,
+            'originalwd': None,
+        }
         legacydest = None
         state = {}
         destmap = {}
@@ -307,8 +332,10 @@
                     skipped.add(old)
                 seen.add(new)
         data['skipped'] = skipped
-        repo.ui.debug('computed skipped revs: %s\n' %
-                        (' '.join('%d' % r for r in sorted(skipped)) or ''))
+        repo.ui.debug(
+            'computed skipped revs: %s\n'
+            % (' '.join('%d' % r for r in sorted(skipped)) or '')
+        )
 
         return data
 
@@ -322,10 +349,11 @@
         if not self.ui.configbool('experimental', 'rebaseskipobsolete'):
             return
         obsoleteset = set(obsoleterevs)
-        (self.obsoletenotrebased,
-         self.obsoletewithoutsuccessorindestination,
-         obsoleteextinctsuccessors) = _computeobsoletenotrebased(
-             self.repo, obsoleteset, destmap)
+        (
+            self.obsoletenotrebased,
+            self.obsoletewithoutsuccessorindestination,
+            obsoleteextinctsuccessors,
+        ) = _computeobsoletenotrebased(self.repo, obsoleteset, destmap)
         skippedset = set(self.obsoletenotrebased)
         skippedset.update(self.obsoletewithoutsuccessorindestination)
         skippedset.update(obsoleteextinctsuccessors)
@@ -339,8 +367,12 @@
             if isabort:
                 clearstatus(self.repo)
                 clearcollapsemsg(self.repo)
-                self.repo.ui.warn(_('rebase aborted (no revision is removed,'
-                                    ' only broken state is cleared)\n'))
+                self.repo.ui.warn(
+                    _(
+                        'rebase aborted (no revision is removed,'
+                        ' only broken state is cleared)\n'
+                    )
+                )
                 return 0
             else:
                 msg = _('cannot continue inconsistent rebase')
@@ -357,13 +389,16 @@
 
         rebaseset = destmap.keys()
         allowunstable = obsolete.isenabled(self.repo, obsolete.allowunstableopt)
-        if (not (self.keepf or allowunstable)
-              and self.repo.revs('first(children(%ld) - %ld)',
-                                 rebaseset, rebaseset)):
+        if not (self.keepf or allowunstable) and self.repo.revs(
+            'first(children(%ld) - %ld)', rebaseset, rebaseset
+        ):
             raise error.Abort(
-                _("can't remove original changesets with"
-                  " unrebased descendants"),
-                hint=_('use --keep to keep original changesets'))
+                _(
+                    "can't remove original changesets with"
+                    " unrebased descendants"
+                ),
+                hint=_('use --keep to keep original changesets'),
+            )
 
         result = buildstate(self.repo, destmap, self.collapsef)
 
@@ -374,19 +409,22 @@
 
         for root in self.repo.set('roots(%ld)', rebaseset):
             if not self.keepf and not root.mutable():
-                raise error.Abort(_("can't rebase public changeset %s")
-                                  % root,
-                                  hint=_("see 'hg help phases' for details"))
+                raise error.Abort(
+                    _("can't rebase public changeset %s") % root,
+                    hint=_("see 'hg help phases' for details"),
+                )
 
         (self.originalwd, self.destmap, self.state) = result
         if self.collapsef:
             dests = set(self.destmap.values())
             if len(dests) != 1:
                 raise error.Abort(
-                    _('--collapse does not work with multiple destinations'))
+                    _('--collapse does not work with multiple destinations')
+                )
             destrev = next(iter(dests))
-            destancestors = self.repo.changelog.ancestors([destrev],
-                                                          inclusive=True)
+            destancestors = self.repo.changelog.ancestors(
+                [destrev], inclusive=True
+            )
             self.external = externalparent(self.repo, self.state, destancestors)
 
         for destrev in sorted(set(destmap.values())):
@@ -399,14 +437,18 @@
     def _assignworkingcopy(self):
         if self.inmemory:
             from mercurial.context import overlayworkingctx
+
             self.wctx = overlayworkingctx(self.repo)
             self.repo.ui.debug("rebasing in-memory\n")
         else:
             self.wctx = self.repo[None]
             self.repo.ui.debug("rebasing on disk\n")
-        self.repo.ui.log("rebase",
-                         "using in-memory rebase: %r\n", self.inmemory,
-                         rebase_imm_used=self.inmemory)
+        self.repo.ui.log(
+            "rebase",
+            "using in-memory rebase: %r\n",
+            self.inmemory,
+            rebase_imm_used=self.inmemory,
+        )
 
     def _performrebase(self, tr):
         self._assignworkingcopy()
@@ -421,8 +463,9 @@
                 for rev in self.state:
                     branches.add(repo[rev].branch())
                     if len(branches) > 1:
-                        raise error.Abort(_('cannot collapse multiple named '
-                            'branches'))
+                        raise error.Abort(
+                            _('cannot collapse multiple named ' 'branches')
+                        )
 
         # Calculate self.obsoletenotrebased
         obsrevs = _filterobsoleterevs(self.repo, self.state)
@@ -442,12 +485,16 @@
             self.storestatus(tr)
 
         cands = [k for k, v in self.state.iteritems() if v == revtodo]
-        p = repo.ui.makeprogress(_("rebasing"), unit=_('changesets'),
-                                 total=len(cands))
+        p = repo.ui.makeprogress(
+            _("rebasing"), unit=_('changesets'), total=len(cands)
+        )
+
         def progress(ctx):
             p.increment(item=("%d:%s" % (ctx.rev(), ctx)))
+
         allowdivergence = self.ui.configbool(
-            'experimental', 'evolution.allowdivergence')
+            'experimental', 'evolution.allowdivergence'
+        )
         for subset in sortsource(self.destmap):
             sortedrevs = self.repo.revs('sort(%ld, -topo)', subset)
             if not allowdivergence:
@@ -483,21 +530,29 @@
             overrides[('ui', 'allowemptycommit')] = True
         with repo.ui.configoverride(overrides, 'rebase'):
             if self.inmemory:
-                newnode = commitmemorynode(repo, p1, p2,
+                newnode = commitmemorynode(
+                    repo,
+                    p1,
+                    p2,
                     wctx=self.wctx,
                     extra=extra,
                     commitmsg=commitmsg,
                     editor=editor,
                     user=ctx.user(),
-                    date=date)
+                    date=date,
+                )
                 mergemod.mergestate.clean(repo)
             else:
-                newnode = commitnode(repo, p1, p2,
+                newnode = commitnode(
+                    repo,
+                    p1,
+                    p2,
                     extra=extra,
                     commitmsg=commitmsg,
                     editor=editor,
                     user=ctx.user(),
-                    date=date)
+                    date=date,
+                )
 
             if newnode is None:
                 # If it ended up being a no-op commit, then the normal
@@ -513,54 +568,80 @@
         desc = _ctxdesc(ctx)
         if self.state[rev] == rev:
             ui.status(_('already rebased %s\n') % desc)
-        elif (not allowdivergence
-              and rev in self.obsoletewithoutsuccessorindestination):
-            msg = _('note: not rebasing %s and its descendants as '
-                    'this would cause divergence\n') % desc
+        elif (
+            not allowdivergence
+            and rev in self.obsoletewithoutsuccessorindestination
+        ):
+            msg = (
+                _(
+                    'note: not rebasing %s and its descendants as '
+                    'this would cause divergence\n'
+                )
+                % desc
+            )
             repo.ui.status(msg)
             self.skipped.add(rev)
         elif rev in self.obsoletenotrebased:
             succ = self.obsoletenotrebased[rev]
             if succ is None:
-                msg = _('note: not rebasing %s, it has no '
-                        'successor\n') % desc
+                msg = (
+                    _('note: not rebasing %s, it has no ' 'successor\n') % desc
+                )
             else:
                 succdesc = _ctxdesc(repo[succ])
-                msg = (_('note: not rebasing %s, already in '
-                         'destination as %s\n') % (desc, succdesc))
+                msg = _(
+                    'note: not rebasing %s, already in ' 'destination as %s\n'
+                ) % (desc, succdesc)
             repo.ui.status(msg)
             # Make clearrebased aware state[rev] is not a true successor
             self.skipped.add(rev)
             # Record rev as moved to its desired destination in self.state.
             # This helps bookmark and working parent movement.
-            dest = max(adjustdest(repo, rev, self.destmap, self.state,
-                                  self.skipped))
+            dest = max(
+                adjustdest(repo, rev, self.destmap, self.state, self.skipped)
+            )
             self.state[rev] = dest
         elif self.state[rev] == revtodo:
             ui.status(_('rebasing %s\n') % desc)
             progressfn(ctx)
-            p1, p2, base = defineparents(repo, rev, self.destmap,
-                                         self.state, self.skipped,
-                                         self.obsoletenotrebased)
+            p1, p2, base = defineparents(
+                repo,
+                rev,
+                self.destmap,
+                self.state,
+                self.skipped,
+                self.obsoletenotrebased,
+            )
             if not self.inmemory and len(repo[None].parents()) == 2:
                 repo.ui.debug('resuming interrupted rebase\n')
             else:
                 overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
                 with ui.configoverride(overrides, 'rebase'):
-                    stats = rebasenode(repo, rev, p1, base, self.collapsef,
-                                       dest, wctx=self.wctx)
+                    stats = rebasenode(
+                        repo,
+                        rev,
+                        p1,
+                        base,
+                        self.collapsef,
+                        dest,
+                        wctx=self.wctx,
+                    )
                     if stats.unresolvedcount > 0:
                         if self.inmemory:
                             raise error.InMemoryMergeConflictsError()
                         else:
                             raise error.InterventionRequired(
-                                _('unresolved conflicts (see hg '
-                                  'resolve, then hg rebase --continue)'))
+                                _(
+                                    'unresolved conflicts (see hg '
+                                    'resolve, then hg rebase --continue)'
+                                )
+                            )
             if not self.collapsef:
                 merging = p2 != nullrev
                 editform = cmdutil.mergeeditform(merging, 'rebase')
-                editor = cmdutil.getcommiteditor(editform=editform,
-                                                 **pycompat.strkwargs(opts))
+                editor = cmdutil.getcommiteditor(
+                    editform=editform, **pycompat.strkwargs(opts)
+                )
                 newnode = self._concludenode(rev, p1, p2, editor)
             else:
                 # Skip commit if we are collapsing
@@ -575,14 +656,20 @@
                 ui.debug('rebased as %s\n' % short(newnode))
             else:
                 if not self.collapsef:
-                    ui.warn(_('note: not rebasing %s, its destination already '
-                              'has all its changes\n') % desc)
+                    ui.warn(
+                        _(
+                            'note: not rebasing %s, its destination already '
+                            'has all its changes\n'
+                        )
+                        % desc
+                    )
                     self.skipped.add(rev)
                 self.state[rev] = p1
                 ui.debug('next revision set to %d\n' % p1)
         else:
-            ui.status(_('already rebased %s as %s\n') %
-                      (desc, repo[self.state[rev]]))
+            ui.status(
+                _('already rebased %s as %s\n') % (desc, repo[self.state[rev]])
+            )
         if not tr:
             # When not using single transaction, store state after each
             # commit is completely done. On InterventionRequired, we thus
@@ -595,9 +682,14 @@
         fm = ui.formatter('rebase', opts)
         fm.startitem()
         if self.collapsef:
-            p1, p2, _base = defineparents(repo, min(self.state), self.destmap,
-                                          self.state, self.skipped,
-                                          self.obsoletenotrebased)
+            p1, p2, _base = defineparents(
+                repo,
+                min(self.state),
+                self.destmap,
+                self.state,
+                self.skipped,
+                self.obsoletenotrebased,
+            )
             editopt = opts.get('edit')
             editform = 'rebase.collapse'
             if self.collapsemsg:
@@ -611,8 +703,9 @@
             editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
             revtoreuse = max(self.state)
 
-            newnode = self._concludenode(revtoreuse, p1, self.external,
-                                         editor, commitmsg=commitmsg)
+            newnode = self._concludenode(
+                revtoreuse, p1, self.external, editor, commitmsg=commitmsg
+            )
 
             if newnode is not None:
                 newrev = repo[newnode].rev()
@@ -620,8 +713,7 @@
                     self.state[oldrev] = newrev
 
         if 'qtip' in repo.tags():
-            updatemq(repo, self.state, self.skipped,
-                     **pycompat.strkwargs(opts))
+            updatemq(repo, self.state, self.skipped, **pycompat.strkwargs(opts))
 
         # restore original working directory
         # (we do this before stripping)
@@ -636,8 +728,17 @@
         collapsedas = None
         if self.collapsef and not self.keepf:
             collapsedas = newnode
-        clearrebased(ui, repo, self.destmap, self.state, self.skipped,
-                     collapsedas, self.keepf, fm=fm, backup=self.backupf)
+        clearrebased(
+            ui,
+            repo,
+            self.destmap,
+            self.state,
+            self.skipped,
+            collapsedas,
+            self.keepf,
+            fm=fm,
+            backup=self.backupf,
+        )
 
         clearstatus(repo)
         clearcollapsemsg(repo)
@@ -649,9 +750,12 @@
             ui.note(_("%d revisions have been skipped\n") % skippedlen)
         fm.end()
 
-        if (self.activebookmark and self.activebookmark in repo._bookmarks and
-            repo['.'].node() == repo._bookmarks[self.activebookmark]):
-                bookmarks.activate(repo, self.activebookmark)
+        if (
+            self.activebookmark
+            and self.activebookmark in repo._bookmarks
+            and repo['.'].node() == repo._bookmarks[self.activebookmark]
+        ):
+            bookmarks.activate(repo, self.activebookmark)
 
     def _abort(self, backup=True, suppwarns=False):
         '''Restore the repository to its original state.'''
@@ -662,29 +766,39 @@
             # rebase, their values within the state mapping will be the dest
             # rev id. The rebased list must must not contain the dest rev
             # (issue4896)
-            rebased = [s for r, s in self.state.items()
-                       if s >= 0 and s != r and s != self.destmap[r]]
+            rebased = [
+                s
+                for r, s in self.state.items()
+                if s >= 0 and s != r and s != self.destmap[r]
+            ]
             immutable = [d for d in rebased if not repo[d].mutable()]
             cleanup = True
             if immutable:
-                repo.ui.warn(_("warning: can't clean up public changesets %s\n")
-                             % ', '.join(bytes(repo[r]) for r in immutable),
-                             hint=_("see 'hg help phases' for details"))
+                repo.ui.warn(
+                    _("warning: can't clean up public changesets %s\n")
+                    % ', '.join(bytes(repo[r]) for r in immutable),
+                    hint=_("see 'hg help phases' for details"),
+                )
                 cleanup = False
 
             descendants = set()
             if rebased:
                 descendants = set(repo.changelog.descendants(rebased))
             if descendants - set(rebased):
-                repo.ui.warn(_("warning: new changesets detected on "
-                               "destination branch, can't strip\n"))
+                repo.ui.warn(
+                    _(
+                        "warning: new changesets detected on "
+                        "destination branch, can't strip\n"
+                    )
+                )
                 cleanup = False
 
             if cleanup:
                 shouldupdate = False
                 if rebased:
                     strippoints = [
-                        c.node() for c in repo.set('roots(%ld)', rebased)]
+                        c.node() for c in repo.set('roots(%ld)', rebased)
+                    ]
 
                 updateifonnodes = set(rebased)
                 updateifonnodes.update(self.destmap.values())
@@ -693,8 +807,9 @@
 
                 # Update away from the rebase if necessary
                 if shouldupdate or needupdate(repo, self.state):
-                    mergemod.update(repo, self.originalwd, branchmerge=False,
-                                    force=True)
+                    mergemod.update(
+                        repo, self.originalwd, branchmerge=False, force=True
+                    )
 
                 # Strip from the first rebased revision
                 if rebased:
@@ -710,36 +825,66 @@
                 repo.ui.warn(_('rebase aborted\n'))
         return 0
 
-@command('rebase',
-    [('s', 'source', '',
-     _('rebase the specified changeset and descendants'), _('REV')),
-    ('b', 'base', '',
-     _('rebase everything from branching point of specified changeset'),
-     _('REV')),
-    ('r', 'rev', [],
-     _('rebase these revisions'),
-     _('REV')),
-    ('d', 'dest', '',
-     _('rebase onto the specified changeset'), _('REV')),
-    ('', 'collapse', False, _('collapse the rebased changesets')),
-    ('m', 'message', '',
-     _('use text as collapse commit message'), _('TEXT')),
-    ('e', 'edit', False, _('invoke editor on commit messages')),
-    ('l', 'logfile', '',
-     _('read collapse commit message from file'), _('FILE')),
-    ('k', 'keep', False, _('keep original changesets')),
-    ('', 'keepbranches', False, _('keep original branch names')),
-    ('D', 'detach', False, _('(DEPRECATED)')),
-    ('i', 'interactive', False, _('(DEPRECATED)')),
-    ('t', 'tool', '', _('specify merge tool')),
-    ('', 'stop', False, _('stop interrupted rebase')),
-    ('c', 'continue', False, _('continue an interrupted rebase')),
-    ('a', 'abort', False, _('abort an interrupted rebase')),
-    ('', 'auto-orphans', '', _('automatically rebase orphan revisions '
-                               'in the specified revset (EXPERIMENTAL)')),
-     ] + cmdutil.dryrunopts + cmdutil.formatteropts + cmdutil.confirmopts,
+
+@command(
+    'rebase',
+    [
+        (
+            's',
+            'source',
+            '',
+            _('rebase the specified changeset and descendants'),
+            _('REV'),
+        ),
+        (
+            'b',
+            'base',
+            '',
+            _('rebase everything from branching point of specified changeset'),
+            _('REV'),
+        ),
+        ('r', 'rev', [], _('rebase these revisions'), _('REV')),
+        ('d', 'dest', '', _('rebase onto the specified changeset'), _('REV')),
+        ('', 'collapse', False, _('collapse the rebased changesets')),
+        (
+            'm',
+            'message',
+            '',
+            _('use text as collapse commit message'),
+            _('TEXT'),
+        ),
+        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (
+            'l',
+            'logfile',
+            '',
+            _('read collapse commit message from file'),
+            _('FILE'),
+        ),
+        ('k', 'keep', False, _('keep original changesets')),
+        ('', 'keepbranches', False, _('keep original branch names')),
+        ('D', 'detach', False, _('(DEPRECATED)')),
+        ('i', 'interactive', False, _('(DEPRECATED)')),
+        ('t', 'tool', '', _('specify merge tool')),
+        ('', 'stop', False, _('stop interrupted rebase')),
+        ('c', 'continue', False, _('continue an interrupted rebase')),
+        ('a', 'abort', False, _('abort an interrupted rebase')),
+        (
+            '',
+            'auto-orphans',
+            '',
+            _(
+                'automatically rebase orphan revisions '
+                'in the specified revset (EXPERIMENTAL)'
+            ),
+        ),
+    ]
+    + cmdutil.dryrunopts
+    + cmdutil.formatteropts
+    + cmdutil.confirmopts,
     _('[-s REV | -b REV] [-d REV] [OPTION]'),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+)
 def rebase(ui, repo, **opts):
     """move changeset (and descendants) to a different branch
 
@@ -869,8 +1014,9 @@
     confirm = opts.get('confirm')
     selactions = [k for k in ['abort', 'stop', 'continue'] if opts.get(k)]
     if len(selactions) > 1:
-        raise error.Abort(_('cannot use --%s with --%s')
-                          % tuple(selactions[:2]))
+        raise error.Abort(
+            _('cannot use --%s with --%s') % tuple(selactions[:2])
+        )
     action = selactions[0] if selactions else None
     if dryrun and action:
         raise error.Abort(_('cannot specify both --dry-run and --%s') % action)
@@ -888,8 +1034,9 @@
     if opts.get('auto_orphans'):
         for key in opts:
             if key != 'auto_orphans' and opts.get(key):
-                raise error.Abort(_('--auto-orphans is incompatible with %s') %
-                                  ('--' + key))
+                raise error.Abort(
+                    _('--auto-orphans is incompatible with %s') % ('--' + key)
+                )
         userrevs = list(repo.revs(opts.get('auto_orphans')))
         opts['rev'] = [revsetlang.formatspec('%ld and orphan()', userrevs)]
         opts['dest'] = '_destautoorphanrebase(SRC)'
@@ -904,11 +1051,17 @@
                 raise error.Abort(_("cannot stop in --collapse session"))
             allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
             if not (rbsrt.keepf or allowunstable):
-                raise error.Abort(_("cannot remove original changesets with"
-                                    " unrebased descendants"),
-                    hint=_('either enable obsmarkers to allow unstable '
-                           'revisions or use --keep to keep original '
-                           'changesets'))
+                raise error.Abort(
+                    _(
+                        "cannot remove original changesets with"
+                        " unrebased descendants"
+                    ),
+                    hint=_(
+                        'either enable obsmarkers to allow unstable '
+                        'revisions or use --keep to keep original '
+                        'changesets'
+                    ),
+                )
             if needupdate(repo, rbsrt.state):
                 # update to the current working revision
                 # to clear interrupted merge
@@ -923,8 +1076,12 @@
             with ui.configoverride(overrides, 'rebase'):
                 return _dorebase(ui, repo, action, opts, inmemory=inmemory)
         except error.InMemoryMergeConflictsError:
-            ui.warn(_('hit merge conflicts; re-running rebase without in-memory'
-                      ' merge\n'))
+            ui.warn(
+                _(
+                    'hit merge conflicts; re-running rebase without in-memory'
+                    ' merge\n'
+                )
+            )
             # TODO: Make in-memory merge not use the on-disk merge state, so
             # we don't have to clean it here
             mergemod.mergestate.clean(repo)
@@ -934,21 +1091,30 @@
     else:
         return _dorebase(ui, repo, action, opts)
 
+
 def _dryrunrebase(ui, repo, action, opts):
     rbsrt = rebaseruntime(repo, ui, inmemory=True, opts=opts)
     confirm = opts.get('confirm')
     if confirm:
         ui.status(_('starting in-memory rebase\n'))
     else:
-        ui.status(_('starting dry-run rebase; repository will not be '
-                    'changed\n'))
+        ui.status(
+            _('starting dry-run rebase; repository will not be ' 'changed\n')
+        )
     with repo.wlock(), repo.lock():
         needsabort = True
         try:
             overrides = {('rebase', 'singletransaction'): True}
             with ui.configoverride(overrides, 'rebase'):
-                _origrebase(ui, repo, action, opts, rbsrt, inmemory=True,
-                            leaveunfinished=True)
+                _origrebase(
+                    ui,
+                    repo,
+                    action,
+                    opts,
+                    rbsrt,
+                    inmemory=True,
+                    leaveunfinished=True,
+                )
         except error.InMemoryMergeConflictsError:
             ui.status(_('hit a merge conflict\n'))
             return 1
@@ -958,30 +1124,40 @@
         else:
             if confirm:
                 ui.status(_('rebase completed successfully\n'))
-                if not ui.promptchoice(_(b'apply changes (yn)?'
-                                         b'$$ &Yes $$ &No')):
+                if not ui.promptchoice(
+                    _(b'apply changes (yn)?' b'$$ &Yes $$ &No')
+                ):
                     # finish unfinished rebase
                     rbsrt._finishrebase()
                 else:
-                    rbsrt._prepareabortorcontinue(isabort=True, backup=False,
-                                                  suppwarns=True)
+                    rbsrt._prepareabortorcontinue(
+                        isabort=True, backup=False, suppwarns=True
+                    )
                 needsabort = False
             else:
-                ui.status(_('dry-run rebase completed successfully; run without'
-                            ' -n/--dry-run to perform this rebase\n'))
+                ui.status(
+                    _(
+                        'dry-run rebase completed successfully; run without'
+                        ' -n/--dry-run to perform this rebase\n'
+                    )
+                )
             return 0
         finally:
             if needsabort:
                 # no need to store backup in case of dryrun
-                rbsrt._prepareabortorcontinue(isabort=True, backup=False,
-                                              suppwarns=True)
+                rbsrt._prepareabortorcontinue(
+                    isabort=True, backup=False, suppwarns=True
+                )
+
 
 def _dorebase(ui, repo, action, opts, inmemory=False):
     rbsrt = rebaseruntime(repo, ui, inmemory, opts)
     return _origrebase(ui, repo, action, opts, rbsrt, inmemory=inmemory)
 
-def _origrebase(ui, repo, action, opts, rbsrt, inmemory=False,
-                leaveunfinished=False):
+
+def _origrebase(
+    ui, repo, action, opts, rbsrt, inmemory=False, leaveunfinished=False
+):
     assert action != 'stop'
     with repo.wlock(), repo.lock():
         # Validate input and define rebasing points
@@ -999,21 +1175,27 @@
             except KeyError:
                 enablehistedit = " --config extensions.histedit="
             help = "hg%s help -e histedit" % enablehistedit
-            msg = _("interactive history editing is supported by the "
-                    "'histedit' extension (see \"%s\")") % help
+            msg = (
+                _(
+                    "interactive history editing is supported by the "
+                    "'histedit' extension (see \"%s\")"
+                )
+                % help
+            )
             raise error.Abort(msg)
 
         if rbsrt.collapsemsg and not rbsrt.collapsef:
-            raise error.Abort(
-                _('message can only be specified with collapse'))
+            raise error.Abort(_('message can only be specified with collapse'))
 
         if action:
             if rbsrt.collapsef:
                 raise error.Abort(
-                    _('cannot use collapse with continue or abort'))
+                    _('cannot use collapse with continue or abort')
+                )
             if srcf or basef or destf:
                 raise error.Abort(
-                    _('abort and continue do not allow specifying revisions'))
+                    _('abort and continue do not allow specifying revisions')
+                )
             if action == 'abort' and opts.get('tool', False):
                 ui.warn(_('tool option will be ignored\n'))
             if action == 'continue':
@@ -1024,8 +1206,16 @@
             if retcode is not None:
                 return retcode
         else:
-            destmap = _definedestmap(ui, repo, inmemory, destf, srcf, basef,
-                                     revf, destspace=destspace)
+            destmap = _definedestmap(
+                ui,
+                repo,
+                inmemory,
+                destf,
+                srcf,
+                basef,
+                revf,
+                destspace=destspace,
+            )
             retcode = rbsrt._preparenewrebase(destmap)
             if retcode is not None:
                 return retcode
@@ -1051,8 +1241,17 @@
                 if not leaveunfinished:
                     rbsrt._finishrebase()
 
-def _definedestmap(ui, repo, inmemory, destf=None, srcf=None, basef=None,
-                   revf=None, destspace=None):
+
+def _definedestmap(
+    ui,
+    repo,
+    inmemory,
+    destf=None,
+    srcf=None,
+    basef=None,
+    revf=None,
+    destspace=None,
+):
     """use revisions argument to define destmap {srcrev: destrev}"""
     if revf is None:
         revf = []
@@ -1071,8 +1270,9 @@
         cmdutil.bailifchanged(repo)
 
     if ui.configbool('commands', 'rebase.requiredest') and not destf:
-        raise error.Abort(_('you must specify a destination'),
-                          hint=_('use: hg rebase -d REV'))
+        raise error.Abort(
+            _('you must specify a destination'), hint=_('use: hg rebase -d REV')
+        )
 
     dest = None
 
@@ -1091,8 +1291,9 @@
     else:
         base = scmutil.revrange(repo, [basef or '.'])
         if not base:
-            ui.status(_('empty "base" revision set - '
-                        "can't compute rebase set\n"))
+            ui.status(
+                _('empty "base" revision set - ' "can't compute rebase set\n")
+            )
             return None
         if destf:
             # --base does not support multiple destinations
@@ -1101,16 +1302,16 @@
             dest = repo[_destrebase(repo, base, destspace=destspace)]
             destf = bytes(dest)
 
-        roots = [] # selected children of branching points
-        bpbase = {} # {branchingpoint: [origbase]}
-        for b in base: # group bases by branching points
+        roots = []  # selected children of branching points
+        bpbase = {}  # {branchingpoint: [origbase]}
+        for b in base:  # group bases by branching points
             bp = repo.revs('ancestor(%d, %d)', b, dest.rev()).first()
             bpbase[bp] = bpbase.get(bp, []) + [b]
         if None in bpbase:
             # emulate the old behavior, showing "nothing to rebase" (a better
             # behavior may be abort with "cannot find branching point" error)
             bpbase.clear()
-        for bp, bs in bpbase.iteritems(): # calculate roots
+        for bp, bs in bpbase.iteritems():  # calculate roots
             roots += list(repo.revs('children(%d) & ancestors(%ld)', bp, bs))
 
         rebaseset = repo.revs('%ld::', roots)
@@ -1121,30 +1322,53 @@
             # smartset.
             if list(base) == [dest.rev()]:
                 if basef:
-                    ui.status(_('nothing to rebase - %s is both "base"'
-                                ' and destination\n') % dest)
+                    ui.status(
+                        _(
+                            'nothing to rebase - %s is both "base"'
+                            ' and destination\n'
+                        )
+                        % dest
+                    )
                 else:
-                    ui.status(_('nothing to rebase - working directory '
-                                'parent is also destination\n'))
+                    ui.status(
+                        _(
+                            'nothing to rebase - working directory '
+                            'parent is also destination\n'
+                        )
+                    )
             elif not repo.revs('%ld - ::%d', base, dest.rev()):
                 if basef:
-                    ui.status(_('nothing to rebase - "base" %s is '
-                                'already an ancestor of destination '
-                                '%s\n') %
-                              ('+'.join(bytes(repo[r]) for r in base),
-                               dest))
+                    ui.status(
+                        _(
+                            'nothing to rebase - "base" %s is '
+                            'already an ancestor of destination '
+                            '%s\n'
+                        )
+                        % ('+'.join(bytes(repo[r]) for r in base), dest)
+                    )
                 else:
-                    ui.status(_('nothing to rebase - working '
-                                'directory parent is already an '
-                                'ancestor of destination %s\n') % dest)
-            else: # can it happen?
-                ui.status(_('nothing to rebase from %s to %s\n') %
-                          ('+'.join(bytes(repo[r]) for r in base), dest))
+                    ui.status(
+                        _(
+                            'nothing to rebase - working '
+                            'directory parent is already an '
+                            'ancestor of destination %s\n'
+                        )
+                        % dest
+                    )
+            else:  # can it happen?
+                ui.status(
+                    _('nothing to rebase from %s to %s\n')
+                    % ('+'.join(bytes(repo[r]) for r in base), dest)
+                )
             return None
 
     rebasingwcp = repo['.'].rev() in rebaseset
-    ui.log("rebase", "rebasing working copy parent: %r\n", rebasingwcp,
-           rebase_rebasing_wcp=rebasingwcp)
+    ui.log(
+        "rebase",
+        "rebasing working copy parent: %r\n",
+        rebasingwcp,
+        rebase_rebasing_wcp=rebasingwcp,
+    )
     if inmemory and rebasingwcp:
         # Check these since we did not before.
         cmdutil.checkunfinished(repo)
@@ -1175,13 +1399,15 @@
                 elif size == 0:
                     ui.note(_('skipping %s - empty destination\n') % repo[r])
                 else:
-                    raise error.Abort(_('rebase destination for %s is not '
-                                        'unique') % repo[r])
+                    raise error.Abort(
+                        _('rebase destination for %s is not ' 'unique')
+                        % repo[r]
+                    )
 
     if dest is not None:
         # single-dest case: assign dest to each rev in rebaseset
         destrev = dest.rev()
-        destmap = {r: destrev for r in rebaseset} # {srcrev: destrev}
+        destmap = {r: destrev for r in rebaseset}  # {srcrev: destrev}
 
     if not destmap:
         ui.status(_('nothing to rebase - empty destination\n'))
@@ -1189,6 +1415,7 @@
 
     return destmap
 
+
 def externalparent(repo, state, destancestors):
     """Return the revision that should be used as the second parent
     when the revisions in state is collapsed on top of destancestors.
@@ -1200,17 +1427,20 @@
         if rev == source:
             continue
         for p in repo[rev].parents():
-            if (p.rev() not in state
-                        and p.rev() not in destancestors):
+            if p.rev() not in state and p.rev() not in destancestors:
                 parents.add(p.rev())
     if not parents:
         return nullrev
     if len(parents) == 1:
         return parents.pop()
-    raise error.Abort(_('unable to collapse on top of %d, there is more '
-                       'than one external parent: %s') %
-                     (max(destancestors),
-                      ', '.join("%d" % p for p in sorted(parents))))
+    raise error.Abort(
+        _(
+            'unable to collapse on top of %d, there is more '
+            'than one external parent: %s'
+        )
+        % (max(destancestors), ', '.join("%d" % p for p in sorted(parents)))
+    )
+
 
 def commitmemorynode(repo, p1, p2, wctx, editor, extra, user, date, commitmsg):
     '''Commit the memory changes with parents p1 and p2.
@@ -1225,12 +1455,20 @@
     if 'branch' in extra:
         branch = extra['branch']
 
-    memctx = wctx.tomemctx(commitmsg, parents=(p1, p2), date=date,
-        extra=extra, user=user, branch=branch, editor=editor)
+    memctx = wctx.tomemctx(
+        commitmsg,
+        parents=(p1, p2),
+        date=date,
+        extra=extra,
+        user=user,
+        branch=branch,
+        editor=editor,
+    )
     commitres = repo.commitctx(memctx)
-    wctx.clean() # Might be reused
+    wctx.clean()  # Might be reused
     return commitres
 
+
 def commitnode(repo, p1, p2, editor, extra, user, date, commitmsg):
     '''Commit the wd changes with parents p1 and p2.
     Return node of committed revision.'''
@@ -1241,12 +1479,14 @@
         repo.setparents(repo[p1].node(), repo[p2].node())
 
         # Commit might fail if unresolved files exist
-        newnode = repo.commit(text=commitmsg, user=user, date=date,
-                              extra=extra, editor=editor)
+        newnode = repo.commit(
+            text=commitmsg, user=user, date=date, extra=extra, editor=editor
+        )
 
         repo.dirstate.setbranch(repo[newnode].branch())
         return newnode
 
+
 def rebasenode(repo, rev, p1, base, collapse, dest, wctx):
     'Rebase a single revision rev on top of p1 using base as merge ancestor'
     # Merge phase
@@ -1268,9 +1508,16 @@
         repo.ui.debug("   detach base %d:%s\n" % (base, repo[base]))
     # When collapsing in-place, the parent is the common ancestor, we
     # have to allow merging with it.
-    stats = mergemod.update(repo, rev, branchmerge=True, force=True,
-                            ancestor=base, mergeancestor=collapse,
-                            labels=['dest', 'source'], wc=wctx)
+    stats = mergemod.update(
+        repo,
+        rev,
+        branchmerge=True,
+        force=True,
+        ancestor=base,
+        mergeancestor=collapse,
+        labels=['dest', 'source'],
+        wc=wctx,
+    )
     if collapse:
         copies.duplicatecopies(repo, wctx, rev, dest)
     else:
@@ -1283,6 +1530,7 @@
         copies.duplicatecopies(repo, wctx, rev, p1rev, skiprev=dest)
     return stats
 
+
 def adjustdest(repo, rev, destmap, state, skipped):
     r"""adjust rebase destination given the current rebase state
 
@@ -1337,8 +1585,11 @@
     """
     # pick already rebased revs with same dest from state as interesting source
     dest = destmap[rev]
-    source = [s for s, d in state.items()
-              if d > 0 and destmap[s] == dest and s not in skipped]
+    source = [
+        s
+        for s, d in state.items()
+        if d > 0 and destmap[s] == dest and s not in skipped
+    ]
 
     result = []
     for prev in repo.changelog.parentrevs(rev):
@@ -1352,10 +1603,12 @@
             if adjusted == revtodo:
                 # sortsource should produce an order that makes this impossible
                 raise error.ProgrammingError(
-                    'rev %d should be rebased already at this time' % dest)
+                    'rev %d should be rebased already at this time' % dest
+                )
         result.append(adjusted)
     return result
 
+
 def _checkobsrebase(repo, ui, rebaseobsrevs, rebaseobsskipped):
     """
     Abort if rebase will create divergence or rebase is noop because of markers
@@ -1365,19 +1618,19 @@
     successors in destination or no non-obsolete successor.
     """
     # Obsolete node with successors not in dest leads to divergence
-    divergenceok = ui.configbool('experimental',
-                                 'evolution.allowdivergence')
+    divergenceok = ui.configbool('experimental', 'evolution.allowdivergence')
     divergencebasecandidates = rebaseobsrevs - rebaseobsskipped
 
     if divergencebasecandidates and not divergenceok:
-        divhashes = (bytes(repo[r])
-                     for r in divergencebasecandidates)
-        msg = _("this rebase will cause "
-                "divergences from: %s")
-        h = _("to force the rebase please set "
-              "experimental.evolution.allowdivergence=True")
+        divhashes = (bytes(repo[r]) for r in divergencebasecandidates)
+        msg = _("this rebase will cause " "divergences from: %s")
+        h = _(
+            "to force the rebase please set "
+            "experimental.evolution.allowdivergence=True"
+        )
         raise error.Abort(msg % (",".join(divhashes),), hint=h)
 
+
 def successorrevs(unfi, rev):
     """yield revision numbers for successors of rev"""
     assert unfi.filtername is None
@@ -1386,6 +1639,7 @@
         if s in nodemap:
             yield nodemap[s]
 
+
 def defineparents(repo, rev, destmap, state, skipped, obsskipped):
     """Return new parents and optionally a merge base for rev being rebased
 
@@ -1407,10 +1661,10 @@
     isancestor = cl.isancestorrev
 
     dest = destmap[rev]
-    oldps = repo.changelog.parentrevs(rev) # old parents
-    newps = [nullrev, nullrev] # new parents
+    oldps = repo.changelog.parentrevs(rev)  # old parents
+    newps = [nullrev, nullrev]  # new parents
     dests = adjustdest(repo, rev, destmap, state, skipped)
-    bases = list(oldps) # merge base candidates, initially just old parents
+    bases = list(oldps)  # merge base candidates, initially just old parents
 
     if all(r == nullrev for r in oldps[1:]):
         # For non-merge changeset, just move p to adjusted dest as requested.
@@ -1439,7 +1693,7 @@
         # The loop tries to be not rely on the fact that a Mercurial node has
         # at most 2 parents.
         for i, p in enumerate(oldps):
-            np = p # new parent
+            np = p  # new parent
             if any(isancestor(x, dests[i]) for x in successorrevs(repo, p)):
                 np = dests[i]
             elif p in state and state[p] > 0:
@@ -1465,9 +1719,9 @@
             for j, x in enumerate(newps[:i]):
                 if x == nullrev:
                     continue
-                if isancestor(np, x): # CASE-1
+                if isancestor(np, x):  # CASE-1
                     np = nullrev
-                elif isancestor(x, np): # CASE-2
+                elif isancestor(x, np):  # CASE-2
                     newps[j] = np
                     np = nullrev
                     # New parents forming an ancestor relationship does not
@@ -1492,9 +1746,13 @@
         #    /|    # None of A and B will be changed to D and rebase fails.
         #   A B D
         if set(newps) == set(oldps) and dest not in newps:
-            raise error.Abort(_('cannot rebase %d:%s without '
-                                'moving at least one of its parents')
-                              % (rev, repo[rev]))
+            raise error.Abort(
+                _(
+                    'cannot rebase %d:%s without '
+                    'moving at least one of its parents'
+                )
+                % (rev, repo[rev])
+            )
 
     # Source should not be ancestor of dest. The check here guarantees it's
     # impossible. With multi-dest, the initial check does not cover complex
@@ -1524,28 +1782,37 @@
     # better than the default (ancestor(F, Z) == null). Therefore still
     # pick one (so choose p1 above).
     if sum(1 for b in bases if b != nullrev) > 1:
-        unwanted = [None, None] # unwanted[i]: unwanted revs if choose bases[i]
+        unwanted = [None, None]  # unwanted[i]: unwanted revs if choose bases[i]
         for i, base in enumerate(bases):
             if base == nullrev:
                 continue
             # Revisions in the side (not chosen as merge base) branch that
             # might contain "surprising" contents
-            siderevs = list(repo.revs('((%ld-%d) %% (%d+%d))',
-                                      bases, base, base, dest))
+            siderevs = list(
+                repo.revs('((%ld-%d) %% (%d+%d))', bases, base, base, dest)
+            )
 
             # If those revisions are covered by rebaseset, the result is good.
             # A merge in rebaseset would be considered to cover its ancestors.
             if siderevs:
-                rebaseset = [r for r, d in state.items()
-                             if d > 0 and r not in obsskipped]
-                merges = [r for r in rebaseset
-                          if cl.parentrevs(r)[1] != nullrev]
-                unwanted[i] = list(repo.revs('%ld - (::%ld) - %ld',
-                                             siderevs, merges, rebaseset))
+                rebaseset = [
+                    r for r, d in state.items() if d > 0 and r not in obsskipped
+                ]
+                merges = [
+                    r for r in rebaseset if cl.parentrevs(r)[1] != nullrev
+                ]
+                unwanted[i] = list(
+                    repo.revs(
+                        '%ld - (::%ld) - %ld', siderevs, merges, rebaseset
+                    )
+                )
 
         # Choose a merge base that has a minimal number of unwanted revs.
-        l, i = min((len(revs), i)
-                   for i, revs in enumerate(unwanted) if revs is not None)
+        l, i = min(
+            (len(revs), i)
+            for i, revs in enumerate(unwanted)
+            if revs is not None
+        )
         base = bases[i]
 
         # newps[0] should match merge base if possible. Currently, if newps[i]
@@ -1559,16 +1826,22 @@
         # we have a more advanced merge algorithm that handles multiple bases.
         if l > 0:
             unwanteddesc = _(' or ').join(
-                (', '.join('%d:%s' % (r, repo[r]) for r in revs)
-                 for revs in unwanted if revs is not None))
+                (
+                    ', '.join('%d:%s' % (r, repo[r]) for r in revs)
+                    for revs in unwanted
+                    if revs is not None
+                )
+            )
             raise error.Abort(
                 _('rebasing %d:%s will include unwanted changes from %s')
-                % (rev, repo[rev], unwanteddesc))
+                % (rev, repo[rev], unwanteddesc)
+            )
 
     repo.ui.debug(" future parents are %d and %d\n" % tuple(newps))
 
     return newps[0], newps[1], base
 
+
 def isagitpatch(repo, patchname):
     'Return true if the given patch is in git format'
     mqpatch = os.path.join(repo.mq.path, patchname)
@@ -1577,6 +1850,7 @@
             return True
     return False
 
+
 def updatemq(repo, state, skipped, **opts):
     'Update rebased mq patches - finalize and then import them'
     mqrebase = {}
@@ -1587,8 +1861,10 @@
     for p in mq.applied:
         rev = repo[p.node].rev()
         if rev in state:
-            repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
-                                        (rev, p.name))
+            repo.ui.debug(
+                'revision %d is an mq patch (%s), finalize it.\n'
+                % (rev, p.name)
+            )
             mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
         else:
             # Applied but not rebased, not sure this should happen
@@ -1601,10 +1877,13 @@
         for rev in sorted(mqrebase, reverse=True):
             if rev not in skipped:
                 name, isgit = mqrebase[rev]
-                repo.ui.note(_('updating mq patch %s to %d:%s\n') %
-                             (name, state[rev], repo[state[rev]]))
-                mq.qimport(repo, (), patchname=name, git=isgit,
-                                rev=["%d" % state[rev]])
+                repo.ui.note(
+                    _('updating mq patch %s to %d:%s\n')
+                    % (name, state[rev], repo[state[rev]])
+                )
+                mq.qimport(
+                    repo, (), patchname=name, git=isgit, rev=["%d" % state[rev]]
+                )
             else:
                 # Rebased and skipped
                 skippedpatches.add(mqrebase[rev][0])
@@ -1612,12 +1891,16 @@
         # Patches were either applied and rebased and imported in
         # order, applied and removed or unapplied. Discard the removed
         # ones while preserving the original series order and guards.
-        newseries = [s for s in original_series
-                     if mq.guard_re.split(s, 1)[0] not in skippedpatches]
+        newseries = [
+            s
+            for s in original_series
+            if mq.guard_re.split(s, 1)[0] not in skippedpatches
+        ]
         mq.fullseries[:] = newseries
         mq.seriesdirty = True
         mq.savedirty()
 
+
 def storecollapsemsg(repo, collapsemsg):
     'Store the collapse message to allow recovery'
     collapsemsg = collapsemsg or ''
@@ -1625,10 +1908,12 @@
     f.write("%s\n" % collapsemsg)
     f.close()
 
+
 def clearcollapsemsg(repo):
     'Remove collapse message file'
     repo.vfs.unlinkpath("last-message.txt", ignoremissing=True)
 
+
 def restorecollapsemsg(repo, isabort):
     'Restore previously stored collapse message'
     try:
@@ -1645,6 +1930,7 @@
             raise error.Abort(_('missing .hg/last-message.txt for rebase'))
     return collapsemsg
 
+
 def clearstatus(repo):
     'Remove the status files'
     # Make sure the active transaction won't write the state file
@@ -1653,6 +1939,7 @@
         tr.removefilegenerator('rebasestate')
     repo.vfs.unlinkpath("rebasestate", ignoremissing=True)
 
+
 def needupdate(repo, state):
     '''check whether we should `update --clean` away from a merge, or if
     somehow the working dir got forcibly updated, e.g. by older hg'''
@@ -1663,13 +1950,15 @@
         return False
 
     # We should be standing on the first as-of-yet unrebased commit.
-    firstunrebased = min([old for old, new in state.iteritems()
-                          if new == nullrev])
+    firstunrebased = min(
+        [old for old, new in state.iteritems() if new == nullrev]
+    )
     if firstunrebased in parents:
         return True
 
     return False
 
+
 def sortsource(destmap):
     """yield source revisions in an order that we only rebase things once
 
@@ -1695,6 +1984,7 @@
         srcset -= set(result)
         yield result
 
+
 def buildstate(repo, destmap, collapse):
     '''Define which revisions are going to be rebased and where
 
@@ -1713,7 +2003,7 @@
             raise error.Abort(_('cannot rebase onto an applied mq patch'))
 
     # Get "cycle" error early by exhausting the generator.
-    sortedsrc = list(sortsource(destmap)) # a list of sorted revs
+    sortedsrc = list(sortsource(destmap))  # a list of sorted revs
     if not sortedsrc:
         raise error.Abort(_('no matching revisions'))
 
@@ -1724,11 +2014,13 @@
     roots = list(repo.set('roots(%ld)', sortedsrc[0]))
     if not roots:
         raise error.Abort(_('no matching revisions'))
+
     def revof(r):
         return r.rev()
+
     roots = sorted(roots, key=revof)
     state = dict.fromkeys(rebaseset, revtodo)
-    emptyrebase = (len(sortedsrc) == 1)
+    emptyrebase = len(sortedsrc) == 1
     for root in roots:
         dest = repo[destmap[root.rev()]]
         commonbase = root.ancestor(dest)
@@ -1759,8 +2051,18 @@
             state[rev] = rev
     return originalwd, destmap, state
 
-def clearrebased(ui, repo, destmap, state, skipped, collapsedas=None,
-                 keepf=False, fm=None, backup=True):
+
+def clearrebased(
+    ui,
+    repo,
+    destmap,
+    state,
+    skipped,
+    collapsedas=None,
+    keepf=False,
+    fm=None,
+    backup=True,
+):
     """dispose of rebased revision at the end of the rebase
 
     If `collapsedas` is not None, the rebase was a collapse whose result if the
@@ -1809,6 +2111,7 @@
         replacements = {}
     scmutil.cleanupnodes(repo, replacements, 'rebase', moves, backup=backup)
 
+
 def pullrebase(orig, ui, repo, *args, **opts):
     'Call rebase after pull if the latter has been invoked with --rebase'
     if opts.get(r'rebase'):
@@ -1820,17 +2123,26 @@
         with repo.wlock(), repo.lock():
             if opts.get(r'update'):
                 del opts[r'update']
-                ui.debug('--update and --rebase are not compatible, ignoring '
-                         'the update flag\n')
+                ui.debug(
+                    '--update and --rebase are not compatible, ignoring '
+                    'the update flag\n'
+                )
 
             cmdutil.checkunfinished(repo, skipmerge=True)
-            cmdutil.bailifchanged(repo, hint=_('cannot pull with rebase: '
-                'please commit or shelve your changes first'))
+            cmdutil.bailifchanged(
+                repo,
+                hint=_(
+                    'cannot pull with rebase: '
+                    'please commit or shelve your changes first'
+                ),
+            )
 
             revsprepull = len(repo)
             origpostincoming = commands.postincoming
+
             def _dummy(*args, **kwargs):
                 pass
+
             commands.postincoming = _dummy
             try:
                 ret = orig(ui, repo, *args, **opts)
@@ -1868,10 +2180,12 @@
 
     return ret
 
+
 def _filterobsoleterevs(repo, revs):
     """returns a set of the obsolete revisions in revs"""
     return set(r for r in revs if repo[r].obsolete())
 
+
 def _computeobsoletenotrebased(repo, rebaseobsrevs, destmap):
     """Return (obsoletenotrebased, obsoletewithoutsuccessorindestination).
 
@@ -1924,11 +2238,13 @@
         obsoleteextinctsuccessors,
     )
 
+
 def abortrebase(ui, repo):
     with repo.wlock(), repo.lock():
         rbsrt = rebaseruntime(repo, ui)
         rbsrt._prepareabortorcontinue(isabort=True)
 
+
 def continuerebase(ui, repo):
     with repo.wlock(), repo.lock():
         rbsrt = rebaseruntime(repo, ui)
@@ -1940,6 +2256,7 @@
         rbsrt._performrebase(None)
         rbsrt._finishrebase()
 
+
 def summaryhook(ui, repo):
     if not repo.vfs.exists('rebasestate'):
         return
@@ -1954,19 +2271,29 @@
         return
     numrebased = len([i for i in state.itervalues() if i >= 0])
     # i18n: column positioning for "hg summary"
-    ui.write(_('rebase: %s, %s (rebase --continue)\n') %
-             (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
-              ui.label(_('%d remaining'), 'rebase.remaining') %
-              (len(state) - numrebased)))
+    ui.write(
+        _('rebase: %s, %s (rebase --continue)\n')
+        % (
+            ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
+            ui.label(_('%d remaining'), 'rebase.remaining')
+            % (len(state) - numrebased),
+        )
+    )
+
 
 def uisetup(ui):
-    #Replace pull with a decorator to provide --rebase option
+    # Replace pull with a decorator to provide --rebase option
     entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
-    entry[1].append(('', 'rebase', None,
-                     _("rebase working directory to branch head")))
-    entry[1].append(('t', 'tool', '',
-                     _("specify merge tool for rebase")))
+    entry[1].append(
+        ('', 'rebase', None, _("rebase working directory to branch head"))
+    )
+    entry[1].append(('t', 'tool', '', _("specify merge tool for rebase")))
     cmdutil.summaryhooks.add('rebase', summaryhook)
-    statemod.addunfinished('rebase', fname='rebasestate', stopflag=True,
-                            continueflag=True, abortfunc=abortrebase,
-                            continuefunc=continuerebase)
+    statemod.addunfinished(
+        'rebase',
+        fname='rebasestate',
+        stopflag=True,
+        continueflag=True,
+        abortfunc=abortrebase,
+        continuefunc=continuerebase,
+    )
--- a/hgext/record.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/record.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,12 +30,14 @@
 testedwith = 'ships-with-hg-core'
 
 
-@command("record",
-         # same options as commit + white space diff options
-        [c for c in commands.table['commit|ci'][1][:]
-            if c[1] != "interactive"] + cmdutil.diffwsopts,
-          _('hg record [OPTION]... [FILE]...'),
-        helpcategory=command.CATEGORY_COMMITTING)
+@command(
+    "record",
+    # same options as commit + white space diff options
+    [c for c in commands.table['commit|ci'][1][:] if c[1] != "interactive"]
+    + cmdutil.diffwsopts,
+    _('hg record [OPTION]... [FILE]...'),
+    helpcategory=command.CATEGORY_COMMITTING,
+)
 def record(ui, repo, *pats, **opts):
     '''interactively select changes to commit
 
@@ -66,14 +68,16 @@
     This command is not available when committing a merge.'''
 
     if not ui.interactive():
-        raise error.Abort(_('running non-interactively, use %s instead') %
-                         'commit')
+        raise error.Abort(
+            _('running non-interactively, use %s instead') % 'commit'
+        )
 
     opts[r"interactive"] = True
     overrides = {('experimental', 'crecord'): False}
     with ui.configoverride(overrides, 'record'):
         return commands.commit(ui, repo, *pats, **opts)
 
+
 def qrefresh(origfn, ui, repo, *pats, **opts):
     if not opts[r'interactive']:
         return origfn(ui, repo, *pats, **opts)
@@ -88,15 +92,19 @@
         mq.refresh(ui, repo, **opts)
 
     # backup all changed files
-    cmdutil.dorecord(ui, repo, committomq, None, True,
-                    cmdutil.recordfilter, *pats, **opts)
+    cmdutil.dorecord(
+        ui, repo, committomq, None, True, cmdutil.recordfilter, *pats, **opts
+    )
+
 
 # This command registration is replaced during uisetup().
-@command('qrecord',
+@command(
+    'qrecord',
     [],
     _('hg qrecord [OPTION]... PATCH [FILE]...'),
     helpcategory=command.CATEGORY_COMMITTING,
-    inferrepo=True)
+    inferrepo=True,
+)
 def qrecord(ui, repo, patch, *pats, **opts):
     '''interactively record a new patch
 
@@ -105,6 +113,7 @@
     '''
     return _qrecord('qnew', ui, repo, patch, *pats, **opts)
 
+
 def _qrecord(cmdsuggest, ui, repo, patch, *pats, **opts):
     try:
         mq = extensions.find('mq')
@@ -120,8 +129,17 @@
     overrides = {('experimental', 'crecord'): False}
     with ui.configoverride(overrides, 'record'):
         cmdutil.checkunfinished(repo)
-        cmdutil.dorecord(ui, repo, committomq, cmdsuggest, False,
-                         cmdutil.recordfilter, *pats, **opts)
+        cmdutil.dorecord(
+            ui,
+            repo,
+            committomq,
+            cmdsuggest,
+            False,
+            cmdutil.recordfilter,
+            *pats,
+            **opts
+        )
+
 
 def qnew(origfn, ui, repo, patch, *args, **opts):
     if opts[r'interactive']:
@@ -140,11 +158,17 @@
         # same options as qnew, but copy them so we don't get
         # -i/--interactive for qrecord and add white space diff options
         mq.cmdtable['qnew'][1][:] + cmdutil.diffwsopts,
-        _('hg qrecord [OPTION]... PATCH [FILE]...'))
+        _('hg qrecord [OPTION]... PATCH [FILE]...'),
+    )
 
     _wrapcmd('qnew', mq.cmdtable, qnew, _("interactively record a new patch"))
-    _wrapcmd('qrefresh', mq.cmdtable, qrefresh,
-             _("interactively select changes to refresh"))
+    _wrapcmd(
+        'qrefresh',
+        mq.cmdtable,
+        qrefresh,
+        _("interactively select changes to refresh"),
+    )
+
 
 def _wrapcmd(cmd, table, wrapfn, msg):
     entry = extensions.wrapcommand(table, cmd, wrapfn)
--- a/hgext/releasenotes.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/releasenotes.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,15 +28,14 @@
     scmutil,
     util,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 cmdtable = {}
 command = registrar.command(cmdtable)
 
 try:
     import fuzzywuzzy.fuzz as fuzz
+
     fuzz.token_set_ratio
 except ImportError:
     fuzz = None
@@ -60,6 +59,7 @@
 
 BULLET_SECTION = _('Other Changes')
 
+
 class parsedreleasenotes(object):
     def __init__(self):
         self.sections = {}
@@ -103,19 +103,25 @@
         This is used to combine multiple sources of release notes together.
         """
         if not fuzz:
-            ui.warn(_("module 'fuzzywuzzy' not found, merging of similar "
-                      "releasenotes is disabled\n"))
+            ui.warn(
+                _(
+                    "module 'fuzzywuzzy' not found, merging of similar "
+                    "releasenotes is disabled\n"
+                )
+            )
 
         for section in other:
-            existingnotes = (
-                converttitled(self.titledforsection(section)) +
-                convertnontitled(self.nontitledforsection(section)))
+            existingnotes = converttitled(
+                self.titledforsection(section)
+            ) + convertnontitled(self.nontitledforsection(section))
             for title, paragraphs in other.titledforsection(section):
                 if self.hastitledinsection(section, title):
                     # TODO prompt for resolution if different and running in
                     # interactive mode.
-                    ui.write(_('%s already exists in %s section; ignoring\n') %
-                             (title, section))
+                    ui.write(
+                        _('%s already exists in %s section; ignoring\n')
+                        % (title, section)
+                    )
                     continue
 
                 incoming_str = converttitled([(title, paragraphs)])[0]
@@ -146,6 +152,7 @@
 
                 self.addnontitleditem(section, paragraphs)
 
+
 class releasenotessections(object):
     def __init__(self, ui, repo=None):
         if repo:
@@ -170,6 +177,7 @@
 
         return None
 
+
 def converttitled(titledparagraphs):
     """
     Convert titled paragraphs to strings
@@ -182,6 +190,7 @@
         string_list.append(' '.join(lines))
     return string_list
 
+
 def convertnontitled(nontitledparagraphs):
     """
     Convert non-titled bullets to strings
@@ -194,6 +203,7 @@
         string_list.append(' '.join(lines))
     return string_list
 
+
 def getissuenum(incoming_str):
     """
     Returns issue number from the incoming string if it exists
@@ -203,6 +213,7 @@
         issue = issue.group()
     return issue
 
+
 def findissue(ui, existing, issue):
     """
     Returns true if issue number already exists in notes.
@@ -213,6 +224,7 @@
     else:
         return False
 
+
 def similar(ui, existing, incoming_str):
     """
     Returns true if similar note found in existing notes.
@@ -220,14 +232,17 @@
     if len(incoming_str.split()) > 10:
         merge = similaritycheck(incoming_str, existing)
         if not merge:
-            ui.write(_('"%s" already exists in notes file; ignoring\n')
-                     % incoming_str)
+            ui.write(
+                _('"%s" already exists in notes file; ignoring\n')
+                % incoming_str
+            )
             return True
         else:
             return False
     else:
         return False
 
+
 def similaritycheck(incoming_str, existingnotes):
     """
     Returns false when note fragment can be merged to existing notes.
@@ -244,6 +259,7 @@
             break
     return merge
 
+
 def getcustomadmonitions(repo):
     ctx = repo['.']
     p = config.config()
@@ -253,13 +269,15 @@
             data = ctx[f].data()
             p.parse(f, data, sections, remap, read)
         else:
-            raise error.Abort(_(".hgreleasenotes file \'%s\' not found") %
-                              repo.pathto(f))
+            raise error.Abort(
+                _(".hgreleasenotes file \'%s\' not found") % repo.pathto(f)
+            )
 
     if '.hgreleasenotes' in ctx:
         read('.hgreleasenotes')
     return p['sections']
 
+
 def checkadmonitions(ui, repo, directives, revs):
     """
     Checks the commit messages for admonitions and their validity.
@@ -280,10 +298,13 @@
             if admonition.group(1) in directives:
                 continue
             else:
-                ui.write(_("Invalid admonition '%s' present in changeset %s"
-                           "\n") % (admonition.group(1), ctx.hex()[:12]))
-                sim = lambda x: difflib.SequenceMatcher(None,
-                    admonition.group(1), x).ratio()
+                ui.write(
+                    _("Invalid admonition '%s' present in changeset %s" "\n")
+                    % (admonition.group(1), ctx.hex()[:12])
+                )
+                sim = lambda x: difflib.SequenceMatcher(
+                    None, admonition.group(1), x
+                ).ratio()
 
                 similar = [s for s in directives if sim(s) > 0.6]
                 if len(similar) == 1:
@@ -292,18 +313,21 @@
                     ss = ", ".join(sorted(similar))
                     ui.write(_("(did you mean one of %s?)\n") % ss)
 
+
 def _getadmonitionlist(ui, sections):
     for section in sections:
         ui.write("%s: %s\n" % (section[0], section[1]))
 
+
 def parsenotesfromrevisions(repo, directives, revs):
     notes = parsedreleasenotes()
 
     for rev in revs:
         ctx = repo[rev]
 
-        blocks, pruned = minirst.parse(ctx.description(),
-                                       admonitions=directives)
+        blocks, pruned = minirst.parse(
+            ctx.description(), admonitions=directives
+        )
 
         for i, block in enumerate(blocks):
             if block['type'] != 'admonition':
@@ -313,8 +337,13 @@
             title = block['lines'][0].strip() if block['lines'] else None
 
             if i + 1 == len(blocks):
-                raise error.Abort(_('changeset %s: release notes directive %s '
-                        'lacks content') % (ctx, directive))
+                raise error.Abort(
+                    _(
+                        'changeset %s: release notes directive %s '
+                        'lacks content'
+                    )
+                    % (ctx, directive)
+                )
 
             # Now search ahead and find all paragraphs attached to this
             # admonition.
@@ -330,8 +359,13 @@
                     break
 
                 if pblock['type'] != 'paragraph':
-                    repo.ui.warn(_('changeset %s: unexpected block in release '
-                        'notes directive %s\n') % (ctx, directive))
+                    repo.ui.warn(
+                        _(
+                            'changeset %s: unexpected block in release '
+                            'notes directive %s\n'
+                        )
+                        % (ctx, directive)
+                    )
 
                 if pblock['indent'] > 0:
                     paragraphs.append(pblock['lines'])
@@ -340,8 +374,10 @@
 
             # TODO consider using title as paragraph for more concise notes.
             if not paragraphs:
-                repo.ui.warn(_("error parsing releasenotes for revision: "
-                               "'%s'\n") % node.hex(ctx.node()))
+                repo.ui.warn(
+                    _("error parsing releasenotes for revision: " "'%s'\n")
+                    % node.hex(ctx.node())
+                )
             if title:
                 notes.addtitleditem(directive, title, paragraphs)
             else:
@@ -349,6 +385,7 @@
 
     return notes
 
+
 def parsereleasenotesfile(sections, text):
     """Parse text content containing generated release notes."""
     notes = parsedreleasenotes()
@@ -375,7 +412,7 @@
                 else:
                     lines = [[l[1:].strip() for l in block['lines']]]
 
-                    for block in blocks[i + 1:]:
+                    for block in blocks[i + 1 :]:
                         if block['type'] in ('bullet', 'section'):
                             break
                         if block['type'] == 'paragraph':
@@ -383,8 +420,10 @@
                     notefragment.append(lines)
                     continue
             elif block['type'] != 'paragraph':
-                raise error.Abort(_('unexpected block type in release notes: '
-                                    '%s') % block['type'])
+                raise error.Abort(
+                    _('unexpected block type in release notes: ' '%s')
+                    % block['type']
+                )
             if title:
                 notefragment.append(block['lines'])
 
@@ -402,8 +441,9 @@
         if block['underline'] == '=':  # main section
             name = sections.sectionfromtitle(title)
             if not name:
-                raise error.Abort(_('unknown release notes section: %s') %
-                                  title)
+                raise error.Abort(
+                    _('unknown release notes section: %s') % title
+                )
 
             currentsection = name
             bullet_points = gatherparagraphsbullets(i)
@@ -424,6 +464,7 @@
 
     return notes
 
+
 def serializenotes(sections, notes):
     """Serialize release notes from parsed fragments and notes.
 
@@ -449,8 +490,9 @@
             for i, para in enumerate(paragraphs):
                 if i:
                     lines.append('')
-                lines.extend(stringutil.wrap(' '.join(para),
-                                             width=78).splitlines())
+                lines.extend(
+                    stringutil.wrap(' '.join(para), width=78).splitlines()
+                )
 
             lines.append('')
 
@@ -468,17 +510,25 @@
             lines.append('')
 
         for paragraphs in nontitled:
-            lines.extend(stringutil.wrap(' '.join(paragraphs[0]),
-                                         width=78,
-                                         initindent='* ',
-                                         hangindent='  ').splitlines())
+            lines.extend(
+                stringutil.wrap(
+                    ' '.join(paragraphs[0]),
+                    width=78,
+                    initindent='* ',
+                    hangindent='  ',
+                ).splitlines()
+            )
 
             for para in paragraphs[1:]:
                 lines.append('')
-                lines.extend(stringutil.wrap(' '.join(para),
-                                             width=78,
-                                             initindent='  ',
-                                             hangindent='  ').splitlines())
+                lines.extend(
+                    stringutil.wrap(
+                        ' '.join(para),
+                        width=78,
+                        initindent='  ',
+                        hangindent='  ',
+                    ).splitlines()
+                )
 
             lines.append('')
 
@@ -487,14 +537,29 @@
 
     return '\n'.join(lines)
 
-@command('releasenotes',
-    [('r', 'rev', '', _('revisions to process for release notes'), _('REV')),
-    ('c', 'check', False, _('checks for validity of admonitions (if any)'),
-        _('REV')),
-    ('l', 'list', False, _('list the available admonitions with their title'),
-        None)],
+
+@command(
+    'releasenotes',
+    [
+        ('r', 'rev', '', _('revisions to process for release notes'), _('REV')),
+        (
+            'c',
+            'check',
+            False,
+            _('checks for validity of admonitions (if any)'),
+            _('REV'),
+        ),
+        (
+            'l',
+            'list',
+            False,
+            _('list the available admonitions with their title'),
+            None,
+        ),
+    ],
     _('hg releasenotes [-r REV] [-c] FILE'),
-    helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
+    helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
+)
 def releasenotes(ui, repo, file_=None, **opts):
     """parse release notes from commit messages into an output file
 
@@ -616,6 +681,7 @@
     with open(file_, 'wb') as fh:
         fh.write(serializenotes(sections, notes))
 
+
 @command('debugparsereleasenotes', norepo=True)
 def debugparsereleasenotes(ui, path, repo=None):
     """parse release notes and print resulting data structure"""
--- a/hgext/relink.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/relink.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,9 +18,7 @@
     registrar,
     util,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -30,6 +28,7 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 @command('relink', [], _('[ORIGIN]'), helpcategory=command.CATEGORY_MAINTENANCE)
 def relink(ui, repo, origin=None, **opts):
     """recreate hardlinks between two repositories
@@ -56,11 +55,14 @@
     command is running. (Both repositories will be locked against
     writes.)
     """
-    if (not util.safehasattr(util, 'samefile') or
-        not util.safehasattr(util, 'samedevice')):
+    if not util.safehasattr(util, 'samefile') or not util.safehasattr(
+        util, 'samedevice'
+    ):
         raise error.Abort(_('hardlinks are not supported on this system'))
-    src = hg.repository(repo.baseui, ui.expandpath(origin or 'default-relink',
-                                          origin or 'default'))
+    src = hg.repository(
+        repo.baseui,
+        ui.expandpath(origin or 'default-relink', origin or 'default'),
+    )
     ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
     if repo.root == src.root:
         ui.status(_('there is nothing to relink\n'))
@@ -75,6 +77,7 @@
         targets = prune(candidates, src.store.path, repo.store.path, ui)
         do_relink(src.store.path, repo.store.path, targets, ui)
 
+
 def collect(src, ui):
     seplen = len(os.path.sep)
     candidates = []
@@ -89,11 +92,13 @@
     src = src.store.path
     progress = ui.makeprogress(_('collecting'), unit=_('files'), total=total)
     pos = 0
-    ui.status(_("tip has %d files, estimated total number of files: %d\n")
-              % (live, total))
+    ui.status(
+        _("tip has %d files, estimated total number of files: %d\n")
+        % (live, total)
+    )
     for dirpath, dirnames, filenames in os.walk(src):
         dirnames.sort()
-        relpath = dirpath[len(src) + seplen:]
+        relpath = dirpath[len(src) + seplen :]
         for filename in sorted(filenames):
             if filename[-2:] not in ('.d', '.i'):
                 continue
@@ -108,6 +113,7 @@
     ui.status(_('collected %d candidate storage files\n') % len(candidates))
     return candidates
 
+
 def prune(candidates, src, dst, ui):
     def linkfilter(src, dst, st):
         try:
@@ -120,14 +126,16 @@
         if not util.samedevice(src, dst):
             # No point in continuing
             raise error.Abort(
-                _('source and destination are on different devices'))
+                _('source and destination are on different devices')
+            )
         if st.st_size != ts.st_size:
             return False
         return st
 
     targets = []
-    progress = ui.makeprogress(_('pruning'), unit=_('files'),
-                               total=len(candidates))
+    progress = ui.makeprogress(
+        _('pruning'), unit=_('files'), total=len(candidates)
+    )
     pos = 0
     for fn, st in candidates:
         pos += 1
@@ -144,6 +152,7 @@
     ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
     return targets
 
+
 def do_relink(src, dst, files, ui):
     def relinkfile(src, dst):
         bak = dst + '.bak'
@@ -159,8 +168,9 @@
     relinked = 0
     savedbytes = 0
 
-    progress = ui.makeprogress(_('relinking'), unit=_('files'),
-                               total=len(files))
+    progress = ui.makeprogress(
+        _('relinking'), unit=_('files'), total=len(files)
+    )
     pos = 0
     for f, sz in files:
         pos += 1
@@ -190,5 +200,7 @@
 
     progress.complete()
 
-    ui.status(_('relinked %d files (%s reclaimed)\n') %
-              (relinked, util.bytecount(savedbytes)))
+    ui.status(
+        _('relinked %d files (%s reclaimed)\n')
+        % (relinked, util.bytecount(savedbytes))
+    )
--- a/hgext/remotefilelog/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -194,8 +194,12 @@
 configitem('remotefilelog', 'cacheprocess.includepath', default=None)
 configitem("remotefilelog", "cachelimit", default="1000 GB")
 
-configitem('remotefilelog', 'fallbackpath', default=configitems.dynamicdefault,
-           alias=[('remotefilelog', 'fallbackrepo')])
+configitem(
+    'remotefilelog',
+    'fallbackpath',
+    default=configitems.dynamicdefault,
+    alias=[('remotefilelog', 'fallbackrepo')],
+)
 
 configitem('remotefilelog', 'validatecachelog', default=None)
 configitem('remotefilelog', 'validatecache', default='on')
@@ -231,8 +235,7 @@
 configitem('remotefilelog', 'nodettl', default=_defaultlimit)
 
 configitem('remotefilelog', 'data.gencountlimit', default=2),
-configitem('remotefilelog', 'data.generations',
-           default=['1GB', '100MB', '1MB'])
+configitem('remotefilelog', 'data.generations', default=['1GB', '100MB', '1MB'])
 configitem('remotefilelog', 'data.maxrepackpacks', default=50)
 configitem('remotefilelog', 'data.repackmaxpacksize', default='4GB')
 configitem('remotefilelog', 'data.repacksizelimit', default='100MB')
@@ -254,6 +257,7 @@
 
 isenabled = shallowutil.isenabled
 
+
 def uisetup(ui):
     """Wraps user facing Mercurial commands to swap them out with shallow
     versions.
@@ -261,23 +265,31 @@
     hg.wirepeersetupfuncs.append(fileserverclient.peersetup)
 
     entry = extensions.wrapcommand(commands.table, 'clone', cloneshallow)
-    entry[1].append(('', 'shallow', None,
-                     _("create a shallow clone which uses remote file "
-                       "history")))
+    entry[1].append(
+        (
+            '',
+            'shallow',
+            None,
+            _("create a shallow clone which uses remote file " "history"),
+        )
+    )
 
-    extensions.wrapcommand(commands.table, 'debugindex',
-        debugcommands.debugindex)
-    extensions.wrapcommand(commands.table, 'debugindexdot',
-        debugcommands.debugindexdot)
+    extensions.wrapcommand(
+        commands.table, 'debugindex', debugcommands.debugindex
+    )
+    extensions.wrapcommand(
+        commands.table, 'debugindexdot', debugcommands.debugindexdot
+    )
     extensions.wrapcommand(commands.table, 'log', log)
     extensions.wrapcommand(commands.table, 'pull', pull)
 
     # Prevent 'hg manifest --all'
     def _manifest(orig, ui, repo, *args, **opts):
-        if (isenabled(repo) and opts.get(r'all')):
+        if isenabled(repo) and opts.get(r'all'):
             raise error.Abort(_("--all is not supported in a shallow repo"))
 
         return orig(ui, repo, *args, **opts)
+
     extensions.wrapcommand(commands.table, "manifest", _manifest)
 
     # Wrap remotefilelog with lfs code
@@ -290,6 +302,7 @@
         if lfsmod:
             lfsmod.wrapfilelog(remotefilelog.remotefilelog)
             fileserverclient._lfsmod = lfsmod
+
     extensions.afterloaded('lfs', _lfsloaded)
 
     # debugdata needs remotefilelog.len to work
@@ -297,18 +310,21 @@
 
     changegroup.cgpacker = shallowbundle.shallowcg1packer
 
-    extensions.wrapfunction(changegroup, '_addchangegroupfiles',
-                            shallowbundle.addchangegroupfiles)
     extensions.wrapfunction(
-        changegroup, 'makechangegroup', shallowbundle.makechangegroup)
+        changegroup, '_addchangegroupfiles', shallowbundle.addchangegroupfiles
+    )
+    extensions.wrapfunction(
+        changegroup, 'makechangegroup', shallowbundle.makechangegroup
+    )
     extensions.wrapfunction(localrepo, 'makestore', storewrapper)
     extensions.wrapfunction(exchange, 'pull', exchangepull)
     extensions.wrapfunction(merge, 'applyupdates', applyupdates)
     extensions.wrapfunction(merge, '_checkunknownfiles', checkunknownfiles)
     extensions.wrapfunction(context.workingctx, '_checklookup', checklookup)
     extensions.wrapfunction(scmutil, '_findrenames', findrenames)
-    extensions.wrapfunction(copies, '_computeforwardmissing',
-                            computeforwardmissing)
+    extensions.wrapfunction(
+        copies, '_computeforwardmissing', computeforwardmissing
+    )
     extensions.wrapfunction(dispatch, 'runcommand', runcommand)
     extensions.wrapfunction(repair, '_collectbrokencsets', _collectbrokencsets)
     extensions.wrapfunction(context.changectx, 'filectx', filectx)
@@ -327,6 +343,7 @@
 def cloneshallow(orig, ui, repo, *args, **opts):
     if opts.get(r'shallow'):
         repos = []
+
         def pull_shallow(orig, self, *args, **kwargs):
             if not isenabled(self):
                 repos.append(self.unfiltered())
@@ -336,8 +353,10 @@
                 # setupclient fixed the class on the repo itself
                 # but we also need to fix it on the repoview
                 if isinstance(self, repoview.repoview):
-                    self.__class__.__bases__ = (self.__class__.__bases__[0],
-                                                self.unfiltered().__class__)
+                    self.__class__.__bases__ = (
+                        self.__class__.__bases__[0],
+                        self.unfiltered().__class__,
+                    )
                 self.requirements.add(constants.SHALLOWREPO_REQUIREMENT)
                 self._writerequirements()
 
@@ -346,6 +365,7 @@
                 return exchangepull(orig, self, *args, **kwargs)
             else:
                 return orig(self, *args, **kwargs)
+
         extensions.wrapfunction(exchange, 'pull', pull_shallow)
 
         # Wrap the stream logic to add requirements and to pass include/exclude
@@ -364,12 +384,16 @@
                     return remote._callstream('stream_out_shallow', **opts)
                 else:
                     return orig()
+
             extensions.wrapfunction(remote, 'stream_out', stream_out_shallow)
+
         def stream_wrap(orig, op):
             setup_streamout(op.repo, op.remote)
             return orig(op)
+
         extensions.wrapfunction(
-            streamclone, 'maybeperformlegacystreamclone', stream_wrap)
+            streamclone, 'maybeperformlegacystreamclone', stream_wrap
+        )
 
         def canperformstreamclone(orig, pullop, bundle2=False):
             # remotefilelog is currently incompatible with the
@@ -377,16 +401,18 @@
             # v1 instead.
             if 'v2' in pullop.remotebundle2caps.get('stream', []):
                 pullop.remotebundle2caps['stream'] = [
-                    c for c in pullop.remotebundle2caps['stream']
-                    if c != 'v2']
+                    c for c in pullop.remotebundle2caps['stream'] if c != 'v2'
+                ]
             if bundle2:
                 return False, None
             supported, requirements = orig(pullop, bundle2=bundle2)
             if requirements is not None:
                 requirements.add(constants.SHALLOWREPO_REQUIREMENT)
             return supported, requirements
+
         extensions.wrapfunction(
-            streamclone, 'canperformstreamclone', canperformstreamclone)
+            streamclone, 'canperformstreamclone', canperformstreamclone
+        )
 
     try:
         orig(ui, repo, *args, **opts)
@@ -396,6 +422,7 @@
                 if util.safehasattr(r, 'fileservice'):
                     r.fileservice.close()
 
+
 def debugdatashallow(orig, *args, **kwds):
     oldlen = remotefilelog.remotefilelog.__len__
     try:
@@ -404,6 +431,7 @@
     finally:
         remotefilelog.remotefilelog.__len__ = oldlen
 
+
 def reposetup(ui, repo):
     if not repo.local():
         return
@@ -424,6 +452,7 @@
     if isserverenabled:
         remotefilelogserver.setupserver(ui, repo)
 
+
 def setupclient(ui, repo):
     if not isinstance(repo, localrepo.localrepository):
         return
@@ -436,6 +465,7 @@
     shallowrepo.wraprepo(repo)
     repo.store = shallowstore.wrapstore(repo.store)
 
+
 def storewrapper(orig, requirements, path, vfstype):
     s = orig(requirements, path, vfstype)
     if constants.SHALLOWREPO_REQUIREMENT in requirements:
@@ -443,9 +473,11 @@
 
     return s
 
+
 # prefetch files before update
-def applyupdates(orig, repo, actions, wctx, mctx, overwrite, wantfiledata,
-                 labels=None):
+def applyupdates(
+    orig, repo, actions, wctx, mctx, overwrite, wantfiledata, labels=None
+):
     if isenabled(repo):
         manifest = mctx.manifest()
         files = []
@@ -453,12 +485,13 @@
             files.append((f, hex(manifest[f])))
         # batch fetch the needed files from the server
         repo.fileservice.prefetch(files)
-    return orig(repo, actions, wctx, mctx, overwrite, wantfiledata,
-                labels=labels)
+    return orig(
+        repo, actions, wctx, mctx, overwrite, wantfiledata, labels=labels
+    )
+
 
 # Prefetch merge checkunknownfiles
-def checkunknownfiles(orig, repo, wctx, mctx, force, actions,
-    *args, **kwargs):
+def checkunknownfiles(orig, repo, wctx, mctx, force, actions, *args, **kwargs):
     if isenabled(repo):
         files = []
         sparsematch = repo.maybesparsematch(mctx.rev())
@@ -474,6 +507,7 @@
         repo.fileservice.prefetch(files)
     return orig(repo, wctx, mctx, force, actions, *args, **kwargs)
 
+
 # Prefetch files before status attempts to look at their size and contents
 def checklookup(orig, self, files):
     repo = self._repo
@@ -487,6 +521,7 @@
         repo.fileservice.prefetch(prefetchfiles)
     return orig(self, files)
 
+
 # Prefetch the logic that compares added and removed files for renames
 def findrenames(orig, repo, matcher, added, removed, *args, **kwargs):
     if isenabled(repo):
@@ -499,6 +534,7 @@
         repo.fileservice.prefetch(files)
     return orig(repo, matcher, added, removed, *args, **kwargs)
 
+
 # prefetch files before pathcopies check
 def computeforwardmissing(orig, a, b, match=None):
     missing = orig(a, b, match=match)
@@ -520,6 +556,7 @@
         repo.fileservice.prefetch(files)
     return missing
 
+
 # close cache miss server connection after the command has finished
 def runcommand(orig, lui, repo, *args, **kwargs):
     fileservice = None
@@ -534,31 +571,48 @@
         if fileservice:
             fileservice.close()
 
+
 # prevent strip from stripping remotefilelogs
 def _collectbrokencsets(orig, repo, files, striprev):
     if isenabled(repo):
         files = list([f for f in files if not repo.shallowmatch(f)])
     return orig(repo, files, striprev)
 
+
 # changectx wrappers
 def filectx(orig, self, path, fileid=None, filelog=None):
     if fileid is None:
         fileid = self.filenode(path)
-    if (isenabled(self._repo) and self._repo.shallowmatch(path)):
-        return remotefilectx.remotefilectx(self._repo, path, fileid=fileid,
-                                           changectx=self, filelog=filelog)
+    if isenabled(self._repo) and self._repo.shallowmatch(path):
+        return remotefilectx.remotefilectx(
+            self._repo, path, fileid=fileid, changectx=self, filelog=filelog
+        )
     return orig(self, path, fileid=fileid, filelog=filelog)
 
+
 def workingfilectx(orig, self, path, filelog=None):
-    if (isenabled(self._repo) and self._repo.shallowmatch(path)):
-        return remotefilectx.remoteworkingfilectx(self._repo, path,
-                                                  workingctx=self,
-                                                  filelog=filelog)
+    if isenabled(self._repo) and self._repo.shallowmatch(path):
+        return remotefilectx.remoteworkingfilectx(
+            self._repo, path, workingctx=self, filelog=filelog
+        )
     return orig(self, path, filelog=filelog)
 
+
 # prefetch required revisions before a diff
-def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
-    copy, getfilectx, *args, **kwargs):
+def trydiff(
+    orig,
+    repo,
+    revs,
+    ctx1,
+    ctx2,
+    modified,
+    added,
+    removed,
+    copy,
+    getfilectx,
+    *args,
+    **kwargs
+):
     if isenabled(repo):
         prefetch = []
         mf1 = ctx1.manifest()
@@ -575,8 +629,20 @@
 
         repo.fileservice.prefetch(prefetch)
 
-    return orig(repo, revs, ctx1, ctx2, modified, added, removed, copy,
-                getfilectx, *args, **kwargs)
+    return orig(
+        repo,
+        revs,
+        ctx1,
+        ctx2,
+        modified,
+        added,
+        removed,
+        copy,
+        getfilectx,
+        *args,
+        **kwargs
+    )
+
 
 # Prevent verify from processing files
 # a stub for mercurial.hg.verify()
@@ -589,6 +655,8 @@
 
 
 clientonetime = False
+
+
 def onetimeclientsetup(ui):
     global clientonetime
     if clientonetime:
@@ -600,18 +668,53 @@
     # This violates Mercurial's filelog->manifest->changelog write order,
     # but is generally fine for client repos.
     pendingfilecommits = []
-    def addrawrevision(orig, self, rawtext, transaction, link, p1, p2, node,
-                       flags, cachedelta=None, _metatuple=None):
+
+    def addrawrevision(
+        orig,
+        self,
+        rawtext,
+        transaction,
+        link,
+        p1,
+        p2,
+        node,
+        flags,
+        cachedelta=None,
+        _metatuple=None,
+    ):
         if isinstance(link, int):
             pendingfilecommits.append(
-                (self, rawtext, transaction, link, p1, p2, node, flags,
-                 cachedelta, _metatuple))
+                (
+                    self,
+                    rawtext,
+                    transaction,
+                    link,
+                    p1,
+                    p2,
+                    node,
+                    flags,
+                    cachedelta,
+                    _metatuple,
+                )
+            )
             return node
         else:
-            return orig(self, rawtext, transaction, link, p1, p2, node, flags,
-                        cachedelta, _metatuple=_metatuple)
+            return orig(
+                self,
+                rawtext,
+                transaction,
+                link,
+                p1,
+                p2,
+                node,
+                flags,
+                cachedelta,
+                _metatuple=_metatuple,
+            )
+
     extensions.wrapfunction(
-        remotefilelog.remotefilelog, 'addrawrevision', addrawrevision)
+        remotefilelog.remotefilelog, 'addrawrevision', addrawrevision
+    )
 
     def changelogadd(orig, self, *args):
         oldlen = len(self)
@@ -625,18 +728,22 @@
                     log.addrawrevision(rt, tr, linknode, p1, p2, n, fl, c, m)
                 else:
                     raise error.ProgrammingError(
-                        'pending multiple integer revisions are not supported')
+                        'pending multiple integer revisions are not supported'
+                    )
         else:
             # "link" is actually wrong here (it is set to len(changelog))
             # if changelog remains unchanged, skip writing file revisions
             # but still do a sanity check about pending multiple revisions
             if len(set(x[3] for x in pendingfilecommits)) > 1:
                 raise error.ProgrammingError(
-                    'pending multiple integer revisions are not supported')
+                    'pending multiple integer revisions are not supported'
+                )
         del pendingfilecommits[:]
         return node
+
     extensions.wrapfunction(changelog.changelog, 'add', changelogadd)
 
+
 def getrenamedfn(orig, repo, endrev=None):
     if not isenabled(repo) or copies.usechangesetcentricalgo(repo):
         return orig(repo, endrev)
@@ -665,6 +772,7 @@
 
     return getrenamed
 
+
 def walkfilerevs(orig, repo, match, follow, revs, fncache):
     if not isenabled(repo):
         return orig(repo, match, follow, revs, fncache)
@@ -680,8 +788,10 @@
     pctx = repo['.']
     for filename in match.files():
         if filename not in pctx:
-            raise error.Abort(_('cannot follow file not in parent '
-                               'revision: "%s"') % filename)
+            raise error.Abort(
+                _('cannot follow file not in parent ' 'revision: "%s"')
+                % filename
+            )
         fctx = pctx[filename]
 
         linkrev = fctx.linkrev()
@@ -697,6 +807,7 @@
 
     return wanted
 
+
 def filelogrevset(orig, repo, subset, x):
     """``filelog(pattern)``
     Changesets connected to the specified filelog.
@@ -711,8 +822,9 @@
 
     # i18n: "filelog" is a keyword
     pat = revset.getstring(x, _("filelog requires a pattern"))
-    m = match.match(repo.root, repo.getcwd(), [pat], default='relpath',
-                       ctx=repo[None])
+    m = match.match(
+        repo.root, repo.getcwd(), [pat], default='relpath', ctx=repo[None]
+    )
     s = set()
 
     if not match.patkind(pat):
@@ -735,6 +847,7 @@
 
     return smartset.baseset([r for r in subset if r in s])
 
+
 @command('gc', [], _('hg gc [REPO...]'), norepo=True)
 def gc(ui, *args, **opts):
     '''garbage collect the client and server filelog caches
@@ -773,6 +886,7 @@
     for repo in repos:
         remotefilelogserver.gcserver(ui, repo._repo)
 
+
 def gcclient(ui, cachepath):
     # get list of repos that use this cache
     repospath = os.path.join(cachepath, 'repos')
@@ -792,8 +906,9 @@
     filesrepacked = False
 
     count = 0
-    progress = ui.makeprogress(_("analyzing repositories"), unit="repos",
-                               total=len(repos))
+    progress = ui.makeprogress(
+        _("analyzing repositories"), unit="repos", total=len(repos)
+    )
     for path in repos:
         progress.update(count)
         count += 1
@@ -843,6 +958,7 @@
         # Compute a keepset which is not garbage collected
         def keyfn(fname, fnode):
             return fileserverclient.getcachekey(reponame, fname, hex(fnode))
+
         keepkeys = repackmod.keepset(repo, keyfn=keyfn, lastkeepkeys=keepkeys)
 
     progress.complete()
@@ -862,6 +978,7 @@
     elif not filesrepacked:
         ui.warn(_("warning: no valid repos in repofile\n"))
 
+
 def log(orig, ui, repo, *pats, **opts):
     if not isenabled(repo):
         return orig(ui, repo, *pats, **opts)
@@ -887,11 +1004,16 @@
                         break
 
             if isfile:
-                ui.warn(_("warning: file log can be slow on large repos - " +
-                          "use -f to speed it up\n"))
+                ui.warn(
+                    _(
+                        "warning: file log can be slow on large repos - "
+                        + "use -f to speed it up\n"
+                    )
+                )
 
     return orig(ui, repo, *pats, **opts)
 
+
 def revdatelimit(ui, revset):
     """Update revset so that only changesets no older than 'prefetchdays' days
     are included. The default value is set to 14 days. If 'prefetchdays' is set
@@ -902,6 +1024,7 @@
         revset = '(%s) & date(-%s)' % (revset, days)
     return revset
 
+
 def readytofetch(repo):
     """Check that enough time has passed since the last background prefetch.
     This only relates to prefetches after operations that change the working
@@ -920,6 +1043,7 @@
 
     return ready
 
+
 def wcpprefetch(ui, repo, **kwargs):
     """Prefetches in background revisions specified by bgprefetchrevs revset.
     Does background repack if backgroundrepack flag is set in config.
@@ -943,6 +1067,7 @@
 
     repo._afterlock(anon)
 
+
 def pull(orig, ui, repo, *pats, **opts):
     result = orig(ui, repo, *pats, **opts)
 
@@ -958,29 +1083,35 @@
             revs = scmutil.revrange(repo, [prefetchrevset])
             base = repo['.'].rev()
             if bgprefetch:
-                repo.backgroundprefetch(prefetchrevset, repack=bgrepack,
-                                        ensurestart=ensurestart)
+                repo.backgroundprefetch(
+                    prefetchrevset, repack=bgrepack, ensurestart=ensurestart
+                )
             else:
                 repo.prefetch(revs, base=base)
                 if bgrepack:
-                    repackmod.backgroundrepack(repo, incremental=True,
-                                               ensurestart=ensurestart)
+                    repackmod.backgroundrepack(
+                        repo, incremental=True, ensurestart=ensurestart
+                    )
         elif bgrepack:
-            repackmod.backgroundrepack(repo, incremental=True,
-                                       ensurestart=ensurestart)
+            repackmod.backgroundrepack(
+                repo, incremental=True, ensurestart=ensurestart
+            )
 
     return result
 
+
 def exchangepull(orig, repo, remote, *args, **kwargs):
     # Hook into the callstream/getbundle to insert bundle capabilities
     # during a pull.
-    def localgetbundle(orig, source, heads=None, common=None, bundlecaps=None,
-                       **kwargs):
+    def localgetbundle(
+        orig, source, heads=None, common=None, bundlecaps=None, **kwargs
+    ):
         if not bundlecaps:
             bundlecaps = set()
         bundlecaps.add(constants.BUNDLE2_CAPABLITY)
-        return orig(source, heads=heads, common=common, bundlecaps=bundlecaps,
-                    **kwargs)
+        return orig(
+            source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs
+        )
 
     if util.safehasattr(remote, '_callstream'):
         remote._localrepo = repo
@@ -989,6 +1120,7 @@
 
     return orig(repo, remote, *args, **kwargs)
 
+
 def _fileprefetchhook(repo, revs, match):
     if isenabled(repo):
         allfiles = []
@@ -1003,49 +1135,65 @@
                     allfiles.append((path, hex(mf[path])))
         repo.fileservice.prefetch(allfiles)
 
-@command('debugremotefilelog', [
-    ('d', 'decompress', None, _('decompress the filelog first')),
-    ], _('hg debugremotefilelog <path>'), norepo=True)
+
+@command(
+    'debugremotefilelog',
+    [('d', 'decompress', None, _('decompress the filelog first')),],
+    _('hg debugremotefilelog <path>'),
+    norepo=True,
+)
 def debugremotefilelog(ui, path, **opts):
     return debugcommands.debugremotefilelog(ui, path, **opts)
 
-@command('verifyremotefilelog', [
-    ('d', 'decompress', None, _('decompress the filelogs first')),
-    ], _('hg verifyremotefilelogs <directory>'), norepo=True)
+
+@command(
+    'verifyremotefilelog',
+    [('d', 'decompress', None, _('decompress the filelogs first')),],
+    _('hg verifyremotefilelogs <directory>'),
+    norepo=True,
+)
 def verifyremotefilelog(ui, path, **opts):
     return debugcommands.verifyremotefilelog(ui, path, **opts)
 
-@command('debugdatapack', [
-    ('', 'long', None, _('print the long hashes')),
-    ('', 'node', '', _('dump the contents of node'), 'NODE'),
-    ], _('hg debugdatapack <paths>'), norepo=True)
+
+@command(
+    'debugdatapack',
+    [
+        ('', 'long', None, _('print the long hashes')),
+        ('', 'node', '', _('dump the contents of node'), 'NODE'),
+    ],
+    _('hg debugdatapack <paths>'),
+    norepo=True,
+)
 def debugdatapack(ui, *paths, **opts):
     return debugcommands.debugdatapack(ui, *paths, **opts)
 
-@command('debughistorypack', [
-    ], _('hg debughistorypack <path>'), norepo=True)
+
+@command('debughistorypack', [], _('hg debughistorypack <path>'), norepo=True)
 def debughistorypack(ui, path, **opts):
     return debugcommands.debughistorypack(ui, path)
 
-@command('debugkeepset', [
-    ], _('hg debugkeepset'))
+
+@command('debugkeepset', [], _('hg debugkeepset'))
 def debugkeepset(ui, repo, **opts):
     # The command is used to measure keepset computation time
     def keyfn(fname, fnode):
         return fileserverclient.getcachekey(repo.name, fname, hex(fnode))
+
     repackmod.keepset(repo, keyfn)
     return
 
-@command('debugwaitonrepack', [
-    ], _('hg debugwaitonrepack'))
+
+@command('debugwaitonrepack', [], _('hg debugwaitonrepack'))
 def debugwaitonrepack(ui, repo, **opts):
     return debugcommands.debugwaitonrepack(repo)
 
-@command('debugwaitonprefetch', [
-    ], _('hg debugwaitonprefetch'))
+
+@command('debugwaitonprefetch', [], _('hg debugwaitonprefetch'))
 def debugwaitonprefetch(ui, repo, **opts):
     return debugcommands.debugwaitonprefetch(repo)
 
+
 def resolveprefetchopts(ui, opts):
     if not opts.get('rev'):
         revset = ['.', 'draft()']
@@ -1068,11 +1216,17 @@
 
     return opts
 
-@command('prefetch', [
-    ('r', 'rev', [], _('prefetch the specified revisions'), _('REV')),
-    ('', 'repack', False, _('run repack after prefetch')),
-    ('b', 'base', '', _("rev that is assumed to already be local")),
-    ] + commands.walkopts, _('hg prefetch [OPTIONS] [FILE...]'))
+
+@command(
+    'prefetch',
+    [
+        ('r', 'rev', [], _('prefetch the specified revisions'), _('REV')),
+        ('', 'repack', False, _('run repack after prefetch')),
+        ('b', 'base', '', _("rev that is assumed to already be local")),
+    ]
+    + commands.walkopts,
+    _('hg prefetch [OPTIONS] [FILE...]'),
+)
 def prefetch(ui, repo, *pats, **opts):
     """prefetch file revisions from the server
 
@@ -1095,20 +1249,35 @@
 
     # Run repack in background
     if opts.get('repack'):
-        repackmod.backgroundrepack(repo, incremental=True,
-                                   ensurestart=ensurestart)
+        repackmod.backgroundrepack(
+            repo, incremental=True, ensurestart=ensurestart
+        )
+
 
-@command('repack', [
-     ('', 'background', None, _('run in a background process'), None),
-     ('', 'incremental', None, _('do an incremental repack'), None),
-     ('', 'packsonly', None, _('only repack packs (skip loose objects)'), None),
-    ], _('hg repack [OPTIONS]'))
+@command(
+    'repack',
+    [
+        ('', 'background', None, _('run in a background process'), None),
+        ('', 'incremental', None, _('do an incremental repack'), None),
+        (
+            '',
+            'packsonly',
+            None,
+            _('only repack packs (skip loose objects)'),
+            None,
+        ),
+    ],
+    _('hg repack [OPTIONS]'),
+)
 def repack_(ui, repo, *pats, **opts):
     if opts.get(r'background'):
         ensurestart = repo.ui.configbool('devel', 'remotefilelog.ensurestart')
-        repackmod.backgroundrepack(repo, incremental=opts.get(r'incremental'),
-                                   packsonly=opts.get(r'packsonly', False),
-                                   ensurestart=ensurestart)
+        repackmod.backgroundrepack(
+            repo,
+            incremental=opts.get(r'incremental'),
+            packsonly=opts.get(r'packsonly', False),
+            ensurestart=ensurestart,
+        )
         return
 
     options = {'packsonly': opts.get(r'packsonly')}
--- a/hgext/remotefilelog/basepack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/basepack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -45,7 +45,7 @@
 # bisect) with (8 step fanout scan + 1 step bisect)
 # 5 step bisect = log(2^16 / 8 / 255)  # fanout
 # 10 step fanout scan = 2^16 / (2^16 / 8)  # fanout space divided by entries
-SMALLFANOUTCUTOFF = 2**16 // 8
+SMALLFANOUTCUTOFF = 2 ** 16 // 8
 
 # The amount of time to wait between checking for new packs. This prevents an
 # exception when data is moved to a new pack after the process has already
@@ -60,6 +60,7 @@
 else:
     PACKOPENMODE = 'rb'
 
+
 class _cachebackedpacks(object):
     def __init__(self, packs, cachesize):
         self._packs = set(packs)
@@ -105,6 +106,7 @@
         # Data not found in any pack.
         self._lastpack = None
 
+
 class basepackstore(object):
     # Default cache size limit for the pack files.
     DEFAULTCACHESIZE = 100
@@ -161,11 +163,14 @@
                 # (the index file and the pack file), we can yield once we see
                 # it twice.
                 if id:
-                    sizes[id] += stat.st_size # Sum both files' sizes together
+                    sizes[id] += stat.st_size  # Sum both files' sizes together
                     mtimes[id].append(stat.st_mtime)
                     if id in ids:
-                        yield (os.path.join(self.path, id), max(mtimes[id]),
-                            sizes[id])
+                        yield (
+                            os.path.join(self.path, id),
+                            max(mtimes[id]),
+                            sizes[id],
+                        )
                     else:
                         ids.add(id)
         except OSError as ex:
@@ -259,6 +264,7 @@
 
         return newpacks
 
+
 class versionmixin(object):
     # Mix-in for classes with multiple supported versions
     VERSION = None
@@ -274,10 +280,11 @@
         else:
             raise RuntimeError('unsupported version: %d' % version)
 
+
 class basepack(versionmixin):
     # The maximum amount we should read via mmap before remmaping so the old
     # pages can be released (100MB)
-    MAXPAGEDIN = 100 * 1024**2
+    MAXPAGEDIN = 100 * 1024 ** 2
 
     SUPPORTED_VERSIONS = [2]
 
@@ -291,7 +298,7 @@
 
         self._index = None
         self._data = None
-        self.freememory() # initialize the mmap
+        self.freememory()  # initialize the mmap
 
         version = struct.unpack('!B', self._data[:PACKVERSIONSIZE])[0]
         self._checkversion(version)
@@ -307,18 +314,19 @@
     @util.propertycache
     def _fanouttable(self):
         params = self.params
-        rawfanout = self._index[FANOUTSTART:FANOUTSTART + params.fanoutsize]
+        rawfanout = self._index[FANOUTSTART : FANOUTSTART + params.fanoutsize]
         fanouttable = []
         for i in pycompat.xrange(0, params.fanoutcount):
             loc = i * 4
-            fanoutentry = struct.unpack('!I', rawfanout[loc:loc + 4])[0]
+            fanoutentry = struct.unpack('!I', rawfanout[loc : loc + 4])[0]
             fanouttable.append(fanoutentry)
         return fanouttable
 
     @util.propertycache
     def _indexend(self):
-        nodecount = struct.unpack_from('!Q', self._index,
-                                       self.params.indexstart - 8)[0]
+        nodecount = struct.unpack_from(
+            '!Q', self._index, self.params.indexstart - 8
+        )[0]
         return self.params.indexstart + nodecount * self.INDEXENTRYLENGTH
 
     def freememory(self):
@@ -335,8 +343,9 @@
         # TODO: use an opener/vfs to access these paths
         with open(self.indexpath, PACKOPENMODE) as indexfp:
             # memory-map the file, size 0 means whole file
-            self._index = mmap.mmap(indexfp.fileno(), 0,
-                                    access=mmap.ACCESS_READ)
+            self._index = mmap.mmap(
+                indexfp.fileno(), 0, access=mmap.ACCESS_READ
+            )
         with open(self.packpath, PACKOPENMODE) as datafp:
             self._data = mmap.mmap(datafp.fileno(), 0, access=mmap.ACCESS_READ)
 
@@ -358,8 +367,8 @@
     def iterentries(self):
         raise NotImplementedError()
 
+
 class mutablebasepack(versionmixin):
-
     def __init__(self, ui, packdir, version=2):
         self._checkversion(version)
         # TODO(augie): make this configurable
@@ -372,9 +381,11 @@
 
         shallowutil.mkstickygroupdir(ui, packdir)
         self.packfp, self.packpath = opener.mkstemp(
-            suffix=self.PACKSUFFIX + '-tmp')
+            suffix=self.PACKSUFFIX + '-tmp'
+        )
         self.idxfp, self.idxpath = opener.mkstemp(
-            suffix=self.INDEXSUFFIX + '-tmp')
+            suffix=self.INDEXSUFFIX + '-tmp'
+        )
         self.packfp = os.fdopen(self.packfp, r'wb+')
         self.idxfp = os.fdopen(self.idxfp, r'wb+')
         self.sha = hashlib.sha1()
@@ -389,7 +400,7 @@
         # Write header
         # TODO: make it extensible (ex: allow specifying compression algorithm,
         # a flexible key/value header, delta algorithm, fanout size, etc)
-        versionbuf = struct.pack('!B', self.VERSION) # unsigned 1 byte int
+        versionbuf = struct.pack('!B', self.VERSION)  # unsigned 1 byte int
         self.writeraw(versionbuf)
 
     def __enter__(self):
@@ -474,8 +485,9 @@
             count += 1
 
             # Must use [0] on the unpack result since it's always a tuple.
-            fanoutkey = struct.unpack(params.fanoutstruct,
-                                      node[:params.fanoutprefix])[0]
+            fanoutkey = struct.unpack(
+                params.fanoutstruct, node[: params.fanoutprefix]
+            )[0]
             if fanouttable[fanoutkey] == EMPTYFANOUT:
                 fanouttable[fanoutkey] = location
 
@@ -511,9 +523,15 @@
             config = 0b10000000
         self.idxfp.write(struct.pack('!BB', self.VERSION, config))
 
+
 class indexparams(object):
-    __slots__ = (r'fanoutprefix', r'fanoutstruct', r'fanoutcount',
-                 r'fanoutsize', r'indexstart')
+    __slots__ = (
+        r'fanoutprefix',
+        r'fanoutstruct',
+        r'fanoutcount',
+        r'fanoutsize',
+        r'indexstart',
+    )
 
     def __init__(self, prefixsize, version):
         self.fanoutprefix = prefixsize
@@ -529,7 +547,7 @@
             raise ValueError("invalid fanout prefix size: %s" % prefixsize)
 
         # The number of fanout table entries
-        self.fanoutcount = 2**(prefixsize * 8)
+        self.fanoutcount = 2 ** (prefixsize * 8)
 
         # The total bytes used by the fanout table
         self.fanoutsize = self.fanoutcount * 4
--- a/hgext/remotefilelog/basestore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/basestore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,6 +19,7 @@
     shallowutil,
 )
 
+
 class basestore(object):
     def __init__(self, repo, path, reponame, shared=False):
         """Creates a remotefilelog store object for the given repo name.
@@ -37,10 +38,12 @@
         self._shared = shared
         self._uid = os.getuid() if not pycompat.iswindows else None
 
-        self._validatecachelog = self.ui.config("remotefilelog",
-                                                "validatecachelog")
-        self._validatecache = self.ui.config("remotefilelog", "validatecache",
-                                             'on')
+        self._validatecachelog = self.ui.config(
+            "remotefilelog", "validatecachelog"
+        )
+        self._validatecache = self.ui.config(
+            "remotefilelog", "validatecache", 'on'
+        )
         if self._validatecache not in ('on', 'strict', 'off'):
             self._validatecache = 'on'
         if self._validatecache == 'off':
@@ -54,8 +57,11 @@
         for name, node in keys:
             filepath = self._getfilepath(name, node)
             exists = os.path.exists(filepath)
-            if (exists and self._validatecache == 'strict' and
-                not self._validatekey(filepath, 'contains')):
+            if (
+                exists
+                and self._validatecache == 'strict'
+                and not self._validatekey(filepath, 'contains')
+            ):
                 exists = False
             if not exists:
                 missing.append((name, node))
@@ -77,8 +83,9 @@
         ui = self.ui
         entries = ledger.sources.get(self, [])
         count = 0
-        progress = ui.makeprogress(_("cleaning up"), unit="files",
-                                   total=len(entries))
+        progress = ui.makeprogress(
+            _("cleaning up"), unit="files", total=len(entries)
+        )
         for entry in entries:
             if entry.gced or (entry.datarepacked and entry.historyrepacked):
                 progress.update(count)
@@ -178,8 +185,11 @@
         return filenames
 
     def _getrepocachepath(self):
-        return os.path.join(
-            self._path, self._reponame) if self._shared else self._path
+        return (
+            os.path.join(self._path, self._reponame)
+            if self._shared
+            else self._path
+        )
 
     def _listkeys(self):
         """List all the remotefilelog keys that exist in the store.
@@ -219,8 +229,9 @@
                 os.rename(filepath, filepath + ".corrupt")
                 raise KeyError("corrupt local cache file %s" % filepath)
         except IOError:
-            raise KeyError("no file found at %s for %s:%s" % (filepath, name,
-                                                              hex(node)))
+            raise KeyError(
+                "no file found at %s for %s:%s" % (filepath, name, hex(node))
+            )
 
         return data
 
@@ -244,8 +255,9 @@
 
             if self._validatecache:
                 if not self._validatekey(filepath, 'write'):
-                    raise error.Abort(_("local cache write was corrupted %s") %
-                                      filepath)
+                    raise error.Abort(
+                        _("local cache write was corrupted %s") % filepath
+                    )
         finally:
             os.umask(oldumask)
 
@@ -288,7 +300,7 @@
 
                 # extract the node from the metadata
                 offset += size
-                datanode = data[offset:offset + 20]
+                datanode = data[offset : offset + 20]
 
                 # and compare against the path
                 if os.path.basename(path) == hex(datanode):
@@ -314,8 +326,9 @@
         # keep files newer than a day even if they aren't needed
         limit = time.time() - (60 * 60 * 24)
 
-        progress = ui.makeprogress(_("removing unnecessary files"),
-                                   unit="files")
+        progress = ui.makeprogress(
+            _("removing unnecessary files"), unit="files"
+        )
         progress.update(0)
         for root, dirs, files in os.walk(cachepath):
             for file in files:
@@ -352,8 +365,10 @@
                         # errno.ENOENT = no such file or directory
                         if e.errno != errno.ENOENT:
                             raise
-                        msg = _("warning: file %s was removed by another "
-                                "process\n")
+                        msg = _(
+                            "warning: file %s was removed by another "
+                            "process\n"
+                        )
                         ui.warn(msg % path)
                         continue
                     removed += 1
@@ -363,8 +378,9 @@
         limit = ui.configbytes("remotefilelog", "cachelimit")
         if size > limit:
             excess = size - limit
-            progress = ui.makeprogress(_("enforcing cache limit"), unit="bytes",
-                                       total=excess)
+            progress = ui.makeprogress(
+                _("enforcing cache limit"), unit="bytes", total=excess
+            )
             removedexcess = 0
             while queue and size > limit and size > 0:
                 progress.update(removedexcess)
@@ -382,10 +398,16 @@
                 removedexcess += oldpathstat.st_size
             progress.complete()
 
-        ui.status(_("finished: removed %d of %d files (%0.2f GB to %0.2f GB)\n")
-                  % (removed, count,
-                     float(originalsize) / 1024.0 / 1024.0 / 1024.0,
-                     float(size) / 1024.0 / 1024.0 / 1024.0))
+        ui.status(
+            _("finished: removed %d of %d files (%0.2f GB to %0.2f GB)\n")
+            % (
+                removed,
+                count,
+                float(originalsize) / 1024.0 / 1024.0 / 1024.0,
+                float(size) / 1024.0 / 1024.0 / 1024.0,
+            )
+        )
+
 
 class baseunionstore(object):
     def __init__(self, *args, **kwargs):
@@ -407,6 +429,7 @@
     def retriable(fn):
         def noop(*args):
             pass
+
         def wrapped(self, *args, **kwargs):
             retrylog = self.retrylog or noop
             funcname = fn.__name__
@@ -421,7 +444,10 @@
                 except KeyError:
                     if i == self.numattempts:
                         # retries exhausted
-                        retrylog('retries exhausted in %s, raising KeyError\n' %
-                                 pycompat.sysbytes(funcname))
+                        retrylog(
+                            'retries exhausted in %s, raising KeyError\n'
+                            % pycompat.sysbytes(funcname)
+                        )
                         raise
+
         return wrapped
--- a/hgext/remotefilelog/connectionpool.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/connectionpool.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,6 +16,7 @@
 
 _sshv1peer = sshpeer.sshv1peer
 
+
 class connectionpool(object):
     def __init__(self, repo):
         self._repo = repo
@@ -40,6 +41,7 @@
                 pass
 
         if conn is None:
+
             def _cleanup(orig):
                 # close pipee first so peer.cleanup reading it won't deadlock,
                 # if there are other processes with pipeo open (i.e. us).
@@ -62,6 +64,7 @@
                 conn.close()
             del pathpool[:]
 
+
 class connection(object):
     def __init__(self, pool, peer):
         self._pool = pool
--- a/hgext/remotefilelog/constants.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/constants.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,8 +22,8 @@
 PATHCOUNTSTRUCT = '!I'
 PATHCOUNTSIZE = struct.calcsize(PATHCOUNTSTRUCT)
 
-FILEPACK_CATEGORY=""
-TREEPACK_CATEGORY="manifests"
+FILEPACK_CATEGORY = ""
+TREEPACK_CATEGORY = "manifests"
 
 ALL_CATEGORIES = [FILEPACK_CATEGORY, TREEPACK_CATEGORY]
 
@@ -31,11 +31,13 @@
 METAKEYFLAG = 'f'  # revlog flag
 METAKEYSIZE = 's'  # full rawtext size
 
+
 def getunits(category):
     if category == FILEPACK_CATEGORY:
         return _("files")
     if category == TREEPACK_CATEGORY:
         return _("trees")
 
+
 # Repack options passed to ``markledger``.
 OPTION_PACKSONLY = 'packsonly'
--- a/hgext/remotefilelog/contentstore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/contentstore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,9 +14,11 @@
     shallowutil,
 )
 
+
 class ChainIndicies(object):
     """A static class for easy reference to the delta chain indicies.
     """
+
     # The filename of this revision delta
     NAME = 0
     # The mercurial file node for this revision delta
@@ -31,6 +33,7 @@
     # The actual delta or full text data.
     DATA = 4
 
+
 class unioncontentstore(basestore.baseunionstore):
     def __init__(self, *args, **kwargs):
         super(unioncontentstore, self).__init__(*args, **kwargs)
@@ -132,8 +135,9 @@
         raise KeyError((name, hex(node)))
 
     def add(self, name, node, data):
-        raise RuntimeError("cannot add content only to remotefilelog "
-                           "contentstore")
+        raise RuntimeError(
+            "cannot add content only to remotefilelog " "contentstore"
+        )
 
     def getmissing(self, keys):
         missing = keys
@@ -152,6 +156,7 @@
         for store in self.stores:
             store.markledger(ledger, options)
 
+
 class remotefilelogcontentstore(basestore.basestore):
     def __init__(self, *args, **kwargs):
         super(remotefilelogcontentstore, self).__init__(*args, **kwargs)
@@ -162,7 +167,7 @@
         data = self._getdata(name, node)
 
         offset, size, flags = shallowutil.parsesizeflags(data)
-        content = data[offset:offset + size]
+        content = data[offset : offset + size]
 
         ancestormap = shallowutil.ancestormap(data)
         p1, p2, linknode, copyfrom = ancestormap[node]
@@ -202,22 +207,23 @@
         return self._threaddata.metacache[1]
 
     def add(self, name, node, data):
-        raise RuntimeError("cannot add content only to remotefilelog "
-                           "contentstore")
+        raise RuntimeError(
+            "cannot add content only to remotefilelog " "contentstore"
+        )
 
     def _sanitizemetacache(self):
         metacache = getattr(self._threaddata, 'metacache', None)
         if metacache is None:
-            self._threaddata.metacache = (None, None) # (node, meta)
+            self._threaddata.metacache = (None, None)  # (node, meta)
 
     def _updatemetacache(self, node, size, flags):
         self._sanitizemetacache()
         if node == self._threaddata.metacache[0]:
             return
-        meta = {constants.METAKEYFLAG: flags,
-                constants.METAKEYSIZE: size}
+        meta = {constants.METAKEYFLAG: flags, constants.METAKEYSIZE: size}
         self._threaddata.metacache = (node, meta)
 
+
 class remotecontentstore(object):
     def __init__(self, ui, fileservice, shared):
         self._fileservice = fileservice
@@ -225,8 +231,9 @@
         self._shared = shared
 
     def get(self, name, node):
-        self._fileservice.prefetch([(name, hex(node))], force=True,
-                                   fetchdata=True)
+        self._fileservice.prefetch(
+            [(name, hex(node))], force=True, fetchdata=True
+        )
         return self._shared.get(name, node)
 
     def getdelta(self, name, node):
@@ -242,8 +249,9 @@
         return [(name, node, None, nullid, revision)]
 
     def getmeta(self, name, node):
-        self._fileservice.prefetch([(name, hex(node))], force=True,
-                                   fetchdata=True)
+        self._fileservice.prefetch(
+            [(name, hex(node))], force=True, fetchdata=True
+        )
         return self._shared.getmeta(name, node)
 
     def add(self, name, node, data):
@@ -255,6 +263,7 @@
     def markledger(self, ledger, options=None):
         pass
 
+
 class manifestrevlogstore(object):
     def __init__(self, repo):
         self._store = repo.store
@@ -277,8 +286,10 @@
     def getmeta(self, name, node):
         rl = self._revlog(name)
         rev = rl.rev(node)
-        return {constants.METAKEYFLAG: rl.flags(rev),
-                constants.METAKEYSIZE: rl.rawsize(rev)}
+        return {
+            constants.METAKEYFLAG: rl.flags(rev),
+            constants.METAKEYSIZE: rl.rawsize(rev),
+        }
 
     def getancestors(self, name, node, known=None):
         if known is None:
@@ -359,7 +370,7 @@
             if path[:5] != 'meta/' or path[-2:] != '.i':
                 continue
 
-            treename = path[5:-len('/00manifest.i')]
+            treename = path[5 : -len('/00manifest.i')]
 
             rl = revlog.revlog(self._svfs, path)
             for rev in pycompat.xrange(len(rl) - 1, -1, -1):
--- a/hgext/remotefilelog/datapack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/datapack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,6 +24,7 @@
 INDEXSUFFIX = '.dataidx'
 PACKSUFFIX = '.datapack'
 
+
 class datapackstore(basepack.basepackstore):
     INDEXSUFFIX = INDEXSUFFIX
     PACKSUFFIX = PACKSUFFIX
@@ -85,6 +86,7 @@
     def add(self, name, node, data):
         raise RuntimeError("cannot add to datapackstore")
 
+
 class datapack(basepack.basepack):
     INDEXSUFFIX = INDEXSUFFIX
     PACKSUFFIX = PACKSUFFIX
@@ -106,8 +108,9 @@
         return missing
 
     def get(self, name, node):
-        raise RuntimeError("must use getdeltachain with datapack (%s:%s)"
-                           % (name, hex(node)))
+        raise RuntimeError(
+            "must use getdeltachain with datapack (%s:%s)" % (name, hex(node))
+        )
 
     def getmeta(self, name, node):
         value = self._find(node)
@@ -115,18 +118,18 @@
             raise KeyError((name, hex(node)))
 
         node, deltabaseoffset, offset, size = value
-        rawentry = self._data[offset:offset + size]
+        rawentry = self._data[offset : offset + size]
 
         # see docstring of mutabledatapack for the format
         offset = 0
-        offset += struct.unpack_from('!H', rawentry, offset)[0] + 2 # filename
-        offset += 40 # node, deltabase node
-        offset += struct.unpack_from('!Q', rawentry, offset)[0] + 8 # delta
+        offset += struct.unpack_from('!H', rawentry, offset)[0] + 2  # filename
+        offset += 40  # node, deltabase node
+        offset += struct.unpack_from('!Q', rawentry, offset)[0] + 8  # delta
 
         metalen = struct.unpack_from('!I', rawentry, offset)[0]
         offset += 4
 
-        meta = shallowutil.parsepackmeta(rawentry[offset:offset + metalen])
+        meta = shallowutil.parsepackmeta(rawentry[offset : offset + metalen])
 
         return meta
 
@@ -155,11 +158,14 @@
         chain = [value]
         deltabaseoffset = value[1]
         entrylen = self.INDEXENTRYLENGTH
-        while (deltabaseoffset != FULLTEXTINDEXMARK
-               and deltabaseoffset != NOBASEINDEXMARK):
+        while (
+            deltabaseoffset != FULLTEXTINDEXMARK
+            and deltabaseoffset != NOBASEINDEXMARK
+        ):
             loc = params.indexstart + deltabaseoffset
-            value = struct.unpack(self.INDEXFORMAT,
-                                  self._index[loc:loc + entrylen])
+            value = struct.unpack(
+                self.INDEXFORMAT, self._index[loc : loc + entrylen]
+            )
             deltabaseoffset = value[1]
             chain.append(value)
 
@@ -175,33 +181,33 @@
         return deltachain
 
     def _readentry(self, offset, size, getmeta=False):
-        rawentry = self._data[offset:offset + size]
+        rawentry = self._data[offset : offset + size]
         self._pagedin += len(rawentry)
 
         # <2 byte len> + <filename>
         lengthsize = 2
         filenamelen = struct.unpack('!H', rawentry[:2])[0]
-        filename = rawentry[lengthsize:lengthsize + filenamelen]
+        filename = rawentry[lengthsize : lengthsize + filenamelen]
 
         # <20 byte node> + <20 byte deltabase>
         nodestart = lengthsize + filenamelen
         deltabasestart = nodestart + NODELENGTH
         node = rawentry[nodestart:deltabasestart]
-        deltabasenode = rawentry[deltabasestart:deltabasestart + NODELENGTH]
+        deltabasenode = rawentry[deltabasestart : deltabasestart + NODELENGTH]
 
         # <8 byte len> + <delta>
         deltastart = deltabasestart + NODELENGTH
-        rawdeltalen = rawentry[deltastart:deltastart + 8]
+        rawdeltalen = rawentry[deltastart : deltastart + 8]
         deltalen = struct.unpack('!Q', rawdeltalen)[0]
 
-        delta = rawentry[deltastart + 8:deltastart + 8 + deltalen]
+        delta = rawentry[deltastart + 8 : deltastart + 8 + deltalen]
         delta = self._decompress(delta)
 
         if getmeta:
             metastart = deltastart + 8 + deltalen
             metalen = struct.unpack_from('!I', rawentry, metastart)[0]
 
-            rawmeta = rawentry[metastart + 4:metastart + 4 + metalen]
+            rawmeta = rawentry[metastart + 4 : metastart + 4 + metalen]
             meta = shallowutil.parsepackmeta(rawmeta)
             return filename, node, deltabasenode, delta, meta
         else:
@@ -215,8 +221,9 @@
 
     def _find(self, node):
         params = self.params
-        fanoutkey = struct.unpack(params.fanoutstruct,
-                                  node[:params.fanoutprefix])[0]
+        fanoutkey = struct.unpack(
+            params.fanoutstruct, node[: params.fanoutprefix]
+        )[0]
         fanout = self._fanouttable
 
         start = fanout[fanoutkey] + params.indexstart
@@ -233,20 +240,20 @@
 
         # Bisect between start and end to find node
         index = self._index
-        startnode = index[start:start + NODELENGTH]
-        endnode = index[end:end + NODELENGTH]
+        startnode = index[start : start + NODELENGTH]
+        endnode = index[end : end + NODELENGTH]
         entrylen = self.INDEXENTRYLENGTH
         if startnode == node:
-            entry = index[start:start + entrylen]
+            entry = index[start : start + entrylen]
         elif endnode == node:
-            entry = index[end:end + entrylen]
+            entry = index[end : end + entrylen]
         else:
             while start < end - entrylen:
                 mid = start + (end - start) // 2
                 mid = mid - ((mid - params.indexstart) % entrylen)
-                midnode = index[mid:mid + NODELENGTH]
+                midnode = index[mid : mid + NODELENGTH]
                 if midnode == node:
-                    entry = index[mid:mid + entrylen]
+                    entry = index[mid : mid + entrylen]
                     break
                 if node > midnode:
                     start = mid
@@ -264,8 +271,9 @@
     def cleanup(self, ledger):
         entries = ledger.sources.get(self, [])
         allkeys = set(self)
-        repackedkeys = set((e.filename, e.node) for e in entries if
-                           e.datarepacked or e.gced)
+        repackedkeys = set(
+            (e.filename, e.node) for e in entries if e.datarepacked or e.gced
+        )
 
         if len(allkeys - repackedkeys) == 0:
             if self.path not in ledger.created:
@@ -284,27 +292,28 @@
             oldoffset = offset
 
             # <2 byte len> + <filename>
-            filenamelen = struct.unpack('!H', data[offset:offset + 2])[0]
+            filenamelen = struct.unpack('!H', data[offset : offset + 2])[0]
             offset += 2
-            filename = data[offset:offset + filenamelen]
+            filename = data[offset : offset + filenamelen]
             offset += filenamelen
 
             # <20 byte node>
-            node = data[offset:offset + constants.NODESIZE]
+            node = data[offset : offset + constants.NODESIZE]
             offset += constants.NODESIZE
             # <20 byte deltabase>
-            deltabase = data[offset:offset + constants.NODESIZE]
+            deltabase = data[offset : offset + constants.NODESIZE]
             offset += constants.NODESIZE
 
             # <8 byte len> + <delta>
-            rawdeltalen = data[offset:offset + 8]
+            rawdeltalen = data[offset : offset + 8]
             deltalen = struct.unpack('!Q', rawdeltalen)[0]
             offset += 8
 
             # TODO(augie): we should store a header that is the
             # uncompressed size.
-            uncompressedlen = len(self._decompress(
-                data[offset:offset + deltalen]))
+            uncompressedlen = len(
+                self._decompress(data[offset : offset + deltalen])
+            )
             offset += deltalen
 
             # <4 byte len> + <metadata-list>
@@ -318,6 +327,7 @@
             if self.freememory():
                 data = self._data
 
+
 class mutabledatapack(basepack.mutablebasepack):
     """A class for constructing and serializing a datapack file and index.
 
@@ -388,6 +398,7 @@
 
     [1]: new in version 1.
     """
+
     INDEXSUFFIX = INDEXSUFFIX
     PACKSUFFIX = PACKSUFFIX
 
@@ -403,7 +414,7 @@
 
     def add(self, name, node, deltabasenode, delta, metadata=None):
         # metadata is a dict, ex. {METAKEYFLAG: flag}
-        if len(name) > 2**16:
+        if len(name) > 2 ** 16:
             raise RuntimeError(_("name too long %s") % name)
         if len(node) != 20:
             raise RuntimeError(_("node should be 20 bytes %s") % node)
@@ -415,18 +426,20 @@
         # TODO: allow configurable compression
         delta = self._compress(delta)
 
-        rawdata = ''.join((
-            struct.pack('!H', len(name)), # unsigned 2 byte int
-            name,
-            node,
-            deltabasenode,
-            struct.pack('!Q', len(delta)), # unsigned 8 byte int
-            delta,
-        ))
+        rawdata = ''.join(
+            (
+                struct.pack('!H', len(name)),  # unsigned 2 byte int
+                name,
+                node,
+                deltabasenode,
+                struct.pack('!Q', len(delta)),  # unsigned 8 byte int
+                delta,
+            )
+        )
 
         # v1 support metadata
         rawmeta = shallowutil.buildpackmeta(metadata)
-        rawdata += struct.pack('!I', len(rawmeta)) # unsigned 4 byte
+        rawdata += struct.pack('!I', len(rawmeta))  # unsigned 4 byte
         rawdata += rawmeta
 
         offset = self.packfp.tell()
@@ -438,8 +451,9 @@
         self.writeraw(rawdata)
 
     def createindex(self, nodelocations, indexoffset):
-        entries = sorted((n, db, o, s) for n, (db, o, s)
-                         in self.entries.iteritems())
+        entries = sorted(
+            (n, db, o, s) for n, (db, o, s) in self.entries.iteritems()
+        )
 
         rawindex = ''
         fmt = self.INDEXFORMAT
@@ -449,8 +463,9 @@
             else:
                 # Instead of storing the deltabase node in the index, let's
                 # store a pointer directly to the index entry for the deltabase.
-                deltabaselocation = nodelocations.get(deltabase,
-                                                      NOBASEINDEXMARK)
+                deltabaselocation = nodelocations.get(
+                    deltabase, NOBASEINDEXMARK
+                )
 
             entry = struct.pack(fmt, node, deltabaselocation, offset, size)
             rawindex += entry
--- a/hgext/remotefilelog/debugcommands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/debugcommands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,29 +29,35 @@
     shallowutil,
 )
 
+
 def debugremotefilelog(ui, path, **opts):
     decompress = opts.get(r'decompress')
 
     size, firstnode, mapping = parsefileblob(path, decompress)
 
-    ui.status(_("size: %d bytes\n") % (size))
-    ui.status(_("path: %s \n") % (path))
+    ui.status(_("size: %d bytes\n") % size)
+    ui.status(_("path: %s \n") % path)
     ui.status(_("key: %s \n") % (short(firstnode)))
     ui.status(_("\n"))
-    ui.status(_("%12s => %12s %13s %13s %12s\n") %
-              ("node", "p1", "p2", "linknode", "copyfrom"))
+    ui.status(
+        _("%12s => %12s %13s %13s %12s\n")
+        % ("node", "p1", "p2", "linknode", "copyfrom")
+    )
 
     queue = [firstnode]
     while queue:
         node = queue.pop(0)
         p1, p2, linknode, copyfrom = mapping[node]
-        ui.status(_("%s => %s  %s  %s  %s\n") %
-            (short(node), short(p1), short(p2), short(linknode), copyfrom))
+        ui.status(
+            _("%s => %s  %s  %s  %s\n")
+            % (short(node), short(p1), short(p2), short(linknode), copyfrom)
+        )
         if p1 != nullid:
             queue.append(p1)
         if p2 != nullid:
             queue.append(p2)
 
+
 def buildtemprevlog(repo, file):
     # get filename key
     filekey = nodemod.hex(hashlib.sha1(file).digest())
@@ -74,6 +80,7 @@
     class faket(object):
         def add(self, a, b, c):
             pass
+
     t = faket()
     for fctx in fctxs:
         if fctx.node() not in repo:
@@ -89,13 +96,16 @@
 
     return r
 
+
 def debugindex(orig, ui, repo, file_=None, **opts):
     """dump the contents of an index file"""
-    if (opts.get(r'changelog') or
-        opts.get(r'manifest') or
-        opts.get(r'dir') or
-        not shallowutil.isenabled(repo) or
-        not repo.shallowmatch(file_)):
+    if (
+        opts.get(r'changelog')
+        or opts.get(r'manifest')
+        or opts.get(r'dir')
+        or not shallowutil.isenabled(repo)
+        or not repo.shallowmatch(file_)
+    ):
         return orig(ui, repo, file_, **opts)
 
     r = buildtemprevlog(repo, file_)
@@ -112,12 +122,20 @@
         basehdr = '  base'
 
     if format == 0:
-        ui.write(("   rev    offset  length " + basehdr + " linkrev"
-                  " nodeid       p1           p2\n"))
+        ui.write(
+            (
+                "   rev    offset  length " + basehdr + " linkrev"
+                " nodeid       p1           p2\n"
+            )
+        )
     elif format == 1:
-        ui.write(("   rev flag   offset   length"
-                  "     size " + basehdr + "   link     p1     p2"
-                  "       nodeid\n"))
+        ui.write(
+            (
+                "   rev flag   offset   length"
+                "     size " + basehdr + "   link     p1     p2"
+                "       nodeid\n"
+            )
+        )
 
     for i in r:
         node = r.node(i)
@@ -130,14 +148,37 @@
                 pp = r.parents(node)
             except Exception:
                 pp = [nullid, nullid]
-            ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
-                    i, r.start(i), r.length(i), base, r.linkrev(i),
-                    short(node), short(pp[0]), short(pp[1])))
+            ui.write(
+                "% 6d % 9d % 7d % 6d % 7d %s %s %s\n"
+                % (
+                    i,
+                    r.start(i),
+                    r.length(i),
+                    base,
+                    r.linkrev(i),
+                    short(node),
+                    short(pp[0]),
+                    short(pp[1]),
+                )
+            )
         elif format == 1:
             pr = r.parentrevs(i)
-            ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
-                    i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
-                    base, r.linkrev(i), pr[0], pr[1], short(node)))
+            ui.write(
+                "% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n"
+                % (
+                    i,
+                    r.flags(i),
+                    r.start(i),
+                    r.length(i),
+                    r.rawsize(i),
+                    base,
+                    r.linkrev(i),
+                    pr[0],
+                    pr[1],
+                    short(node),
+                )
+            )
+
 
 def debugindexdot(orig, ui, repo, file_):
     """dump an index DAG as a graphviz dot file"""
@@ -146,7 +187,7 @@
 
     r = buildtemprevlog(repo, os.path.basename(file_)[:-2])
 
-    ui.write(("digraph G {\n"))
+    ui.write("digraph G {\n")
     for i in r:
         node = r.node(i)
         pp = r.parents(node)
@@ -155,6 +196,7 @@
             ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
     ui.write("}\n")
 
+
 def verifyremotefilelog(ui, path, **opts):
     decompress = opts.get(r'decompress')
 
@@ -167,14 +209,18 @@
             for p1, p2, linknode, copyfrom in mapping.itervalues():
                 if linknode == nullid:
                     actualpath = os.path.relpath(root, path)
-                    key = fileserverclient.getcachekey("reponame", actualpath,
-                                                       file)
-                    ui.status("%s %s\n" % (key, os.path.relpath(filepath,
-                                                                path)))
+                    key = fileserverclient.getcachekey(
+                        "reponame", actualpath, file
+                    )
+                    ui.status(
+                        "%s %s\n" % (key, os.path.relpath(filepath, path))
+                    )
+
 
 def _decompressblob(raw):
     return zlib.decompress(raw)
 
+
 def parsefileblob(path, decompress):
     f = open(path, "rb")
     try:
@@ -194,24 +240,25 @@
     while start < len(raw):
         divider = raw.index('\0', start + 80)
 
-        currentnode = raw[start:(start + 20)]
+        currentnode = raw[start : (start + 20)]
         if not firstnode:
             firstnode = currentnode
 
-        p1 = raw[(start + 20):(start + 40)]
-        p2 = raw[(start + 40):(start + 60)]
-        linknode = raw[(start + 60):(start + 80)]
-        copyfrom = raw[(start + 80):divider]
+        p1 = raw[(start + 20) : (start + 40)]
+        p2 = raw[(start + 40) : (start + 60)]
+        linknode = raw[(start + 60) : (start + 80)]
+        copyfrom = raw[(start + 80) : divider]
 
         mapping[currentnode] = (p1, p2, linknode, copyfrom)
         start = divider + 1
 
     return size, firstnode, mapping
 
+
 def debugdatapack(ui, *paths, **opts):
     for path in paths:
         if '.data' in path:
-            path = path[:path.index('.data')]
+            path = path[: path.index('.data')]
         ui.write("%s:\n" % path)
         dpack = datapack.datapack(path)
         node = opts.get(r'node')
@@ -230,6 +277,7 @@
         lastfilename = None
         totaldeltasize = 0
         totalblobsize = 0
+
         def printtotals():
             if lastfilename is not None:
                 ui.write("\n")
@@ -238,14 +286,18 @@
             difference = totalblobsize - totaldeltasize
             deltastr = "%0.1f%% %s" % (
                 (100.0 * abs(difference) / totalblobsize),
-                ("smaller" if difference > 0 else "bigger"))
+                ("smaller" if difference > 0 else "bigger"),
+            )
 
-            ui.write(("Total:%s%s  %s (%s)\n") % (
-                "".ljust(2 * hashlen - len("Total:")),
-                ('%d' % totaldeltasize).ljust(12),
-                ('%d' % totalblobsize).ljust(9),
-                deltastr
-            ))
+            ui.write(
+                "Total:%s%s  %s (%s)\n"
+                % (
+                    "".ljust(2 * hashlen - len("Total:")),
+                    ('%d' % totaldeltasize).ljust(12),
+                    ('%d' % totalblobsize).ljust(9),
+                    deltastr,
+                )
+            )
 
         bases = {}
         nodes = set()
@@ -260,11 +312,15 @@
                 printtotals()
                 name = '(empty name)' if filename == '' else filename
                 ui.write("%s:\n" % name)
-                ui.write("%s%s%s%s\n" % (
-                    "Node".ljust(hashlen),
-                    "Delta Base".ljust(hashlen),
-                    "Delta Length".ljust(14),
-                    "Blob Size".ljust(9)))
+                ui.write(
+                    "%s%s%s%s\n"
+                    % (
+                        "Node".ljust(hashlen),
+                        "Delta Base".ljust(hashlen),
+                        "Delta Length".ljust(14),
+                        "Blob Size".ljust(9),
+                    )
+                )
                 lastfilename = filename
                 totalblobsize = 0
                 totaldeltasize = 0
@@ -277,11 +333,15 @@
                 totalblobsize += blobsize
             else:
                 blobsize = "(missing)"
-            ui.write("%s  %s  %s%s\n" % (
-                hashformatter(node),
-                hashformatter(deltabase),
-                ('%d' % deltalen).ljust(14),
-                pycompat.bytestr(blobsize)))
+            ui.write(
+                "%s  %s  %s%s\n"
+                % (
+                    hashformatter(node),
+                    hashformatter(deltabase),
+                    ('%d' % deltalen).ljust(14),
+                    pycompat.bytestr(blobsize),
+                )
+            )
 
         if filename is not None:
             printtotals()
@@ -291,6 +351,7 @@
             ui.warn(("%d failures\n" % failures))
             return 1
 
+
 def _sanitycheck(ui, nodes, bases):
     """
     Does some basic sanity checking on a packfiles with ``nodes`` ``bases`` (a
@@ -307,14 +368,22 @@
 
         while deltabase != nullid:
             if deltabase not in nodes:
-                ui.warn(("Bad entry: %s has an unknown deltabase (%s)\n" %
-                        (short(node), short(deltabase))))
+                ui.warn(
+                    (
+                        "Bad entry: %s has an unknown deltabase (%s)\n"
+                        % (short(node), short(deltabase))
+                    )
+                )
                 failures += 1
                 break
 
             if deltabase in seen:
-                ui.warn(("Bad entry: %s has a cycle (at %s)\n" %
-                        (short(node), short(deltabase))))
+                ui.warn(
+                    (
+                        "Bad entry: %s has a cycle (at %s)\n"
+                        % (short(node), short(deltabase))
+                    )
+                )
                 failures += 1
                 break
 
@@ -326,6 +395,7 @@
         bases[node] = nullid
     return failures
 
+
 def dumpdeltachain(ui, deltachain, **opts):
     hashformatter = hex
     hashlen = 40
@@ -335,22 +405,30 @@
         if filename != lastfilename:
             ui.write("\n%s\n" % filename)
             lastfilename = filename
-        ui.write("%s  %s  %s  %s\n" % (
-            "Node".ljust(hashlen),
-            "Delta Base".ljust(hashlen),
-            "Delta SHA1".ljust(hashlen),
-            "Delta Length".ljust(6),
-        ))
+        ui.write(
+            "%s  %s  %s  %s\n"
+            % (
+                "Node".ljust(hashlen),
+                "Delta Base".ljust(hashlen),
+                "Delta SHA1".ljust(hashlen),
+                "Delta Length".ljust(6),
+            )
+        )
 
-        ui.write("%s  %s  %s  %d\n" % (
-            hashformatter(node),
-            hashformatter(deltabasenode),
-            nodemod.hex(hashlib.sha1(delta).digest()),
-            len(delta)))
+        ui.write(
+            "%s  %s  %s  %d\n"
+            % (
+                hashformatter(node),
+                hashformatter(deltabasenode),
+                nodemod.hex(hashlib.sha1(delta).digest()),
+                len(delta),
+            )
+        )
+
 
 def debughistorypack(ui, path):
     if '.hist' in path:
-        path = path[:path.index('.hist')]
+        path = path[: path.index('.hist')]
     hpack = historypack.historypack(path)
 
     lastfilename = None
@@ -358,21 +436,41 @@
         filename, node, p1node, p2node, linknode, copyfrom = entry
         if filename != lastfilename:
             ui.write("\n%s\n" % filename)
-            ui.write("%s%s%s%s%s\n" % (
-                "Node".ljust(14),
-                "P1 Node".ljust(14),
-                "P2 Node".ljust(14),
-                "Link Node".ljust(14),
-                "Copy From"))
+            ui.write(
+                "%s%s%s%s%s\n"
+                % (
+                    "Node".ljust(14),
+                    "P1 Node".ljust(14),
+                    "P2 Node".ljust(14),
+                    "Link Node".ljust(14),
+                    "Copy From",
+                )
+            )
             lastfilename = filename
-        ui.write("%s  %s  %s  %s  %s\n" % (short(node), short(p1node),
-            short(p2node), short(linknode), copyfrom))
+        ui.write(
+            "%s  %s  %s  %s  %s\n"
+            % (
+                short(node),
+                short(p1node),
+                short(p2node),
+                short(linknode),
+                copyfrom,
+            )
+        )
+
 
 def debugwaitonrepack(repo):
     with lockmod.lock(repack.repacklockvfs(repo), "repacklock", timeout=-1):
         return
 
+
 def debugwaitonprefetch(repo):
-    with repo._lock(repo.svfs, "prefetchlock", True, None,
-                         None, _('prefetching in %s') % repo.origroot):
+    with repo._lock(
+        repo.svfs,
+        "prefetchlock",
+        True,
+        None,
+        None,
+        _('prefetching in %s') % repo.origroot,
+    ):
         pass
--- a/hgext/remotefilelog/fileserverclient.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/fileserverclient.py	Sun Oct 06 09:45:02 2019 -0400
@@ -43,22 +43,25 @@
 
 _lfsmod = None
 
+
 def getcachekey(reponame, file, id):
     pathhash = node.hex(hashlib.sha1(file).digest())
     return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
 
+
 def getlocalkey(file, id):
     pathhash = node.hex(hashlib.sha1(file).digest())
     return os.path.join(pathhash, id)
 
+
 def peersetup(ui, peer):
-
     class remotefilepeer(peer.__class__):
         @wireprotov1peer.batchable
         def x_rfl_getfile(self, file, node):
             if not self.capable('x_rfl_getfile'):
                 raise error.Abort(
-                    'configured remotefile server does not support getfile')
+                    'configured remotefile server does not support getfile'
+                )
             f = wireprotov1peer.future()
             yield {'file': file, 'node': node}, f
             code, data = f.value.split('\0', 1)
@@ -69,8 +72,10 @@
         @wireprotov1peer.batchable
         def x_rfl_getflogheads(self, path):
             if not self.capable('x_rfl_getflogheads'):
-                raise error.Abort('configured remotefile server does not '
-                                  'support getflogheads')
+                raise error.Abort(
+                    'configured remotefile server does not '
+                    'support getflogheads'
+                )
             f = wireprotov1peer.future()
             yield {'path': path}, f
             heads = f.value.split('\n') if f.value else []
@@ -79,13 +84,17 @@
         def _updatecallstreamopts(self, command, opts):
             if command != 'getbundle':
                 return
-            if (constants.NETWORK_CAP_LEGACY_SSH_GETFILES
-                not in self.capabilities()):
+            if (
+                constants.NETWORK_CAP_LEGACY_SSH_GETFILES
+                not in self.capabilities()
+            ):
                 return
             if not util.safehasattr(self, '_localrepo'):
                 return
-            if (constants.SHALLOWREPO_REQUIREMENT
-                not in self._localrepo.requirements):
+            if (
+                constants.SHALLOWREPO_REQUIREMENT
+                not in self._localrepo.requirements
+            ):
                 return
 
             bundlecaps = opts.get('bundlecaps')
@@ -114,8 +123,9 @@
 
         def _sendrequest(self, command, args, **opts):
             self._updatecallstreamopts(command, args)
-            return super(remotefilepeer, self)._sendrequest(command, args,
-                                                            **opts)
+            return super(remotefilepeer, self)._sendrequest(
+                command, args, **opts
+            )
 
         def _callstream(self, command, **opts):
             supertype = super(remotefilepeer, self)
@@ -125,11 +135,13 @@
 
     peer.__class__ = remotefilepeer
 
+
 class cacheconnection(object):
     """The connection for communicating with the remote cache. Performs
     gets and sets by communicating with an external process that has the
     cache-specific implementation.
     """
+
     def __init__(self):
         self.pipeo = self.pipei = self.pipee = None
         self.subprocess = None
@@ -138,8 +150,9 @@
     def connect(self, cachecommand):
         if self.pipeo:
             raise error.Abort(_("cache connection already open"))
-        self.pipei, self.pipeo, self.pipee, self.subprocess = (
-            procutil.popen4(cachecommand))
+        self.pipei, self.pipeo, self.pipee, self.subprocess = procutil.popen4(
+            cachecommand
+        )
         self.connected = True
 
     def close(self):
@@ -148,6 +161,7 @@
                 pipe.close()
             except Exception:
                 pass
+
         if self.connected:
             try:
                 self.pipei.write("exit\n")
@@ -190,8 +204,10 @@
 
         return result
 
+
 def _getfilesbatch(
-        remote, receivemissing, progresstick, missed, idmap, batchsize):
+    remote, receivemissing, progresstick, missed, idmap, batchsize
+):
     # Over http(s), iterbatch is a streamy method and we can start
     # looking at results early. This means we send one (potentially
     # large) request, but then we show nice progress as we process
@@ -205,10 +221,11 @@
     with remote.commandexecutor() as e:
         futures = []
         for m in missed:
-            futures.append(e.callcommand('x_rfl_getfile', {
-                'file': idmap[m],
-                'node': m[-40:]
-            }))
+            futures.append(
+                e.callcommand(
+                    'x_rfl_getfile', {'file': idmap[m], 'node': m[-40:]}
+                )
+            )
 
         for i, m in enumerate(missed):
             r = futures[i].result()
@@ -218,8 +235,10 @@
             receivemissing(io.BytesIO('%d\n%s' % (len(r), r)), file_, node)
             progresstick()
 
+
 def _getfiles_optimistic(
-    remote, receivemissing, progresstick, missed, idmap, step):
+    remote, receivemissing, progresstick, missed, idmap, step
+):
     remote._callstream("x_rfl_getfiles")
     i = 0
     pipeo = remote._pipeo
@@ -248,8 +267,10 @@
     pipeo.write('\n')
     pipeo.flush()
 
+
 def _getfiles_threaded(
-    remote, receivemissing, progresstick, missed, idmap, step):
+    remote, receivemissing, progresstick, missed, idmap, step
+):
     remote._callstream("getfiles")
     pipeo = remote._pipeo
     pipei = remote._pipei
@@ -261,6 +282,7 @@
             sshrequest = "%s%s\n" % (versionid, file)
             pipeo.write(sshrequest)
         pipeo.flush()
+
     writerthread = threading.Thread(target=writer)
     writerthread.daemon = True
     writerthread.start()
@@ -276,9 +298,11 @@
     pipeo.write('\n')
     pipeo.flush()
 
+
 class fileserverclient(object):
     """A client for requesting files from the remote file server.
     """
+
     def __init__(self, repo):
         ui = repo.ui
         self.repo = repo
@@ -290,7 +314,8 @@
         # This option causes remotefilelog to pass the full file path to the
         # cacheprocess instead of a hashed key.
         self.cacheprocesspasspath = ui.configbool(
-            "remotefilelog", "cacheprocess.includepath")
+            "remotefilelog", "cacheprocess.includepath"
+        )
 
         self.debugoutput = ui.configbool("remotefilelog", "debug")
 
@@ -341,8 +366,12 @@
                 for missingid in idmap:
                     if not missingid in missedset:
                         missed.append(missingid)
-                self.ui.warn(_("warning: cache connection closed early - " +
-                    "falling back to server\n"))
+                self.ui.warn(
+                    _(
+                        "warning: cache connection closed early - "
+                        + "falling back to server\n"
+                    )
+                )
                 break
             if missingid == "0":
                 break
@@ -359,8 +388,14 @@
 
         fromcache = total - len(missed)
         progress.update(fromcache, total=total)
-        self.ui.log("remotefilelog", "remote cache hit rate is %r of %r\n",
-                    fromcache, total, hit=fromcache, total=total)
+        self.ui.log(
+            "remotefilelog",
+            "remote cache hit rate is %r of %r\n",
+            fromcache,
+            total,
+            hit=fromcache,
+            total=total,
+        )
 
         oldumask = os.umask(0o002)
         try:
@@ -375,46 +410,69 @@
                     with self._connect() as conn:
                         remote = conn.peer
                         if remote.capable(
-                                constants.NETWORK_CAP_LEGACY_SSH_GETFILES):
+                            constants.NETWORK_CAP_LEGACY_SSH_GETFILES
+                        ):
                             if not isinstance(remote, _sshv1peer):
-                                raise error.Abort('remotefilelog requires ssh '
-                                                  'servers')
-                            step = self.ui.configint('remotefilelog',
-                                                     'getfilesstep')
-                            getfilestype = self.ui.config('remotefilelog',
-                                                          'getfilestype')
+                                raise error.Abort(
+                                    'remotefilelog requires ssh ' 'servers'
+                                )
+                            step = self.ui.configint(
+                                'remotefilelog', 'getfilesstep'
+                            )
+                            getfilestype = self.ui.config(
+                                'remotefilelog', 'getfilestype'
+                            )
                             if getfilestype == 'threaded':
                                 _getfiles = _getfiles_threaded
                             else:
                                 _getfiles = _getfiles_optimistic
-                            _getfiles(remote, self.receivemissing,
-                                      progress.increment, missed, idmap, step)
+                            _getfiles(
+                                remote,
+                                self.receivemissing,
+                                progress.increment,
+                                missed,
+                                idmap,
+                                step,
+                            )
                         elif remote.capable("x_rfl_getfile"):
                             if remote.capable('batch'):
                                 batchdefault = 100
                             else:
                                 batchdefault = 10
                             batchsize = self.ui.configint(
-                                'remotefilelog', 'batchsize', batchdefault)
+                                'remotefilelog', 'batchsize', batchdefault
+                            )
                             self.ui.debug(
                                 b'requesting %d files from '
-                                b'remotefilelog server...\n' % len(missed))
+                                b'remotefilelog server...\n' % len(missed)
+                            )
                             _getfilesbatch(
-                                remote, self.receivemissing, progress.increment,
-                                missed, idmap, batchsize)
+                                remote,
+                                self.receivemissing,
+                                progress.increment,
+                                missed,
+                                idmap,
+                                batchsize,
+                            )
                         else:
-                            raise error.Abort("configured remotefilelog server"
-                                             " does not support remotefilelog")
+                            raise error.Abort(
+                                "configured remotefilelog server"
+                                " does not support remotefilelog"
+                            )
 
-                    self.ui.log("remotefilefetchlog",
-                                "Success\n",
-                                fetched_files = progress.pos - fromcache,
-                                total_to_fetch = total - fromcache)
+                    self.ui.log(
+                        "remotefilefetchlog",
+                        "Success\n",
+                        fetched_files=progress.pos - fromcache,
+                        total_to_fetch=total - fromcache,
+                    )
                 except Exception:
-                    self.ui.log("remotefilefetchlog",
-                                "Fail\n",
-                                fetched_files = progress.pos - fromcache,
-                                total_to_fetch = total - fromcache)
+                    self.ui.log(
+                        "remotefilefetchlog",
+                        "Fail\n",
+                        fetched_files=progress.pos - fromcache,
+                        total_to_fetch=total - fromcache,
+                    )
                     raise
                 finally:
                     self.ui.verbose = verbose
@@ -432,17 +490,21 @@
     def receivemissing(self, pipe, filename, node):
         line = pipe.readline()[:-1]
         if not line:
-            raise error.ResponseError(_("error downloading file contents:"),
-                                      _("connection closed early"))
+            raise error.ResponseError(
+                _("error downloading file contents:"),
+                _("connection closed early"),
+            )
         size = int(line)
         data = pipe.read(size)
         if len(data) != size:
-            raise error.ResponseError(_("error downloading file contents:"),
-                                      _("only received %s of %s bytes")
-                                      % (len(data), size))
+            raise error.ResponseError(
+                _("error downloading file contents:"),
+                _("only received %s of %s bytes") % (len(data), size),
+            )
 
-        self.writedata.addremotefilelognode(filename, bin(node),
-                                             zlib.decompress(data))
+        self.writedata.addremotefilelognode(
+            filename, bin(node), zlib.decompress(data)
+        )
 
     def connect(self):
         if self.cacheprocess:
@@ -477,26 +539,33 @@
 
     def close(self):
         if fetches:
-            msg = ("%d files fetched over %d fetches - " +
-                   "(%d misses, %0.2f%% hit ratio) over %0.2fs\n") % (
-                       fetched,
-                       fetches,
-                       fetchmisses,
-                       float(fetched - fetchmisses) / float(fetched) * 100.0,
-                       fetchcost)
+            msg = (
+                "%d files fetched over %d fetches - "
+                + "(%d misses, %0.2f%% hit ratio) over %0.2fs\n"
+            ) % (
+                fetched,
+                fetches,
+                fetchmisses,
+                float(fetched - fetchmisses) / float(fetched) * 100.0,
+                fetchcost,
+            )
             if self.debugoutput:
                 self.ui.warn(msg)
-            self.ui.log("remotefilelog.prefetch", msg.replace("%", "%%"),
+            self.ui.log(
+                "remotefilelog.prefetch",
+                msg.replace("%", "%%"),
                 remotefilelogfetched=fetched,
                 remotefilelogfetches=fetches,
                 remotefilelogfetchmisses=fetchmisses,
-                remotefilelogfetchtime=fetchcost * 1000)
+                remotefilelogfetchtime=fetchcost * 1000,
+            )
 
         if self.remotecache.connected:
             self.remotecache.close()
 
-    def prefetch(self, fileids, force=False, fetchdata=True,
-                 fetchhistory=False):
+    def prefetch(
+        self, fileids, force=False, fetchdata=True, fetchhistory=False
+    ):
         """downloads the given file versions to the cache
         """
         repo = self.repo
@@ -506,8 +575,11 @@
             # - we don't use .hgtags
             # - workingctx produces ids with length 42,
             #   which we skip since they aren't in any cache
-            if (file == '.hgtags' or len(id) == 42
-                or not repo.shallowmatch(file)):
+            if (
+                file == '.hgtags'
+                or len(id) == 42
+                or not repo.shallowmatch(file)
+            ):
                 continue
 
             idstocheck.append((file, bin(id)))
@@ -517,7 +589,8 @@
         if force:
             datastore = contentstore.unioncontentstore(*repo.shareddatastores)
             historystore = metadatastore.unionmetadatastore(
-                *repo.sharedhistorystores)
+                *repo.sharedhistorystores
+            )
 
         missingids = set()
         if fetchdata:
@@ -531,9 +604,12 @@
         if nullids:
             missingids = [(f, id) for f, id in missingids if id != nullid]
             repo.ui.develwarn(
-                ('remotefilelog not fetching %d null revs'
-                 ' - this is likely hiding bugs' % nullids),
-                config='remotefilelog-ext')
+                (
+                    'remotefilelog not fetching %d null revs'
+                    ' - this is likely hiding bugs' % nullids
+                ),
+                config='remotefilelog-ext',
+            )
         if missingids:
             global fetches, fetched, fetchcost
             fetches += 1
@@ -542,8 +618,9 @@
             # let's log that information for debugging.
             if fetches >= 15 and fetches < 18:
                 if fetches == 15:
-                    fetchwarning = self.ui.config('remotefilelog',
-                                                  'fetchwarning')
+                    fetchwarning = self.ui.config(
+                        'remotefilelog', 'fetchwarning'
+                    )
                     if fetchwarning:
                         self.ui.warn(fetchwarning + '\n')
                 self.logstacktrace()
@@ -552,14 +629,16 @@
             start = time.time()
             missingids = self.request(missingids)
             if missingids:
-                raise error.Abort(_("unable to download %d files") %
-                                  len(missingids))
+                raise error.Abort(
+                    _("unable to download %d files") % len(missingids)
+                )
             fetchcost += time.time() - start
             self._lfsprefetch(fileids)
 
     def _lfsprefetch(self, fileids):
         if not _lfsmod or not util.safehasattr(
-                self.repo.svfs, 'lfslocalblobstore'):
+            self.repo.svfs, 'lfslocalblobstore'
+        ):
             return
         if not _lfsmod.wrapper.candownload(self.repo):
             return
@@ -580,5 +659,9 @@
 
     def logstacktrace(self):
         import traceback
-        self.ui.log('remotefilelog', 'excess remotefilelog fetching:\n%s\n',
-                    ''.join(traceback.format_stack()))
+
+        self.ui.log(
+            'remotefilelog',
+            'excess remotefilelog fetching:\n%s\n',
+            ''.join(traceback.format_stack()),
+        )
--- a/hgext/remotefilelog/historypack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/historypack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -37,6 +37,7 @@
 ANC_LINKNODE = 3
 ANC_COPYFROM = 4
 
+
 class historypackstore(basepack.basepackstore):
     INDEXSUFFIX = INDEXSUFFIX
     PACKSUFFIX = PACKSUFFIX
@@ -75,8 +76,10 @@
         raise KeyError((name, node))
 
     def add(self, filename, node, p1, p2, linknode, copyfrom):
-        raise RuntimeError("cannot add to historypackstore (%s:%s)"
-                           % (filename, hex(node)))
+        raise RuntimeError(
+            "cannot add to historypackstore (%s:%s)" % (filename, hex(node))
+        )
+
 
 class historypack(basepack.basepack):
     INDEXSUFFIX = INDEXSUFFIX
@@ -153,17 +156,20 @@
 
     def _readentry(self, offset):
         data = self._data
-        entry = struct.unpack(PACKFORMAT, data[offset:offset + PACKENTRYLENGTH])
+        entry = struct.unpack(
+            PACKFORMAT, data[offset : offset + PACKENTRYLENGTH]
+        )
         copyfrom = None
         copyfromlen = entry[ANC_COPYFROM]
         if copyfromlen != 0:
             offset += PACKENTRYLENGTH
-            copyfrom = data[offset:offset + copyfromlen]
+            copyfrom = data[offset : offset + copyfromlen]
         return entry, copyfrom
 
     def add(self, filename, node, p1, p2, linknode, copyfrom):
-        raise RuntimeError("cannot add to historypack (%s:%s)" %
-                           (filename, hex(node)))
+        raise RuntimeError(
+            "cannot add to historypack (%s:%s)" % (filename, hex(node))
+        )
 
     def _findnode(self, name, node):
         if self.VERSION == 0:
@@ -174,9 +180,12 @@
         else:
             section = self._findsection(name)
             nodeindexoffset, nodeindexsize = section[3:]
-            entry = self._bisect(node, nodeindexoffset,
-                                 nodeindexoffset + nodeindexsize,
-                                 NODEINDEXENTRYLENGTH)
+            entry = self._bisect(
+                node,
+                nodeindexoffset,
+                nodeindexoffset + nodeindexsize,
+                NODEINDEXENTRYLENGTH,
+            )
             if entry is not None:
                 node, offset = struct.unpack(NODEINDEXFORMAT, entry)
                 entry, copyfrom = self._readentry(offset)
@@ -189,8 +198,9 @@
     def _findsection(self, name):
         params = self.params
         namehash = hashlib.sha1(name).digest()
-        fanoutkey = struct.unpack(params.fanoutstruct,
-                                  namehash[:params.fanoutprefix])[0]
+        fanoutkey = struct.unpack(
+            params.fanoutstruct, namehash[: params.fanoutprefix]
+        )[0]
         fanout = self._fanouttable
 
         start = fanout[fanoutkey] + params.indexstart
@@ -209,54 +219,65 @@
 
         rawentry = struct.unpack(self.INDEXFORMAT, entry)
         x, offset, size, nodeindexoffset, nodeindexsize = rawentry
-        rawnamelen = self._index[nodeindexoffset:nodeindexoffset +
-                                                 constants.FILENAMESIZE]
+        rawnamelen = self._index[
+            nodeindexoffset : nodeindexoffset + constants.FILENAMESIZE
+        ]
         actualnamelen = struct.unpack('!H', rawnamelen)[0]
         nodeindexoffset += constants.FILENAMESIZE
-        actualname = self._index[nodeindexoffset:nodeindexoffset +
-                                                 actualnamelen]
+        actualname = self._index[
+            nodeindexoffset : nodeindexoffset + actualnamelen
+        ]
         if actualname != name:
-            raise KeyError("found file name %s when looking for %s" %
-                           (actualname, name))
+            raise KeyError(
+                "found file name %s when looking for %s" % (actualname, name)
+            )
         nodeindexoffset += actualnamelen
 
-        filenamelength = struct.unpack('!H', self._data[offset:offset +
-                                                    constants.FILENAMESIZE])[0]
+        filenamelength = struct.unpack(
+            '!H', self._data[offset : offset + constants.FILENAMESIZE]
+        )[0]
         offset += constants.FILENAMESIZE
 
-        actualname = self._data[offset:offset + filenamelength]
+        actualname = self._data[offset : offset + filenamelength]
         offset += filenamelength
 
         if name != actualname:
-            raise KeyError("found file name %s when looking for %s" %
-                           (actualname, name))
+            raise KeyError(
+                "found file name %s when looking for %s" % (actualname, name)
+            )
 
         # Skip entry list size
         offset += ENTRYCOUNTSIZE
 
         nodelistoffset = offset
-        nodelistsize = (size - constants.FILENAMESIZE - filenamelength -
-                        ENTRYCOUNTSIZE)
-        return (name, nodelistoffset, nodelistsize,
-                nodeindexoffset, nodeindexsize)
+        nodelistsize = (
+            size - constants.FILENAMESIZE - filenamelength - ENTRYCOUNTSIZE
+        )
+        return (
+            name,
+            nodelistoffset,
+            nodelistsize,
+            nodeindexoffset,
+            nodeindexsize,
+        )
 
     def _bisect(self, node, start, end, entrylen):
         # Bisect between start and end to find node
         origstart = start
-        startnode = self._index[start:start + NODELENGTH]
-        endnode = self._index[end:end + NODELENGTH]
+        startnode = self._index[start : start + NODELENGTH]
+        endnode = self._index[end : end + NODELENGTH]
 
         if startnode == node:
-            return self._index[start:start + entrylen]
+            return self._index[start : start + entrylen]
         elif endnode == node:
-            return self._index[end:end + entrylen]
+            return self._index[end : end + entrylen]
         else:
             while start < end - entrylen:
                 mid = start + (end - start) // 2
                 mid = mid - ((mid - origstart) % entrylen)
-                midnode = self._index[mid:mid + NODELENGTH]
+                midnode = self._index[mid : mid + NODELENGTH]
                 if midnode == node:
-                    return self._index[mid:mid + entrylen]
+                    return self._index[mid : mid + entrylen]
                 if node > midnode:
                     start = mid
                 elif node < midnode:
@@ -270,8 +291,9 @@
     def cleanup(self, ledger):
         entries = ledger.sources.get(self, [])
         allkeys = set(self)
-        repackedkeys = set((e.filename, e.node) for e in entries if
-                           e.historyrepacked)
+        repackedkeys = set(
+            (e.filename, e.node) for e in entries if e.historyrepacked
+        )
 
         if len(allkeys - repackedkeys) == 0:
             if self.path not in ledger.created:
@@ -288,32 +310,42 @@
         while offset < self.datasize:
             data = self._data
             # <2 byte len> + <filename>
-            filenamelen = struct.unpack('!H', data[offset:offset +
-                                                   constants.FILENAMESIZE])[0]
+            filenamelen = struct.unpack(
+                '!H', data[offset : offset + constants.FILENAMESIZE]
+            )[0]
             offset += constants.FILENAMESIZE
-            filename = data[offset:offset + filenamelen]
+            filename = data[offset : offset + filenamelen]
             offset += filenamelen
 
-            revcount = struct.unpack('!I', data[offset:offset +
-                                                ENTRYCOUNTSIZE])[0]
+            revcount = struct.unpack(
+                '!I', data[offset : offset + ENTRYCOUNTSIZE]
+            )[0]
             offset += ENTRYCOUNTSIZE
 
             for i in pycompat.xrange(revcount):
-                entry = struct.unpack(PACKFORMAT, data[offset:offset +
-                                                              PACKENTRYLENGTH])
+                entry = struct.unpack(
+                    PACKFORMAT, data[offset : offset + PACKENTRYLENGTH]
+                )
                 offset += PACKENTRYLENGTH
 
-                copyfrom = data[offset:offset + entry[ANC_COPYFROM]]
+                copyfrom = data[offset : offset + entry[ANC_COPYFROM]]
                 offset += entry[ANC_COPYFROM]
 
-                yield (filename, entry[ANC_NODE], entry[ANC_P1NODE],
-                        entry[ANC_P2NODE], entry[ANC_LINKNODE], copyfrom)
+                yield (
+                    filename,
+                    entry[ANC_NODE],
+                    entry[ANC_P1NODE],
+                    entry[ANC_P2NODE],
+                    entry[ANC_LINKNODE],
+                    copyfrom,
+                )
 
                 self._pagedin += PACKENTRYLENGTH
 
             # If we've read a lot of data from the mmap, free some memory.
             self.freememory()
 
+
 class mutablehistorypack(basepack.mutablebasepack):
     """A class for constructing and serializing a histpack file and index.
 
@@ -389,6 +421,7 @@
 
     [1]: new in version 1.
     """
+
     INDEXSUFFIX = INDEXSUFFIX
     PACKSUFFIX = PACKSUFFIX
 
@@ -409,10 +442,9 @@
     def add(self, filename, node, p1, p2, linknode, copyfrom):
         copyfrom = copyfrom or ''
         copyfromlen = struct.pack('!H', len(copyfrom))
-        self.fileentries.setdefault(filename, []).append((node, p1, p2,
-                                                          linknode,
-                                                          copyfromlen,
-                                                          copyfrom))
+        self.fileentries.setdefault(filename, []).append(
+            (node, p1, p2, linknode, copyfromlen, copyfrom)
+        )
 
     def _write(self):
         for filename in sorted(self.fileentries):
@@ -421,6 +453,7 @@
 
             # Write the file section content
             entrymap = dict((e[0], e) for e in entries)
+
             def parentfunc(node):
                 x, p1, p2, x, x, x = entrymap[node]
                 parents = []
@@ -430,16 +463,21 @@
                     parents.append(p2)
                 return parents
 
-            sortednodes = list(reversed(shallowutil.sortnodes(
-                (e[0] for e in entries),
-                parentfunc)))
+            sortednodes = list(
+                reversed(
+                    shallowutil.sortnodes((e[0] for e in entries), parentfunc)
+                )
+            )
 
             # Write the file section header
-            self.writeraw("%s%s%s" % (
-                struct.pack('!H', len(filename)),
-                filename,
-                struct.pack('!I', len(sortednodes)),
-            ))
+            self.writeraw(
+                "%s%s%s"
+                % (
+                    struct.pack('!H', len(filename)),
+                    filename,
+                    struct.pack('!I', len(sortednodes)),
+                )
+            )
 
             sectionlen = constants.FILENAMESIZE + len(filename) + 4
 
@@ -478,14 +516,20 @@
         nodeindexformat = self.NODEINDEXFORMAT
         nodeindexlength = self.NODEINDEXENTRYLENGTH
 
-        files = ((hashlib.sha1(filename).digest(), filename, offset, size)
-                for filename, (offset, size) in self.files.iteritems())
+        files = (
+            (hashlib.sha1(filename).digest(), filename, offset, size)
+            for filename, (offset, size) in self.files.iteritems()
+        )
         files = sorted(files)
 
         # node index is after file index size, file index, and node index size
         indexlensize = struct.calcsize('!Q')
-        nodeindexoffset = (indexoffset + indexlensize +
-                           (len(files) * fileindexlength) + indexlensize)
+        nodeindexoffset = (
+            indexoffset
+            + indexlensize
+            + (len(files) * fileindexlength)
+            + indexlensize
+        )
 
         fileindexentries = []
         nodeindexentries = []
@@ -496,16 +540,24 @@
 
             nodeindexsize = len(nodelocations) * nodeindexlength
 
-            rawentry = struct.pack(fileindexformat, namehash, offset, size,
-                                   nodeindexoffset, nodeindexsize)
+            rawentry = struct.pack(
+                fileindexformat,
+                namehash,
+                offset,
+                size,
+                nodeindexoffset,
+                nodeindexsize,
+            )
             # Node index
-            nodeindexentries.append(struct.pack(constants.FILENAMESTRUCT,
-                                                len(filename)) + filename)
+            nodeindexentries.append(
+                struct.pack(constants.FILENAMESTRUCT, len(filename)) + filename
+            )
             nodeindexoffset += constants.FILENAMESIZE + len(filename)
 
             for node, location in sorted(nodelocations.iteritems()):
-                nodeindexentries.append(struct.pack(nodeindexformat, node,
-                                                    location))
+                nodeindexentries.append(
+                    struct.pack(nodeindexformat, node, location)
+                )
                 nodecount += 1
 
             nodeindexoffset += len(nodelocations) * nodeindexlength
@@ -513,5 +565,6 @@
             fileindexentries.append(rawentry)
 
         nodecountraw = struct.pack('!Q', nodecount)
-        return (''.join(fileindexentries) + nodecountraw +
-                ''.join(nodeindexentries))
+        return (
+            ''.join(fileindexentries) + nodecountraw + ''.join(nodeindexentries)
+        )
--- a/hgext/remotefilelog/metadatastore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/metadatastore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -6,6 +6,7 @@
     shallowutil,
 )
 
+
 class unionmetadatastore(basestore.baseunionstore):
     def __init__(self, *args, **kwargs):
         super(unionmetadatastore, self).__init__(*args, **kwargs)
@@ -32,6 +33,7 @@
             return []
 
         ancestors = {}
+
         def traverse(curname, curnode):
             # TODO: this algorithm has the potential to traverse parts of
             # history twice. Ex: with A->B->C->F and A->B->D->F, both D and C
@@ -59,8 +61,9 @@
         while missing:
             curname, curnode = missing.pop()
             try:
-                ancestors.update(self._getpartialancestors(curname, curnode,
-                                                           known=known))
+                ancestors.update(
+                    self._getpartialancestors(curname, curnode, known=known)
+                )
                 newmissing = traverse(curname, curnode)
                 missing.extend(newmissing)
             except KeyError:
@@ -95,8 +98,9 @@
         raise KeyError((name, hex(node)))
 
     def add(self, name, node, data):
-        raise RuntimeError("cannot add content only to remotefilelog "
-                           "contentstore")
+        raise RuntimeError(
+            "cannot add content only to remotefilelog " "contentstore"
+        )
 
     def getmissing(self, keys):
         missing = keys
@@ -113,6 +117,7 @@
         metrics = [s.getmetrics() for s in self.stores]
         return shallowutil.sumdicts(*metrics)
 
+
 class remotefilelogmetadatastore(basestore.basestore):
     def getancestors(self, name, node, known=None):
         """Returns as many ancestors as we're aware of.
@@ -130,8 +135,10 @@
         return self.getancestors(name, node)[node]
 
     def add(self, name, node, parents, linknode):
-        raise RuntimeError("cannot add metadata only to remotefilelog "
-                           "metadatastore")
+        raise RuntimeError(
+            "cannot add metadata only to remotefilelog " "metadatastore"
+        )
+
 
 class remotemetadatastore(object):
     def __init__(self, ui, fileservice, shared):
@@ -139,8 +146,9 @@
         self._shared = shared
 
     def getancestors(self, name, node, known=None):
-        self._fileservice.prefetch([(name, hex(node))], force=True,
-                                   fetchdata=False, fetchhistory=True)
+        self._fileservice.prefetch(
+            [(name, hex(node))], force=True, fetchdata=False, fetchhistory=True
+        )
         return self._shared.getancestors(name, node, known=known)
 
     def getnodeinfo(self, name, node):
--- a/hgext/remotefilelog/remotefilectx.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/remotefilectx.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,15 +22,25 @@
 propertycache = util.propertycache
 FASTLOG_TIMEOUT_IN_SECS = 0.5
 
+
 class remotefilectx(context.filectx):
-    def __init__(self, repo, path, changeid=None, fileid=None,
-                 filelog=None, changectx=None, ancestormap=None):
+    def __init__(
+        self,
+        repo,
+        path,
+        changeid=None,
+        fileid=None,
+        filelog=None,
+        changectx=None,
+        ancestormap=None,
+    ):
         if fileid == nullrev:
             fileid = nullid
         if fileid and len(fileid) == 40:
             fileid = bin(fileid)
-        super(remotefilectx, self).__init__(repo, path, changeid,
-            fileid, filelog, changectx)
+        super(remotefilectx, self).__init__(
+            repo, path, changeid, fileid, filelog, changectx
+        )
         self._ancestormap = ancestormap
 
     def size(self):
@@ -45,8 +55,9 @@
         elif r'_descendantrev' in self.__dict__:
             # this file context was created from a revision with a known
             # descendant, we can (lazily) correct for linkrev aliases
-            linknode = self._adjustlinknode(self._path, self._filelog,
-                                            self._filenode, self._descendantrev)
+            linknode = self._adjustlinknode(
+                self._path, self._filelog, self._filenode, self._descendantrev
+            )
             return self._repo.unfiltered().changelog.rev(linknode)
         else:
             return self.linkrev()
@@ -54,8 +65,13 @@
     def filectx(self, fileid, changeid=None):
         '''opens an arbitrary revision of the file without
         opening a new filelog'''
-        return remotefilectx(self._repo, self._path, fileid=fileid,
-                             filelog=self._filelog, changeid=changeid)
+        return remotefilectx(
+            self._repo,
+            self._path,
+            fileid=fileid,
+            filelog=self._filelog,
+            changeid=changeid,
+        )
 
     def linkrev(self):
         return self._linkrev
@@ -79,8 +95,10 @@
 
         for rev in range(len(cl) - 1, 0, -1):
             node = cl.node(rev)
-            data = cl.read(node) # get changeset data (we avoid object creation)
-            if path in data[3]: # checking the 'files' field.
+            data = cl.read(
+                node
+            )  # get changeset data (we avoid object creation)
+            if path in data[3]:  # checking the 'files' field.
                 # The file has been touched, check if the hash is what we're
                 # looking for.
                 if fileid == mfl[data[0]].readfast().get(path):
@@ -104,9 +122,13 @@
         noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
         if noctx or self.rev() == lkr:
             return lkr
-        linknode = self._adjustlinknode(self._path, self._filelog,
-                                        self._filenode, self.rev(),
-                                        inclusive=True)
+        linknode = self._adjustlinknode(
+            self._path,
+            self._filelog,
+            self._filenode,
+            self.rev(),
+            inclusive=True,
+        )
         return self._repo.changelog.rev(linknode)
 
     def renamed(self):
@@ -155,16 +177,18 @@
         if p1 != nullid:
             path = copyfrom or self._path
             flog = repo.file(path)
-            p1ctx = remotefilectx(repo, path, fileid=p1, filelog=flog,
-                                  ancestormap=ancestormap)
+            p1ctx = remotefilectx(
+                repo, path, fileid=p1, filelog=flog, ancestormap=ancestormap
+            )
             p1ctx._descendantrev = self.rev()
             results.append(p1ctx)
 
         if p2 != nullid:
             path = self._path
             flog = repo.file(path)
-            p2ctx = remotefilectx(repo, path, fileid=p2, filelog=flog,
-                                  ancestormap=ancestormap)
+            p2ctx = remotefilectx(
+                repo, path, fileid=p2, filelog=flog, ancestormap=ancestormap
+            )
             p2ctx._descendantrev = self.rev()
             results.append(p2ctx)
 
@@ -172,7 +196,7 @@
 
     def _nodefromancrev(self, ancrev, cl, mfl, path, fnode):
         """returns the node for <path> in <ancrev> if content matches <fnode>"""
-        ancctx = cl.read(ancrev) # This avoids object creation.
+        ancctx = cl.read(ancrev)  # This avoids object creation.
         manifestnode, files = ancctx[0], ancctx[3]
         # If the file was touched in this ancestor, and the content is similar
         # to the one we are searching for.
@@ -214,7 +238,7 @@
         if srcrev is None:
             # wctx case, used by workingfilectx during mergecopy
             revs = [p.rev() for p in self._repo[None].parents()]
-            inclusive = True # we skipped the real (revless) source
+            inclusive = True  # we skipped the real (revless) source
         else:
             revs = [srcrev]
 
@@ -247,16 +271,16 @@
                 # TODO: there used to be a codepath to fetch linknodes
                 # from a server as a fast path, but it appeared to
                 # depend on an API FB added to their phabricator.
-                lnode = self._forceprefetch(repo, path, fnode, revs,
-                                            commonlogkwargs)
+                lnode = self._forceprefetch(
+                    repo, path, fnode, revs, commonlogkwargs
+                )
                 if lnode:
                     return lnode
                 seenpublic = True
 
         return linknode
 
-    def _forceprefetch(self, repo, path, fnode, revs,
-                       commonlogkwargs):
+    def _forceprefetch(self, repo, path, fnode, revs, commonlogkwargs):
         # This next part is super non-obvious, so big comment block time!
         #
         # It is possible to get extremely bad performance here when a fairly
@@ -307,7 +331,7 @@
             # we need to rebuild the ancestor map to recompute the
             # linknodes.
             self._ancestormap = None
-            linknode = self.ancestormap()[fnode][2] # 2 is linknode
+            linknode = self.ancestormap()[fnode][2]  # 2 is linknode
             if self._verifylinknode(revs, linknode):
                 logmsg = 'remotefilelog prefetching succeeded'
                 return linknode
@@ -318,8 +342,12 @@
             return None
         finally:
             elapsed = time.time() - start
-            repo.ui.log('linkrevfixup', logmsg + '\n', elapsed=elapsed * 1000,
-                        **commonlogkwargs)
+            repo.ui.log(
+                'linkrevfixup',
+                logmsg + '\n',
+                elapsed=elapsed * 1000,
+                **commonlogkwargs
+            )
 
     def _verifylinknode(self, revs, linknode):
         """
@@ -370,7 +398,7 @@
 
         # Sort by linkrev
         # The copy tracing algorithm depends on these coming out in order
-        ancestors = sorted(ancestors, reverse=True, key=lambda x:x.linkrev())
+        ancestors = sorted(ancestors, reverse=True, key=lambda x: x.linkrev())
 
         for ancestor in ancestors:
             yield ancestor
@@ -404,8 +432,7 @@
         result = ancestor.genericancestor(a, b, parents)
         if result:
             f, n = result
-            r = remotefilectx(self._repo, f, fileid=n,
-                                 ancestormap=amap)
+            r = remotefilectx(self._repo, f, fileid=n, ancestormap=amap)
             return r
 
         return None
@@ -417,11 +444,14 @@
             # use introrev so prefetchskip can be accurately tested
             introrev = self.introrev()
             if self.rev() != introrev:
-                introctx = remotefilectx(self._repo, self._path,
-                                         changeid=introrev,
-                                         fileid=self._filenode,
-                                         filelog=self._filelog,
-                                         ancestormap=self._ancestormap)
+                introctx = remotefilectx(
+                    self._repo,
+                    self._path,
+                    changeid=introrev,
+                    fileid=self._filenode,
+                    filelog=self._filelog,
+                    ancestormap=self._ancestormap,
+                )
 
         # like self.ancestors, but append to "fetch" and skip visiting parents
         # of nodes in "prefetchskip".
@@ -442,8 +472,9 @@
                     seen.add(parent.node())
                     queue.append(parent)
 
-        self._repo.ui.debug('remotefilelog: prefetching %d files '
-                            'for annotate\n' % len(fetch))
+        self._repo.ui.debug(
+            'remotefilelog: prefetching %d files ' 'for annotate\n' % len(fetch)
+        )
         if fetch:
             self._repo.fileservice.prefetch(fetch)
         return super(remotefilectx, self).annotate(*args, **kwargs)
@@ -452,11 +483,13 @@
     def children(self):
         return []
 
+
 class remoteworkingfilectx(context.workingfilectx, remotefilectx):
     def __init__(self, repo, path, filelog=None, workingctx=None):
         self._ancestormap = None
-        super(remoteworkingfilectx, self).__init__(repo, path, filelog,
-                                                   workingctx)
+        super(remoteworkingfilectx, self).__init__(
+            repo, path, filelog, workingctx
+        )
 
     def parents(self):
         return remotefilectx.parents(self)
--- a/hgext/remotefilelog/remotefilelog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/remotefilelog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -33,6 +33,7 @@
     shallowutil,
 )
 
+
 class remotefilelognodemap(object):
     def __init__(self, filename, store):
         self._filename = filename
@@ -47,6 +48,7 @@
             raise KeyError(node)
         return node
 
+
 class remotefilelog(object):
 
     _generaldelta = True
@@ -68,15 +70,17 @@
         if not t.startswith('\1\n'):
             return t
         s = t.index('\1\n', 2)
-        return t[s + 2:]
+        return t[s + 2 :]
 
     def add(self, text, meta, transaction, linknode, p1=None, p2=None):
         # hash with the metadata, like in vanilla filelogs
-        hashtext = shallowutil.createrevlogtext(text, meta.get('copy'),
-                                                meta.get('copyrev'))
+        hashtext = shallowutil.createrevlogtext(
+            text, meta.get('copy'), meta.get('copyrev')
+        )
         node = storageutil.hashrevisionsha1(hashtext, p1, p2)
-        return self.addrevision(hashtext, transaction, linknode, p1, p2,
-                                node=node)
+        return self.addrevision(
+            hashtext, transaction, linknode, p1, p2, node=node
+        )
 
     def _createfileblob(self, text, meta, flags, p1, p2, node, linknode):
         # text passed to "_createfileblob" does not include filelog metadata
@@ -117,7 +121,12 @@
 
             pacopyfrom = pacopyfrom or ''
             ancestortext += "%s%s%s%s%s\0" % (
-                c, pa1, pa2, ancestorlinknode, pacopyfrom)
+                c,
+                pa1,
+                pa2,
+                ancestorlinknode,
+                pacopyfrom,
+            )
 
             if pa1 != nullid and pa1 not in visited:
                 queue.append(pa1)
@@ -130,9 +139,18 @@
 
         return data
 
-    def addrevision(self, text, transaction, linknode, p1, p2, cachedelta=None,
-                    node=None, flags=revlog.REVIDX_DEFAULT_FLAGS,
-                    sidedata=None):
+    def addrevision(
+        self,
+        text,
+        transaction,
+        linknode,
+        p1,
+        p2,
+        cachedelta=None,
+        node=None,
+        flags=revlog.REVIDX_DEFAULT_FLAGS,
+        sidedata=None,
+    ):
         # text passed to "addrevision" includes hg filelog metadata header
         if node is None:
             node = storageutil.hashrevisionsha1(text, p1, p2)
@@ -140,14 +158,33 @@
             sidedata = {}
 
         meta, metaoffset = storageutil.parsemeta(text)
-        rawtext, validatehash = flagutil.processflagswrite(self, text, flags,
-                                                           sidedata=sidedata)
-        return self.addrawrevision(rawtext, transaction, linknode, p1, p2,
-                                   node, flags, cachedelta,
-                                   _metatuple=(meta, metaoffset))
+        rawtext, validatehash = flagutil.processflagswrite(
+            self, text, flags, sidedata=sidedata
+        )
+        return self.addrawrevision(
+            rawtext,
+            transaction,
+            linknode,
+            p1,
+            p2,
+            node,
+            flags,
+            cachedelta,
+            _metatuple=(meta, metaoffset),
+        )
 
-    def addrawrevision(self, rawtext, transaction, linknode, p1, p2, node,
-                       flags, cachedelta=None, _metatuple=None):
+    def addrawrevision(
+        self,
+        rawtext,
+        transaction,
+        linknode,
+        p1,
+        p2,
+        node,
+        flags,
+        cachedelta=None,
+        _metatuple=None,
+    ):
         if _metatuple:
             # _metatuple: used by "addrevision" internally by remotefilelog
             # meta was parsed confidently
@@ -173,8 +210,9 @@
             blobtext = rawtext[metaoffset:]
         else:
             blobtext = rawtext
-        data = self._createfileblob(blobtext, meta, flags, p1, p2, node,
-                                    linknode)
+        data = self._createfileblob(
+            blobtext, meta, flags, p1, p2, node, linknode
+        )
         self.repo.contentstore.addremotefilelognode(self.filename, node, data)
 
         return node
@@ -226,7 +264,8 @@
     def flags(self, node):
         if isinstance(node, int):
             raise error.ProgrammingError(
-                'remotefilelog does not accept integer rev for flags')
+                'remotefilelog does not accept integer rev for flags'
+            )
         store = self.repo.contentstore
         return store.getmeta(self.filename, node).get(constants.METAKEYFLAG, 0)
 
@@ -254,9 +293,15 @@
     def linkrev(self, node):
         return self.repo.unfiltered().changelog.rev(self.linknode(node))
 
-    def emitrevisions(self, nodes, nodesorder=None, revisiondata=False,
-                      assumehaveparentrevisions=False, deltaprevious=False,
-                      deltamode=None):
+    def emitrevisions(
+        self,
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+        deltaprevious=False,
+        deltamode=None,
+    ):
         # we don't use any of these parameters here
         del nodesorder, revisiondata, assumehaveparentrevisions, deltaprevious
         del deltamode
@@ -283,18 +328,18 @@
                 baserevisionsize=None,
                 revision=revision,
                 delta=delta,
-                )
+            )
 
     def revdiff(self, node1, node2):
-        return mdiff.textdiff(self.rawdata(node1),
-                              self.rawdata(node2))
+        return mdiff.textdiff(self.rawdata(node1), self.rawdata(node2))
 
     def lookup(self, node):
         if len(node) == 40:
             node = bin(node)
         if len(node) != 20:
-            raise error.LookupError(node, self.filename,
-                                    _('invalid lookup input'))
+            raise error.LookupError(
+                node, self.filename, _('invalid lookup input')
+            )
 
         return node
 
@@ -306,18 +351,19 @@
         # This is a hack.
         if isinstance(rev, int):
             raise error.ProgrammingError(
-                'remotefilelog does not convert integer rev to node')
+                'remotefilelog does not convert integer rev to node'
+            )
         return rev
 
     def _processflags(self, text, flags, operation, raw=False):
         """deprecated entry point to access flag processors"""
-        msg = ('_processflag(...) use the specialized variant')
+        msg = '_processflag(...) use the specialized variant'
         util.nouideprecwarn(msg, '5.2', stacklevel=2)
         if raw:
             return text, flagutil.processflagsraw(self, text, flags)
         elif operation == 'read':
             return flagutil.processflagsread(self, text, flags)
-        else: # write operation
+        else:  # write operation
             return flagutil.processflagswrite(self, text, flags)
 
     def revision(self, node, raw=False):
@@ -329,8 +375,9 @@
         if node == nullid:
             return ""
         if len(node) != 20:
-            raise error.LookupError(node, self.filename,
-                                    _('invalid revision input'))
+            raise error.LookupError(
+                node, self.filename, _('invalid revision input')
+            )
         if node == wdirid or node in wdirfilenodeids:
             raise error.WdirUnsupported
 
@@ -350,8 +397,9 @@
         """reads the raw file blob from disk, cache, or server"""
         fileservice = self.repo.fileservice
         localcache = fileservice.localcache
-        cachekey = fileserverclient.getcachekey(self.repo.name, self.filename,
-                                                id)
+        cachekey = fileserverclient.getcachekey(
+            self.repo.name, self.filename, id
+        )
         try:
             return localcache.read(cachekey)
         except KeyError:
@@ -426,8 +474,9 @@
         # Breadth first traversal to build linkrev graph
         parentrevs = collections.defaultdict(list)
         revmap = {}
-        queue = collections.deque(((None, n) for n in parentsmap
-                 if n not in allparents))
+        queue = collections.deque(
+            ((None, n) for n in parentsmap if n not in allparents)
+        )
         while queue:
             prevrev, current = queue.pop()
             if current in revmap:
--- a/hgext/remotefilelog/remotefilelogserver.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/remotefilelogserver.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,45 +28,53 @@
     wireprototypes,
     wireprotov1server,
 )
-from .  import (
+from . import (
     constants,
     shallowutil,
 )
 
 _sshv1server = wireprotoserver.sshv1protocolhandler
 
+
 def setupserver(ui, repo):
     """Sets up a normal Mercurial repo so it can serve files to shallow repos.
     """
     onetimesetup(ui)
 
     # don't send files to shallow clients during pulls
-    def generatefiles(orig, self, changedfiles, linknodes, commonrevs, source,
-                      *args, **kwargs):
+    def generatefiles(
+        orig, self, changedfiles, linknodes, commonrevs, source, *args, **kwargs
+    ):
         caps = self._bundlecaps or []
         if constants.BUNDLE2_CAPABLITY in caps:
             # only send files that don't match the specified patterns
             includepattern = None
             excludepattern = None
-            for cap in (self._bundlecaps or []):
+            for cap in self._bundlecaps or []:
                 if cap.startswith("includepattern="):
-                    includepattern = cap[len("includepattern="):].split('\0')
+                    includepattern = cap[len("includepattern=") :].split('\0')
                 elif cap.startswith("excludepattern="):
-                    excludepattern = cap[len("excludepattern="):].split('\0')
+                    excludepattern = cap[len("excludepattern=") :].split('\0')
 
             m = match.always()
             if includepattern or excludepattern:
-                m = match.match(repo.root, '', None,
-                    includepattern, excludepattern)
+                m = match.match(
+                    repo.root, '', None, includepattern, excludepattern
+                )
 
             changedfiles = list([f for f in changedfiles if not m(f)])
-        return orig(self, changedfiles, linknodes, commonrevs, source,
-                    *args, **kwargs)
+        return orig(
+            self, changedfiles, linknodes, commonrevs, source, *args, **kwargs
+        )
 
     extensions.wrapfunction(
-        changegroup.cgpacker, 'generatefiles', generatefiles)
+        changegroup.cgpacker, 'generatefiles', generatefiles
+    )
+
 
 onetime = False
+
+
 def onetimesetup(ui):
     """Configures the wireprotocol for both clients and servers.
     """
@@ -77,16 +85,20 @@
 
     # support file content requests
     wireprotov1server.wireprotocommand(
-        'x_rfl_getflogheads', 'path', permission='pull')(getflogheads)
+        'x_rfl_getflogheads', 'path', permission='pull'
+    )(getflogheads)
+    wireprotov1server.wireprotocommand('x_rfl_getfiles', '', permission='pull')(
+        getfiles
+    )
     wireprotov1server.wireprotocommand(
-        'x_rfl_getfiles', '', permission='pull')(getfiles)
-    wireprotov1server.wireprotocommand(
-        'x_rfl_getfile', 'file node', permission='pull')(getfile)
+        'x_rfl_getfile', 'file node', permission='pull'
+    )(getfile)
 
     class streamstate(object):
         match = None
         shallowremote = False
         noflatmf = False
+
     state = streamstate()
 
     def stream_out_shallow(repo, proto, other):
@@ -107,19 +119,22 @@
             state.match = match.always()
             state.noflatmf = other.get('noflatmanifest') == 'True'
             if includepattern or excludepattern:
-                state.match = match.match(repo.root, '', None,
-                    includepattern, excludepattern)
+                state.match = match.match(
+                    repo.root, '', None, includepattern, excludepattern
+                )
             streamres = wireprotov1server.stream(repo, proto)
 
             # Force the first value to execute, so the file list is computed
             # within the try/finally scope
             first = next(streamres.gen)
             second = next(streamres.gen)
+
             def gen():
                 yield first
                 yield second
                 for value in streamres.gen:
                     yield value
+
             return wireprototypes.streamres(gen())
         finally:
             state.shallowremote = oldshallow
@@ -149,8 +164,9 @@
 
             if 'treemanifest' in repo.requirements:
                 for (u, e, s) in repo.store.datafiles():
-                    if (u.startswith('meta/') and
-                        (u.endswith('.i') or u.endswith('.d'))):
+                    if u.startswith('meta/') and (
+                        u.endswith('.i') or u.endswith('.d')
+                    ):
                         yield (u, e, s)
 
             # Return .d and .i files that do not match the shallow pattern
@@ -170,8 +186,9 @@
             # don't allow cloning from a shallow repo to a full repo
             # since it would require fetching every version of every
             # file in order to create the revlogs.
-            raise error.Abort(_("Cannot clone from a shallow repo "
-                                "to a full repo."))
+            raise error.Abort(
+                _("Cannot clone from a shallow repo " "to a full repo.")
+            )
         else:
             for x in orig(repo, matcher):
                 yield x
@@ -181,14 +198,16 @@
     # expose remotefilelog capabilities
     def _capabilities(orig, repo, proto):
         caps = orig(repo, proto)
-        if (shallowutil.isenabled(repo) or ui.configbool('remotefilelog',
-                                                         'server')):
+        if shallowutil.isenabled(repo) or ui.configbool(
+            'remotefilelog', 'server'
+        ):
             if isinstance(proto, _sshv1server):
                 # legacy getfiles method which only works over ssh
                 caps.append(constants.NETWORK_CAP_LEGACY_SSH_GETFILES)
             caps.append('x_rfl_getflogheads')
             caps.append('x_rfl_getfile')
         return caps
+
     extensions.wrapfunction(wireprotov1server, '_capabilities', _capabilities)
 
     def _adjustlinkrev(orig, self, *args, **kwargs):
@@ -200,7 +219,8 @@
         return orig(self, *args, **kwargs)
 
     extensions.wrapfunction(
-        context.basefilectx, '_adjustlinkrev', _adjustlinkrev)
+        context.basefilectx, '_adjustlinkrev', _adjustlinkrev
+    )
 
     def _iscmd(orig, cmd):
         if cmd == 'x_rfl_getfiles':
@@ -209,6 +229,7 @@
 
     extensions.wrapfunction(wireprotoserver, 'iscmd', _iscmd)
 
+
 def _loadfileblob(repo, cachepath, path, node):
     filecachepath = os.path.join(cachepath, path, hex(node))
     if not os.path.exists(filecachepath) or os.path.getsize(filecachepath) == 0:
@@ -250,6 +271,7 @@
             text = f.read()
     return text
 
+
 def getflogheads(repo, proto, path):
     """A server api for requesting a filelog's heads
     """
@@ -257,6 +279,7 @@
     heads = flog.heads()
     return '\n'.join((hex(head) for head in heads if head != nullid))
 
+
 def getfile(repo, proto, file, node):
     """A server api for requesting a particular version of a file. Can be used
     in batches to request many files at once. The return protocol is:
@@ -276,6 +299,7 @@
         return '0\0'
     return '0\0' + _loadfileblob(repo, cachepath, file, node)
 
+
 def getfiles(repo, proto):
     """A server api for requesting particular versions of particular files.
     """
@@ -310,8 +334,10 @@
             # it would be better to only flush after processing a whole batch
             # but currently we don't know if there are more requests coming
             proto._fout.flush()
+
     return wireprototypes.streamres(streamer())
 
+
 def createfileblob(filectx):
     """
     format:
@@ -361,8 +387,12 @@
                 copyname = rename[0]
             linknode = ancestorctx.node()
             ancestortext += "%s%s%s%s%s\0" % (
-                ancestorctx.filenode(), p1, p2, linknode,
-                copyname)
+                ancestorctx.filenode(),
+                p1,
+                p2,
+                linknode,
+                copyname,
+            )
     finally:
         repo.forcelinkrev = False
 
@@ -370,6 +400,7 @@
 
     return "%s\0%s%s" % (header, text, ancestortext)
 
+
 def gcserver(ui, repo):
     if not repo.ui.configbool("remotefilelog", "server"):
         return
--- a/hgext/remotefilelog/repack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/repack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -31,11 +31,14 @@
 
 osutil = policy.importmod(r'osutil')
 
+
 class RepackAlreadyRunning(error.Abort):
     pass
 
-def backgroundrepack(repo, incremental=True, packsonly=False,
-                     ensurestart=False):
+
+def backgroundrepack(
+    repo, incremental=True, packsonly=False, ensurestart=False
+):
     cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'repack']
     msg = _("(running background repack)\n")
     if incremental:
@@ -47,21 +50,27 @@
     # We know this command will find a binary, so don't block on it starting.
     procutil.runbgcommand(cmd, encoding.environ, ensurestart=ensurestart)
 
+
 def fullrepack(repo, options=None):
     """If ``packsonly`` is True, stores creating only loose objects are skipped.
     """
     if util.safehasattr(repo, 'shareddatastores'):
-        datasource = contentstore.unioncontentstore(
-            *repo.shareddatastores)
+        datasource = contentstore.unioncontentstore(*repo.shareddatastores)
         historysource = metadatastore.unionmetadatastore(
-            *repo.sharedhistorystores,
-            allowincomplete=True)
+            *repo.sharedhistorystores, allowincomplete=True
+        )
 
         packpath = shallowutil.getcachepackpath(
+            repo, constants.FILEPACK_CATEGORY
+        )
+        _runrepack(
             repo,
-            constants.FILEPACK_CATEGORY)
-        _runrepack(repo, datasource, historysource, packpath,
-                   constants.FILEPACK_CATEGORY, options=options)
+            datasource,
+            historysource,
+            packpath,
+            constants.FILEPACK_CATEGORY,
+            options=options,
+        )
 
     if util.safehasattr(repo.manifestlog, 'datastore'):
         localdata, shareddata = _getmanifeststores(repo)
@@ -71,20 +80,33 @@
         # Repack the shared manifest store
         datasource = contentstore.unioncontentstore(*sdstores)
         historysource = metadatastore.unionmetadatastore(
-                        *shstores,
-                        allowincomplete=True)
-        _runrepack(repo, datasource, historysource, spackpath,
-                   constants.TREEPACK_CATEGORY, options=options)
+            *shstores, allowincomplete=True
+        )
+        _runrepack(
+            repo,
+            datasource,
+            historysource,
+            spackpath,
+            constants.TREEPACK_CATEGORY,
+            options=options,
+        )
 
         # Repack the local manifest store
         datasource = contentstore.unioncontentstore(
-                        *ldstores,
-                        allowincomplete=True)
+            *ldstores, allowincomplete=True
+        )
         historysource = metadatastore.unionmetadatastore(
-                        *lhstores,
-                        allowincomplete=True)
-        _runrepack(repo, datasource, historysource, lpackpath,
-                   constants.TREEPACK_CATEGORY, options=options)
+            *lhstores, allowincomplete=True
+        )
+        _runrepack(
+            repo,
+            datasource,
+            historysource,
+            lpackpath,
+            constants.TREEPACK_CATEGORY,
+            options=options,
+        )
+
 
 def incrementalrepack(repo, options=None):
     """This repacks the repo by looking at the distribution of pack files in the
@@ -92,14 +114,16 @@
     """
     if util.safehasattr(repo, 'shareddatastores'):
         packpath = shallowutil.getcachepackpath(
+            repo, constants.FILEPACK_CATEGORY
+        )
+        _incrementalrepack(
             repo,
-            constants.FILEPACK_CATEGORY)
-        _incrementalrepack(repo,
-                           repo.shareddatastores,
-                           repo.sharedhistorystores,
-                           packpath,
-                           constants.FILEPACK_CATEGORY,
-                           options=options)
+            repo.shareddatastores,
+            repo.sharedhistorystores,
+            packpath,
+            constants.FILEPACK_CATEGORY,
+            options=options,
+        )
 
     if util.safehasattr(repo.manifestlog, 'datastore'):
         localdata, shareddata = _getmanifeststores(repo)
@@ -107,21 +131,26 @@
         spackpath, sdstores, shstores = shareddata
 
         # Repack the shared manifest store
-        _incrementalrepack(repo,
-                           sdstores,
-                           shstores,
-                           spackpath,
-                           constants.TREEPACK_CATEGORY,
-                           options=options)
+        _incrementalrepack(
+            repo,
+            sdstores,
+            shstores,
+            spackpath,
+            constants.TREEPACK_CATEGORY,
+            options=options,
+        )
 
         # Repack the local manifest store
-        _incrementalrepack(repo,
-                           ldstores,
-                           lhstores,
-                           lpackpath,
-                           constants.TREEPACK_CATEGORY,
-                           allowincompletedata=True,
-                           options=options)
+        _incrementalrepack(
+            repo,
+            ldstores,
+            lhstores,
+            lpackpath,
+            constants.TREEPACK_CATEGORY,
+            allowincompletedata=True,
+            options=options,
+        )
+
 
 def _getmanifeststores(repo):
     shareddatastores = repo.manifestlog.shareddatastores
@@ -129,19 +158,25 @@
     sharedhistorystores = repo.manifestlog.sharedhistorystores
     localhistorystores = repo.manifestlog.localhistorystores
 
-    sharedpackpath = shallowutil.getcachepackpath(repo,
-                                            constants.TREEPACK_CATEGORY)
-    localpackpath = shallowutil.getlocalpackpath(repo.svfs.vfs.base,
-                                            constants.TREEPACK_CATEGORY)
+    sharedpackpath = shallowutil.getcachepackpath(
+        repo, constants.TREEPACK_CATEGORY
+    )
+    localpackpath = shallowutil.getlocalpackpath(
+        repo.svfs.vfs.base, constants.TREEPACK_CATEGORY
+    )
 
-    return ((localpackpath, localdatastores, localhistorystores),
-            (sharedpackpath, shareddatastores, sharedhistorystores))
+    return (
+        (localpackpath, localdatastores, localhistorystores),
+        (sharedpackpath, shareddatastores, sharedhistorystores),
+    )
+
 
 def _topacks(packpath, files, constructor):
     paths = list(os.path.join(packpath, p) for p in files)
     packs = list(constructor(p) for p in paths)
     return packs
 
+
 def _deletebigpacks(repo, folder, files):
     """Deletes packfiles that are bigger than ``packs.maxpacksize``.
 
@@ -156,96 +191,128 @@
 
     # Either an oversize index or datapack will trigger cleanup of the whole
     # pack:
-    oversized = {os.path.splitext(path)[0] for path, ftype, stat in files
-        if (stat.st_size > maxsize and (os.path.splitext(path)[1]
-                                        in VALIDEXTS))}
+    oversized = {
+        os.path.splitext(path)[0]
+        for path, ftype, stat in files
+        if (stat.st_size > maxsize and (os.path.splitext(path)[1] in VALIDEXTS))
+    }
 
     for rootfname in oversized:
         rootpath = os.path.join(folder, rootfname)
         for ext in VALIDEXTS:
             path = rootpath + ext
-            repo.ui.debug('removing oversize packfile %s (%s)\n' %
-                          (path, util.bytecount(os.stat(path).st_size)))
+            repo.ui.debug(
+                'removing oversize packfile %s (%s)\n'
+                % (path, util.bytecount(os.stat(path).st_size))
+            )
             os.unlink(path)
     return [row for row in files if os.path.basename(row[0]) not in oversized]
 
-def _incrementalrepack(repo, datastore, historystore, packpath, category,
-        allowincompletedata=False, options=None):
+
+def _incrementalrepack(
+    repo,
+    datastore,
+    historystore,
+    packpath,
+    category,
+    allowincompletedata=False,
+    options=None,
+):
     shallowutil.mkstickygroupdir(repo.ui, packpath)
 
     files = osutil.listdir(packpath, stat=True)
     files = _deletebigpacks(repo, packpath, files)
-    datapacks = _topacks(packpath,
-        _computeincrementaldatapack(repo.ui, files),
-        datapack.datapack)
-    datapacks.extend(s for s in datastore
-                     if not isinstance(s, datapack.datapackstore))
+    datapacks = _topacks(
+        packpath, _computeincrementaldatapack(repo.ui, files), datapack.datapack
+    )
+    datapacks.extend(
+        s for s in datastore if not isinstance(s, datapack.datapackstore)
+    )
 
-    historypacks = _topacks(packpath,
+    historypacks = _topacks(
+        packpath,
         _computeincrementalhistorypack(repo.ui, files),
-        historypack.historypack)
-    historypacks.extend(s for s in historystore
-                        if not isinstance(s, historypack.historypackstore))
+        historypack.historypack,
+    )
+    historypacks.extend(
+        s
+        for s in historystore
+        if not isinstance(s, historypack.historypackstore)
+    )
 
     # ``allhistory{files,packs}`` contains all known history packs, even ones we
     # don't plan to repack. They are used during the datapack repack to ensure
     # good ordering of nodes.
-    allhistoryfiles = _allpackfileswithsuffix(files, historypack.PACKSUFFIX,
-                            historypack.INDEXSUFFIX)
-    allhistorypacks = _topacks(packpath,
+    allhistoryfiles = _allpackfileswithsuffix(
+        files, historypack.PACKSUFFIX, historypack.INDEXSUFFIX
+    )
+    allhistorypacks = _topacks(
+        packpath,
         (f for f, mode, stat in allhistoryfiles),
-        historypack.historypack)
-    allhistorypacks.extend(s for s in historystore
-                        if not isinstance(s, historypack.historypackstore))
-    _runrepack(repo,
-               contentstore.unioncontentstore(
-                   *datapacks,
-                   allowincomplete=allowincompletedata),
-               metadatastore.unionmetadatastore(
-                   *historypacks,
-                   allowincomplete=True),
-               packpath, category,
-               fullhistory=metadatastore.unionmetadatastore(
-                   *allhistorypacks,
-                   allowincomplete=True),
-                options=options)
+        historypack.historypack,
+    )
+    allhistorypacks.extend(
+        s
+        for s in historystore
+        if not isinstance(s, historypack.historypackstore)
+    )
+    _runrepack(
+        repo,
+        contentstore.unioncontentstore(
+            *datapacks, allowincomplete=allowincompletedata
+        ),
+        metadatastore.unionmetadatastore(*historypacks, allowincomplete=True),
+        packpath,
+        category,
+        fullhistory=metadatastore.unionmetadatastore(
+            *allhistorypacks, allowincomplete=True
+        ),
+        options=options,
+    )
+
 
 def _computeincrementaldatapack(ui, files):
     opts = {
-        'gencountlimit' : ui.configint(
-            'remotefilelog', 'data.gencountlimit'),
-        'generations' : ui.configlist(
-            'remotefilelog', 'data.generations'),
-        'maxrepackpacks' : ui.configint(
-            'remotefilelog', 'data.maxrepackpacks'),
-        'repackmaxpacksize' : ui.configbytes(
-            'remotefilelog', 'data.repackmaxpacksize'),
-        'repacksizelimit' : ui.configbytes(
-            'remotefilelog', 'data.repacksizelimit'),
+        'gencountlimit': ui.configint('remotefilelog', 'data.gencountlimit'),
+        'generations': ui.configlist('remotefilelog', 'data.generations'),
+        'maxrepackpacks': ui.configint('remotefilelog', 'data.maxrepackpacks'),
+        'repackmaxpacksize': ui.configbytes(
+            'remotefilelog', 'data.repackmaxpacksize'
+        ),
+        'repacksizelimit': ui.configbytes(
+            'remotefilelog', 'data.repacksizelimit'
+        ),
     }
 
     packfiles = _allpackfileswithsuffix(
-        files, datapack.PACKSUFFIX, datapack.INDEXSUFFIX)
+        files, datapack.PACKSUFFIX, datapack.INDEXSUFFIX
+    )
     return _computeincrementalpack(packfiles, opts)
 
+
 def _computeincrementalhistorypack(ui, files):
     opts = {
-        'gencountlimit' : ui.configint(
-            'remotefilelog', 'history.gencountlimit'),
-        'generations' : ui.configlist(
-            'remotefilelog', 'history.generations', ['100MB']),
-        'maxrepackpacks' : ui.configint(
-            'remotefilelog', 'history.maxrepackpacks'),
-        'repackmaxpacksize' : ui.configbytes(
-            'remotefilelog', 'history.repackmaxpacksize', '400MB'),
-        'repacksizelimit' : ui.configbytes(
-            'remotefilelog', 'history.repacksizelimit'),
+        'gencountlimit': ui.configint('remotefilelog', 'history.gencountlimit'),
+        'generations': ui.configlist(
+            'remotefilelog', 'history.generations', ['100MB']
+        ),
+        'maxrepackpacks': ui.configint(
+            'remotefilelog', 'history.maxrepackpacks'
+        ),
+        'repackmaxpacksize': ui.configbytes(
+            'remotefilelog', 'history.repackmaxpacksize', '400MB'
+        ),
+        'repacksizelimit': ui.configbytes(
+            'remotefilelog', 'history.repacksizelimit'
+        ),
     }
 
     packfiles = _allpackfileswithsuffix(
-        files, historypack.PACKSUFFIX, historypack.INDEXSUFFIX)
+        files, historypack.PACKSUFFIX, historypack.INDEXSUFFIX
+    )
     return _computeincrementalpack(packfiles, opts)
 
+
 def _allpackfileswithsuffix(files, packsuffix, indexsuffix):
     result = []
     fileset = set(fn for fn, mode, stat in files)
@@ -253,7 +320,7 @@
         if not filename.endswith(packsuffix):
             continue
 
-        prefix = filename[:-len(packsuffix)]
+        prefix = filename[: -len(packsuffix)]
 
         # Don't process a pack if it doesn't have an index.
         if (prefix + indexsuffix) not in fileset:
@@ -262,6 +329,7 @@
 
     return result
 
+
 def _computeincrementalpack(files, opts):
     """Given a set of pack files along with the configuration options, this
     function computes the list of files that should be packed as part of an
@@ -272,8 +340,9 @@
     over time).
     """
 
-    limits = list(sorted((util.sizetoint(s) for s in opts['generations']),
-                                reverse=True))
+    limits = list(
+        sorted((util.sizetoint(s) for s in opts['generations']), reverse=True)
+    )
     limits.append(0)
 
     # Group the packs by generation (i.e. by size)
@@ -303,23 +372,29 @@
     for i, limit in enumerate(limits):
         if len(generations[i]) > opts['gencountlimit']:
             # Sort to be smallest last, for easy popping later
-            genpacks.extend(sorted(generations[i], reverse=True,
-                                   key=lambda x: sizes[x]))
+            genpacks.extend(
+                sorted(generations[i], reverse=True, key=lambda x: sizes[x])
+            )
             break
 
     # Take as many packs from the generation as we can
     chosenpacks = genpacks[-3:]
     genpacks = genpacks[:-3]
     repacksize = sum(sizes[n] for n in chosenpacks)
-    while (repacksize < opts['repacksizelimit'] and genpacks and
-           len(chosenpacks) < opts['maxrepackpacks']):
+    while (
+        repacksize < opts['repacksizelimit']
+        and genpacks
+        and len(chosenpacks) < opts['maxrepackpacks']
+    ):
         chosenpacks.append(genpacks.pop())
         repacksize += sizes[chosenpacks[-1]]
 
     return chosenpacks
 
-def _runrepack(repo, data, history, packpath, category, fullhistory=None,
-               options=None):
+
+def _runrepack(
+    repo, data, history, packpath, category, fullhistory=None, options=None
+):
     shallowutil.mkstickygroupdir(repo.ui, packpath)
 
     def isold(repo, filename, node):
@@ -337,16 +412,26 @@
     garbagecollect = repo.ui.configbool('remotefilelog', 'gcrepack')
     if not fullhistory:
         fullhistory = history
-    packer = repacker(repo, data, history, fullhistory, category,
-                      gc=garbagecollect, isold=isold, options=options)
+    packer = repacker(
+        repo,
+        data,
+        history,
+        fullhistory,
+        category,
+        gc=garbagecollect,
+        isold=isold,
+        options=options,
+    )
 
     with datapack.mutabledatapack(repo.ui, packpath) as dpack:
         with historypack.mutablehistorypack(repo.ui, packpath) as hpack:
             try:
                 packer.run(dpack, hpack)
             except error.LockHeld:
-                raise RepackAlreadyRunning(_("skipping repack - another repack "
-                                             "is already running"))
+                raise RepackAlreadyRunning(
+                    _("skipping repack - another repack " "is already running")
+                )
+
 
 def keepset(repo, keyfn, lastkeepkeys=None):
     """Computes a keepset which is not garbage collected.
@@ -409,12 +494,23 @@
 
     return keepkeys
 
+
 class repacker(object):
     """Class for orchestrating the repack of data and history information into a
     new format.
     """
-    def __init__(self, repo, data, history, fullhistory, category, gc=False,
-                 isold=None, options=None):
+
+    def __init__(
+        self,
+        repo,
+        data,
+        history,
+        fullhistory,
+        category,
+        gc=False,
+        isold=None,
+        options=None,
+    ):
         self.repo = repo
         self.data = data
         self.history = history
@@ -426,14 +522,15 @@
             if not isold:
                 raise ValueError("Function 'isold' is not properly specified")
             # use (filename, node) tuple as a keepset key
-            self.keepkeys = keepset(repo, lambda f, n : (f, n))
+            self.keepkeys = keepset(repo, lambda f, n: (f, n))
             self.isold = isold
 
     def run(self, targetdata, targethistory):
         ledger = repackledger()
 
-        with lockmod.lock(repacklockvfs(self.repo), "repacklock", desc=None,
-                          timeout=0):
+        with lockmod.lock(
+            repacklockvfs(self.repo), "repacklock", desc=None, timeout=0
+        ):
             self.repo.hook('prerepack')
 
             # Populate ledger from source
@@ -473,8 +570,10 @@
         orphans = sorted(orphans)
         orphans = list(sorted(orphans, key=getsize, reverse=True))
         if ui.debugflag:
-            ui.debug("%s: orphan chain: %s\n" % (filename,
-                ", ".join([short(s) for s in orphans])))
+            ui.debug(
+                "%s: orphan chain: %s\n"
+                % (filename, ", ".join([short(s) for s in orphans]))
+            )
 
         # Create one contiguous chain and reassign deltabases.
         for i, node in enumerate(orphans):
@@ -497,23 +596,28 @@
                 byfile.setdefault(entry.filename, {})[entry.node] = entry
 
         count = 0
-        repackprogress = ui.makeprogress(_("repacking data"), unit=self.unit,
-                                            total=len(byfile))
+        repackprogress = ui.makeprogress(
+            _("repacking data"), unit=self.unit, total=len(byfile)
+        )
         for filename, entries in sorted(byfile.iteritems()):
             repackprogress.update(count)
 
             ancestors = {}
             nodes = list(node for node in entries)
             nohistory = []
-            buildprogress = ui.makeprogress(_("building history"), unit='nodes',
-                                            total=len(nodes))
+            buildprogress = ui.makeprogress(
+                _("building history"), unit='nodes', total=len(nodes)
+            )
             for i, node in enumerate(nodes):
                 if node in ancestors:
                     continue
                 buildprogress.update(i)
                 try:
-                    ancestors.update(self.fullhistory.getancestors(filename,
-                        node, known=ancestors))
+                    ancestors.update(
+                        self.fullhistory.getancestors(
+                            filename, node, known=ancestors
+                        )
+                    )
                 except KeyError:
                     # Since we're packing data entries, we may not have the
                     # corresponding history entries for them. It's not a big
@@ -524,14 +628,16 @@
             # Order the nodes children first, so we can produce reverse deltas
             orderednodes = list(reversed(self._toposort(ancestors)))
             if len(nohistory) > 0:
-                ui.debug('repackdata: %d nodes without history\n' %
-                         len(nohistory))
+                ui.debug(
+                    'repackdata: %d nodes without history\n' % len(nohistory)
+                )
             orderednodes.extend(sorted(nohistory))
 
             # Filter orderednodes to just the nodes we want to serialize (it
             # currently also has the edge nodes' ancestors).
-            orderednodes = list(filter(lambda node: node in nodes,
-                                orderednodes))
+            orderednodes = list(
+                filter(lambda node: node in nodes, orderednodes)
+            )
 
             # Garbage collect old nodes:
             if self.garbagecollect:
@@ -539,8 +645,9 @@
                 for node in orderednodes:
                     # If the node is old and is not in the keepset, we skip it,
                     # and mark as garbage collected
-                    if ((filename, node) not in self.keepkeys and
-                        self.isold(self.repo, filename, node)):
+                    if (filename, node) not in self.keepkeys and self.isold(
+                        self.repo, filename, node
+                    ):
                         entries[node].gced = True
                         continue
                     neworderednodes.append(node)
@@ -551,9 +658,9 @@
             nobase = set()
             referenced = set()
             nodes = set(nodes)
-            processprogress = ui.makeprogress(_("processing nodes"),
-                                              unit='nodes',
-                                              total=len(orderednodes))
+            processprogress = ui.makeprogress(
+                _("processing nodes"), unit='nodes', total=len(orderednodes)
+            )
             for i, node in enumerate(orderednodes):
                 processprogress.update(i)
                 # Find delta base
@@ -593,8 +700,9 @@
             # experimental config: repack.chainorphansbysize
             if ui.configbool('repack', 'chainorphansbysize'):
                 orphans = nobase - referenced
-                orderednodes = self._chainorphans(ui, filename, orderednodes,
-                    orphans, deltabases)
+                orderednodes = self._chainorphans(
+                    ui, filename, orderednodes, orphans, deltabases
+                )
 
             # Compute deltas and write to the pack
             for i, node in enumerate(orderednodes):
@@ -607,8 +715,11 @@
                     deltaentry = self.data.getdelta(filename, node)
                     delta, deltabasename, origdeltabase, meta = deltaentry
                     size = meta.get(constants.METAKEYSIZE)
-                    if (deltabasename != filename or origdeltabase != deltabase
-                        or size is None):
+                    if (
+                        deltabasename != filename
+                        or origdeltabase != deltabase
+                        or size is None
+                    ):
                         deltabasetext = self.data.get(filename, deltabase)
                         original = self.data.get(filename, node)
                         size = len(original)
@@ -639,8 +750,9 @@
             if entry.historysource:
                 byfile.setdefault(entry.filename, {})[entry.node] = entry
 
-        progress = ui.makeprogress(_("repacking history"), unit=self.unit,
-                                   total=len(byfile))
+        progress = ui.makeprogress(
+            _("repacking history"), unit=self.unit, total=len(byfile)
+        )
         for filename, entries in sorted(byfile.iteritems()):
             ancestors = {}
             nodes = list(node for node in entries)
@@ -648,8 +760,9 @@
             for node in nodes:
                 if node in ancestors:
                     continue
-                ancestors.update(self.history.getancestors(filename, node,
-                                                           known=ancestors))
+                ancestors.update(
+                    self.history.getancestors(filename, node, known=ancestors)
+                )
 
             # Order the nodes children first
             orderednodes = reversed(self._toposort(ancestors))
@@ -702,11 +815,13 @@
         sortednodes = shallowutil.sortnodes(ancestors.keys(), parentfunc)
         return sortednodes
 
+
 class repackledger(object):
     """Storage for all the bookkeeping that happens during a repack. It contains
     the list of revisions being repacked, what happened to each revision, and
     which source store contained which revision originally (for later cleanup).
     """
+
     def __init__(self):
         self.entries = {}
         self.sources = {}
@@ -748,11 +863,21 @@
     def addcreated(self, value):
         self.created.add(value)
 
+
 class repackentry(object):
     """Simple class representing a single revision entry in the repackledger.
     """
-    __slots__ = (r'filename', r'node', r'datasource', r'historysource',
-                 r'datarepacked', r'historyrepacked', r'gced')
+
+    __slots__ = (
+        r'filename',
+        r'node',
+        r'datasource',
+        r'historysource',
+        r'datarepacked',
+        r'historyrepacked',
+        r'gced',
+    )
+
     def __init__(self, filename, node):
         self.filename = filename
         self.node = node
@@ -767,13 +892,14 @@
         # If garbage collected
         self.gced = False
 
+
 def repacklockvfs(repo):
     if util.safehasattr(repo, 'name'):
         # Lock in the shared cache so repacks across multiple copies of the same
         # repo are coordinated.
         sharedcachepath = shallowutil.getcachepackpath(
-            repo,
-            constants.FILEPACK_CATEGORY)
+            repo, constants.FILEPACK_CATEGORY
+        )
         return vfs.vfs(sharedcachepath)
     else:
         return repo.svfs
--- a/hgext/remotefilelog/shallowbundle.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/shallowbundle.py	Sun Oct 06 09:45:02 2019 -0400
@@ -26,10 +26,10 @@
 LocalFiles = 1
 AllFiles = 2
 
+
 def shallowgroup(cls, self, nodelist, rlog, lookup, units=None, reorder=None):
     if not isinstance(rlog, remotefilelog.remotefilelog):
-        for c in super(cls, self).group(nodelist, rlog, lookup,
-                                        units=units):
+        for c in super(cls, self).group(nodelist, rlog, lookup, units=units):
             yield c
         return
 
@@ -52,17 +52,20 @@
 
     yield self.close()
 
+
 class shallowcg1packer(changegroup.cgpacker):
     def generate(self, commonrevs, clnodes, fastpathlinkrev, source):
         if shallowutil.isenabled(self._repo):
             fastpathlinkrev = False
 
-        return super(shallowcg1packer, self).generate(commonrevs, clnodes,
-            fastpathlinkrev, source)
+        return super(shallowcg1packer, self).generate(
+            commonrevs, clnodes, fastpathlinkrev, source
+        )
 
     def group(self, nodelist, rlog, lookup, units=None, reorder=None):
-        return shallowgroup(shallowcg1packer, self, nodelist, rlog, lookup,
-                            units=units)
+        return shallowgroup(
+            shallowcg1packer, self, nodelist, rlog, lookup, units=units
+        )
 
     def generatefiles(self, changedfiles, *args):
         try:
@@ -78,16 +81,18 @@
                 # Force load the filelog data.
                 bundlerepo.bundlerepository.file(repo, 'foo')
                 if repo._cgfilespos:
-                    raise error.Abort("cannot pull from full bundles",
-                                      hint="use `hg unbundle` instead")
+                    raise error.Abort(
+                        "cannot pull from full bundles",
+                        hint="use `hg unbundle` instead",
+                    )
                 return []
             filestosend = self.shouldaddfilegroups(source)
             if filestosend == NoFiles:
-                changedfiles = list([f for f in changedfiles
-                                     if not repo.shallowmatch(f)])
+                changedfiles = list(
+                    [f for f in changedfiles if not repo.shallowmatch(f)]
+                )
 
-        return super(shallowcg1packer, self).generatefiles(
-            changedfiles, *args)
+        return super(shallowcg1packer, self).generatefiles(changedfiles, *args)
 
     def shouldaddfilegroups(self, source):
         repo = self._repo
@@ -110,8 +115,9 @@
 
     def prune(self, rlog, missing, commonrevs):
         if not isinstance(rlog, remotefilelog.remotefilelog):
-            return super(shallowcg1packer, self).prune(rlog, missing,
-                commonrevs)
+            return super(shallowcg1packer, self).prune(
+                rlog, missing, commonrevs
+            )
 
         repo = self._repo
         results = []
@@ -138,6 +144,7 @@
         yield meta
         yield delta
 
+
 def makechangegroup(orig, repo, outgoing, version, source, *args, **kwargs):
     if not shallowutil.isenabled(repo):
         return orig(repo, outgoing, version, source, *args, **kwargs)
@@ -149,24 +156,26 @@
             bundlecaps = kwargs.get(r'bundlecaps')
             includepattern = None
             excludepattern = None
-            for cap in (bundlecaps or []):
+            for cap in bundlecaps or []:
                 if cap.startswith("includepattern="):
-                    raw = cap[len("includepattern="):]
+                    raw = cap[len("includepattern=") :]
                     if raw:
                         includepattern = raw.split('\0')
                 elif cap.startswith("excludepattern="):
-                    raw = cap[len("excludepattern="):]
+                    raw = cap[len("excludepattern=") :]
                     if raw:
                         excludepattern = raw.split('\0')
             if includepattern or excludepattern:
-                repo.shallowmatch = match.match(repo.root, '', None,
-                    includepattern, excludepattern)
+                repo.shallowmatch = match.match(
+                    repo.root, '', None, includepattern, excludepattern
+                )
             else:
                 repo.shallowmatch = match.always()
         return orig(repo, outgoing, version, source, *args, **kwargs)
     finally:
         repo.shallowmatch = original
 
+
 def addchangegroupfiles(orig, repo, source, revmap, trp, expectedfiles, *args):
     if not shallowutil.isenabled(repo):
         return orig(repo, source, revmap, trp, expectedfiles, *args)
@@ -218,6 +227,7 @@
             raise error.Abort(_("received file revlog group is empty"))
 
     processed = set()
+
     def available(f, node, depf, depnode):
         if depnode != nullid and (depf, depnode) not in processed:
             if not (depf, depnode) in revisiondatas:
--- a/hgext/remotefilelog/shallowrepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/shallowrepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -43,19 +43,24 @@
 
     # Instantiate local data stores
     localcontent = contentstore.remotefilelogcontentstore(
-        repo, localpath, repo.name, shared=False)
+        repo, localpath, repo.name, shared=False
+    )
     localmetadata = metadatastore.remotefilelogmetadatastore(
-        repo, localpath, repo.name, shared=False)
+        repo, localpath, repo.name, shared=False
+    )
     return localcontent, localmetadata
 
+
 def makecachestores(repo):
     """Typically machine-wide, cache of remote data; can be discarded."""
     # Instantiate shared cache stores
     cachepath = shallowutil.getcachepath(repo.ui)
     cachecontent = contentstore.remotefilelogcontentstore(
-        repo, cachepath, repo.name, shared=True)
+        repo, cachepath, repo.name, shared=True
+    )
     cachemetadata = metadatastore.remotefilelogmetadatastore(
-        repo, cachepath, repo.name, shared=True)
+        repo, cachepath, repo.name, shared=True
+    )
 
     repo.sharedstore = cachecontent
     repo.shareddatastores.append(cachecontent)
@@ -63,30 +68,35 @@
 
     return cachecontent, cachemetadata
 
+
 def makeremotestores(repo, cachecontent, cachemetadata):
     """These stores fetch data from a remote server."""
     # Instantiate remote stores
     repo.fileservice = fileserverclient.fileserverclient(repo)
     remotecontent = contentstore.remotecontentstore(
-        repo.ui, repo.fileservice, cachecontent)
+        repo.ui, repo.fileservice, cachecontent
+    )
     remotemetadata = metadatastore.remotemetadatastore(
-        repo.ui, repo.fileservice, cachemetadata)
+        repo.ui, repo.fileservice, cachemetadata
+    )
     return remotecontent, remotemetadata
 
+
 def makepackstores(repo):
     """Packs are more efficient (to read from) cache stores."""
     # Instantiate pack stores
-    packpath = shallowutil.getcachepackpath(repo,
-                                            constants.FILEPACK_CATEGORY)
+    packpath = shallowutil.getcachepackpath(repo, constants.FILEPACK_CATEGORY)
     packcontentstore = datapack.datapackstore(repo.ui, packpath)
     packmetadatastore = historypack.historypackstore(repo.ui, packpath)
 
     repo.shareddatastores.append(packcontentstore)
     repo.sharedhistorystores.append(packmetadatastore)
-    shallowutil.reportpackmetrics(repo.ui, 'filestore', packcontentstore,
-        packmetadatastore)
+    shallowutil.reportpackmetrics(
+        repo.ui, 'filestore', packcontentstore, packmetadatastore
+    )
     return packcontentstore, packmetadatastore
 
+
 def makeunionstores(repo):
     """Union stores iterate the other stores and return the first result."""
     repo.shareddatastores = []
@@ -95,23 +105,38 @@
     packcontentstore, packmetadatastore = makepackstores(repo)
     cachecontent, cachemetadata = makecachestores(repo)
     localcontent, localmetadata = makelocalstores(repo)
-    remotecontent, remotemetadata = makeremotestores(repo, cachecontent,
-                                                     cachemetadata)
+    remotecontent, remotemetadata = makeremotestores(
+        repo, cachecontent, cachemetadata
+    )
 
     # Instantiate union stores
     repo.contentstore = contentstore.unioncontentstore(
-        packcontentstore, cachecontent,
-        localcontent, remotecontent, writestore=localcontent)
+        packcontentstore,
+        cachecontent,
+        localcontent,
+        remotecontent,
+        writestore=localcontent,
+    )
     repo.metadatastore = metadatastore.unionmetadatastore(
-        packmetadatastore, cachemetadata, localmetadata, remotemetadata,
-        writestore=localmetadata)
+        packmetadatastore,
+        cachemetadata,
+        localmetadata,
+        remotemetadata,
+        writestore=localmetadata,
+    )
 
     fileservicedatawrite = cachecontent
     fileservicehistorywrite = cachemetadata
-    repo.fileservice.setstore(repo.contentstore, repo.metadatastore,
-                              fileservicedatawrite, fileservicehistorywrite)
-    shallowutil.reportpackmetrics(repo.ui, 'filestore',
-        packcontentstore, packmetadatastore)
+    repo.fileservice.setstore(
+        repo.contentstore,
+        repo.metadatastore,
+        fileservicedatawrite,
+        fileservicehistorywrite,
+    )
+    shallowutil.reportpackmetrics(
+        repo.ui, 'filestore', packcontentstore, packmetadatastore
+    )
+
 
 def wraprepo(repo):
     class shallowrepository(repo.__class__):
@@ -121,11 +146,16 @@
 
         @util.propertycache
         def fallbackpath(self):
-            path = repo.ui.config("remotefilelog", "fallbackpath",
-                                  repo.ui.config('paths', 'default'))
+            path = repo.ui.config(
+                "remotefilelog",
+                "fallbackpath",
+                repo.ui.config('paths', 'default'),
+            )
             if not path:
-                raise error.Abort("no remotefilelog server "
-                    "configured - is your .hg/hgrc trusted?")
+                raise error.Abort(
+                    "no remotefilelog server "
+                    "configured - is your .hg/hgrc trusted?"
+                )
 
             return path
 
@@ -157,8 +187,9 @@
             if self.shallowmatch(path):
                 return remotefilectx.remotefilectx(self, path, *args, **kwargs)
             else:
-                return super(shallowrepository, self).filectx(path, *args,
-                                                              **kwargs)
+                return super(shallowrepository, self).filectx(
+                    path, *args, **kwargs
+                )
 
         @localrepo.unfilteredmethod
         def commitctx(self, ctx, error=False, origctx=None):
@@ -178,12 +209,19 @@
                     if fparent1 != nullid:
                         files.append((f, hex(fparent1)))
                 self.fileservice.prefetch(files)
-            return super(shallowrepository, self).commitctx(ctx,
-                                                            error=error,
-                                                            origctx=origctx)
+            return super(shallowrepository, self).commitctx(
+                ctx, error=error, origctx=origctx
+            )
 
-        def backgroundprefetch(self, revs, base=None, repack=False, pats=None,
-                               opts=None, ensurestart=False):
+        def backgroundprefetch(
+            self,
+            revs,
+            base=None,
+            repack=False,
+            pats=None,
+            opts=None,
+            ensurestart=False,
+        ):
             """Runs prefetch in background with optional repack
             """
             cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'prefetch']
@@ -193,15 +231,22 @@
                 cmd += ['-r', revs]
             # We know this command will find a binary, so don't block
             # on it starting.
-            procutil.runbgcommand(cmd, encoding.environ,
-                                  ensurestart=ensurestart)
+            procutil.runbgcommand(
+                cmd, encoding.environ, ensurestart=ensurestart
+            )
 
         def prefetch(self, revs, base=None, pats=None, opts=None):
             """Prefetches all the necessary file revisions for the given revs
             Optionally runs repack in background
             """
-            with repo._lock(repo.svfs, 'prefetchlock', True, None, None,
-                            _('prefetching in %s') % repo.origroot):
+            with repo._lock(
+                repo.svfs,
+                'prefetchlock',
+                True,
+                None,
+                None,
+                _('prefetching in %s') % repo.origroot,
+            ):
                 self._prefetch(revs, base, pats, opts)
 
         def _prefetch(self, revs, base=None, pats=None, opts=None):
@@ -212,8 +257,9 @@
                 # become obsolete if the local commits are stripped.
                 localrevs = repo.revs('outgoing(%s)', fallbackpath)
                 if base is not None and base != nullrev:
-                    serverbase = list(repo.revs('first(reverse(::%s) - %ld)',
-                                                base, localrevs))
+                    serverbase = list(
+                        repo.revs('first(reverse(::%s) - %ld)', base, localrevs)
+                    )
                     if serverbase:
                         base = serverbase[0]
             else:
@@ -290,13 +336,16 @@
 
     makeunionstores(repo)
 
-    repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern",
-                                             None)
-    repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern",
-                                             None)
+    repo.includepattern = repo.ui.configlist(
+        "remotefilelog", "includepattern", None
+    )
+    repo.excludepattern = repo.ui.configlist(
+        "remotefilelog", "excludepattern", None
+    )
     if not util.safehasattr(repo, 'connectionpool'):
         repo.connectionpool = connectionpool.connectionpool(repo)
 
     if repo.includepattern or repo.excludepattern:
-        repo.shallowmatch = match.match(repo.root, '', None,
-            repo.includepattern, repo.excludepattern)
+        repo.shallowmatch = match.match(
+            repo.root, '', None, repo.includepattern, repo.excludepattern
+        )
--- a/hgext/remotefilelog/shallowstore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/shallowstore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -6,6 +6,7 @@
 # GNU General Public License version 2 or any later version.
 from __future__ import absolute_import
 
+
 def wrapstore(store):
     class shallowstore(store.__class__):
         def __contains__(self, path):
--- a/hgext/remotefilelog/shallowutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/shallowutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -31,28 +31,34 @@
 if not pycompat.iswindows:
     import grp
 
+
 def isenabled(repo):
     """returns whether the repository is remotefilelog enabled or not"""
     return constants.SHALLOWREPO_REQUIREMENT in repo.requirements
 
+
 def getcachekey(reponame, file, id):
     pathhash = node.hex(hashlib.sha1(file).digest())
     return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
 
+
 def getlocalkey(file, id):
     pathhash = node.hex(hashlib.sha1(file).digest())
     return os.path.join(pathhash, id)
 
+
 def getcachepath(ui, allowempty=False):
     cachepath = ui.config("remotefilelog", "cachepath")
     if not cachepath:
         if allowempty:
             return None
         else:
-            raise error.Abort(_("could not find config option "
-                                "remotefilelog.cachepath"))
+            raise error.Abort(
+                _("could not find config option " "remotefilelog.cachepath")
+            )
     return util.expandpath(cachepath)
 
+
 def getcachepackpath(repo, category):
     cachepath = getcachepath(repo.ui)
     if category != constants.FILEPACK_CATEGORY:
@@ -60,9 +66,11 @@
     else:
         return os.path.join(cachepath, repo.name, 'packs')
 
+
 def getlocalpackpath(base, category):
     return os.path.join(base, 'packs', category)
 
+
 def createrevlogtext(text, copyfrom=None, copyrev=None):
     """returns a string that matches the revlog contents in a
     traditional revlog
@@ -76,14 +84,16 @@
 
     return text
 
+
 def parsemeta(text):
     """parse mercurial filelog metadata"""
     meta, size = storageutil.parsemeta(text)
     if text.startswith('\1\n'):
         s = text.index('\1\n', 2)
-        text = text[s + 2:]
+        text = text[s + 2 :]
     return meta or {}, text
 
+
 def sumdicts(*dicts):
     """Adds all the values of *dicts together into one dictionary. This assumes
     the values in *dicts are all summable.
@@ -96,6 +106,7 @@
             result[k] += v
     return result
 
+
 def prefixkeys(dict, prefix):
     """Returns ``dict`` with ``prefix`` prepended to all its keys."""
     result = {}
@@ -103,11 +114,13 @@
         result[prefix + k] = v
     return result
 
+
 def reportpackmetrics(ui, prefix, *stores):
     dicts = [s.getmetrics() for s in stores]
     dict = prefixkeys(sumdicts(*dicts), prefix + '_')
     ui.log(prefix + "_packsizes", "\n", **pycompat.strkwargs(dict))
 
+
 def _parsepackmeta(metabuf):
     """parse datapack meta, bytes (<metadata-list>) -> dict
 
@@ -121,19 +134,20 @@
     offset = 0
     buflen = len(metabuf)
     while buflen - offset >= 3:
-        key = metabuf[offset:offset + 1]
+        key = metabuf[offset : offset + 1]
         offset += 1
         metalen = struct.unpack_from('!H', metabuf, offset)[0]
         offset += 2
         if offset + metalen > buflen:
             raise ValueError('corrupted metadata: incomplete buffer')
-        value = metabuf[offset:offset + metalen]
+        value = metabuf[offset : offset + metalen]
         metadict[key] = value
         offset += metalen
     if offset != buflen:
         raise ValueError('corrupted metadata: redundant data')
     return metadict
 
+
 def _buildpackmeta(metadict):
     """reverse of _parsepackmeta, dict -> bytes (<metadata-list>)
 
@@ -148,9 +162,10 @@
     for k, v in sorted((metadict or {}).iteritems()):
         if len(k) != 1:
             raise error.ProgrammingError('packmeta: illegal key: %s' % k)
-        if len(v) > 0xfffe:
-            raise ValueError('metadata value is too long: 0x%x > 0xfffe'
-                             % len(v))
+        if len(v) > 0xFFFE:
+            raise ValueError(
+                'metadata value is too long: 0x%x > 0xfffe' % len(v)
+            )
         metabuf += k
         metabuf += struct.pack('!H', len(v))
         metabuf += v
@@ -158,11 +173,13 @@
     # only 256 keys, and for each value, len(value) <= 0xfffe.
     return metabuf
 
+
 _metaitemtypes = {
     constants.METAKEYFLAG: (int, pycompat.long),
     constants.METAKEYSIZE: (int, pycompat.long),
 }
 
+
 def buildpackmeta(metadict):
     """like _buildpackmeta, but typechecks metadict and normalize it.
 
@@ -183,6 +200,7 @@
         newmeta[k] = v
     return _buildpackmeta(newmeta)
 
+
 def parsepackmeta(metabuf):
     """like _parsepackmeta, but convert fields to desired types automatically.
 
@@ -195,14 +213,16 @@
             metadict[k] = bin2int(v)
     return metadict
 
+
 def int2bin(n):
     """convert a non-negative integer to raw binary buffer"""
     buf = bytearray()
     while n > 0:
-        buf.insert(0, n & 0xff)
+        buf.insert(0, n & 0xFF)
         n >>= 8
     return bytes(buf)
 
+
 def bin2int(buf):
     """the reverse of int2bin, convert a binary buffer to an integer"""
     x = 0
@@ -211,6 +231,7 @@
         x |= b
     return x
 
+
 def parsesizeflags(raw):
     """given a remotefilelog blob, return (headersize, rawtextsize, flags)
 
@@ -227,12 +248,13 @@
             if header.startswith('v1\n'):
                 for s in header.split('\n'):
                     if s.startswith(constants.METAKEYSIZE):
-                        size = int(s[len(constants.METAKEYSIZE):])
+                        size = int(s[len(constants.METAKEYSIZE) :])
                     elif s.startswith(constants.METAKEYFLAG):
-                        flags = int(s[len(constants.METAKEYFLAG):])
+                        flags = int(s[len(constants.METAKEYFLAG) :])
             else:
-                raise RuntimeError('unsupported remotefilelog header: %s'
-                                   % header)
+                raise RuntimeError(
+                    'unsupported remotefilelog header: %s' % header
+                )
         else:
             # v0, str(int(size)) is the header
             size = int(header)
@@ -242,6 +264,7 @@
         raise RuntimeError(r"unexpected remotefilelog header: no size found")
     return index + 1, size, flags
 
+
 def buildfileblobheader(size, flags, version=None):
     """return the header of a remotefilelog blob.
 
@@ -254,9 +277,12 @@
     if version is None:
         version = int(bool(flags))
     if version == 1:
-        header = ('v1\n%s%d\n%s%d'
-                  % (constants.METAKEYSIZE, size,
-                     constants.METAKEYFLAG, flags))
+        header = 'v1\n%s%d\n%s%d' % (
+            constants.METAKEYSIZE,
+            size,
+            constants.METAKEYFLAG,
+            flags,
+        )
     elif version == 0:
         if flags:
             raise error.ProgrammingError('fileblob v0 does not support flag')
@@ -265,6 +291,7 @@
         raise error.ProgrammingError('unknown fileblob version %d' % version)
     return header
 
+
 def ancestormap(raw):
     offset, size, flags = parsesizeflags(raw)
     start = offset + size
@@ -273,17 +300,18 @@
     while start < len(raw):
         divider = raw.index('\0', start + 80)
 
-        currentnode = raw[start:(start + 20)]
-        p1 = raw[(start + 20):(start + 40)]
-        p2 = raw[(start + 40):(start + 60)]
-        linknode = raw[(start + 60):(start + 80)]
-        copyfrom = raw[(start + 80):divider]
+        currentnode = raw[start : (start + 20)]
+        p1 = raw[(start + 20) : (start + 40)]
+        p2 = raw[(start + 40) : (start + 60)]
+        linknode = raw[(start + 60) : (start + 80)]
+        copyfrom = raw[(start + 80) : divider]
 
         mapping[currentnode] = (p1, p2, linknode, copyfrom)
         start = divider + 1
 
     return mapping
 
+
 def readfile(path):
     f = open(path, 'rb')
     try:
@@ -298,12 +326,14 @@
     finally:
         f.close()
 
+
 def unlinkfile(filepath):
     if pycompat.iswindows:
         # On Windows, os.unlink cannnot delete readonly files
         os.chmod(filepath, stat.S_IWUSR)
     os.unlink(filepath)
 
+
 def renamefile(source, destination):
     if pycompat.iswindows:
         # On Windows, os.rename cannot rename readonly files
@@ -315,6 +345,7 @@
 
     os.rename(source, destination)
 
+
 def writefile(path, content, readonly=False):
     dirname, filename = os.path.split(path)
     if not os.path.exists(dirname):
@@ -352,6 +383,7 @@
             pass
         raise
 
+
 def sortnodes(nodes, parentfunc):
     """Topologically sorts the nodes, using the parentfunc to find
     the parents of nodes."""
@@ -388,36 +420,43 @@
 
     return results
 
+
 def readexactly(stream, n):
     '''read n bytes from stream.read and abort if less was available'''
     s = stream.read(n)
     if len(s) < n:
-        raise error.Abort(_("stream ended unexpectedly"
-                           " (got %d bytes, expected %d)")
-                          % (len(s), n))
+        raise error.Abort(
+            _("stream ended unexpectedly" " (got %d bytes, expected %d)")
+            % (len(s), n)
+        )
     return s
 
+
 def readunpack(stream, fmt):
     data = readexactly(stream, struct.calcsize(fmt))
     return struct.unpack(fmt, data)
 
+
 def readpath(stream):
     rawlen = readexactly(stream, constants.FILENAMESIZE)
     pathlen = struct.unpack(constants.FILENAMESTRUCT, rawlen)[0]
     return readexactly(stream, pathlen)
 
+
 def readnodelist(stream):
     rawlen = readexactly(stream, constants.NODECOUNTSIZE)
     nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0]
     for i in pycompat.xrange(nodecount):
         yield readexactly(stream, constants.NODESIZE)
 
+
 def readpathlist(stream):
     rawlen = readexactly(stream, constants.PATHCOUNTSIZE)
     pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0]
     for i in pycompat.xrange(pathcount):
         yield readpath(stream)
 
+
 def getgid(groupname):
     try:
         gid = grp.getgrnam(pycompat.fsdecode(groupname)).gr_gid
@@ -425,6 +464,7 @@
     except KeyError:
         return None
 
+
 def setstickygroupdir(path, gid, warn=None):
     if gid is None:
         return
@@ -435,6 +475,7 @@
         if warn:
             warn(_('unable to chown/chmod on %s: %s\n') % (path, ex))
 
+
 def mkstickygroupdir(ui, path):
     """Creates the given directory (if it doesn't exist) and give it a
     particular group with setgid enabled."""
@@ -479,12 +520,14 @@
     finally:
         os.umask(oldumask)
 
+
 def getusername(ui):
     try:
         return stringutil.shortuser(ui.username())
     except Exception:
         return 'unknown'
 
+
 def getreponame(ui):
     reponame = ui.config('paths', 'default')
     if reponame:
--- a/hgext/remotefilelog/shallowverifier.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotefilelog/shallowverifier.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,9 +9,11 @@
 from mercurial.i18n import _
 from mercurial import verify
 
+
 class shallowverifier(verify.verifier):
     def _verifyfiles(self, filenodes, filelinkrevs):
         """Skips files verification since repo's not guaranteed to have them"""
         self.repo.ui.status(
-            _("skipping filelog check since remotefilelog is used\n"))
+            _("skipping filelog check since remotefilelog is used\n")
+        )
         return 0, 0
--- a/hgext/remotenames.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/remotenames.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,9 +28,7 @@
 
 from mercurial.i18n import _
 
-from mercurial.node import (
-    bin,
-)
+from mercurial.node import bin
 from mercurial import (
     bookmarks,
     error,
@@ -45,15 +43,15 @@
     util,
 )
 
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 if pycompat.ispy3:
     import collections.abc
+
     mutablemapping = collections.abc.MutableMapping
 else:
     import collections
+
     mutablemapping = collections.MutableMapping
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -67,16 +65,17 @@
 templatekeyword = registrar.templatekeyword()
 revsetpredicate = registrar.revsetpredicate()
 
-configitem('remotenames', 'bookmarks',
-    default=True,
+configitem(
+    'remotenames', 'bookmarks', default=True,
 )
-configitem('remotenames', 'branches',
-    default=True,
+configitem(
+    'remotenames', 'branches', default=True,
 )
-configitem('remotenames', 'hoistedpeer',
-    default='default',
+configitem(
+    'remotenames', 'hoistedpeer', default='default',
 )
 
+
 class lazyremotenamedict(mutablemapping):
     """
     Read-only dict-like Class to lazily resolve remotename entries
@@ -88,10 +87,11 @@
     is in self.potentialentries we resolve it and store the result in
     self.cache. We cannot be lazy is when asked all the entries (keys).
     """
+
     def __init__(self, kind, repo):
         self.cache = {}
         self.potentialentries = {}
-        self._kind = kind # bookmarks or branches
+        self._kind = kind  # bookmarks or branches
         self._repo = repo
         self.loaded = False
 
@@ -99,8 +99,9 @@
         """ Read the remotenames file, store entries matching selected kind """
         self.loaded = True
         repo = self._repo
-        for node, rpath, rname in logexchange.readremotenamefile(repo,
-                                                                self._kind):
+        for node, rpath, rname in logexchange.readremotenamefile(
+            repo, self._kind
+        ):
             name = rpath + '/' + rname
             self.potentialentries[name] = (node, rpath, name)
 
@@ -117,7 +118,7 @@
         except LookupError:
             return None
         # Skip closed branches
-        if (self._kind == 'branches' and repo[binnode].closesbranch()):
+        if self._kind == 'branches' and repo[binnode].closesbranch():
             return None
         return [binnode]
 
@@ -169,6 +170,7 @@
 
     items = iteritems
 
+
 class remotenames(object):
     """
     This class encapsulates all the remotenames state. It also contains
@@ -223,7 +225,7 @@
             hoist += '/'
             for name, node in marktonodes.iteritems():
                 if name.startswith(hoist):
-                    name = name[len(hoist):]
+                    name = name[len(hoist) :]
                     self._hoisttonodes[name] = node
         return self._hoisttonodes
 
@@ -234,10 +236,11 @@
             hoist += '/'
             for name, node in marktonodes.iteritems():
                 if name.startswith(hoist):
-                    name = name[len(hoist):]
+                    name = name[len(hoist) :]
                     self._nodetohoists.setdefault(node[0], []).append(name)
         return self._nodetohoists
 
+
 def wrapprintbookmarks(orig, ui, repo, fm, bmarks):
     if 'remotebookmarks' not in repo.names:
         return
@@ -253,9 +256,11 @@
 
     return orig(ui, repo, fm, bmarks)
 
+
 def extsetup(ui):
     extensions.wrapfunction(bookmarks, '_printbookmarks', wrapprintbookmarks)
 
+
 def reposetup(ui, repo):
 
     # set the config option to store remotenames
@@ -274,10 +279,13 @@
             colorname='remotebookmark',
             logfmt='remote bookmark:  %s\n',
             listnames=lambda repo: repo._remotenames.bmarktonodes().keys(),
-            namemap=lambda repo, name:
-                repo._remotenames.bmarktonodes().get(name, []),
-            nodemap=lambda repo, node:
-                repo._remotenames.nodetobmarks().get(node, []))
+            namemap=lambda repo, name: repo._remotenames.bmarktonodes().get(
+                name, []
+            ),
+            nodemap=lambda repo, node: repo._remotenames.nodetobmarks().get(
+                node, []
+            ),
+        )
         repo.names.addnamespace(remotebookmarkns)
 
         # hoisting only works if there are remote bookmarks
@@ -288,12 +296,16 @@
                 templatename='hoistednames',
                 colorname='hoistedname',
                 logfmt='hoisted name:  %s\n',
-                listnames = lambda repo:
-                    repo._remotenames.hoisttonodes(hoist).keys(),
-                namemap = lambda repo, name:
-                    repo._remotenames.hoisttonodes(hoist).get(name, []),
-                nodemap = lambda repo, node:
-                    repo._remotenames.nodetohoists(hoist).get(node, []))
+                listnames=lambda repo: repo._remotenames.hoisttonodes(
+                    hoist
+                ).keys(),
+                namemap=lambda repo, name: repo._remotenames.hoisttonodes(
+                    hoist
+                ).get(name, []),
+                nodemap=lambda repo, node: repo._remotenames.nodetohoists(
+                    hoist
+                ).get(node, []),
+            )
             repo.names.addnamespace(hoistednamens)
 
     if ui.configbool('remotenames', 'branches'):
@@ -302,13 +314,17 @@
             templatename='remotebranches',
             colorname='remotebranch',
             logfmt='remote branch:  %s\n',
-            listnames = lambda repo: repo._remotenames.branchtonodes().keys(),
-            namemap = lambda repo, name:
-                repo._remotenames.branchtonodes().get(name, []),
-            nodemap = lambda repo, node:
-                repo._remotenames.nodetobranch().get(node, []))
+            listnames=lambda repo: repo._remotenames.branchtonodes().keys(),
+            namemap=lambda repo, name: repo._remotenames.branchtonodes().get(
+                name, []
+            ),
+            nodemap=lambda repo, node: repo._remotenames.nodetobranch().get(
+                node, []
+            ),
+        )
         repo.names.addnamespace(remotebranchns)
 
+
 @templatekeyword('remotenames', requires={'repo', 'ctx'})
 def remotenameskw(context, mapping):
     """List of strings. Remote names associated with the changeset."""
@@ -322,8 +338,10 @@
     if 'remotebranches' in repo.names:
         remotenames += repo.names['remotebranches'].names(repo, ctx.node())
 
-    return templateutil.compatlist(context, mapping, 'remotename', remotenames,
-                                   plural='remotenames')
+    return templateutil.compatlist(
+        context, mapping, 'remotename', remotenames, plural='remotenames'
+    )
+
 
 @templatekeyword('remotebookmarks', requires={'repo', 'ctx'})
 def remotebookmarkskw(context, mapping):
@@ -335,8 +353,14 @@
     if 'remotebookmarks' in repo.names:
         remotebmarks = repo.names['remotebookmarks'].names(repo, ctx.node())
 
-    return templateutil.compatlist(context, mapping, 'remotebookmark',
-                                   remotebmarks, plural='remotebookmarks')
+    return templateutil.compatlist(
+        context,
+        mapping,
+        'remotebookmark',
+        remotebmarks,
+        plural='remotebookmarks',
+    )
+
 
 @templatekeyword('remotebranches', requires={'repo', 'ctx'})
 def remotebrancheskw(context, mapping):
@@ -348,15 +372,22 @@
     if 'remotebranches' in repo.names:
         remotebranches = repo.names['remotebranches'].names(repo, ctx.node())
 
-    return templateutil.compatlist(context, mapping, 'remotebranch',
-                                   remotebranches, plural='remotebranches')
+    return templateutil.compatlist(
+        context,
+        mapping,
+        'remotebranch',
+        remotebranches,
+        plural='remotebranches',
+    )
+
 
 def _revsetutil(repo, subset, x, rtypes):
     """utility function to return a set of revs based on the rtypes"""
     args = revsetlang.getargs(x, 0, 1, _('only one argument accepted'))
     if args:
         kind, pattern, matcher = stringutil.stringmatcher(
-            revsetlang.getstring(args[0], _('argument must be a string')))
+            revsetlang.getstring(args[0], _('argument must be a string'))
+        )
     else:
         kind = pattern = None
         matcher = util.always
@@ -371,12 +402,14 @@
                     continue
                 nodes.update(ns.nodes(repo, name))
     if kind == 'literal' and not nodes:
-        raise error.RepoLookupError(_("remote name '%s' does not exist")
-                                    % pattern)
+        raise error.RepoLookupError(
+            _("remote name '%s' does not exist") % pattern
+        )
 
     revs = (cl.rev(n) for n in nodes if cl.hasnode(n))
     return subset & smartset.baseset(revs)
 
+
 @revsetpredicate('remotenames([name])')
 def remotenamesrevset(repo, subset, x):
     """All changesets which have a remotename on them. If `name` is
@@ -386,6 +419,7 @@
     """
     return _revsetutil(repo, subset, x, ('remotebookmarks', 'remotebranches'))
 
+
 @revsetpredicate('remotebranches([name])')
 def remotebranchesrevset(repo, subset, x):
     """All changesets which are branch heads on remotes. If `name` is
@@ -395,6 +429,7 @@
     """
     return _revsetutil(repo, subset, x, ('remotebranches',))
 
+
 @revsetpredicate('remotebookmarks([name])')
 def remotebmarksrevset(repo, subset, x):
     """All changesets which have bookmarks on remotes. If `name` is
--- a/hgext/schemes.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/schemes.py	Sun Oct 06 09:45:02 2019 -0400
@@ -65,6 +65,7 @@
 
 _partre = re.compile(br'\{(\d+)\}')
 
+
 class ShortRepository(object):
     def __init__(self, url, scheme, templater):
         self.scheme = scheme
@@ -80,8 +81,9 @@
 
     def instance(self, ui, url, create, intents=None, createopts=None):
         url = self.resolve(url)
-        return hg._peerlookup(url).instance(ui, url, create, intents=intents,
-                                            createopts=createopts)
+        return hg._peerlookup(url).instance(
+            ui, url, create, intents=intents, createopts=createopts
+        )
 
     def resolve(self, url):
         # Should this use the util.url class, or is manual parsing better?
@@ -98,6 +100,7 @@
         context = dict(('%d' % (i + 1), v) for i, v in enumerate(parts))
         return ''.join(self.templater.process(self.url, context)) + tail
 
+
 def hasdriveletter(orig, path):
     if path:
         for scheme in schemes:
@@ -105,26 +108,35 @@
                 return False
     return orig(path)
 
+
 schemes = {
     'py': 'http://hg.python.org/',
     'bb': 'https://bitbucket.org/',
     'bb+ssh': 'ssh://hg@bitbucket.org/',
     'gcode': 'https://{1}.googlecode.com/hg/',
-    'kiln': 'https://{1}.kilnhg.com/Repo/'
-    }
+    'kiln': 'https://{1}.kilnhg.com/Repo/',
+}
+
 
 def extsetup(ui):
     schemes.update(dict(ui.configitems('schemes')))
     t = templater.engine(templater.parse)
     for scheme, url in schemes.items():
-        if (pycompat.iswindows and len(scheme) == 1 and scheme.isalpha()
-            and os.path.exists('%s:\\' % scheme)):
-            raise error.Abort(_('custom scheme %s:// conflicts with drive '
-                               'letter %s:\\\n') % (scheme, scheme.upper()))
+        if (
+            pycompat.iswindows
+            and len(scheme) == 1
+            and scheme.isalpha()
+            and os.path.exists('%s:\\' % scheme)
+        ):
+            raise error.Abort(
+                _('custom scheme %s:// conflicts with drive ' 'letter %s:\\\n')
+                % (scheme, scheme.upper())
+            )
         hg.schemes[scheme] = ShortRepository(url, scheme, t)
 
     extensions.wrapfunction(util, 'hasdriveletter', hasdriveletter)
 
+
 @command('debugexpandscheme', norepo=True)
 def expandscheme(ui, url, **opts):
     """given a repo path, provide the scheme-expanded path
--- a/hgext/share.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/share.py	Sun Oct 06 09:45:02 2019 -0400
@@ -60,17 +60,26 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-@command('share',
-    [('U', 'noupdate', None, _('do not create a working directory')),
-     ('B', 'bookmarks', None, _('also share bookmarks')),
-     ('', 'relative', None, _('point to source using a relative path '
-                              '(EXPERIMENTAL)')),
+
+@command(
+    'share',
+    [
+        ('U', 'noupdate', None, _('do not create a working directory')),
+        ('B', 'bookmarks', None, _('also share bookmarks')),
+        (
+            '',
+            'relative',
+            None,
+            _('point to source using a relative path ' '(EXPERIMENTAL)'),
+        ),
     ],
     _('[-U] [-B] SOURCE [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
-    norepo=True)
-def share(ui, source, dest=None, noupdate=False, bookmarks=False,
-          relative=False):
+    norepo=True,
+)
+def share(
+    ui, source, dest=None, noupdate=False, bookmarks=False, relative=False
+):
     """create a new shared repository
 
     Initialize a new repository and working directory that shares its
@@ -88,10 +97,17 @@
        the broken clone to reset it to a changeset that still exists.
     """
 
-    hg.share(ui, source, dest=dest, update=not noupdate,
-             bookmarks=bookmarks, relative=relative)
+    hg.share(
+        ui,
+        source,
+        dest=dest,
+        update=not noupdate,
+        bookmarks=bookmarks,
+        relative=relative,
+    )
     return 0
 
+
 @command('unshare', [], '', helpcategory=command.CATEGORY_MAINTENANCE)
 def unshare(ui, repo):
     """convert a shared repository to a normal one
@@ -104,6 +120,7 @@
 
     hg.unshare(ui, repo)
 
+
 # Wrap clone command to pass auto share options.
 def clone(orig, ui, source, *args, **opts):
     pool = ui.config('share', 'pool')
@@ -117,12 +134,14 @@
 
     return orig(ui, source, *args, **opts)
 
+
 def extsetup(ui):
     extensions.wrapfunction(bookmarks, '_getbkfile', getbkfile)
     extensions.wrapfunction(bookmarks.bmstore, '_recordchange', recordchange)
     extensions.wrapfunction(bookmarks.bmstore, '_writerepo', writerepo)
     extensions.wrapcommand(commands.table, 'clone', clone)
 
+
 def _hassharedbookmarks(repo):
     """Returns whether this repo has shared bookmarks"""
     if bookmarks.bookmarksinstore(repo):
@@ -137,6 +156,7 @@
         return False
     return hg.sharedbookmarks in shared
 
+
 def getbkfile(orig, repo):
     if _hassharedbookmarks(repo):
         srcrepo = hg.sharedreposource(repo)
@@ -144,8 +164,9 @@
             # just orig(srcrepo) doesn't work as expected, because
             # HG_PENDING refers repo.root.
             try:
-                fp, pending = txnutil.trypending(repo.root, repo.vfs,
-                                                 'bookmarks')
+                fp, pending = txnutil.trypending(
+                    repo.root, repo.vfs, 'bookmarks'
+                )
                 if pending:
                     # only in this case, bookmark information in repo
                     # is up-to-date.
@@ -165,6 +186,7 @@
             # See also https://www.mercurial-scm.org/wiki/SharedRepository
     return orig(repo)
 
+
 def recordchange(orig, self, tr):
     # Continue with write to local bookmarks file as usual
     orig(self, tr)
@@ -175,6 +197,7 @@
             category = 'share-bookmarks'
             tr.addpostclose(category, lambda tr: self._writerepo(srcrepo))
 
+
 def writerepo(orig, self, repo):
     # First write local bookmarks file in case we ever unshare
     orig(self, repo)
--- a/hgext/show.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/show.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,9 +28,7 @@
 from __future__ import absolute_import
 
 from mercurial.i18n import _
-from mercurial.node import (
-    nullrev,
-)
+from mercurial.node import nullrev
 from mercurial import (
     cmdutil,
     commands,
@@ -58,6 +56,7 @@
 
 revsetpredicate = registrar.revsetpredicate()
 
+
 class showcmdfunc(registrar._funcregistrarbase):
     """Register a function to be invoked for an `hg show <thing>`."""
 
@@ -84,17 +83,23 @@
         func._fmtopic = fmtopic
         func._csettopic = csettopic
 
+
 showview = showcmdfunc()
 
-@command('show', [
-    # TODO: Switch this template flag to use cmdutil.formatteropts if
-    # 'hg show' becomes stable before --template/-T is stable. For now,
-    # we are putting it here without the '(EXPERIMENTAL)' flag because it
-    # is an important part of the 'hg show' user experience and the entire
-    # 'hg show' experience is experimental.
-    ('T', 'template', '', ('display with template'), _('TEMPLATE')),
-    ], _('VIEW'),
-    helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
+
+@command(
+    'show',
+    [
+        # TODO: Switch this template flag to use cmdutil.formatteropts if
+        # 'hg show' becomes stable before --template/-T is stable. For now,
+        # we are putting it here without the '(EXPERIMENTAL)' flag because it
+        # is an important part of the 'hg show' user experience and the entire
+        # 'hg show' experience is experimental.
+        ('T', 'template', '', 'display with template', _('TEMPLATE')),
+    ],
+    _('VIEW'),
+    helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
+)
 def show(ui, repo, view=None, template=None):
     """show various repository information
 
@@ -127,16 +132,20 @@
         ui.write('\n')
 
         for name, func in sorted(views.items()):
-            ui.write(('%s\n') % pycompat.sysbytes(func.__doc__))
+            ui.write('%s\n' % pycompat.sysbytes(func.__doc__))
 
         ui.write('\n')
-        raise error.Abort(_('no view requested'),
-                          hint=_('use "hg show VIEW" to choose a view'))
+        raise error.Abort(
+            _('no view requested'),
+            hint=_('use "hg show VIEW" to choose a view'),
+        )
 
     # TODO use same logic as dispatch to perform prefix matching.
     if view not in views:
-        raise error.Abort(_('unknown view: %s') % view,
-                          hint=_('run "hg show" to see available views'))
+        raise error.Abort(
+            _('unknown view: %s') % view,
+            hint=_('run "hg show" to see available views'),
+        )
 
     template = template or 'show'
 
@@ -155,6 +164,7 @@
     else:
         return fn(ui, repo)
 
+
 @showview('bookmarks', fmtopic='bookmarks')
 def showbookmarks(ui, repo, fm):
     """bookmarks and their associated changeset"""
@@ -177,21 +187,27 @@
         fm.context(ctx=repo[node])
         fm.write('bookmark', '%s', bm)
         fm.write('node', fm.hexfunc(node), fm.hexfunc(node))
-        fm.data(active=bm == active,
-                longestbookmarklen=longestname,
-                nodelen=nodelen)
+        fm.data(
+            active=bm == active, longestbookmarklen=longestname, nodelen=nodelen
+        )
+
 
 @showview('stack', csettopic='stack')
 def showstack(ui, repo, displayer):
     """current line of work"""
     wdirctx = repo['.']
     if wdirctx.rev() == nullrev:
-        raise error.Abort(_('stack view only available when there is a '
-                            'working directory'))
+        raise error.Abort(
+            _('stack view only available when there is a ' 'working directory')
+        )
 
     if wdirctx.phase() == phases.public:
-        ui.write(_('(empty stack; working directory parent is a published '
-                   'changeset)\n'))
+        ui.write(
+            _(
+                '(empty stack; working directory parent is a published '
+                'changeset)\n'
+            )
+        )
         return
 
     # TODO extract "find stack" into a function to facilitate
@@ -238,8 +254,11 @@
     # merge or rebase targets.
     if basectx:
         # TODO make this customizable?
-        newheads = set(repo.revs('heads(%d::) - %ld - not public()',
-                                 basectx.rev(), stackrevs))
+        newheads = set(
+            repo.revs(
+                'heads(%d::) - %ld - not public()', basectx.rev(), stackrevs
+            )
+        )
     else:
         newheads = set()
 
@@ -258,8 +277,10 @@
     # TODO use proper graph symbols from graphmod
 
     tres = formatter.templateresources(ui, repo)
-    shortesttmpl = formatter.maketemplater(ui, '{shortest(node, %d)}' % nodelen,
-                                           resources=tres)
+    shortesttmpl = formatter.maketemplater(
+        ui, '{shortest(node, %d)}' % nodelen, resources=tres
+    )
+
     def shortest(ctx):
         return shortesttmpl.renderdefault({'ctx': ctx, 'node': ctx.hex()})
 
@@ -278,8 +299,9 @@
 
         sourcectx = repo[stackrevs[-1]]
 
-        sortedheads = sorted(newheads, key=lambda x: revdistance[x],
-                             reverse=True)
+        sortedheads = sorted(
+            newheads, key=lambda x: revdistance[x], reverse=True
+        )
 
         for i, rev in enumerate(sortedheads):
             ctx = repo[rev]
@@ -289,7 +311,7 @@
             else:
                 ui.write('  ')
 
-            ui.write(('o  '))
+            ui.write('o  ')
             displayer.show(ctx, nodelen=nodelen)
             displayer.flush(ctx)
             ui.write('\n')
@@ -300,15 +322,21 @@
                 ui.write(' /')
 
             ui.write('    (')
-            ui.write(_('%d commits ahead') % revdistance[rev],
-                     label='stack.commitdistance')
+            ui.write(
+                _('%d commits ahead') % revdistance[rev],
+                label='stack.commitdistance',
+            )
 
             if haverebase:
                 # TODO may be able to omit --source in some scenarios
                 ui.write('; ')
-                ui.write(('hg rebase --source %s --dest %s' % (
-                         shortest(sourcectx), shortest(ctx))),
-                         label='stack.rebasehint')
+                ui.write(
+                    (
+                        'hg rebase --source %s --dest %s'
+                        % (shortest(sourcectx), shortest(ctx))
+                    ),
+                    label='stack.rebasehint',
+                )
 
             ui.write(')\n')
 
@@ -345,12 +373,13 @@
             ui.write(' /   ')
 
         ui.write(_('(stack base)'), '\n', label='stack.label')
-        ui.write(('o  '))
+        ui.write('o  ')
 
         displayer.show(basectx, nodelen=nodelen)
         displayer.flush(basectx)
         ui.write('\n')
 
+
 @revsetpredicate('_underway([commitage[, headage]])')
 def underwayrevset(repo, subset, x):
     args = revset.getargsdict(x, 'underway', 'commitage headage')
@@ -374,8 +403,11 @@
     rsargs = []
     if args['commitage']:
         rs += ' and date(%s)'
-        rsargs.append(revsetlang.getstring(args['commitage'],
-                                           _('commitage requires a string')))
+        rsargs.append(
+            revsetlang.getstring(
+                args['commitage'], _('commitage requires a string')
+            )
+        )
 
     mutable = repo.revs(rs, *rsargs)
     relevant = revset.baseset(mutable)
@@ -389,8 +421,11 @@
     rsargs = []
     if args['headage']:
         rs += ' and date(%s)'
-        rsargs.append(revsetlang.getstring(args['headage'],
-                                           _('headage requires a string')))
+        rsargs.append(
+            revsetlang.getstring(
+                args['headage'], _('headage requires a string')
+            )
+        )
 
     relevant += repo.revs(rs, *rsargs)
 
@@ -401,6 +436,7 @@
 
     return subset & relevant
 
+
 @showview('work', csettopic='work')
 def showwork(ui, repo, displayer):
     """changesets that aren't finished"""
@@ -411,8 +447,15 @@
     revdag = graphmod.dagwalker(repo, revs)
 
     ui.setconfig('experimental', 'graphshorten', True)
-    logcmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges,
-                            props={'nodelen': nodelen})
+    logcmdutil.displaygraph(
+        ui,
+        repo,
+        revdag,
+        displayer,
+        graphmod.asciiedges,
+        props={'nodelen': nodelen},
+    )
+
 
 def extsetup(ui):
     # Alias `hg <prefix><view>` to `hg show <view>`.
@@ -420,8 +463,9 @@
         for view in showview._table:
             name = '%s%s' % (prefix, view)
 
-            choice, allcommands = cmdutil.findpossible(name, commands.table,
-                                                       strict=True)
+            choice, allcommands = cmdutil.findpossible(
+                name, commands.table, strict=True
+            )
 
             # This alias is already a command name. Don't set it.
             if name in choice:
@@ -433,6 +477,7 @@
 
             ui.setconfig('alias', name, 'show %s' % view, source='show')
 
+
 def longestshortest(repo, revs, minlen=4):
     """Return the length of the longest shortest node to identify revisions.
 
@@ -448,8 +493,11 @@
     if not revs:
         return minlen
     cl = repo.changelog
-    return max(len(scmutil.shortesthexnodeidprefix(repo, cl.node(r), minlen))
-               for r in revs)
+    return max(
+        len(scmutil.shortesthexnodeidprefix(repo, cl.node(r), minlen))
+        for r in revs
+    )
+
 
 # Adjust the docstring of the show command so it shows all registered views.
 # This is a bit hacky because it runs at the end of module load. When moved
@@ -460,11 +508,18 @@
     longest = max(map(len, showview._table.keys()))
     entries = []
     for key in sorted(showview._table.keys()):
-        entries.append(r'    %s   %s' % (
-            pycompat.sysstr(key.ljust(longest)), showview._table[key]._origdoc))
+        entries.append(
+            r'    %s   %s'
+            % (
+                pycompat.sysstr(key.ljust(longest)),
+                showview._table[key]._origdoc,
+            )
+        )
 
     cmdtable['show'][0].__doc__ = pycompat.sysstr('%s\n\n%s\n    ') % (
         cmdtable['show'][0].__doc__.rstrip(),
-        pycompat.sysstr('\n\n').join(entries))
+        pycompat.sysstr('\n\n').join(entries),
+    )
+
 
 _updatedocstring()
--- a/hgext/sparse.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/sparse.py	Sun Oct 06 09:45:02 2019 -0400
@@ -97,6 +97,7 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
+
 def extsetup(ui):
     sparse.enabled = True
 
@@ -105,6 +106,7 @@
     _setupadd(ui)
     _setupdirstate(ui)
 
+
 def replacefilecache(cls, propname, replacement):
     """Replace a filecache property with a new class. This allows changing the
     cache invalidation condition."""
@@ -118,25 +120,37 @@
         cls = cls.__bases__[0]
 
     if cls is object:
-        raise AttributeError(_("type '%s' has no property '%s'") % (origcls,
-                             propname))
+        raise AttributeError(
+            _("type '%s' has no property '%s'") % (origcls, propname)
+        )
+
 
 def _setuplog(ui):
     entry = commands.table['log|history']
-    entry[1].append(('', 'sparse', None,
-        "limit to changesets affecting the sparse checkout"))
+    entry[1].append(
+        (
+            '',
+            'sparse',
+            None,
+            "limit to changesets affecting the sparse checkout",
+        )
+    )
 
     def _initialrevs(orig, repo, opts):
         revs = orig(repo, opts)
         if opts.get('sparse'):
             sparsematch = sparse.matcher(repo)
+
             def ctxmatch(rev):
                 ctx = repo[rev]
                 return any(f for f in ctx.files() if sparsematch(f))
+
             revs = revs.filter(ctxmatch)
         return revs
+
     extensions.wrapfunction(logcmdutil, '_initialrevs', _initialrevs)
 
+
 def _clonesparsecmd(orig, ui, repo, *args, **opts):
     include_pat = opts.get(r'include')
     exclude_pat = opts.get(r'exclude')
@@ -157,28 +171,41 @@
     # if --narrow is passed, it means they are includes and excludes for narrow
     # clone
     if not narrow_pat and (include or exclude or enableprofile):
+
         def clonesparse(orig, self, node, overwrite, *args, **kwargs):
-            sparse.updateconfig(self.unfiltered(), pat, {}, include=include,
-                                exclude=exclude, enableprofile=enableprofile,
-                                usereporootpaths=True)
+            sparse.updateconfig(
+                self.unfiltered(),
+                pat,
+                {},
+                include=include,
+                exclude=exclude,
+                enableprofile=enableprofile,
+                usereporootpaths=True,
+            )
             return orig(self, node, overwrite, *args, **kwargs)
+
         extensions.wrapfunction(hg, 'updaterepo', clonesparse)
     return orig(ui, repo, *args, **opts)
 
+
 def _setupclone(ui):
     entry = commands.table['clone']
-    entry[1].append(('', 'enable-profile', [],
-                    'enable a sparse profile'))
-    entry[1].append(('', 'include', [],
-                    'include sparse pattern'))
-    entry[1].append(('', 'exclude', [],
-                    'exclude sparse pattern'))
+    entry[1].append(('', 'enable-profile', [], 'enable a sparse profile'))
+    entry[1].append(('', 'include', [], 'include sparse pattern'))
+    entry[1].append(('', 'exclude', [], 'exclude sparse pattern'))
     extensions.wrapcommand(commands.table, 'clone', _clonesparsecmd)
 
+
 def _setupadd(ui):
     entry = commands.table['add']
-    entry[1].append(('s', 'sparse', None,
-                    'also include directories of added files in sparse config'))
+    entry[1].append(
+        (
+            's',
+            'sparse',
+            None,
+            'also include directories of added files in sparse config',
+        )
+    )
 
     def _add(orig, ui, repo, *pats, **opts):
         if opts.get(r'sparse'):
@@ -191,6 +218,7 @@
 
     extensions.wrapcommand(commands.table, 'add', _add)
 
+
 def _setupdirstate(ui):
     """Modify the dirstate to prevent stat'ing excluded files,
     and to prevent modifications to files outside the checkout.
@@ -221,39 +249,63 @@
                 changedfiles = dirstatefilestoremove.union(changedfiles)
 
         return orig(self, parent, allfiles, changedfiles)
+
     extensions.wrapfunction(dirstate.dirstate, 'rebuild', _rebuild)
 
     # Prevent adding files that are outside the sparse checkout
     editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge']
-    hint = _('include file with `hg debugsparse --include <pattern>` or use ' +
-             '`hg add -s <file>` to include file directory while adding')
+    hint = _(
+        'include file with `hg debugsparse --include <pattern>` or use '
+        + '`hg add -s <file>` to include file directory while adding'
+    )
     for func in editfuncs:
+
         def _wrapper(orig, self, *args, **kwargs):
             sparsematch = self._sparsematcher
             if not sparsematch.always():
                 for f in args:
-                    if (f is not None and not sparsematch(f) and
-                        f not in self):
-                        raise error.Abort(_("cannot add '%s' - it is outside "
-                                            "the sparse checkout") % f,
-                                          hint=hint)
+                    if f is not None and not sparsematch(f) and f not in self:
+                        raise error.Abort(
+                            _(
+                                "cannot add '%s' - it is outside "
+                                "the sparse checkout"
+                            )
+                            % f,
+                            hint=hint,
+                        )
             return orig(self, *args, **kwargs)
+
         extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
 
-@command('debugsparse', [
-    ('I', 'include', False, _('include files in the sparse checkout')),
-    ('X', 'exclude', False, _('exclude files in the sparse checkout')),
-    ('d', 'delete', False, _('delete an include/exclude rule')),
-    ('f', 'force', False, _('allow changing rules even with pending changes')),
-    ('', 'enable-profile', False, _('enables the specified profile')),
-    ('', 'disable-profile', False, _('disables the specified profile')),
-    ('', 'import-rules', False, _('imports rules from a file')),
-    ('', 'clear-rules', False, _('clears local include/exclude rules')),
-    ('', 'refresh', False, _('updates the working after sparseness changes')),
-    ('', 'reset', False, _('makes the repo full again')),
-    ] + commands.templateopts,
+
+@command(
+    'debugsparse',
+    [
+        ('I', 'include', False, _('include files in the sparse checkout')),
+        ('X', 'exclude', False, _('exclude files in the sparse checkout')),
+        ('d', 'delete', False, _('delete an include/exclude rule')),
+        (
+            'f',
+            'force',
+            False,
+            _('allow changing rules even with pending changes'),
+        ),
+        ('', 'enable-profile', False, _('enables the specified profile')),
+        ('', 'disable-profile', False, _('disables the specified profile')),
+        ('', 'import-rules', False, _('imports rules from a file')),
+        ('', 'clear-rules', False, _('clears local include/exclude rules')),
+        (
+            '',
+            'refresh',
+            False,
+            _('updates the working after sparseness changes'),
+        ),
+        ('', 'reset', False, _('makes the repo full again')),
+    ]
+    + commands.templateopts,
     _('[--OPTION] PATTERN...'),
-    helpbasic=True)
+    helpbasic=True,
+)
 def debugsparse(ui, repo, *pats, **opts):
     """make the current checkout sparse, or edit the existing checkout
 
@@ -306,8 +358,19 @@
     delete = opts.get('delete')
     refresh = opts.get('refresh')
     reset = opts.get('reset')
-    count = sum([include, exclude, enableprofile, disableprofile, delete,
-                 importrules, refresh, clearrules, reset])
+    count = sum(
+        [
+            include,
+            exclude,
+            enableprofile,
+            disableprofile,
+            delete,
+            importrules,
+            refresh,
+            clearrules,
+            reset,
+        ]
+    )
     if count > 1:
         raise error.Abort(_("too many flags specified"))
 
@@ -320,14 +383,26 @@
                 ui.status(("\n".join(temporaryincludes) + "\n"))
             return
         else:
-            raise error.Abort(_('the debugsparse command is only supported on'
-                                ' sparse repositories'))
+            raise error.Abort(
+                _(
+                    'the debugsparse command is only supported on'
+                    ' sparse repositories'
+                )
+            )
 
     if include or exclude or delete or reset or enableprofile or disableprofile:
-        sparse.updateconfig(repo, pats, opts, include=include, exclude=exclude,
-                            reset=reset, delete=delete,
-                            enableprofile=enableprofile,
-                            disableprofile=disableprofile, force=force)
+        sparse.updateconfig(
+            repo,
+            pats,
+            opts,
+            include=include,
+            exclude=exclude,
+            reset=reset,
+            delete=delete,
+            enableprofile=enableprofile,
+            disableprofile=disableprofile,
+            force=force,
+        )
 
     if importrules:
         sparse.importfromfiles(repo, opts, pats, force=force)
@@ -340,9 +415,16 @@
             wlock = repo.wlock()
             fcounts = map(
                 len,
-                sparse.refreshwdir(repo, repo.status(), sparse.matcher(repo),
-                                   force=force))
-            sparse.printchanges(ui, opts, added=fcounts[0], dropped=fcounts[1],
-                                conflicting=fcounts[2])
+                sparse.refreshwdir(
+                    repo, repo.status(), sparse.matcher(repo), force=force
+                ),
+            )
+            sparse.printchanges(
+                ui,
+                opts,
+                added=fcounts[0],
+                dropped=fcounts[1],
+                conflicting=fcounts[2],
+            )
         finally:
             wlock.release()
--- a/hgext/split.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/split.py	Sun Oct 06 09:45:02 2019 -0400
@@ -31,9 +31,7 @@
 )
 
 # allow people to use split without explicitly enabling rebase extension
-from . import (
-    rebase,
-)
+from . import rebase
 
 cmdtable = {}
 command = registrar.command(cmdtable)
@@ -44,12 +42,18 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-@command('split',
-    [('r', 'rev', '', _("revision to split"), _('REV')),
-     ('', 'rebase', True, _('rebase descendants after split')),
-    ] + cmdutil.commitopts2,
+
+@command(
+    'split',
+    [
+        ('r', 'rev', '', _("revision to split"), _('REV')),
+        ('', 'rebase', True, _('rebase descendants after split')),
+    ]
+    + cmdutil.commitopts2,
     _('hg split [--no-rebase] [[-r] REV]'),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, helpbasic=True)
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+    helpbasic=True,
+)
 def split(ui, repo, *revs, **opts):
     """split a changeset into smaller ones
 
@@ -87,23 +91,30 @@
         #
         # So only "public" check is useful and it's checked directly here.
         if ctx.phase() == phases.public:
-            raise error.Abort(_('cannot split public changeset'),
-                              hint=_("see 'hg help phases' for details"))
+            raise error.Abort(
+                _('cannot split public changeset'),
+                hint=_("see 'hg help phases' for details"),
+            )
 
         descendants = list(repo.revs('(%d::) - (%d)', rev, rev))
         alloworphaned = obsolete.isenabled(repo, obsolete.allowunstableopt)
         if opts.get('rebase'):
             # Skip obsoleted descendants and their descendants so the rebase
             # won't cause conflicts for sure.
-            torebase = list(repo.revs('%ld - (%ld & obsolete())::',
-                                      descendants, descendants))
+            torebase = list(
+                repo.revs(
+                    '%ld - (%ld & obsolete())::', descendants, descendants
+                )
+            )
             if not alloworphaned and len(torebase) != len(descendants):
-                raise error.Abort(_('split would leave orphaned changesets '
-                                    'behind'))
+                raise error.Abort(
+                    _('split would leave orphaned changesets ' 'behind')
+                )
         else:
             if not alloworphaned and descendants:
                 raise error.Abort(
-                    _('cannot split changeset with children without rebase'))
+                    _('cannot split changeset with children without rebase')
+                )
             torebase = ()
 
         if len(ctx.parents()) > 1:
@@ -130,8 +141,9 @@
         if torebase and top:
             dorebase(ui, repo, torebase, top)
 
+
 def dosplit(ui, repo, tr, ctx, opts):
-    committed = [] # [ctx]
+    committed = []  # [ctx]
 
     # Set working parent to ctx.p1(), and keep working copy as ctx's content
     if ctx.node() != repo.dirstate.p1():
@@ -145,21 +157,27 @@
     # Main split loop
     while incomplete(repo):
         if committed:
-            header = (_('HG: Splitting %s. So far it has been split into:\n')
-                      % short(ctx.node()))
+            header = _(
+                'HG: Splitting %s. So far it has been split into:\n'
+            ) % short(ctx.node())
             for c in committed:
                 firstline = c.description().split('\n', 1)[0]
                 header += _('HG: - %s: %s\n') % (short(c.node()), firstline)
-            header += _('HG: Write commit message for the next split '
-                        'changeset.\n')
+            header += _(
+                'HG: Write commit message for the next split ' 'changeset.\n'
+            )
         else:
-            header = _('HG: Splitting %s. Write commit message for the '
-                       'first split changeset.\n') % short(ctx.node())
-        opts.update({
-            'edit': True,
-            'interactive': True,
-            'message': header + ctx.description(),
-        })
+            header = _(
+                'HG: Splitting %s. Write commit message for the '
+                'first split changeset.\n'
+            ) % short(ctx.node())
+        opts.update(
+            {
+                'edit': True,
+                'interactive': True,
+                'message': header + ctx.description(),
+            }
+        )
         commands.commit(ui, repo, **pycompat.strkwargs(opts))
         newctx = repo['.']
         committed.append(newctx)
@@ -167,11 +185,20 @@
     if not committed:
         raise error.Abort(_('cannot split an empty revision'))
 
-    scmutil.cleanupnodes(repo, {ctx.node(): [c.node() for c in committed]},
-                         operation='split', fixphase=True)
+    scmutil.cleanupnodes(
+        repo,
+        {ctx.node(): [c.node() for c in committed]},
+        operation='split',
+        fixphase=True,
+    )
 
     return committed[-1]
 
+
 def dorebase(ui, repo, src, destctx):
-    rebase.rebase(ui, repo, rev=[revsetlang.formatspec('%ld', src)],
-                  dest=revsetlang.formatspec('%d', destctx.rev()))
+    rebase.rebase(
+        ui,
+        repo,
+        rev=[revsetlang.formatspec('%ld', src)],
+        dest=revsetlang.formatspec('%d', destctx.rev()),
+    )
--- a/hgext/sqlitestore.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/sqlitestore.py	Sun Oct 06 09:45:02 2019 -0400
@@ -57,9 +57,7 @@
     nullrev,
     short,
 )
-from mercurial.thirdparty import (
-    attr,
-)
+from mercurial.thirdparty import attr
 from mercurial import (
     ancestor,
     dagop,
@@ -77,12 +75,11 @@
     repository,
     util as interfaceutil,
 )
-from mercurial.utils import (
-    storageutil,
-)
+from mercurial.utils import storageutil
 
 try:
     from mercurial import zstd
+
     zstd.__version__
 except ImportError:
     zstd = None
@@ -91,9 +88,12 @@
 configitem = registrar.configitem(configtable)
 
 # experimental config: storage.sqlite.compression
-configitem('storage', 'sqlite.compression',
-           default='zstd' if zstd else 'zlib',
-           experimental=True)
+configitem(
+    'storage',
+    'sqlite.compression',
+    default='zstd' if zstd else 'zlib',
+    experimental=True,
+)
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -121,24 +121,19 @@
     # Deltas are stored as content-indexed blobs.
     # compression column holds COMPRESSION_* constant for how the
     # delta is encoded.
-
     r'CREATE TABLE delta ('
     r'    id INTEGER PRIMARY KEY, '
     r'    compression INTEGER NOT NULL, '
     r'    hash BLOB UNIQUE ON CONFLICT ABORT, '
     r'    delta BLOB NOT NULL '
     r')',
-
     # Tracked paths are denormalized to integers to avoid redundant
     # storage of the path name.
     r'CREATE TABLE filepath ('
     r'    id INTEGER PRIMARY KEY, '
     r'    path BLOB NOT NULL '
     r')',
-
-    r'CREATE UNIQUE INDEX filepath_path '
-    r'    ON filepath (path)',
-
+    r'CREATE UNIQUE INDEX filepath_path ' r'    ON filepath (path)',
     # We have a single table for all file revision data.
     # Each file revision is uniquely described by a (path, rev) and
     # (path, node).
@@ -162,13 +157,10 @@
     r'    deltabaseid INTEGER REFERENCES fileindex(id), '
     r'    node BLOB NOT NULL '
     r')',
-
     r'CREATE UNIQUE INDEX fileindex_pathrevnum '
     r'    ON fileindex (pathid, revnum)',
-
     r'CREATE UNIQUE INDEX fileindex_pathnode '
     r'    ON fileindex (pathid, node)',
-
     # Provide a view over all file data for convenience.
     r'CREATE VIEW filedata AS '
     r'SELECT '
@@ -185,12 +177,11 @@
     r'    fileindex.deltabaseid AS deltabaseid '
     r'FROM filepath, fileindex '
     r'WHERE fileindex.pathid=filepath.id',
-
     r'PRAGMA user_version=%d' % CURRENT_SCHEMA_VERSION,
 ]
 
-def resolvedeltachain(db, pathid, node, revisioncache,
-                      stoprids, zstddctx=None):
+
+def resolvedeltachain(db, pathid, node, revisioncache, stoprids, zstddctx=None):
     """Resolve a delta chain for a file node."""
 
     # TODO the "not in ({stops})" here is possibly slowing down the query
@@ -214,8 +205,10 @@
         r'SELECT deltachain.baseid, compression, delta '
         r'FROM deltachain, delta '
         r'WHERE delta.id=deltachain.deltaid'.format(
-            stops=r','.join([r'?'] * len(stoprids))),
-        tuple([pathid, node] + list(stoprids.keys())))
+            stops=r','.join([r'?'] * len(stoprids))
+        ),
+        tuple([pathid, node] + list(stoprids.keys())),
+    )
 
     deltas = []
     lastdeltabaseid = None
@@ -230,8 +223,9 @@
         elif compression == COMPRESSION_ZLIB:
             delta = zlib.decompress(delta)
         else:
-            raise SQLiteStoreError('unhandled compression type: %d' %
-                                   compression)
+            raise SQLiteStoreError(
+                'unhandled compression type: %d' % compression
+            )
 
         deltas.append(delta)
 
@@ -251,20 +245,24 @@
 
     return fulltext
 
+
 def insertdelta(db, compression, hash, delta):
     try:
         return db.execute(
             r'INSERT INTO delta (compression, hash, delta) '
             r'VALUES (?, ?, ?)',
-            (compression, hash, delta)).lastrowid
+            (compression, hash, delta),
+        ).lastrowid
     except sqlite3.IntegrityError:
         return db.execute(
-            r'SELECT id FROM delta WHERE hash=?',
-            (hash,)).fetchone()[0]
+            r'SELECT id FROM delta WHERE hash=?', (hash,)
+        ).fetchone()[0]
+
 
 class SQLiteStoreError(error.StorageError):
     pass
 
+
 @attr.s
 class revisionentry(object):
     rid = attr.ib()
@@ -277,6 +275,7 @@
     linkrev = attr.ib()
     flags = attr.ib()
 
+
 @interfaceutil.implementer(repository.irevisiondelta)
 @attr.s(slots=True)
 class sqliterevisiondelta(object):
@@ -290,6 +289,7 @@
     delta = attr.ib()
     linknode = attr.ib(default=None)
 
+
 @interfaceutil.implementer(repository.iverifyproblem)
 @attr.s(frozen=True)
 class sqliteproblem(object):
@@ -297,6 +297,7 @@
     error = attr.ib(default=None)
     node = attr.ib(default=None)
 
+
 @interfaceutil.implementer(repository.ifilestorage)
 class sqlitefilestore(object):
     """Implements storage for an individual tracked path."""
@@ -332,8 +333,11 @@
         self._nodetorev = {}
         self._revisions = {}
 
-        res = list(self._db.execute(
-            r'SELECT id FROM filepath WHERE path=?', (self._path,)))
+        res = list(
+            self._db.execute(
+                r'SELECT id FROM filepath WHERE path=?', (self._path,)
+            )
+        )
 
         if not res:
             self._pathid = None
@@ -346,14 +350,16 @@
             r'FROM fileindex '
             r'WHERE pathid=? '
             r'ORDER BY revnum ASC',
-            (self._pathid,))
+            (self._pathid,),
+        )
 
         for i, row in enumerate(res):
             rid, rev, node, p1rev, p2rev, linkrev, flags = row
 
             if i != rev:
-                raise SQLiteStoreError(_('sqlite database has inconsistent '
-                                         'revision numbers'))
+                raise SQLiteStoreError(
+                    _('sqlite database has inconsistent ' 'revision numbers')
+                )
 
             if p1rev == nullrev:
                 p1node = nullid
@@ -374,7 +380,8 @@
                 p1node=p1node,
                 p2node=p2node,
                 linkrev=linkrev,
-                flags=flags)
+                flags=flags,
+            )
 
             self._revtonode[rev] = node
             self._nodetorev[node] = rev
@@ -395,8 +402,9 @@
         return node in self._nodetorev
 
     def revs(self, start=0, stop=None):
-        return storageutil.iterrevs(len(self._revisions), start=start,
-                                    stop=stop)
+        return storageutil.iterrevs(
+            len(self._revisions), start=start, stop=stop
+        )
 
     def parents(self, node):
         if node == nullid:
@@ -478,8 +486,9 @@
         startrev = self.rev(start) if start is not None else nullrev
         stoprevs = {self.rev(n) for n in stop or []}
 
-        revs = dagop.headrevssubset(self.revs, self.parentrevs,
-                                    startrev=startrev, stoprevs=stoprevs)
+        revs = dagop.headrevssubset(
+            self.revs, self.parentrevs, startrev=startrev, stoprevs=stoprevs
+        )
 
         return [self.node(rev) for rev in revs]
 
@@ -492,7 +501,8 @@
             r'  FROM filedata '
             r'  WHERE path=? AND (p1rev=? OR p2rev=?) '
             r'  ORDER BY revnum ASC',
-            (self._path, rev, rev))
+            (self._path, rev, rev),
+        )
 
         return [row[0] for row in res]
 
@@ -531,15 +541,19 @@
         # short-circuit delta chain traversal and decompression as soon as
         # we encounter a revision in the cache.
 
-        stoprids = {self._revisions[n].rid: n
-                    for n in self._revisioncache}
+        stoprids = {self._revisions[n].rid: n for n in self._revisioncache}
 
         if not stoprids:
             stoprids[-1] = None
 
-        fulltext = resolvedeltachain(self._db, self._pathid, node,
-                                     self._revisioncache, stoprids,
-                                     zstddctx=self._dctx)
+        fulltext = resolvedeltachain(
+            self._db,
+            self._pathid,
+            node,
+            self._revisioncache,
+            stoprids,
+            zstddctx=self._dctx,
+        )
 
         # Don't verify hashes if parent nodes were rewritten, as the hash
         # wouldn't verify.
@@ -564,12 +578,18 @@
     def cmp(self, node, fulltext):
         return not storageutil.filedataequivalent(self, node, fulltext)
 
-    def emitrevisions(self, nodes, nodesorder=None, revisiondata=False,
-                      assumehaveparentrevisions=False,
-                      deltamode=repository.CG_DELTAMODE_STD):
+    def emitrevisions(
+        self,
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+        deltamode=repository.CG_DELTAMODE_STD,
+    ):
         if nodesorder not in ('nodes', 'storage', 'linear', None):
-            raise error.ProgrammingError('unhandled value for nodesorder: %s' %
-                                         nodesorder)
+            raise error.ProgrammingError(
+                'unhandled value for nodesorder: %s' % nodesorder
+            )
 
         nodes = [n for n in nodes if n != nullid]
 
@@ -581,23 +601,29 @@
             r'SELECT revnum, deltaid FROM fileindex '
             r'WHERE pathid=? '
             r'    AND node in (%s)' % (r','.join([r'?'] * len(nodes))),
-            tuple([self._pathid] + nodes))
+            tuple([self._pathid] + nodes),
+        )
 
         deltabases = {}
 
         for rev, deltaid in res:
             res = self._db.execute(
                 r'SELECT revnum from fileindex WHERE pathid=? AND deltaid=?',
-                (self._pathid, deltaid))
+                (self._pathid, deltaid),
+            )
             deltabases[rev] = res.fetchone()[0]
 
         # TODO define revdifffn so we can use delta from storage.
         for delta in storageutil.emitrevisions(
-            self, nodes, nodesorder, sqliterevisiondelta,
+            self,
+            nodes,
+            nodesorder,
+            sqliterevisiondelta,
             deltaparentfn=deltabases.__getitem__,
             revisiondata=revisiondata,
             assumehaveparentrevisions=assumehaveparentrevisions,
-            deltamode=deltamode):
+            deltamode=deltamode,
+        ):
 
             yield delta
 
@@ -611,8 +637,17 @@
 
         return self.addrevision(filedata, transaction, linkrev, p1, p2)
 
-    def addrevision(self, revisiondata, transaction, linkrev, p1, p2, node=None,
-                    flags=0, cachedelta=None):
+    def addrevision(
+        self,
+        revisiondata,
+        transaction,
+        linkrev,
+        p1,
+        p2,
+        node=None,
+        flags=0,
+        cachedelta=None,
+    ):
         if flags:
             raise SQLiteStoreError(_('flags not supported on revisions'))
 
@@ -625,14 +660,21 @@
         if node in self._nodetorev:
             return node
 
-        node = self._addrawrevision(node, revisiondata, transaction, linkrev,
-                                    p1, p2)
+        node = self._addrawrevision(
+            node, revisiondata, transaction, linkrev, p1, p2
+        )
 
         self._revisioncache[node] = revisiondata
         return node
 
-    def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None,
-                 maybemissingparents=False):
+    def addgroup(
+        self,
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        maybemissingparents=False,
+    ):
         nodes = []
 
         for node, p1, p2, linknode, deltabase, delta, wireflags in deltas:
@@ -663,12 +705,11 @@
                 newlen = len(delta) - hlen
 
                 if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen):
-                    raise error.CensoredBaseError(self._path,
-                                                  deltabase)
+                    raise error.CensoredBaseError(self._path, deltabase)
 
-            if (not (storeflags & FLAG_CENSORED)
-                and storageutil.deltaiscensored(
-                    delta, baserev, lambda x: len(self.rawdata(x)))):
+            if not (storeflags & FLAG_CENSORED) and storageutil.deltaiscensored(
+                delta, baserev, lambda x: len(self.rawdata(x))
+            ):
                 storeflags |= FLAG_CENSORED
 
             linkrev = linkmapper(linknode)
@@ -685,9 +726,9 @@
                     entry.flags &= ~FLAG_MISSING_P1
 
                     self._db.execute(
-                        r'UPDATE fileindex SET p1rev=?, flags=? '
-                        r'WHERE id=?',
-                        (self._nodetorev[p1], entry.flags, entry.rid))
+                        r'UPDATE fileindex SET p1rev=?, flags=? ' r'WHERE id=?',
+                        (self._nodetorev[p1], entry.flags, entry.rid),
+                    )
 
                 if entry.flags & FLAG_MISSING_P2 and p2 != nullid:
                     entry.p2node = p2
@@ -695,9 +736,9 @@
                     entry.flags &= ~FLAG_MISSING_P2
 
                     self._db.execute(
-                        r'UPDATE fileindex SET p2rev=?, flags=? '
-                        r'WHERE id=?',
-                        (self._nodetorev[p1], entry.flags, entry.rid))
+                        r'UPDATE fileindex SET p2rev=?, flags=? ' r'WHERE id=?',
+                        (self._nodetorev[p1], entry.flags, entry.rid),
+                    )
 
                 continue
 
@@ -708,8 +749,16 @@
                 text = None
                 storedelta = (deltabase, delta)
 
-            self._addrawrevision(node, text, transaction, linkrev, p1, p2,
-                                 storedelta=storedelta, flags=storeflags)
+            self._addrawrevision(
+                node,
+                text,
+                transaction,
+                linkrev,
+                p1,
+                p2,
+                storedelta=storedelta,
+                flags=storeflags,
+            )
 
             if addrevisioncb:
                 addrevisioncb(self, node)
@@ -722,8 +771,9 @@
         # This restriction is cargo culted from revlogs and makes no sense for
         # SQLite, since columns can be resized at will.
         if len(tombstone) > len(self.rawdata(censornode)):
-            raise error.Abort(_('censor tombstone must be no longer than '
-                                'censored data'))
+            raise error.Abort(
+                _('censor tombstone must be no longer than ' 'censored data')
+            )
 
         # We need to replace the censored revision's data with the tombstone.
         # But replacing that data will have implications for delta chains that
@@ -738,21 +788,26 @@
         # Find the delta to be censored.
         censoreddeltaid = self._db.execute(
             r'SELECT deltaid FROM fileindex WHERE id=?',
-            (self._revisions[censornode].rid,)).fetchone()[0]
+            (self._revisions[censornode].rid,),
+        ).fetchone()[0]
 
         # Find all its delta chain children.
         # TODO once we support storing deltas for !files, we'll need to look
         # for those delta chains too.
-        rows = list(self._db.execute(
-            r'SELECT id, pathid, node FROM fileindex '
-            r'WHERE deltabaseid=? OR deltaid=?',
-            (censoreddeltaid, censoreddeltaid)))
+        rows = list(
+            self._db.execute(
+                r'SELECT id, pathid, node FROM fileindex '
+                r'WHERE deltabaseid=? OR deltaid=?',
+                (censoreddeltaid, censoreddeltaid),
+            )
+        )
 
         for row in rows:
             rid, pathid, node = row
 
-            fulltext = resolvedeltachain(self._db, pathid, node, {}, {-1: None},
-                                         zstddctx=self._dctx)
+            fulltext = resolvedeltachain(
+                self._db, pathid, node, {}, {-1: None}, zstddctx=self._dctx
+            )
 
             deltahash = hashlib.sha1(fulltext).digest()
 
@@ -766,8 +821,9 @@
                 deltablob = fulltext
                 compression = COMPRESSION_NONE
             else:
-                raise error.ProgrammingError('unhandled compression engine: %s'
-                                             % self._compengine)
+                raise error.ProgrammingError(
+                    'unhandled compression engine: %s' % self._compengine
+                )
 
             if len(deltablob) >= len(fulltext):
                 deltablob = fulltext
@@ -777,13 +833,16 @@
 
             self._db.execute(
                 r'UPDATE fileindex SET deltaid=?, deltabaseid=NULL '
-                r'WHERE id=?', (deltaid, rid))
+                r'WHERE id=?',
+                (deltaid, rid),
+            )
 
         # Now create the tombstone delta and replace the delta on the censored
         # node.
         deltahash = hashlib.sha1(tombstone).digest()
-        tombstonedeltaid = insertdelta(self._db, COMPRESSION_NONE,
-                                       deltahash, tombstone)
+        tombstonedeltaid = insertdelta(
+            self._db, COMPRESSION_NONE, deltahash, tombstone
+        )
 
         flags = self._revisions[censornode].flags
         flags |= FLAG_CENSORED
@@ -791,19 +850,22 @@
         self._db.execute(
             r'UPDATE fileindex SET flags=?, deltaid=?, deltabaseid=NULL '
             r'WHERE pathid=? AND node=?',
-            (flags, tombstonedeltaid, self._pathid, censornode))
+            (flags, tombstonedeltaid, self._pathid, censornode),
+        )
 
-        self._db.execute(
-            r'DELETE FROM delta WHERE id=?', (censoreddeltaid,))
+        self._db.execute(r'DELETE FROM delta WHERE id=?', (censoreddeltaid,))
 
         self._refreshindex()
         self._revisioncache.clear()
 
     def getstrippoint(self, minlink):
-        return storageutil.resolvestripinfo(minlink, len(self) - 1,
-                                            [self.rev(n) for n in self.heads()],
-                                            self.linkrev,
-                                            self.parentrevs)
+        return storageutil.resolvestripinfo(
+            minlink,
+            len(self) - 1,
+            [self.rev(n) for n in self.heads()],
+            self.linkrev,
+            self.parentrevs,
+        )
 
     def strip(self, minlink, transaction):
         if not len(self):
@@ -817,7 +879,8 @@
         for rev in self.revs(rev):
             self._db.execute(
                 r'DELETE FROM fileindex WHERE pathid=? AND node=?',
-                (self._pathid, self.node(rev)))
+                (self._pathid, self.node(rev)),
+            )
 
         # TODO how should we garbage collect data in delta table?
 
@@ -830,9 +893,14 @@
     def files(self):
         return []
 
-    def storageinfo(self, exclusivefiles=False, sharedfiles=False,
-                    revisionscount=False, trackedsize=False,
-                    storedsize=False):
+    def storageinfo(
+        self,
+        exclusivefiles=False,
+        sharedfiles=False,
+        revisionscount=False,
+        trackedsize=False,
+        storedsize=False,
+    ):
         d = {}
 
         if exclusivefiles:
@@ -846,8 +914,9 @@
             d['revisionscount'] = len(self)
 
         if trackedsize:
-            d['trackedsize'] = sum(len(self.revision(node))
-                                       for node in self._nodetorev)
+            d['trackedsize'] = sum(
+                len(self.revision(node)) for node in self._nodetorev
+            )
 
         if storedsize:
             # TODO implement this?
@@ -865,8 +934,8 @@
                 self.revision(node)
             except Exception as e:
                 yield sqliteproblem(
-                    error=_('unpacking %s: %s') % (short(node), e),
-                    node=node)
+                    error=_('unpacking %s: %s') % (short(node), e), node=node
+                )
 
                 state['skipread'].add(node)
 
@@ -887,14 +956,23 @@
         if storageutil.iscensoredtext(fulltext):
             raise error.CensoredNodeError(self._path, node, fulltext)
 
-        raise SQLiteStoreError(_('integrity check failed on %s') %
-                               self._path)
+        raise SQLiteStoreError(_('integrity check failed on %s') % self._path)
 
-    def _addrawrevision(self, node, revisiondata, transaction, linkrev,
-                        p1, p2, storedelta=None, flags=0):
+    def _addrawrevision(
+        self,
+        node,
+        revisiondata,
+        transaction,
+        linkrev,
+        p1,
+        p2,
+        storedelta=None,
+        flags=0,
+    ):
         if self._pathid is None:
             res = self._db.execute(
-                r'INSERT INTO filepath (path) VALUES (?)', (self._path,))
+                r'INSERT INTO filepath (path) VALUES (?)', (self._path,)
+            )
             self._pathid = res.lastrowid
 
         # For simplicity, always store a delta against p1.
@@ -913,8 +991,9 @@
             if deltabase == nullid:
                 delta = revisiondata
             else:
-                delta = mdiff.textdiff(self.revision(self.rev(deltabase)),
-                                       revisiondata)
+                delta = mdiff.textdiff(
+                    self.revision(self.rev(deltabase)), revisiondata
+                )
 
         # File index stores a pointer to its delta and the parent delta.
         # The parent delta is stored via a pointer to the fileindex PK.
@@ -939,8 +1018,9 @@
             deltablob = delta
             compression = COMPRESSION_NONE
         else:
-            raise error.ProgrammingError('unhandled compression engine: %s' %
-                                         self._compengine)
+            raise error.ProgrammingError(
+                'unhandled compression engine: %s' % self._compengine
+            )
 
         # Don't store compressed data if it isn't practical.
         if len(deltablob) >= len(delta):
@@ -966,8 +1046,17 @@
             r'    pathid, revnum, node, p1rev, p2rev, linkrev, flags, '
             r'    deltaid, deltabaseid) '
             r'    VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)',
-            (self._pathid, rev, node, p1rev, p2rev, linkrev, flags,
-             deltaid, baseid)
+            (
+                self._pathid,
+                rev,
+                node,
+                p1rev,
+                p2rev,
+                linkrev,
+                flags,
+                deltaid,
+                baseid,
+            ),
         ).lastrowid
 
         entry = revisionentry(
@@ -979,7 +1068,8 @@
             p1node=p1,
             p2node=p2,
             linkrev=linkrev,
-            flags=flags)
+            flags=flags,
+        )
 
         self._nodetorev[node] = rev
         self._revtonode[rev] = node
@@ -987,6 +1077,7 @@
 
         return node
 
+
 class sqliterepository(localrepo.localrepository):
     def cancopy(self):
         return False
@@ -1024,6 +1115,7 @@
 
         return db
 
+
 def makedb(path):
     """Construct a database handle for a database at path."""
 
@@ -1049,6 +1141,7 @@
 
     return db
 
+
 def featuresetup(ui, supported):
     supported.add(REQUIREMENT)
 
@@ -1060,14 +1153,16 @@
     supported.add(REQUIREMENT_SHALLOW_FILES)
     supported.add(repository.NARROW_REQUIREMENT)
 
+
 def newreporequirements(orig, ui, createopts):
     if createopts['backend'] != 'sqlite':
         return orig(ui, createopts)
 
     # This restriction can be lifted once we have more confidence.
     if 'sharedrepo' in createopts:
-        raise error.Abort(_('shared repositories not supported with SQLite '
-                            'store'))
+        raise error.Abort(
+            _('shared repositories not supported with SQLite ' 'store')
+        )
 
     # This filtering is out of an abundance of caution: we want to ensure
     # we honor creation options and we do that by annotating exactly the
@@ -1080,8 +1175,10 @@
 
     unsupported = set(createopts) - known
     if unsupported:
-        raise error.Abort(_('SQLite store does not support repo creation '
-                            'option: %s') % ', '.join(sorted(unsupported)))
+        raise error.Abort(
+            _('SQLite store does not support repo creation ' 'option: %s')
+            % ', '.join(sorted(unsupported))
+        )
 
     # Since we're a hybrid store that still relies on revlogs, we fall back
     # to using the revlogv1 backend's storage requirements then adding our
@@ -1093,9 +1190,13 @@
     compression = ui.config('storage', 'sqlite.compression')
 
     if compression == 'zstd' and not zstd:
-        raise error.Abort(_('storage.sqlite.compression set to "zstd" but '
-                            'zstandard compression not available to this '
-                            'Mercurial install'))
+        raise error.Abort(
+            _(
+                'storage.sqlite.compression set to "zstd" but '
+                'zstandard compression not available to this '
+                'Mercurial install'
+            )
+        )
 
     if compression == 'zstd':
         requirements.add(REQUIREMENT_ZSTD)
@@ -1104,17 +1205,24 @@
     elif compression == 'none':
         requirements.add(REQUIREMENT_NONE)
     else:
-        raise error.Abort(_('unknown compression engine defined in '
-                            'storage.sqlite.compression: %s') % compression)
+        raise error.Abort(
+            _(
+                'unknown compression engine defined in '
+                'storage.sqlite.compression: %s'
+            )
+            % compression
+        )
 
     if createopts.get('shallowfilestore'):
         requirements.add(REQUIREMENT_SHALLOW_FILES)
 
     return requirements
 
+
 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
 class sqlitefilestorage(object):
     """Repository file storage backed by SQLite."""
+
     def file(self, path):
         if path[0] == b'/':
             path = path[1:]
@@ -1126,11 +1234,16 @@
         elif REQUIREMENT_NONE in self.requirements:
             compression = 'none'
         else:
-            raise error.Abort(_('unable to determine what compression engine '
-                                'to use for SQLite storage'))
+            raise error.Abort(
+                _(
+                    'unable to determine what compression engine '
+                    'to use for SQLite storage'
+                )
+            )
 
         return sqlitefilestore(self._dbconn, path, compression)
 
+
 def makefilestorage(orig, requirements, features, **kwargs):
     """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
     if REQUIREMENT in requirements:
@@ -1141,16 +1254,22 @@
     else:
         return orig(requirements=requirements, features=features, **kwargs)
 
+
 def makemain(orig, ui, requirements, **kwargs):
     if REQUIREMENT in requirements:
         if REQUIREMENT_ZSTD in requirements and not zstd:
-            raise error.Abort(_('repository uses zstandard compression, which '
-                                'is not available to this Mercurial install'))
+            raise error.Abort(
+                _(
+                    'repository uses zstandard compression, which '
+                    'is not available to this Mercurial install'
+                )
+            )
 
         return sqliterepository
 
     return orig(requirements=requirements, **kwargs)
 
+
 def verifierinit(orig, self, *args, **kwargs):
     orig(self, *args, **kwargs)
 
@@ -1158,16 +1277,16 @@
     # advertised. So suppress these warnings.
     self.warnorphanstorefiles = False
 
+
 def extsetup(ui):
     localrepo.featuresetupfuncs.add(featuresetup)
-    extensions.wrapfunction(localrepo, 'newreporequirements',
-                            newreporequirements)
-    extensions.wrapfunction(localrepo, 'makefilestorage',
-                            makefilestorage)
-    extensions.wrapfunction(localrepo, 'makemain',
-                            makemain)
-    extensions.wrapfunction(verify.verifier, '__init__',
-                            verifierinit)
+    extensions.wrapfunction(
+        localrepo, 'newreporequirements', newreporequirements
+    )
+    extensions.wrapfunction(localrepo, 'makefilestorage', makefilestorage)
+    extensions.wrapfunction(localrepo, 'makemain', makemain)
+    extensions.wrapfunction(verify.verifier, '__init__', verifierinit)
+
 
 def reposetup(ui, repo):
     if isinstance(repo, sqliterepository):
--- a/hgext/strip.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/strip.py	Sun Oct 06 09:45:02 2019 -0400
@@ -20,6 +20,7 @@
     scmutil,
     util,
 )
+
 nullid = nodemod.nullid
 release = lockmod.release
 
@@ -31,6 +32,7 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
+
 def checklocalchanges(repo, force=False):
     s = repo.status()
     if not force:
@@ -40,26 +42,40 @@
         cmdutil.checkunfinished(repo, skipmerge=True)
     return s
 
+
 def _findupdatetarget(repo, nodes):
     unode, p2 = repo.changelog.parents(nodes[0])
     currentbranch = repo[None].branch()
 
-    if (util.safehasattr(repo, 'mq') and p2 != nullid
-        and p2 in [x.node for x in repo.mq.applied]):
+    if (
+        util.safehasattr(repo, 'mq')
+        and p2 != nullid
+        and p2 in [x.node for x in repo.mq.applied]
+    ):
         unode = p2
     elif currentbranch != repo[unode].branch():
         pwdir = 'parents(wdir())'
         revset = 'max(((parents(%ln::%r) + %r) - %ln::%r) and branch(%s))'
-        branchtarget = repo.revs(revset, nodes, pwdir, pwdir, nodes, pwdir,
-                                 currentbranch)
+        branchtarget = repo.revs(
+            revset, nodes, pwdir, pwdir, nodes, pwdir, currentbranch
+        )
         if branchtarget:
             cl = repo.changelog
             unode = cl.node(branchtarget.first())
 
     return unode
 
-def strip(ui, repo, revs, update=True, backup=True, force=None, bookmarks=None,
-          soft=False):
+
+def strip(
+    ui,
+    repo,
+    revs,
+    update=True,
+    backup=True,
+    force=None,
+    bookmarks=None,
+    soft=False,
+):
     with repo.wlock(), repo.lock():
 
         if update:
@@ -82,26 +98,56 @@
             for bookmark in sorted(bookmarks):
                 ui.write(_("bookmark '%s' deleted\n") % bookmark)
 
-@command("strip",
-         [
-          ('r', 'rev', [], _('strip specified revision (optional, '
-                               'can specify revisions without this '
-                               'option)'), _('REV')),
-          ('f', 'force', None, _('force removal of changesets, discard '
-                                 'uncommitted changes (no backup)')),
-          ('', 'no-backup', None, _('do not save backup bundle')),
-          ('', 'nobackup', None, _('do not save backup bundle '
-                                   '(DEPRECATED)')),
-          ('n', '', None, _('ignored  (DEPRECATED)')),
-          ('k', 'keep', None, _("do not modify working directory during "
-                                "strip")),
-          ('B', 'bookmark', [], _("remove revs only reachable from given"
-                                  " bookmark"), _('BOOKMARK')),
-          ('', 'soft', None,
-          _("simply drop changesets from visible history (EXPERIMENTAL)")),
-         ],
-          _('hg strip [-k] [-f] [-B bookmark] [-r] REV...'),
-          helpcategory=command.CATEGORY_MAINTENANCE)
+
+@command(
+    "strip",
+    [
+        (
+            'r',
+            'rev',
+            [],
+            _(
+                'strip specified revision (optional, '
+                'can specify revisions without this '
+                'option)'
+            ),
+            _('REV'),
+        ),
+        (
+            'f',
+            'force',
+            None,
+            _(
+                'force removal of changesets, discard '
+                'uncommitted changes (no backup)'
+            ),
+        ),
+        ('', 'no-backup', None, _('do not save backup bundle')),
+        ('', 'nobackup', None, _('do not save backup bundle ' '(DEPRECATED)')),
+        ('n', '', None, _('ignored  (DEPRECATED)')),
+        (
+            'k',
+            'keep',
+            None,
+            _("do not modify working directory during " "strip"),
+        ),
+        (
+            'B',
+            'bookmark',
+            [],
+            _("remove revs only reachable from given" " bookmark"),
+            _('BOOKMARK'),
+        ),
+        (
+            '',
+            'soft',
+            None,
+            _("simply drop changesets from visible history (EXPERIMENTAL)"),
+        ),
+    ],
+    _('hg strip [-k] [-f] [-B bookmark] [-r] REV...'),
+    helpcategory=command.CATEGORY_MAINTENANCE,
+)
 def stripcmd(ui, repo, *revs, **opts):
     """strip changesets and all their descendants from the repository
 
@@ -145,8 +191,10 @@
         if bookmarks:
             repomarks = repo._bookmarks
             if not bookmarks.issubset(repomarks):
-                raise error.Abort(_("bookmark '%s' not found") %
-                    ','.join(sorted(bookmarks - set(repomarks.keys()))))
+                raise error.Abort(
+                    _("bookmark '%s' not found")
+                    % ','.join(sorted(bookmarks - set(repomarks.keys())))
+                )
 
             # If the requested bookmark is not the only one pointing to a
             # a revision we have to only delete the bookmark and not strip
@@ -174,8 +222,10 @@
 
         # if one of the wdir parent is stripped we'll need
         # to update away to an earlier revision
-        update = any(p != nullid and cl.rev(p) in strippedrevs
-                     for p in repo.dirstate.parents())
+        update = any(
+            p != nullid and cl.rev(p) in strippedrevs
+            for p in repo.dirstate.parents()
+        )
 
         rootnodes = set(cl.node(r) for r in roots)
 
@@ -222,9 +272,15 @@
 
             update = False
 
-
-        strip(ui, repo, revs, backup=backup, update=update,
-              force=opts.get('force'), bookmarks=bookmarks,
-              soft=opts['soft'])
+        strip(
+            ui,
+            repo,
+            revs,
+            backup=backup,
+            update=update,
+            force=opts.get('force'),
+            bookmarks=bookmarks,
+            soft=opts['soft'],
+        )
 
     return 0
--- a/hgext/transplant.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/transplant.py	Sun Oct 06 09:45:02 2019 -0400
@@ -44,9 +44,11 @@
     stringutil,
 )
 
+
 class TransplantError(error.Abort):
     pass
 
+
 cmdtable = {}
 command = registrar.command(cmdtable)
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -58,18 +60,20 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('transplant', 'filter',
-    default=None,
+configitem(
+    'transplant', 'filter', default=None,
 )
-configitem('transplant', 'log',
-    default=None,
+configitem(
+    'transplant', 'log', default=None,
 )
 
+
 class transplantentry(object):
     def __init__(self, lnode, rnode):
         self.lnode = lnode
         self.rnode = rnode
 
+
 class transplants(object):
     def __init__(self, path=None, transplantfile=None, opener=None):
         self.path = path
@@ -116,17 +120,22 @@
             del list[list.index(transplant)]
             self.dirty = True
 
+
 class transplanter(object):
     def __init__(self, ui, repo, opts):
         self.ui = ui
         self.path = repo.vfs.join('transplant')
         self.opener = vfsmod.vfs(self.path)
-        self.transplants = transplants(self.path, 'transplants',
-                                       opener=self.opener)
+        self.transplants = transplants(
+            self.path, 'transplants', opener=self.opener
+        )
+
         def getcommiteditor():
             editform = cmdutil.mergeeditform(repo[None], 'transplant')
-            return cmdutil.getcommiteditor(editform=editform,
-                                           **pycompat.strkwargs(opts))
+            return cmdutil.getcommiteditor(
+                editform=editform, **pycompat.strkwargs(opts)
+            )
+
         self.getcommiteditor = getcommiteditor
 
     def applied(self, repo, node, parent):
@@ -136,8 +145,9 @@
             parentrev = repo.changelog.rev(parent)
         if hasnode(repo, node):
             rev = repo.changelog.rev(node)
-            reachable = repo.changelog.ancestors([parentrev], rev,
-                                                 inclusive=True)
+            reachable = repo.changelog.ancestors(
+                [parentrev], rev, inclusive=True
+            )
             if rev in reachable:
                 return True
         for t in self.transplants.get(node):
@@ -146,8 +156,9 @@
                 self.transplants.remove(t)
                 return False
             lnoderev = repo.changelog.rev(t.lnode)
-            if lnoderev in repo.changelog.ancestors([parentrev], lnoderev,
-                                                    inclusive=True):
+            if lnoderev in repo.changelog.ancestors(
+                [parentrev], lnoderev, inclusive=True
+            ):
                 return True
         return False
 
@@ -170,8 +181,9 @@
                 revstr = '%d:%s' % (rev, nodemod.short(node))
 
                 if self.applied(repo, node, p1):
-                    self.ui.warn(_('skipping already applied revision %s\n') %
-                                 revstr)
+                    self.ui.warn(
+                        _('skipping already applied revision %s\n') % revstr
+                    )
                     continue
 
                 parents = source.changelog.parents(node)
@@ -185,8 +197,9 @@
                     if pulls:
                         if source != repo:
                             exchange.pull(repo, source.peer(), heads=pulls)
-                        merge.update(repo, pulls[-1], branchmerge=False,
-                                     force=False)
+                        merge.update(
+                            repo, pulls[-1], branchmerge=False, force=False
+                        )
                         p1 = repo.dirstate.p1()
                         pulls = []
 
@@ -202,15 +215,18 @@
                 skipmerge = False
                 if parents[1] != revlog.nullid:
                     if not opts.get('parent'):
-                        self.ui.note(_('skipping merge changeset %d:%s\n')
-                                     % (rev, nodemod.short(node)))
+                        self.ui.note(
+                            _('skipping merge changeset %d:%s\n')
+                            % (rev, nodemod.short(node))
+                        )
                         skipmerge = True
                     else:
                         parent = source.lookup(opts['parent'])
                         if parent not in parents:
-                            raise error.Abort(_('%s is not a parent of %s') %
-                                              (nodemod.short(parent),
-                                               nodemod.short(node)))
+                            raise error.Abort(
+                                _('%s is not a parent of %s')
+                                % (nodemod.short(parent), nodemod.short(node))
+                            )
                 else:
                     parent = parents[0]
 
@@ -228,23 +244,30 @@
                 if patchfile or domerge:
                     try:
                         try:
-                            n = self.applyone(repo, node,
-                                              source.changelog.read(node),
-                                              patchfile, merge=domerge,
-                                              log=opts.get('log'),
-                                              filter=opts.get('filter'))
+                            n = self.applyone(
+                                repo,
+                                node,
+                                source.changelog.read(node),
+                                patchfile,
+                                merge=domerge,
+                                log=opts.get('log'),
+                                filter=opts.get('filter'),
+                            )
                         except TransplantError:
                             # Do not rollback, it is up to the user to
                             # fix the merge or cancel everything
                             tr.close()
                             raise
                         if n and domerge:
-                            self.ui.status(_('%s merged at %s\n') % (revstr,
-                                      nodemod.short(n)))
+                            self.ui.status(
+                                _('%s merged at %s\n')
+                                % (revstr, nodemod.short(n))
+                            )
                         elif n:
-                            self.ui.status(_('%s transplanted to %s\n')
-                                           % (nodemod.short(node),
-                                              nodemod.short(n)))
+                            self.ui.status(
+                                _('%s transplanted to %s\n')
+                                % (nodemod.short(node), nodemod.short(n))
+                            )
                     finally:
                         if patchfile:
                             os.unlink(patchfile)
@@ -274,22 +297,30 @@
         fp.close()
 
         try:
-            self.ui.system('%s %s %s' % (filter,
-                                         procutil.shellquote(headerfile),
-                                         procutil.shellquote(patchfile)),
-                           environ={'HGUSER': changelog[1],
-                                    'HGREVISION': nodemod.hex(node),
-                                    },
-                           onerr=error.Abort, errprefix=_('filter failed'),
-                           blockedtag='transplant_filter')
+            self.ui.system(
+                '%s %s %s'
+                % (
+                    filter,
+                    procutil.shellquote(headerfile),
+                    procutil.shellquote(patchfile),
+                ),
+                environ={
+                    'HGUSER': changelog[1],
+                    'HGREVISION': nodemod.hex(node),
+                },
+                onerr=error.Abort,
+                errprefix=_('filter failed'),
+                blockedtag='transplant_filter',
+            )
             user, date, msg = self.parselog(open(headerfile, 'rb'))[1:4]
         finally:
             os.unlink(headerfile)
 
         return (user, date, msg)
 
-    def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
-                 filter=None):
+    def applyone(
+        self, repo, node, cl, patchfile, merge=False, log=False, filter=None
+    ):
         '''apply the patch in patchfile to the repository as a transplant'''
         (manifest, user, (time, timezone), files, message) = cl[:5]
         date = "%d %d" % (time, timezone)
@@ -319,8 +350,12 @@
                 p2 = node
                 self.log(user, date, message, p1, p2, merge=merge)
                 self.ui.write(stringutil.forcebytestr(inst) + '\n')
-                raise TransplantError(_('fix up the working directory and run '
-                                        'hg transplant --continue'))
+                raise TransplantError(
+                    _(
+                        'fix up the working directory and run '
+                        'hg transplant --continue'
+                    )
+                )
         else:
             files = None
         if merge:
@@ -330,11 +365,18 @@
         else:
             m = match.exact(files)
 
-        n = repo.commit(message, user, date, extra=extra, match=m,
-                        editor=self.getcommiteditor())
+        n = repo.commit(
+            message,
+            user,
+            date,
+            extra=extra,
+            match=m,
+            editor=self.getcommiteditor(),
+        )
         if not n:
-            self.ui.warn(_('skipping emptied changeset %s\n') %
-                           nodemod.short(node))
+            self.ui.warn(
+                _('skipping emptied changeset %s\n') % nodemod.short(node)
+            )
             return None
         if not merge:
             self.transplants.set(n, node)
@@ -349,12 +391,14 @@
         if os.path.exists(os.path.join(self.path, 'journal')):
             n, node = self.recover(repo, source, opts)
             if n:
-                self.ui.status(_('%s transplanted as %s\n') %
-                                 (nodemod.short(node),
-                                  nodemod.short(n)))
+                self.ui.status(
+                    _('%s transplanted as %s\n')
+                    % (nodemod.short(node), nodemod.short(n))
+                )
             else:
-                self.ui.status(_('%s skipped due to empty diff\n')
-                               % (nodemod.short(node),))
+                self.ui.status(
+                    _('%s skipped due to empty diff\n') % (nodemod.short(node),)
+                )
         seriespath = os.path.join(self.path, 'series')
         if not os.path.exists(seriespath):
             self.transplants.write()
@@ -380,9 +424,10 @@
             if opts.get('parent'):
                 parent = source.lookup(opts['parent'])
                 if parent not in parents:
-                    raise error.Abort(_('%s is not a parent of %s') %
-                                      (nodemod.short(parent),
-                                       nodemod.short(node)))
+                    raise error.Abort(
+                        _('%s is not a parent of %s')
+                        % (nodemod.short(parent), nodemod.short(node))
+                    )
             else:
                 merge = True
 
@@ -390,14 +435,21 @@
         try:
             p1 = repo.dirstate.p1()
             if p1 != parent:
-                raise error.Abort(_('working directory not at transplant '
-                                   'parent %s') % nodemod.hex(parent))
+                raise error.Abort(
+                    _('working directory not at transplant ' 'parent %s')
+                    % nodemod.hex(parent)
+                )
             if merge:
                 repo.setparents(p1, parents[1])
             modified, added, removed, deleted = repo.status()[:4]
             if merge or modified or added or removed or deleted:
-                n = repo.commit(message, user, date, extra=extra,
-                                editor=self.getcommiteditor())
+                n = repo.commit(
+                    message,
+                    user,
+                    date,
+                    extra=extra,
+                    editor=self.getcommiteditor(),
+                )
                 if not n:
                     raise error.Abort(_('commit failed'))
                 if not merge:
@@ -418,8 +470,9 @@
             startctx = repo['.']
             hg.updaterepo(repo, startctx.node(), overwrite=True)
             ui.status(_("stopped the interrupted transplant\n"))
-            ui.status(_("working directory is now at %s\n") %
-                      startctx.hex()[:12])
+            ui.status(
+                _("working directory is now at %s\n") % startctx.hex()[:12]
+            )
             self.unlog()
             return 0
 
@@ -513,31 +566,35 @@
 
         return matchfn
 
+
 def hasnode(repo, node):
     try:
         return repo.changelog.rev(node) is not None
     except error.StorageError:
         return False
 
+
 def browserevs(ui, repo, nodes, opts):
     '''interactively transplant changesets'''
     displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
     transplants = []
     merges = []
-    prompt = _('apply changeset? [ynmpcq?]:'
-               '$$ &yes, transplant this changeset'
-               '$$ &no, skip this changeset'
-               '$$ &merge at this changeset'
-               '$$ show &patch'
-               '$$ &commit selected changesets'
-               '$$ &quit and cancel transplant'
-               '$$ &? (show this help)')
+    prompt = _(
+        'apply changeset? [ynmpcq?]:'
+        '$$ &yes, transplant this changeset'
+        '$$ &no, skip this changeset'
+        '$$ &merge at this changeset'
+        '$$ show &patch'
+        '$$ &commit selected changesets'
+        '$$ &quit and cancel transplant'
+        '$$ &? (show this help)'
+    )
     for node in nodes:
         displayer.show(repo[node])
         action = None
         while not action:
             choice = ui.promptchoice(prompt)
-            action = 'ynmpcq?'[choice:choice + 1]
+            action = 'ynmpcq?'[choice : choice + 1]
             if action == '?':
                 for c, t in ui.extractchoices(prompt)[1]:
                     ui.write('%s: %s\n' % (c, t))
@@ -560,24 +617,41 @@
     displayer.close()
     return (transplants, merges)
 
-@command('transplant',
-    [('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
-    ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
-    ('a', 'all', None, _('pull all changesets up to the --branch revisions')),
-    ('p', 'prune', [], _('skip over REV'), _('REV')),
-    ('m', 'merge', [], _('merge at REV'), _('REV')),
-    ('', 'parent', '',
-     _('parent to choose when transplanting merge'), _('REV')),
-    ('e', 'edit', False, _('invoke editor on commit messages')),
-    ('', 'log', None, _('append transplant info to log message')),
-    ('', 'stop', False, _('stop interrupted transplant')),
-    ('c', 'continue', None, _('continue last transplant session '
-                              'after fixing conflicts')),
-    ('', 'filter', '',
-     _('filter changesets through command'), _('CMD'))],
-    _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
-      '[-m REV] [REV]...'),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
+
+@command(
+    'transplant',
+    [
+        ('s', 'source', '', _('transplant changesets from REPO'), _('REPO')),
+        ('b', 'branch', [], _('use this source changeset as head'), _('REV')),
+        (
+            'a',
+            'all',
+            None,
+            _('pull all changesets up to the --branch revisions'),
+        ),
+        ('p', 'prune', [], _('skip over REV'), _('REV')),
+        ('m', 'merge', [], _('merge at REV'), _('REV')),
+        (
+            '',
+            'parent',
+            '',
+            _('parent to choose when transplanting merge'),
+            _('REV'),
+        ),
+        ('e', 'edit', False, _('invoke editor on commit messages')),
+        ('', 'log', None, _('append transplant info to log message')),
+        ('', 'stop', False, _('stop interrupted transplant')),
+        (
+            'c',
+            'continue',
+            None,
+            _('continue last transplant session ' 'after fixing conflicts'),
+        ),
+        ('', 'filter', '', _('filter changesets through command'), _('CMD')),
+    ],
+    _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] ' '[-m REV] [REV]...'),
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+)
 def transplant(ui, repo, *revs, **opts):
     '''transplant changesets from another branch
 
@@ -632,6 +706,7 @@
     with repo.wlock():
         return _dotransplant(ui, repo, *revs, **opts)
 
+
 def _dotransplant(ui, repo, *revs, **opts):
     def incwalk(repo, csets, match=util.always):
         for node in csets:
@@ -655,24 +730,41 @@
     def checkopts(opts, revs):
         if opts.get('continue'):
             if opts.get('branch') or opts.get('all') or opts.get('merge'):
-                raise error.Abort(_('--continue is incompatible with '
-                                   '--branch, --all and --merge'))
+                raise error.Abort(
+                    _(
+                        '--continue is incompatible with '
+                        '--branch, --all and --merge'
+                    )
+                )
             return
         if opts.get('stop'):
             if opts.get('branch') or opts.get('all') or opts.get('merge'):
-                raise error.Abort(_('--stop is incompatible with '
-                                   '--branch, --all and --merge'))
+                raise error.Abort(
+                    _(
+                        '--stop is incompatible with '
+                        '--branch, --all and --merge'
+                    )
+                )
             return
-        if not (opts.get('source') or revs or
-                opts.get('merge') or opts.get('branch')):
-            raise error.Abort(_('no source URL, branch revision, or revision '
-                               'list provided'))
+        if not (
+            opts.get('source')
+            or revs
+            or opts.get('merge')
+            or opts.get('branch')
+        ):
+            raise error.Abort(
+                _(
+                    'no source URL, branch revision, or revision '
+                    'list provided'
+                )
+            )
         if opts.get('all'):
             if not opts.get('branch'):
                 raise error.Abort(_('--all requires a branch revision'))
             if revs:
-                raise error.Abort(_('--all is incompatible with a '
-                                   'revision list'))
+                raise error.Abort(
+                    _('--all is incompatible with a ' 'revision list')
+                )
 
     opts = pycompat.byteskwargs(opts)
     checkopts(opts, revs)
@@ -710,8 +802,9 @@
                 target.add(peer.lookup(r))
             except error.RepoError:
                 pass
-        source, csets, cleanupfn = bundlerepo.getremotechanges(ui, repo, peer,
-                                    onlyheads=sorted(target), force=True)
+        source, csets, cleanupfn = bundlerepo.getremotechanges(
+            ui, repo, peer, onlyheads=sorted(target), force=True
+        )
     else:
         source = repo
         heads = pycompat.maplist(source.lookup, opts.get('branch', ()))
@@ -724,8 +817,10 @@
 
         tf = tp.transplantfilter(repo, source, p1)
         if opts.get('prune'):
-            prune = set(source[r].node()
-                        for r in scmutil.revrange(source, opts.get('prune')))
+            prune = set(
+                source[r].node()
+                for r in scmutil.revrange(source, opts.get('prune'))
+            )
             matchfn = lambda x: tf(x) and x not in prune
         else:
             matchfn = tf
@@ -738,8 +833,9 @@
             if source != repo:
                 alltransplants = incwalk(source, csets, match=matchfn)
             else:
-                alltransplants = transplantwalk(source, p1, heads,
-                                                match=matchfn)
+                alltransplants = transplantwalk(
+                    source, p1, heads, match=matchfn
+                )
             if opts.get('all'):
                 revs = alltransplants
             else:
@@ -755,6 +851,7 @@
         if cleanupfn:
             cleanupfn()
 
+
 def continuecmd(ui, repo):
     """logic to resume an interrupted transplant using
     'hg continue'"""
@@ -762,8 +859,10 @@
         tp = transplanter(ui, repo, {})
         return tp.resume(repo, repo, {})
 
+
 revsetpredicate = registrar.revsetpredicate()
 
+
 @revsetpredicate('transplanted([set])')
 def revsettransplanted(repo, subset, x):
     """Transplanted changesets in set, or all transplanted changesets.
@@ -772,11 +871,14 @@
         s = revset.getset(repo, subset, x)
     else:
         s = subset
-    return smartset.baseset([r for r in s if
-        repo[r].extra().get('transplant_source')])
+    return smartset.baseset(
+        [r for r in s if repo[r].extra().get('transplant_source')]
+    )
+
 
 templatekeyword = registrar.templatekeyword()
 
+
 @templatekeyword('transplanted', requires={'ctx'})
 def kwtransplanted(context, mapping):
     """String. The node identifier of the transplanted
@@ -785,14 +887,20 @@
     n = ctx.extra().get('transplant_source')
     return n and nodemod.hex(n) or ''
 
+
 def extsetup(ui):
-    statemod.addunfinished (
-        'transplant', fname='transplant/journal', clearable=True,
+    statemod.addunfinished(
+        'transplant',
+        fname='transplant/journal',
+        clearable=True,
         continuefunc=continuecmd,
-        statushint=_('To continue:    hg transplant --continue\n'
-                     'To stop:        hg transplant --stop'),
-        cmdhint=_("use 'hg transplant --continue' or 'hg transplant --stop'")
+        statushint=_(
+            'To continue:    hg transplant --continue\n'
+            'To stop:        hg transplant --stop'
+        ),
+        cmdhint=_("use 'hg transplant --continue' or 'hg transplant --stop'"),
     )
 
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = [revsettransplanted, kwtransplanted]
--- a/hgext/uncommit.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/uncommit.py	Sun Oct 06 09:45:02 2019 -0400
@@ -42,11 +42,11 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('experimental', 'uncommitondirtywdir',
-    default=False,
+configitem(
+    'experimental', 'uncommitondirtywdir', default=False,
 )
-configitem('experimental', 'uncommit.keep',
-    default=False,
+configitem(
+    'experimental', 'uncommit.keep', default=False,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -55,8 +55,10 @@
 # leave the attribute unspecified.
 testedwith = 'ships-with-hg-core'
 
-def _commitfiltered(repo, ctx, match, keepcommit, message=None, user=None,
-                    date=None):
+
+def _commitfiltered(
+    repo, ctx, match, keepcommit, message=None, user=None, date=None
+):
     """Recommit ctx with changed files not in match. Return the new
     node identifier, or None if nothing changed.
     """
@@ -73,19 +75,24 @@
     if not keepcommit:
         return ctx.p1().node()
 
-    files = (initialfiles - exclude)
+    files = initialfiles - exclude
     # Filter copies
     copied = copiesmod.pathcopies(base, ctx)
-    copied = dict((dst, src) for dst, src in copied.iteritems()
-                  if dst in files)
+    copied = dict((dst, src) for dst, src in copied.iteritems() if dst in files)
+
     def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
         if path not in contentctx:
             return None
         fctx = contentctx[path]
-        mctx = context.memfilectx(repo, memctx, fctx.path(), fctx.data(),
-                                  fctx.islink(),
-                                  fctx.isexec(),
-                                  copysource=copied.get(path))
+        mctx = context.memfilectx(
+            repo,
+            memctx,
+            fctx.path(),
+            fctx.data(),
+            fctx.islink(),
+            fctx.isexec(),
+            copysource=copied.get(path),
+        )
         return mctx
 
     if not files:
@@ -98,25 +105,38 @@
     if not date:
         date = ctx.date()
 
-    new = context.memctx(repo,
-                         parents=[base.node(), node.nullid],
-                         text=message,
-                         files=files,
-                         filectxfn=filectxfn,
-                         user=user,
-                         date=date,
-                         extra=ctx.extra())
+    new = context.memctx(
+        repo,
+        parents=[base.node(), node.nullid],
+        text=message,
+        files=files,
+        filectxfn=filectxfn,
+        user=user,
+        date=date,
+        extra=ctx.extra(),
+    )
     return repo.commitctx(new)
 
-@command('uncommit',
-    [('', 'keep', None, _('allow an empty commit after uncommitting')),
-     ('', 'allow-dirty-working-copy', False,
-    _('allow uncommit with outstanding changes')),
-     (b'n', b'note', b'', _(b'store a note on uncommit'), _(b'TEXT'))
-    ] + commands.walkopts + commands.commitopts + commands.commitopts2
+
+@command(
+    'uncommit',
+    [
+        ('', 'keep', None, _('allow an empty commit after uncommitting')),
+        (
+            '',
+            'allow-dirty-working-copy',
+            False,
+            _('allow uncommit with outstanding changes'),
+        ),
+        (b'n', b'note', b'', _(b'store a note on uncommit'), _(b'TEXT')),
+    ]
+    + commands.walkopts
+    + commands.commitopts
+    + commands.commitopts2
     + commands.commitopts3,
     _('[OPTION]... [FILE]...'),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+)
 def uncommit(ui, repo, *pats, **opts):
     """uncommit part or all of a local changeset
 
@@ -137,11 +157,14 @@
 
         m, a, r, d = repo.status()[:4]
         isdirtypath = any(set(m + a + r + d) & set(pats))
-        allowdirtywcopy = (opts['allow_dirty_working_copy'] or
-                    repo.ui.configbool('experimental', 'uncommitondirtywdir'))
+        allowdirtywcopy = opts[
+            'allow_dirty_working_copy'
+        ] or repo.ui.configbool('experimental', 'uncommitondirtywdir')
         if not allowdirtywcopy and (not pats or isdirtypath):
-            cmdutil.bailifchanged(repo, hint=_('requires '
-                                '--allow-dirty-working-copy to uncommit'))
+            cmdutil.bailifchanged(
+                repo,
+                hint=_('requires ' '--allow-dirty-working-copy to uncommit'),
+            )
         old = repo['.']
         rewriteutil.precheck(repo, [old.rev()], 'uncommit')
         if len(old.parents()) > 1:
@@ -164,15 +187,18 @@
 
             for f in sorted(badfiles):
                 if f in s.clean:
-                    hint = _(b"file was not changed in working directory "
-                             b"parent")
+                    hint = _(
+                        b"file was not changed in working directory " b"parent"
+                    )
                 elif repo.wvfs.exists(f):
                     hint = _(b"file was untracked in working directory parent")
                 else:
                     hint = _(b"file does not exist")
 
-                raise error.Abort(_(b'cannot uncommit "%s"')
-                                  % scmutil.getuipathfn(repo)(f), hint=hint)
+                raise error.Abort(
+                    _(b'cannot uncommit "%s"') % scmutil.getuipathfn(repo)(f),
+                    hint=hint,
+                )
 
         with repo.transaction('uncommit'):
             if not (opts[b'message'] or opts[b'logfile']):
@@ -185,9 +211,15 @@
                     keepcommit = opts.get('keep')
                 else:
                     keepcommit = ui.configbool('experimental', 'uncommit.keep')
-            newid = _commitfiltered(repo, old, match, keepcommit,
-                                    message=message, user=opts.get(b'user'),
-                                    date=opts.get(b'date'))
+            newid = _commitfiltered(
+                repo,
+                old,
+                match,
+                keepcommit,
+                message=message,
+                user=opts.get(b'user'),
+                date=opts.get(b'date'),
+            )
             if newid is None:
                 ui.status(_("nothing to uncommit\n"))
                 return 1
@@ -205,13 +237,19 @@
 
             scmutil.cleanupnodes(repo, mapping, 'uncommit', fixphase=True)
 
+
 def predecessormarkers(ctx):
     """yields the obsolete markers marking the given changeset as a successor"""
     for data in ctx.repo().obsstore.predecessors.get(ctx.node(), ()):
         yield obsutil.marker(ctx.repo(), data)
 
-@command('unamend', [], helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
-         helpbasic=True)
+
+@command(
+    'unamend',
+    [],
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+    helpbasic=True,
+)
 def unamend(ui, repo, **opts):
     """undo the most recent amend operation on a current changeset
 
@@ -250,14 +288,16 @@
                 return None
 
         # Make a new commit same as predctx
-        newctx = context.memctx(repo,
-                                parents=(predctx.p1(), predctx.p2()),
-                                text=predctx.description(),
-                                files=predctx.files(),
-                                filectxfn=filectxfn,
-                                user=predctx.user(),
-                                date=predctx.date(),
-                                extra=extras)
+        newctx = context.memctx(
+            repo,
+            parents=(predctx.p1(), predctx.p2()),
+            text=predctx.description(),
+            files=predctx.files(),
+            filectxfn=filectxfn,
+            user=predctx.user(),
+            date=predctx.date(),
+            extra=extras,
+        )
         newprednode = repo.commitctx(newctx)
         newpredctx = repo[newprednode]
         dirstate = repo.dirstate
--- a/hgext/win32mbcs.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/win32mbcs.py	Sun Oct 06 09:45:02 2019 -0400
@@ -68,11 +68,12 @@
 
 # Encoding.encoding may be updated by --encoding option.
 # Use a lambda do delay the resolution.
-configitem('win32mbcs', 'encoding',
-    default=lambda: encoding.encoding,
+configitem(
+    'win32mbcs', 'encoding', default=lambda: encoding.encoding,
 )
 
-_encoding = None                                # see extsetup
+_encoding = None  # see extsetup
+
 
 def decode(arg):
     if isinstance(arg, str):
@@ -89,6 +90,7 @@
             arg[k] = decode(v)
     return arg
 
+
 def encode(arg):
     if isinstance(arg, pycompat.unicode):
         return arg.encode(_encoding)
@@ -101,6 +103,7 @@
             arg[k] = encode(v)
     return arg
 
+
 def appendsep(s):
     # ensure the path ends with os.sep, appending it if necessary.
     try:
@@ -123,8 +126,11 @@
         # return value.
         return enc(func(*dec(args), **dec(kwds)))
     except UnicodeError:
-        raise error.Abort(_("[win32mbcs] filename conversion failed with"
-                         " %s encoding\n") % (_encoding))
+        raise error.Abort(
+            _("[win32mbcs] filename conversion failed with" " %s encoding\n")
+            % _encoding
+        )
+
 
 def wrapper(func, args, kwds):
     return basewrapper(func, pycompat.unicode, encode, decode, args, kwds)
@@ -133,6 +139,7 @@
 def reversewrapper(func, args, kwds):
     return basewrapper(func, str, decode, encode, args, kwds)
 
+
 def wrapperforlistdir(func, args, kwds):
     # Ensure 'path' argument ends with os.sep to avoids
     # misinterpreting last 0x5c of MBCS 2nd byte as path separator.
@@ -143,15 +150,19 @@
         kwds['path'] = appendsep(kwds['path'])
     return func(*args, **kwds)
 
+
 def wrapname(name, wrapper):
     module, name = name.rsplit('.', 1)
     module = sys.modules[module]
     func = getattr(module, name)
+
     def f(*args, **kwds):
         return wrapper(func, args, kwds)
+
     f.__name__ = func.__name__
     setattr(module, name, f)
 
+
 # List of functions to be wrapped.
 # NOTE: os.path.dirname() and os.path.basename() are safe because
 #       they use result of os.path.split()
@@ -177,10 +188,12 @@
  sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
  shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
 
+
 def extsetup(ui):
     # TODO: decide use of config section for this extension
-    if ((not os.path.supports_unicode_filenames) and
-        (pycompat.sysplatform != 'cygwin')):
+    if (not os.path.supports_unicode_filenames) and (
+        pycompat.sysplatform != 'cygwin'
+    ):
         ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
         return
     # determine encoding for filename
@@ -202,5 +215,4 @@
         # command line options is not yet applied when
         # extensions.loadall() is called.
         if '--debug' in sys.argv:
-            ui.write(("[win32mbcs] activated with encoding: %s\n")
-                     % _encoding)
+            ui.write("[win32mbcs] activated with encoding: %s\n" % _encoding)
--- a/hgext/win32text.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/win32text.py	Sun Oct 06 09:45:02 2019 -0400
@@ -45,16 +45,12 @@
 
 import re
 from mercurial.i18n import _
-from mercurial.node import (
-    short,
-)
+from mercurial.node import short
 from mercurial import (
     pycompat,
     registrar,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -65,8 +61,8 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem('win32text', 'warn',
-    default=True,
+configitem(
+    'win32text', 'warn', default=True,
 )
 
 # regexp for single LF without CR preceding.
@@ -75,53 +71,67 @@
 newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
 filterstr = {'\r\n': 'clever', '\r': 'mac'}
 
+
 def checknewline(s, newline, ui=None, repo=None, filename=None):
     # warn if already has 'newline' in repository.
     # it might cause unexpected eol conversion.
     # see issue 302:
     #   https://bz.mercurial-scm.org/302
     if newline in s and ui and filename and repo:
-        ui.warn(_('WARNING: %s already has %s line endings\n'
-                  'and does not need EOL conversion by the win32text plugin.\n'
-                  'Before your next commit, please reconsider your '
-                  'encode/decode settings in \nMercurial.ini or %s.\n') %
-                (filename, newlinestr[newline], repo.vfs.join('hgrc')))
+        ui.warn(
+            _(
+                'WARNING: %s already has %s line endings\n'
+                'and does not need EOL conversion by the win32text plugin.\n'
+                'Before your next commit, please reconsider your '
+                'encode/decode settings in \nMercurial.ini or %s.\n'
+            )
+            % (filename, newlinestr[newline], repo.vfs.join('hgrc'))
+        )
+
 
 def dumbdecode(s, cmd, **kwargs):
     checknewline(s, '\r\n', **kwargs)
     # replace single LF to CRLF
     return re_single_lf.sub('\\1\r\n', s)
 
+
 def dumbencode(s, cmd):
     return s.replace('\r\n', '\n')
 
+
 def macdumbdecode(s, cmd, **kwargs):
     checknewline(s, '\r', **kwargs)
     return s.replace('\n', '\r')
 
+
 def macdumbencode(s, cmd):
     return s.replace('\r', '\n')
 
+
 def cleverdecode(s, cmd, **kwargs):
     if not stringutil.binary(s):
         return dumbdecode(s, cmd, **kwargs)
     return s
 
+
 def cleverencode(s, cmd):
     if not stringutil.binary(s):
         return dumbencode(s, cmd)
     return s
 
+
 def macdecode(s, cmd, **kwargs):
     if not stringutil.binary(s):
         return macdumbdecode(s, cmd, **kwargs)
     return s
 
+
 def macencode(s, cmd):
     if not stringutil.binary(s):
         return macdumbencode(s, cmd)
     return s
 
+
 _filters = {
     'dumbdecode:': dumbdecode,
     'dumbencode:': dumbencode,
@@ -131,7 +141,8 @@
     'macdumbencode:': macdumbencode,
     'macdecode:': macdecode,
     'macencode:': macencode,
-    }
+}
+
 
 def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
     halt = False
@@ -142,8 +153,9 @@
     # changegroup that contains an unacceptable commit followed later
     # by a commit that fixes the problem.
     tip = repo['tip']
-    for rev in pycompat.xrange(repo.changelog.tiprev(),
-                               repo[node].rev() - 1, -1):
+    for rev in pycompat.xrange(
+        repo.changelog.tiprev(), repo[node].rev() - 1, -1
+    ):
         c = repo[rev]
         for f in c.files():
             if f in seen or f not in tip or f not in c:
@@ -152,44 +164,61 @@
             data = c[f].data()
             if not stringutil.binary(data) and newline in data:
                 if not halt:
-                    ui.warn(_('attempt to commit or push text file(s) '
-                              'using %s line endings\n') %
-                              newlinestr[newline])
+                    ui.warn(
+                        _(
+                            'attempt to commit or push text file(s) '
+                            'using %s line endings\n'
+                        )
+                        % newlinestr[newline]
+                    )
                 ui.warn(_('in %s: %s\n') % (short(c.node()), f))
                 halt = True
     if halt and hooktype == 'pretxnchangegroup':
         crlf = newlinestr[newline].lower()
         filter = filterstr[newline]
-        ui.warn(_('\nTo prevent this mistake in your local repository,\n'
-                  'add to Mercurial.ini or .hg/hgrc:\n'
-                  '\n'
-                  '[hooks]\n'
-                  'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
-                  '\n'
-                  'and also consider adding:\n'
-                  '\n'
-                  '[extensions]\n'
-                  'win32text =\n'
-                  '[encode]\n'
-                  '** = %sencode:\n'
-                  '[decode]\n'
-                  '** = %sdecode:\n') % (crlf, crlf, filter, filter))
+        ui.warn(
+            _(
+                '\nTo prevent this mistake in your local repository,\n'
+                'add to Mercurial.ini or .hg/hgrc:\n'
+                '\n'
+                '[hooks]\n'
+                'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
+                '\n'
+                'and also consider adding:\n'
+                '\n'
+                '[extensions]\n'
+                'win32text =\n'
+                '[encode]\n'
+                '** = %sencode:\n'
+                '[decode]\n'
+                '** = %sdecode:\n'
+            )
+            % (crlf, crlf, filter, filter)
+        )
     return halt
 
+
 def forbidcrlf(ui, repo, hooktype, node, **kwargs):
     return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
 
+
 def forbidcr(ui, repo, hooktype, node, **kwargs):
     return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
 
+
 def reposetup(ui, repo):
     if not repo.local():
         return
     for name, fn in _filters.iteritems():
         repo.adddatafilter(name, fn)
 
+
 def extsetup(ui):
     # deprecated config: win32text.warn
     if ui.configbool('win32text', 'warn'):
-        ui.warn(_("win32text is deprecated: "
-                  "https://mercurial-scm.org/wiki/Win32TextExtension\n"))
+        ui.warn(
+            _(
+                "win32text is deprecated: "
+                "https://mercurial-scm.org/wiki/Win32TextExtension\n"
+            )
+        )
--- a/hgext/zeroconf/Zeroconf.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/zeroconf/Zeroconf.py	Sun Oct 06 09:45:02 2019 -0400
@@ -110,23 +110,23 @@
 _MDNS_ADDR = r'224.0.0.251'
 _MDNS_PORT = 5353
 _DNS_PORT = 53
-_DNS_TTL = 60 * 60 # one hour default TTL
+_DNS_TTL = 60 * 60  # one hour default TTL
 
-_MAX_MSG_TYPICAL = 1460 # unused
+_MAX_MSG_TYPICAL = 1460  # unused
 _MAX_MSG_ABSOLUTE = 8972
 
-_FLAGS_QR_MASK = 0x8000 # query response mask
-_FLAGS_QR_QUERY = 0x0000 # query
-_FLAGS_QR_RESPONSE = 0x8000 # response
+_FLAGS_QR_MASK = 0x8000  # query response mask
+_FLAGS_QR_QUERY = 0x0000  # query
+_FLAGS_QR_RESPONSE = 0x8000  # response
 
-_FLAGS_AA = 0x0400 # Authoritative answer
-_FLAGS_TC = 0x0200 # Truncated
-_FLAGS_RD = 0x0100 # Recursion desired
-_FLAGS_RA = 0x8000 # Recursion available
+_FLAGS_AA = 0x0400  # Authoritative answer
+_FLAGS_TC = 0x0200  # Truncated
+_FLAGS_RD = 0x0100  # Recursion desired
+_FLAGS_RA = 0x8000  # Recursion available
 
-_FLAGS_Z = 0x0040 # Zero
-_FLAGS_AD = 0x0020 # Authentic data
-_FLAGS_CD = 0x0010 # Checking disabled
+_FLAGS_Z = 0x0040  # Zero
+_FLAGS_AD = 0x0020  # Authentic data
+_FLAGS_CD = 0x0010  # Checking disabled
 
 _CLASS_IN = 1
 _CLASS_CS = 2
@@ -159,65 +159,80 @@
 
 # Mapping constants to names
 
-_CLASSES = { _CLASS_IN : "in",
-             _CLASS_CS : "cs",
-             _CLASS_CH : "ch",
-             _CLASS_HS : "hs",
-             _CLASS_NONE : "none",
-             _CLASS_ANY : "any" }
+_CLASSES = {
+    _CLASS_IN: "in",
+    _CLASS_CS: "cs",
+    _CLASS_CH: "ch",
+    _CLASS_HS: "hs",
+    _CLASS_NONE: "none",
+    _CLASS_ANY: "any",
+}
 
-_TYPES = { _TYPE_A : "a",
-           _TYPE_NS : "ns",
-           _TYPE_MD : "md",
-           _TYPE_MF : "mf",
-           _TYPE_CNAME : "cname",
-           _TYPE_SOA : "soa",
-           _TYPE_MB : "mb",
-           _TYPE_MG : "mg",
-           _TYPE_MR : "mr",
-           _TYPE_NULL : "null",
-           _TYPE_WKS : "wks",
-           _TYPE_PTR : "ptr",
-           _TYPE_HINFO : "hinfo",
-           _TYPE_MINFO : "minfo",
-           _TYPE_MX : "mx",
-           _TYPE_TXT : "txt",
-           _TYPE_AAAA : "quada",
-           _TYPE_SRV : "srv",
-           _TYPE_ANY : "any" }
+_TYPES = {
+    _TYPE_A: "a",
+    _TYPE_NS: "ns",
+    _TYPE_MD: "md",
+    _TYPE_MF: "mf",
+    _TYPE_CNAME: "cname",
+    _TYPE_SOA: "soa",
+    _TYPE_MB: "mb",
+    _TYPE_MG: "mg",
+    _TYPE_MR: "mr",
+    _TYPE_NULL: "null",
+    _TYPE_WKS: "wks",
+    _TYPE_PTR: "ptr",
+    _TYPE_HINFO: "hinfo",
+    _TYPE_MINFO: "minfo",
+    _TYPE_MX: "mx",
+    _TYPE_TXT: "txt",
+    _TYPE_AAAA: "quada",
+    _TYPE_SRV: "srv",
+    _TYPE_ANY: "any",
+}
 
 # utility functions
 
+
 def currentTimeMillis():
     """Current system time in milliseconds"""
     return time.time() * 1000
 
+
 # Exceptions
 
+
 class NonLocalNameException(Exception):
     pass
 
+
 class NonUniqueNameException(Exception):
     pass
 
+
 class NamePartTooLongException(Exception):
     pass
 
+
 class AbstractMethodException(Exception):
     pass
 
+
 class BadTypeInNameException(Exception):
     pass
 
+
 class BadDomainName(Exception):
     def __init__(self, pos):
         Exception.__init__(self, "at position %s" % pos)
 
+
 class BadDomainNameCircular(BadDomainName):
     pass
 
+
 # implementation classes
 
+
 class DNSEntry(object):
     """A DNS entry"""
 
@@ -231,8 +246,11 @@
     def __eq__(self, other):
         """Equality test on name, type, and class"""
         if isinstance(other, DNSEntry):
-            return (self.name == other.name and self.type == other.type and
-                    self.clazz == other.clazz)
+            return (
+                self.name == other.name
+                and self.type == other.type
+                and self.clazz == other.clazz
+            )
         return 0
 
     def __ne__(self, other):
@@ -244,30 +262,34 @@
         try:
             return _CLASSES[clazz]
         except KeyError:
-            return "?(%s)" % (clazz)
+            return "?(%s)" % clazz
 
     def getType(self, type):
         """Type accessor"""
         try:
             return _TYPES[type]
         except KeyError:
-            return "?(%s)" % (type)
+            return "?(%s)" % type
 
     def toString(self, hdr, other):
         """String representation with additional information"""
-        result = ("%s[%s,%s" %
-            (hdr, self.getType(self.type), self.getClazz(self.clazz)))
+        result = "%s[%s,%s" % (
+            hdr,
+            self.getType(self.type),
+            self.getClazz(self.clazz),
+        )
         if self.unique:
             result += "-unique,"
         else:
             result += ","
         result += self.name
         if other is not None:
-            result += ",%s]" % (other)
+            result += ",%s]" % other
         else:
             result += "]"
         return result
 
+
 class DNSQuestion(DNSEntry):
     """A DNS question entry"""
 
@@ -280,9 +302,11 @@
 
     def answeredBy(self, rec):
         """Returns true if the question is answered by the record"""
-        return (self.clazz == rec.clazz and
-                (self.type == rec.type or self.type == _TYPE_ANY) and
-                self.name == rec.name)
+        return (
+            self.clazz == rec.clazz
+            and (self.type == rec.type or self.type == _TYPE_ANY)
+            and self.name == rec.name
+        )
 
     def __repr__(self):
         """String representation"""
@@ -347,10 +371,14 @@
 
     def toString(self, other):
         """String representation with additional information"""
-        arg = ("%s/%s,%s" %
-            (self.ttl, self.getRemainingTTL(currentTimeMillis()), other))
+        arg = "%s/%s,%s" % (
+            self.ttl,
+            self.getRemainingTTL(currentTimeMillis()),
+            other,
+        )
         return DNSEntry.toString(self, "record", arg)
 
+
 class DNSAddress(DNSRecord):
     """A DNS address record"""
 
@@ -375,6 +403,7 @@
         except Exception:
             return self.address
 
+
 class DNSHinfo(DNSRecord):
     """A DNS host information record"""
 
@@ -398,6 +427,7 @@
         """String representation"""
         return self.cpu + " " + self.os
 
+
 class DNSPointer(DNSRecord):
     """A DNS pointer record"""
 
@@ -419,6 +449,7 @@
         """String representation"""
         return self.toString(self.alias)
 
+
 class DNSText(DNSRecord):
     """A DNS text record"""
 
@@ -443,6 +474,7 @@
         else:
             return self.toString(self.text)
 
+
 class DNSService(DNSRecord):
     """A DNS service record"""
 
@@ -463,16 +495,19 @@
     def __eq__(self, other):
         """Tests equality on priority, weight, port and server"""
         if isinstance(other, DNSService):
-            return (self.priority == other.priority and
-                    self.weight == other.weight and
-                    self.port == other.port and
-                    self.server == other.server)
+            return (
+                self.priority == other.priority
+                and self.weight == other.weight
+                and self.port == other.port
+                and self.server == other.server
+            )
         return 0
 
     def __repr__(self):
         """String representation"""
         return self.toString("%s:%s" % (self.server, self.port))
 
+
 class DNSIncoming(object):
     """Object representation of an incoming DNS packet"""
 
@@ -495,8 +530,9 @@
         """Reads header portion of packet"""
         format = '!HHHHHH'
         length = struct.calcsize(format)
-        info = struct.unpack(format,
-                             self.data[self.offset:self.offset + length])
+        info = struct.unpack(
+            format, self.data[self.offset : self.offset + length]
+        )
         self.offset += length
 
         self.id = info[0]
@@ -512,8 +548,9 @@
         length = struct.calcsize(format)
         for i in range(0, self.numquestions):
             name = self.readName()
-            info = struct.unpack(format,
-                                 self.data[self.offset:self.offset + length])
+            info = struct.unpack(
+                format, self.data[self.offset : self.offset + length]
+            )
             self.offset += length
 
             try:
@@ -526,8 +563,9 @@
         """Reads an integer from the packet"""
         format = '!I'
         length = struct.calcsize(format)
-        info = struct.unpack(format,
-                             self.data[self.offset:self.offset + length])
+        info = struct.unpack(
+            format, self.data[self.offset : self.offset + length]
+        )
         self.offset += length
         return info[0]
 
@@ -541,8 +579,9 @@
         """Reads a string of a given length from the packet"""
         format = '!%ds' % len
         length = struct.calcsize(format)
-        info = struct.unpack(format,
-                             self.data[self.offset:self.offset + length])
+        info = struct.unpack(
+            format, self.data[self.offset : self.offset + length]
+        )
         self.offset += length
         return info[0]
 
@@ -550,8 +589,9 @@
         """Reads an unsigned short from the packet"""
         format = '!H'
         length = struct.calcsize(format)
-        info = struct.unpack(format,
-                             self.data[self.offset:self.offset + length])
+        info = struct.unpack(
+            format, self.data[self.offset : self.offset + length]
+        )
         self.offset += length
         return info[0]
 
@@ -562,33 +602,48 @@
         n = self.numanswers + self.numauthorities + self.numadditionals
         for i in range(0, n):
             domain = self.readName()
-            info = struct.unpack(format,
-                                 self.data[self.offset:self.offset + length])
+            info = struct.unpack(
+                format, self.data[self.offset : self.offset + length]
+            )
             self.offset += length
 
             rec = None
             if info[0] == _TYPE_A:
-                rec = DNSAddress(domain, info[0], info[1], info[2],
-                                 self.readString(4))
+                rec = DNSAddress(
+                    domain, info[0], info[1], info[2], self.readString(4)
+                )
             elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
-                rec = DNSPointer(domain, info[0], info[1], info[2],
-                                 self.readName())
+                rec = DNSPointer(
+                    domain, info[0], info[1], info[2], self.readName()
+                )
             elif info[0] == _TYPE_TXT:
-                rec = DNSText(domain, info[0], info[1], info[2],
-                              self.readString(info[3]))
+                rec = DNSText(
+                    domain, info[0], info[1], info[2], self.readString(info[3])
+                )
             elif info[0] == _TYPE_SRV:
-                rec = DNSService(domain, info[0], info[1], info[2],
-                                 self.readUnsignedShort(),
-                                 self.readUnsignedShort(),
-                                 self.readUnsignedShort(),
-                                 self.readName())
+                rec = DNSService(
+                    domain,
+                    info[0],
+                    info[1],
+                    info[2],
+                    self.readUnsignedShort(),
+                    self.readUnsignedShort(),
+                    self.readUnsignedShort(),
+                    self.readName(),
+                )
             elif info[0] == _TYPE_HINFO:
-                rec = DNSHinfo(domain, info[0], info[1], info[2],
-                               self.readCharacterString(),
-                               self.readCharacterString())
+                rec = DNSHinfo(
+                    domain,
+                    info[0],
+                    info[1],
+                    info[2],
+                    self.readCharacterString(),
+                    self.readCharacterString(),
+                )
             elif info[0] == _TYPE_AAAA:
-                rec = DNSAddress(domain, info[0], info[1], info[2],
-                                 self.readString(16))
+                rec = DNSAddress(
+                    domain, info[0], info[1], info[2], self.readString(16)
+                )
             else:
                 # Try to ignore types we don't know about
                 # this may mean the rest of the name is
@@ -596,8 +651,8 @@
                 # so this is left for debugging.  New types
                 # encountered need to be parsed properly.
                 #
-                #print "UNKNOWN TYPE = " + str(info[0])
-                #raise BadTypeInNameException
+                # print "UNKNOWN TYPE = " + str(info[0])
+                # raise BadTypeInNameException
                 self.offset += info[3]
 
             if rec is not None:
@@ -613,7 +668,7 @@
 
     def readUTF(self, offset, len):
         """Reads a UTF-8 string of a given length from the packet"""
-        return self.data[offset:offset + len].decode('utf-8')
+        return self.data[offset : offset + len].decode('utf-8')
 
     def readName(self):
         """Reads a domain name from the packet"""
@@ -623,7 +678,7 @@
         first = off
 
         while True:
-            len = ord(self.data[off:off + 1])
+            len = ord(self.data[off : off + 1])
             off += 1
             if len == 0:
                 break
@@ -634,7 +689,7 @@
             elif t == 0xC0:
                 if next < 0:
                     next = off + 1
-                off = ((len & 0x3F) << 8) | ord(self.data[off:off + 1])
+                off = ((len & 0x3F) << 8) | ord(self.data[off : off + 1])
                 if off >= first:
                     raise BadDomainNameCircular(off)
                 first = off
@@ -781,7 +836,7 @@
         self.size -= 2
 
         length = len(''.join(self.data[index:]))
-        self.insertShort(index, length) # Here is the short we adjusted for
+        self.insertShort(index, length)  # Here is the short we adjusted for
 
     def packet(self):
         """Returns a string containing the packet's bytes
@@ -878,7 +933,7 @@
     def __init__(self, zeroconf):
         threading.Thread.__init__(self)
         self.zeroconf = zeroconf
-        self.readers = {} # maps socket to reader
+        self.readers = {}  # maps socket to reader
         self.timeout = 5
         self.condition = threading.Condition()
         self.start()
@@ -928,6 +983,7 @@
         self.condition.notify()
         self.condition.release()
 
+
 class Listener(object):
     """A Listener is used by this module to listen on the multicast
     group to which DNS messages are sent, allowing the implementation
@@ -1008,8 +1064,9 @@
 
         self.done = 0
 
-        self.zeroconf.addListener(self, DNSQuestion(self.type, _TYPE_PTR,
-                                                    _CLASS_IN))
+        self.zeroconf.addListener(
+            self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)
+        )
         self.start()
 
     def updateRecord(self, zeroconf, now, record):
@@ -1024,15 +1081,17 @@
                     oldrecord.resetTTL(record)
                 else:
                     del self.services[record.alias.lower()]
-                    callback = (lambda x:
-                        self.listener.removeService(x, self.type, record.alias))
+                    callback = lambda x: self.listener.removeService(
+                        x, self.type, record.alias
+                    )
                     self.list.append(callback)
                     return
             except Exception:
                 if not expired:
                     self.services[record.alias.lower()] = record
-                    callback = (lambda x:
-                        self.listener.addService(x, self.type, record.alias))
+                    callback = lambda x: self.listener.addService(
+                        x, self.type, record.alias
+                    )
                     self.list.append(callback)
 
             expires = record.getExpirationTime(75)
@@ -1073,8 +1132,17 @@
 class ServiceInfo(object):
     """Service information"""
 
-    def __init__(self, type, name, address=None, port=None, weight=0,
-                 priority=0, properties=None, server=None):
+    def __init__(
+        self,
+        type,
+        name,
+        address=None,
+        port=None,
+        weight=0,
+        priority=0,
+        properties=None,
+        server=None,
+    ):
         """Create a service description.
 
         type: fully qualified service type name
@@ -1122,8 +1190,9 @@
                     suffix = ''
                 list.append('='.join((key, suffix)))
             for item in list:
-                result = ''.join((result, struct.pack('!c', chr(len(item))),
-                                  item))
+                result = ''.join(
+                    (result, struct.pack('!c', chr(len(item))), item)
+                )
             self.text = result
         else:
             self.text = properties
@@ -1139,7 +1208,7 @@
             while index < end:
                 length = ord(text[index])
                 index += 1
-                strs.append(text[index:index + length])
+                strs.append(text[index : index + length])
                 index += length
 
             for s in strs:
@@ -1150,7 +1219,7 @@
                     value = 0
                 else:
                     key = s[:eindex]
-                    value = s[eindex + 1:]
+                    value = s[eindex + 1 :]
                     if value == 'true':
                         value = 1
                     elif value == 'false' or not value:
@@ -1172,7 +1241,7 @@
     def getName(self):
         """Name accessor"""
         if self.type is not None and self.name.endswith("." + self.type):
-            return self.name[:len(self.name) - len(self.type) - 1]
+            return self.name[: len(self.name) - len(self.type) - 1]
         return self.name
 
     def getAddress(self):
@@ -1207,7 +1276,7 @@
         """Updates service information from a DNS record"""
         if record is not None and not record.isExpired(now):
             if record.type == _TYPE_A:
-                #if record.name == self.name:
+                # if record.name == self.name:
                 if record.name == self.server:
                     self.address = record.address
             elif record.type == _TYPE_SRV:
@@ -1216,10 +1285,14 @@
                     self.port = record.port
                     self.weight = record.weight
                     self.priority = record.priority
-                    #self.address = None
-                    self.updateRecord(zeroconf, now,
-                                      zeroconf.cache.getByDetails(self.server,
-                                      _TYPE_A, _CLASS_IN))
+                    # self.address = None
+                    self.updateRecord(
+                        zeroconf,
+                        now,
+                        zeroconf.cache.getByDetails(
+                            self.server, _TYPE_A, _CLASS_IN
+                        ),
+                    )
             elif record.type == _TYPE_TXT:
                 if record.name == self.name:
                     self.setText(record.text)
@@ -1233,34 +1306,44 @@
         next = now + delay
         last = now + timeout
         try:
-            zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY,
-                                                   _CLASS_IN))
-            while (self.server is None or self.address is None or
-                   self.text is None):
+            zeroconf.addListener(
+                self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)
+            )
+            while (
+                self.server is None or self.address is None or self.text is None
+            ):
                 if last <= now:
                     return 0
                 if next <= now:
                     out = DNSOutgoing(_FLAGS_QR_QUERY)
-                    out.addQuestion(DNSQuestion(self.name, _TYPE_SRV,
-                                                _CLASS_IN))
+                    out.addQuestion(
+                        DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN)
+                    )
                     out.addAnswerAtTime(
-                        zeroconf.cache.getByDetails(self.name,
-                                                    _TYPE_SRV,
-                                                    _CLASS_IN),
-                                        now)
-                    out.addQuestion(DNSQuestion(self.name, _TYPE_TXT,
-                                                _CLASS_IN))
+                        zeroconf.cache.getByDetails(
+                            self.name, _TYPE_SRV, _CLASS_IN
+                        ),
+                        now,
+                    )
+                    out.addQuestion(
+                        DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN)
+                    )
                     out.addAnswerAtTime(
-                        zeroconf.cache.getByDetails(self.name, _TYPE_TXT,
-                                                    _CLASS_IN),
-                                        now)
+                        zeroconf.cache.getByDetails(
+                            self.name, _TYPE_TXT, _CLASS_IN
+                        ),
+                        now,
+                    )
                     if self.server is not None:
                         out.addQuestion(
-                            DNSQuestion(self.server, _TYPE_A, _CLASS_IN))
+                            DNSQuestion(self.server, _TYPE_A, _CLASS_IN)
+                        )
                         out.addAnswerAtTime(
-                            zeroconf.cache.getByDetails(self.server, _TYPE_A,
-                                                        _CLASS_IN),
-                                            now)
+                            zeroconf.cache.getByDetails(
+                                self.server, _TYPE_A, _CLASS_IN
+                            ),
+                            now,
+                        )
                     zeroconf.send(out)
                     next = now + delay
                     delay = delay * 2
@@ -1285,8 +1368,11 @@
 
     def __repr__(self):
         """String representation"""
-        result = ("service[%s,%s:%s," %
-            (self.name, socket.inet_ntoa(self.getAddress()), self.port))
+        result = "service[%s,%s:%s," % (
+            self.name,
+            socket.inet_ntoa(self.getAddress()),
+            self.port,
+        )
         if self.text is None:
             result += "None"
         else:
@@ -1303,6 +1389,7 @@
 
     Supports registration, unregistration, queries and browsing.
     """
+
     def __init__(self, bindaddress=None):
         """Creates an instance of the Zeroconf class, establishing
         multicast communications, listening and reaping threads."""
@@ -1335,8 +1422,11 @@
             # Some versions of linux raise an exception even though
             # SO_REUSEADDR and SO_REUSEPORT have been set, so ignore it
             pass
-        self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP,
-            socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0'))
+        self.socket.setsockopt(
+            socket.SOL_IP,
+            socket.IP_ADD_MEMBERSHIP,
+            socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0'),
+        )
 
         self.listeners = []
         self.browsers = []
@@ -1413,20 +1503,32 @@
                 now = currentTimeMillis()
                 continue
             out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
-            out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR,
-                _CLASS_IN, ttl, info.name), 0)
+            out.addAnswerAtTime(
+                DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0
+            )
             out.addAnswerAtTime(
                 DNSService(
-                    info.name, _TYPE_SRV,
-                    _CLASS_IN, ttl, info.priority, info.weight, info.port,
-                    info.server),
-                0)
+                    info.name,
+                    _TYPE_SRV,
+                    _CLASS_IN,
+                    ttl,
+                    info.priority,
+                    info.weight,
+                    info.port,
+                    info.server,
+                ),
+                0,
+            )
             out.addAnswerAtTime(
-                DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text),
-                0)
+                DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0
+            )
             if info.address:
-                out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
-                    _CLASS_IN, ttl, info.address), 0)
+                out.addAnswerAtTime(
+                    DNSAddress(
+                        info.server, _TYPE_A, _CLASS_IN, ttl, info.address
+                    ),
+                    0,
+                )
             self.send(out)
             i += 1
             nexttime += _REGISTER_TIME
@@ -1451,17 +1553,31 @@
                 continue
             out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
             out.addAnswerAtTime(
-                DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0)
+                DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0
+            )
             out.addAnswerAtTime(
-                DNSService(info.name, _TYPE_SRV,
-                           _CLASS_IN, 0, info.priority, info.weight, info.port,
-                           info.name),
-                0)
-            out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT,
-                _CLASS_IN, 0, info.text), 0)
+                DNSService(
+                    info.name,
+                    _TYPE_SRV,
+                    _CLASS_IN,
+                    0,
+                    info.priority,
+                    info.weight,
+                    info.port,
+                    info.name,
+                ),
+                0,
+            )
+            out.addAnswerAtTime(
+                DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0
+            )
             if info.address:
-                out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
-                    _CLASS_IN, 0, info.address), 0)
+                out.addAnswerAtTime(
+                    DNSAddress(
+                        info.server, _TYPE_A, _CLASS_IN, 0, info.address
+                    ),
+                    0,
+                )
             self.send(out)
             i += 1
             nexttime += _UNREGISTER_TIME
@@ -1479,18 +1595,36 @@
                     continue
                 out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
                 for info in self.services.values():
-                    out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR,
-                        _CLASS_IN, 0, info.name), 0)
+                    out.addAnswerAtTime(
+                        DNSPointer(
+                            info.type, _TYPE_PTR, _CLASS_IN, 0, info.name
+                        ),
+                        0,
+                    )
                     out.addAnswerAtTime(
-                        DNSService(info.name, _TYPE_SRV,
-                                   _CLASS_IN, 0, info.priority, info.weight,
-                                   info.port, info.server),
-                        0)
-                    out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT,
-                        _CLASS_IN, 0, info.text), 0)
+                        DNSService(
+                            info.name,
+                            _TYPE_SRV,
+                            _CLASS_IN,
+                            0,
+                            info.priority,
+                            info.weight,
+                            info.port,
+                            info.server,
+                        ),
+                        0,
+                    )
+                    out.addAnswerAtTime(
+                        DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text),
+                        0,
+                    )
                     if info.address:
-                        out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A,
-                           _CLASS_IN, 0, info.address), 0)
+                        out.addAnswerAtTime(
+                            DNSAddress(
+                                info.server, _TYPE_A, _CLASS_IN, 0, info.address
+                            ),
+                            0,
+                        )
                 self.send(out)
                 i += 1
                 nexttime += _UNREGISTER_TIME
@@ -1503,11 +1637,18 @@
         i = 0
         while i < 3:
             for record in self.cache.entriesWithName(info.type):
-                if (record.type == _TYPE_PTR and not record.isExpired(now) and
-                    record.alias == info.name):
-                    if (info.name.find('.') < 0):
-                        info.name = ("%w.[%s:%d].%s" %
-                            (info.name, info.address, info.port, info.type))
+                if (
+                    record.type == _TYPE_PTR
+                    and not record.isExpired(now)
+                    and record.alias == info.name
+                ):
+                    if info.name.find('.') < 0:
+                        info.name = "%w.[%s:%d].%s" % (
+                            info.name,
+                            info.address,
+                            info.port,
+                            info.type,
+                        )
                         self.checkService(info)
                         return
                     raise NonUniqueNameException
@@ -1518,8 +1659,9 @@
             out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
             self.debug = out
             out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
-            out.addAuthoritativeAnswer(DNSPointer(info.type, _TYPE_PTR,
-                _CLASS_IN, _DNS_TTL, info.name))
+            out.addAuthoritativeAnswer(
+                DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)
+            )
             self.send(out)
             i += 1
             nexttime += _CHECK_TIME
@@ -1588,17 +1730,30 @@
                     for stype in self.servicetypes.keys():
                         if out is None:
                             out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
-                        out.addAnswer(msg,
-                                      DNSPointer(
-                                          "_services._dns-sd._udp.local.",
-                                           _TYPE_PTR, _CLASS_IN,
-                                           _DNS_TTL, stype))
+                        out.addAnswer(
+                            msg,
+                            DNSPointer(
+                                "_services._dns-sd._udp.local.",
+                                _TYPE_PTR,
+                                _CLASS_IN,
+                                _DNS_TTL,
+                                stype,
+                            ),
+                        )
                 for service in self.services.values():
                     if question.name == service.type:
                         if out is None:
                             out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
-                        out.addAnswer(msg, DNSPointer(service.type, _TYPE_PTR,
-                            _CLASS_IN, _DNS_TTL, service.name))
+                        out.addAnswer(
+                            msg,
+                            DNSPointer(
+                                service.type,
+                                _TYPE_PTR,
+                                _CLASS_IN,
+                                _DNS_TTL,
+                                service.name,
+                            ),
+                        )
             else:
                 try:
                     if out is None:
@@ -1608,32 +1763,56 @@
                     if question.type == _TYPE_A or question.type == _TYPE_ANY:
                         for service in self.services.values():
                             if service.server == question.name.lower():
-                                out.addAnswer(msg,
-                                    DNSAddress(question.name, _TYPE_A,
-                                               _CLASS_IN | _CLASS_UNIQUE,
-                                               _DNS_TTL, service.address))
+                                out.addAnswer(
+                                    msg,
+                                    DNSAddress(
+                                        question.name,
+                                        _TYPE_A,
+                                        _CLASS_IN | _CLASS_UNIQUE,
+                                        _DNS_TTL,
+                                        service.address,
+                                    ),
+                                )
 
                     service = self.services.get(question.name.lower(), None)
                     if not service:
                         continue
 
-                    if (question.type == _TYPE_SRV or
-                        question.type == _TYPE_ANY):
-                        out.addAnswer(msg,
-                            DNSService(question.name, _TYPE_SRV,
-                                       _CLASS_IN | _CLASS_UNIQUE,
-                                       _DNS_TTL, service.priority,
-                                       service.weight, service.port,
-                                       service.server))
-                    if (question.type == _TYPE_TXT or
-                        question.type == _TYPE_ANY):
-                        out.addAnswer(msg, DNSText(question.name, _TYPE_TXT,
-                            _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.text))
+                    if question.type == _TYPE_SRV or question.type == _TYPE_ANY:
+                        out.addAnswer(
+                            msg,
+                            DNSService(
+                                question.name,
+                                _TYPE_SRV,
+                                _CLASS_IN | _CLASS_UNIQUE,
+                                _DNS_TTL,
+                                service.priority,
+                                service.weight,
+                                service.port,
+                                service.server,
+                            ),
+                        )
+                    if question.type == _TYPE_TXT or question.type == _TYPE_ANY:
+                        out.addAnswer(
+                            msg,
+                            DNSText(
+                                question.name,
+                                _TYPE_TXT,
+                                _CLASS_IN | _CLASS_UNIQUE,
+                                _DNS_TTL,
+                                service.text,
+                            ),
+                        )
                     if question.type == _TYPE_SRV:
                         out.addAdditionalAnswer(
-                            DNSAddress(service.server, _TYPE_A,
-                                       _CLASS_IN | _CLASS_UNIQUE,
-                                       _DNS_TTL, service.address))
+                            DNSAddress(
+                                service.server,
+                                _TYPE_A,
+                                _CLASS_IN | _CLASS_UNIQUE,
+                                _DNS_TTL,
+                                service.address,
+                            )
+                        )
                 except Exception:
                     traceback.print_exc()
 
@@ -1644,7 +1823,7 @@
     def send(self, out, addr=_MDNS_ADDR, port=_MDNS_PORT):
         """Sends an outgoing packet."""
         # This is a quick test to see if we can parse the packets we generate
-        #temp = DNSIncoming(out.packet())
+        # temp = DNSIncoming(out.packet())
         try:
             self.socket.sendto(out.packet(), 0, (addr, port))
         except Exception:
@@ -1659,10 +1838,14 @@
             self.notifyAll()
             self.engine.notify()
             self.unregisterAllServices()
-            self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP,
-                socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0'))
+            self.socket.setsockopt(
+                socket.SOL_IP,
+                socket.IP_DROP_MEMBERSHIP,
+                socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(r'0.0.0.0'),
+            )
             self.socket.close()
 
+
 # Test a few module features, including service registration, service
 # query (for Zoe), and service unregistration.
 
@@ -1670,21 +1853,34 @@
     print("Multicast DNS Service Discovery for Python, version", __version__)
     r = Zeroconf()
     print("1. Testing registration of a service...")
-    desc = {'version':'0.10','a':'test value', 'b':'another value'}
-    info = ServiceInfo("_http._tcp.local.",
-                       "My Service Name._http._tcp.local.",
-        socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc)
+    desc = {'version': '0.10', 'a': 'test value', 'b': 'another value'}
+    info = ServiceInfo(
+        "_http._tcp.local.",
+        "My Service Name._http._tcp.local.",
+        socket.inet_aton("127.0.0.1"),
+        1234,
+        0,
+        0,
+        desc,
+    )
     print("   Registering service...")
     r.registerService(info)
     print("   Registration done.")
     print("2. Testing query of service information...")
-    print("   Getting ZOE service:",
-        str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local.")))
+    print(
+        "   Getting ZOE service:",
+        str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local.")),
+    )
     print("   Query done.")
     print("3. Testing query of own service...")
-    print("   Getting self:",
-        str(r.getServiceInfo("_http._tcp.local.",
-                             "My Service Name._http._tcp.local.")))
+    print(
+        "   Getting self:",
+        str(
+            r.getServiceInfo(
+                "_http._tcp.local.", "My Service Name._http._tcp.local."
+            )
+        ),
+    )
     print("   Query done.")
     print("4. Testing unregister of service information...")
     r.unregisterService(info)
--- a/hgext/zeroconf/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext/zeroconf/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -37,9 +37,7 @@
     pycompat,
     ui as uimod,
 )
-from mercurial.hgweb import (
-    server as servermod
-)
+from mercurial.hgweb import server as servermod
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -52,6 +50,7 @@
 server = None
 localip = None
 
+
 def getip():
     # finds external-facing interface without sending any packets (Linux)
     try:
@@ -83,6 +82,7 @@
 
     return dumbip
 
+
 def publish(name, desc, path, port):
     global server, localip
     if not server:
@@ -98,25 +98,32 @@
     name = r"%s-%s" % (hostname, name)
 
     # advertise to browsers
-    svc = Zeroconf.ServiceInfo('_http._tcp.local.',
-                               pycompat.bytestr(name + r'._http._tcp.local.'),
-                               server = host,
-                               port = port,
-                               properties = {'description': desc,
-                                             'path': "/" + path},
-                               address = localip, weight = 0, priority = 0)
+    svc = Zeroconf.ServiceInfo(
+        '_http._tcp.local.',
+        pycompat.bytestr(name + r'._http._tcp.local.'),
+        server=host,
+        port=port,
+        properties={'description': desc, 'path': "/" + path},
+        address=localip,
+        weight=0,
+        priority=0,
+    )
     server.registerService(svc)
 
     # advertise to Mercurial clients
-    svc = Zeroconf.ServiceInfo('_hg._tcp.local.',
-                               pycompat.bytestr(name + r'._hg._tcp.local.'),
-                               server = host,
-                               port = port,
-                               properties = {'description': desc,
-                                             'path': "/" + path},
-                               address = localip, weight = 0, priority = 0)
+    svc = Zeroconf.ServiceInfo(
+        '_hg._tcp.local.',
+        pycompat.bytestr(name + r'._hg._tcp.local.'),
+        server=host,
+        port=port,
+        properties={'description': desc, 'path': "/" + path},
+        address=localip,
+        weight=0,
+        priority=0,
+    )
     server.registerService(svc)
 
+
 def zc_create_server(create_server, ui, app):
     httpd = create_server(ui, app)
     port = httpd.port
@@ -146,17 +153,22 @@
             publish(name, desc, path, port)
     return httpd
 
+
 # listen
 
+
 class listener(object):
     def __init__(self):
         self.found = {}
+
     def removeService(self, server, type, name):
         if repr(name) in self.found:
             del self.found[repr(name)]
+
     def addService(self, server, type, name):
         self.found[repr(name)] = server.getServiceInfo(type, name)
 
+
 def getzcpaths():
     ip = getip()
     if ip.startswith(r'127.'):
@@ -167,11 +179,15 @@
     time.sleep(1)
     server.close()
     for value in l.found.values():
-        name = value.name[:value.name.index(b'.')]
-        url = r"http://%s:%s%s" % (socket.inet_ntoa(value.address), value.port,
-                                   value.properties.get(r"path", r"/"))
+        name = value.name[: value.name.index(b'.')]
+        url = r"http://%s:%s%s" % (
+            socket.inet_ntoa(value.address),
+            value.port,
+            value.properties.get(r"path", r"/"),
+        )
         yield b"zc-" + name, pycompat.bytestr(url)
 
+
 def config(orig, self, section, key, *args, **kwargs):
     if section == "paths" and key.startswith("zc-"):
         for name, path in getzcpaths():
@@ -179,12 +195,14 @@
                 return path
     return orig(self, section, key, *args, **kwargs)
 
+
 def configitems(orig, self, section, *args, **kwargs):
     repos = orig(self, section, *args, **kwargs)
     if section == "paths":
         repos += getzcpaths()
     return repos
 
+
 def configsuboptions(orig, self, section, name, *args, **kwargs):
     opt, sub = orig(self, section, name, *args, **kwargs)
     if section == "paths" and name.startswith("zc-"):
@@ -195,12 +213,14 @@
                 return zcurl, sub
     return opt, sub
 
+
 def defaultdest(orig, source):
     for name, path in getzcpaths():
         if path == source:
             return name.encode(encoding.encoding)
     return orig(source)
 
+
 def cleanupafterdispatch(orig, ui, options, cmd, cmdfunc):
     try:
         return orig(ui, options, cmd, cmdfunc)
@@ -211,6 +231,7 @@
         if server:
             server.close()
 
+
 extensions.wrapfunction(dispatch, '_runcommand', cleanupafterdispatch)
 
 extensions.wrapfunction(uimod.ui, 'config', config)
--- a/hgext3rd/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/hgext3rd/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,4 +1,5 @@
 # name space package to host third party extensions
 from __future__ import absolute_import
 import pkgutil
+
 __path__ = pkgutil.extend_path(__path__, __name__)
--- a/i18n/check-translation.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/i18n/check-translation.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,12 +10,15 @@
 scanners = []
 checkers = []
 
+
 def scanner():
     def decorator(func):
         scanners.append(func)
         return func
+
     return decorator
 
+
 def levelchecker(level, msgidpat):
     def decorator(func):
         if msgidpat:
@@ -25,8 +28,10 @@
         checkers.append((func, level))
         func.match = match
         return func
+
     return decorator
 
+
 def match(checker, pe):
     """Examine whether POEntry "pe" is target of specified checker or not
     """
@@ -39,11 +44,14 @@
             return
     return True
 
+
 ####################
 
+
 def fatalchecker(msgidpat=None):
     return levelchecker('fatal', msgidpat)
 
+
 @fatalchecker(r'\$\$')
 def promptchoice(pe):
     """Check translation of the string given to "ui.promptchoice()"
@@ -70,7 +78,10 @@
     if [c for c, i in indices if len(c) == i + 1]:
         yield "msgstr has invalid '&' followed by none"
 
+
 deprecatedpe = None
+
+
 @scanner()
 def deprecatedsetup(pofile):
     pes = [p for p in pofile if p.msgid == '(DEPRECATED)' and p.msgstr]
@@ -78,6 +89,7 @@
         global deprecatedpe
         deprecatedpe = pes[0]
 
+
 @fatalchecker(r'\(DEPRECATED\)')
 def deprecated(pe):
     """Check for DEPRECATED
@@ -109,16 +121,20 @@
     ...     msgstr= 'something (DETACERPED, foo bar)')
     >>> match(deprecated, pe)
     """
-    if not ('(DEPRECATED)' in pe.msgstr or
-            (deprecatedpe and
-             deprecatedpe.msgstr in pe.msgstr)):
+    if not (
+        '(DEPRECATED)' in pe.msgstr
+        or (deprecatedpe and deprecatedpe.msgstr in pe.msgstr)
+    ):
         yield "msgstr inconsistently translated (DEPRECATED)"
 
+
 ####################
 
+
 def warningchecker(msgidpat=None):
     return levelchecker('warning', msgidpat)
 
+
 @warningchecker()
 def taildoublecolons(pe):
     """Check equality of tail '::'-ness between msgid and msgstr
@@ -141,6 +157,7 @@
     if pe.msgid.endswith('::') != pe.msgstr.endswith('::'):
         yield "tail '::'-ness differs between msgid and msgstr"
 
+
 @warningchecker()
 def indentation(pe):
     """Check equality of initial indentation between msgid and msgstr
@@ -159,13 +176,15 @@
     if idindent != strindent:
         yield "initial indentation width differs betweeen msgid and msgstr"
 
+
 ####################
 
+
 def check(pofile, fatal=True, warning=False):
-    targetlevel = { 'fatal': fatal, 'warning': warning }
-    targetcheckers = [(checker, level)
-                      for checker, level in checkers
-                      if targetlevel[level]]
+    targetlevel = {'fatal': fatal, 'warning': warning}
+    targetcheckers = [
+        (checker, level) for checker, level in checkers if targetlevel[level]
+    ]
     if not targetcheckers:
         return []
 
@@ -176,19 +195,22 @@
         errors = []
         for checker, level in targetcheckers:
             if match(checker, pe):
-                errors.extend((level, checker.__name__, error)
-                              for error in checker(pe))
+                errors.extend(
+                    (level, checker.__name__, error) for error in checker(pe)
+                )
         if errors:
             detected.append((pe, errors))
     return detected
 
+
 ########################################
 
 if __name__ == "__main__":
     import sys
     import optparse
 
-    optparser = optparse.OptionParser("""%prog [options] pofile ...
+    optparser = optparse.OptionParser(
+        """%prog [options] pofile ...
 
 This checks Mercurial specific translation problems in specified
 '*.po' files.
@@ -207,32 +229,44 @@
     # no-foo-check
     msgid = "....."
     msgstr = "....."
-""")
-    optparser.add_option("", "--warning",
-                         help="show also warning level problems",
-                         action="store_true")
-    optparser.add_option("", "--doctest",
-                         help="run doctest of this tool, instead of check",
-                         action="store_true")
+"""
+    )
+    optparser.add_option(
+        "",
+        "--warning",
+        help="show also warning level problems",
+        action="store_true",
+    )
+    optparser.add_option(
+        "",
+        "--doctest",
+        help="run doctest of this tool, instead of check",
+        action="store_true",
+    )
     (options, args) = optparser.parse_args()
 
     if options.doctest:
         import os
+
         if 'TERM' in os.environ:
             del os.environ['TERM']
         import doctest
+
         failures, tests = doctest.testmod()
         sys.exit(failures and 1 or 0)
 
     detected = []
     warning = options.warning
     for f in args:
-        detected.extend((f, pe, errors)
-                        for pe, errors in check(polib.pofile(f),
-                                                warning=warning))
+        detected.extend(
+            (f, pe, errors)
+            for pe, errors in check(polib.pofile(f), warning=warning)
+        )
     if detected:
         for f, pe, errors in detected:
             for level, checker, error in errors:
-                sys.stderr.write('%s:%d:%s(%s): %s\n'
-                                 % (f, pe.linenum, level, checker, error))
+                sys.stderr.write(
+                    '%s:%d:%s(%s): %s\n'
+                    % (f, pe.linenum, level, checker, error)
+                )
         sys.exit(1)
--- a/i18n/polib.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/i18n/polib.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,8 +17,18 @@
 
 __author__ = 'David Jean Louis <izimobil@gmail.com>'
 __version__ = '1.0.7'
-__all__ = ['pofile', 'POFile', 'POEntry', 'mofile', 'MOFile', 'MOEntry',
-           'default_encoding', 'escape', 'unescape', 'detect_encoding', ]
+__all__ = [
+    'pofile',
+    'POFile',
+    'POEntry',
+    'mofile',
+    'MOFile',
+    'MOEntry',
+    'default_encoding',
+    'escape',
+    'unescape',
+    'detect_encoding',
+]
 
 import array
 import codecs
@@ -55,6 +65,7 @@
     def u(s):
         return unicode(s, "unicode_escape")
 
+
 else:
     PY3 = True
     text_type = str
@@ -64,6 +75,8 @@
 
     def u(s):
         return s
+
+
 # }}}
 # _pofile_or_mofile {{{
 
@@ -84,11 +97,13 @@
         f,
         encoding=enc,
         check_for_duplicates=kwargs.get('check_for_duplicates', False),
-        klass=kwargs.get('klass')
+        klass=kwargs.get('klass'),
     )
     instance = parser.parse()
     instance.wrapwidth = kwargs.get('wrapwidth', 78)
     return instance
+
+
 # }}}
 # _is_file {{{
 
@@ -107,6 +122,8 @@
         return os.path.exists(filename_or_contents)
     except (ValueError, UnicodeEncodeError):
         return False
+
+
 # }}}
 # function pofile() {{{
 
@@ -139,6 +156,8 @@
         instance).
     """
     return _pofile_or_mofile(pofile, 'pofile', **kwargs)
+
+
 # }}}
 # function mofile() {{{
 
@@ -172,6 +191,8 @@
         instance).
     """
     return _pofile_or_mofile(mofile, 'mofile', **kwargs)
+
+
 # }}}
 # function detect_encoding() {{{
 
@@ -229,6 +250,8 @@
                     return enc
         f.close()
     return default_encoding
+
+
 # }}}
 # function escape() {{{
 
@@ -238,11 +261,15 @@
     Escapes the characters ``\\\\``, ``\\t``, ``\\n``, ``\\r`` and ``"`` in
     the given string ``st`` and returns it.
     """
-    return st.replace('\\', r'\\')\
-             .replace('\t', r'\t')\
-             .replace('\r', r'\r')\
-             .replace('\n', r'\n')\
-             .replace('\"', r'\"')
+    return (
+        st.replace('\\', r'\\')
+        .replace('\t', r'\t')
+        .replace('\r', r'\r')
+        .replace('\n', r'\n')
+        .replace('\"', r'\"')
+    )
+
+
 # }}}
 # function unescape() {{{
 
@@ -252,6 +279,7 @@
     Unescapes the characters ``\\\\``, ``\\t``, ``\\n``, ``\\r`` and ``"`` in
     the given string ``st`` and returns it.
     """
+
     def unescape_repl(m):
         m = m.group(1)
         if m == 'n':
@@ -263,7 +291,10 @@
         if m == '\\':
             return '\\'
         return m  # handles escaped double quote
+
     return re.sub(r'\\(\\|n|t|r|")', unescape_repl, st)
+
+
 # }}}
 # class _BaseFile {{{
 
@@ -317,8 +348,9 @@
         Returns the unicode representation of the file.
         """
         ret = []
-        entries = [self.metadata_as_entry()] + \
-                  [e for e in self if not e.obsolete]
+        entries = [self.metadata_as_entry()] + [
+            e for e in self if not e.obsolete
+        ]
         for entry in entries:
             ret.append(entry.__unicode__(self.wrapwidth))
         for entry in self.obsolete_entries():
@@ -326,14 +358,17 @@
         ret = u('\n').join(ret)
 
         assert isinstance(ret, text_type)
-        #if type(ret) != text_type:
+        # if type(ret) != text_type:
         #    return unicode(ret, self.encoding)
         return ret
 
     if PY3:
+
         def __str__(self):
             return self.__unicode__()
+
     else:
+
         def __str__(self):
             """
             Returns the string representation of the file.
@@ -353,8 +388,10 @@
         ``entry``
             an instance of :class:`~polib._BaseEntry`.
         """
-        return self.find(entry.msgid, by='msgid', msgctxt=entry.msgctxt) \
+        return (
+            self.find(entry.msgid, by='msgid', msgctxt=entry.msgctxt)
             is not None
+        )
 
     def __eq__(self, other):
         return str(self) == str(other)
@@ -439,8 +476,9 @@
         if self.fpath is None and fpath:
             self.fpath = fpath
 
-    def find(self, st, by='msgid', include_obsolete_entries=False,
-             msgctxt=False):
+    def find(
+        self, st, by='msgid', include_obsolete_entries=False, msgctxt=False
+    ):
         """
         Find the entry which msgid (or property identified by the ``by``
         argument) matches the string ``st``.
@@ -490,7 +528,7 @@
             'Content-Type',
             'Content-Transfer-Encoding',
             'Language',
-            'Plural-Forms'
+            'Plural-Forms',
         ]
         ordered_data = []
         for data in data_order:
@@ -524,10 +562,11 @@
                 return -1
             else:
                 return 0
+
         # add metadata entry
         entries.sort(key=lambda o: o.msgctxt or o.msgid)
         mentry = self.metadata_as_entry()
-        #mentry.msgstr = mentry.msgstr.replace('\\n', '').lstrip()
+        # mentry.msgstr = mentry.msgstr.replace('\\n', '').lstrip()
         entries = [mentry] + entries
         entries_len = len(entries)
         ids, strs = b(''), b('')
@@ -578,8 +617,8 @@
             # start of value index
             7 * 4 + entries_len * 8,
             # size and offset of hash table, we don't use hash tables
-            0, keystart
-
+            0,
+            keystart,
         )
         if PY3 and sys.version_info.minor > 1:  # python 3.2 or superior
             output += array.array("i", offsets).tobytes()
@@ -597,6 +636,8 @@
         if isinstance(mixed, text_type):
             mixed = mixed.encode(self.encoding)
         return mixed
+
+
 # }}}
 # class POFile {{{
 
@@ -658,8 +699,11 @@
         """
         Convenience method that returns the list of untranslated entries.
         """
-        return [e for e in self if not e.translated() and not e.obsolete
-                and not 'fuzzy' in e.flags]
+        return [
+            e
+            for e in self
+            if not e.translated() and not e.obsolete and not 'fuzzy' in e.flags
+        ]
 
     def fuzzy_entries(self):
         """
@@ -703,6 +747,8 @@
         for entry in self:
             if entry.msgid not in refpot_msgids:
                 entry.obsolete = True
+
+
 # }}}
 # class MOFile {{{
 
@@ -713,8 +759,9 @@
     This class inherits the :class:`~polib._BaseFile` class and, by
     extension, the python ``list`` type.
     """
-    MAGIC = 0x950412de
-    MAGIC_SWAPPED = 0xde120495
+
+    MAGIC = 0x950412DE
+    MAGIC_SWAPPED = 0xDE120495
 
     def __init__(self, *args, **kwargs):
         """
@@ -776,6 +823,8 @@
         Convenience method to keep the same interface with POFile instances.
         """
         return []
+
+
 # }}}
 # class _BaseEntry {{{
 
@@ -831,14 +880,16 @@
         ret = []
         # write the msgctxt if any
         if self.msgctxt is not None:
-            ret += self._str_field("msgctxt", delflag, "", self.msgctxt,
-                                   wrapwidth)
+            ret += self._str_field(
+                "msgctxt", delflag, "", self.msgctxt, wrapwidth
+            )
         # write the msgid
         ret += self._str_field("msgid", delflag, "", self.msgid, wrapwidth)
         # write the msgid_plural if any
         if self.msgid_plural:
-            ret += self._str_field("msgid_plural", delflag, "",
-                                   self.msgid_plural, wrapwidth)
+            ret += self._str_field(
+                "msgid_plural", delflag, "", self.msgid_plural, wrapwidth
+            )
         if self.msgstr_plural:
             # write the msgstr_plural if any
             msgstrs = self.msgstr_plural
@@ -847,20 +898,25 @@
             for index in keys:
                 msgstr = msgstrs[index]
                 plural_index = '[%s]' % index
-                ret += self._str_field("msgstr", delflag, plural_index, msgstr,
-                                       wrapwidth)
+                ret += self._str_field(
+                    "msgstr", delflag, plural_index, msgstr, wrapwidth
+                )
         else:
             # otherwise write the msgstr
-            ret += self._str_field("msgstr", delflag, "", self.msgstr,
-                                   wrapwidth)
+            ret += self._str_field(
+                "msgstr", delflag, "", self.msgstr, wrapwidth
+            )
         ret.append('')
         ret = u('\n').join(ret)
         return ret
 
     if PY3:
+
         def __str__(self):
             return self.__unicode__()
+
     else:
+
         def __str__(self):
             """
             Returns the string representation of the entry.
@@ -870,8 +926,7 @@
     def __eq__(self, other):
         return str(self) == str(other)
 
-    def _str_field(self, fieldname, delflag, plural_index, field,
-                   wrapwidth=78):
+    def _str_field(self, fieldname, delflag, plural_index, field, wrapwidth=78):
         lines = field.splitlines(True)
         if len(lines) > 1:
             lines = [''] + lines  # start with initial empty line
@@ -888,23 +943,30 @@
             real_wrapwidth = wrapwidth - flength + specialchars_count
             if wrapwidth > 0 and len(field) > real_wrapwidth:
                 # Wrap the line but take field name into account
-                lines = [''] + [unescape(item) for item in wrap(
-                    escaped_field,
-                    wrapwidth - 2,  # 2 for quotes ""
-                    drop_whitespace=False,
-                    break_long_words=False
-                )]
+                lines = [''] + [
+                    unescape(item)
+                    for item in wrap(
+                        escaped_field,
+                        wrapwidth - 2,  # 2 for quotes ""
+                        drop_whitespace=False,
+                        break_long_words=False,
+                    )
+                ]
             else:
                 lines = [field]
         if fieldname.startswith('previous_'):
             # quick and dirty trick to get the real field name
             fieldname = fieldname[9:]
 
-        ret = ['%s%s%s "%s"' % (delflag, fieldname, plural_index,
-                                escape(lines.pop(0)))]
+        ret = [
+            '%s%s%s "%s"'
+            % (delflag, fieldname, plural_index, escape(lines.pop(0)))
+        ]
         for line in lines:
             ret.append('%s"%s"' % (delflag, escape(line)))
         return ret
+
+
 # }}}
 # class POEntry {{{
 
@@ -972,7 +1034,7 @@
                             wrapwidth,
                             initial_indent=c[1],
                             subsequent_indent=c[1],
-                            break_long_words=False
+                            break_long_words=False,
                         )
                     else:
                         ret.append('%s%s' % (c[1], comment))
@@ -991,13 +1053,16 @@
                 # what we want for filenames, so the dirty hack is to
                 # temporally replace hyphens with a char that a file cannot
                 # contain, like "*"
-                ret += [l.replace('*', '-') for l in wrap(
-                    filestr.replace('-', '*'),
-                    wrapwidth,
-                    initial_indent='#: ',
-                    subsequent_indent='#: ',
-                    break_long_words=False
-                )]
+                ret += [
+                    l.replace('*', '-')
+                    for l in wrap(
+                        filestr.replace('-', '*'),
+                        wrapwidth,
+                        initial_indent='#: ',
+                        subsequent_indent='#: ',
+                        break_long_words=False,
+                    )
+                ]
             else:
                 ret.append('#: ' + filestr)
 
@@ -1006,8 +1071,7 @@
             ret.append('#, %s' % ', '.join(self.flags))
 
         # previous context and previous msgid/msgid_plural
-        fields = ['previous_msgctxt', 'previous_msgid',
-                  'previous_msgid_plural']
+        fields = ['previous_msgctxt', 'previous_msgid', 'previous_msgid_plural']
         for f in fields:
             val = getattr(self, f)
             if val:
@@ -1017,7 +1081,7 @@
         ret = u('\n').join(ret)
 
         assert isinstance(ret, text_type)
-        #if type(ret) != types.UnicodeType:
+        # if type(ret) != types.UnicodeType:
         #    return unicode(ret, self.encoding)
         return ret
 
@@ -1131,6 +1195,8 @@
 
     def __hash__(self):
         return hash((self.msgid, self.msgstr))
+
+
 # }}}
 # class MOEntry {{{
 
@@ -1139,6 +1205,7 @@
     """
     Represents a mo file entry.
     """
+
     def __init__(self, *args, **kwargs):
         """
         Constructor, accepts the following keyword arguments,
@@ -1168,6 +1235,7 @@
     def __hash__(self):
         return hash((self.msgid, self.msgstr))
 
+
 # }}}
 # class _POFileParser {{{
 
@@ -1211,7 +1279,7 @@
         self.instance = klass(
             pofile=pofile,
             encoding=enc,
-            check_for_duplicates=kwargs.get('check_for_duplicates', False)
+            check_for_duplicates=kwargs.get('check_for_duplicates', False),
         )
         self.transitions = {}
         self.current_line = 0
@@ -1238,25 +1306,61 @@
         #     * MS: a msgstr
         #     * MX: a msgstr plural
         #     * MC: a msgid or msgstr continuation line
-        all = ['st', 'he', 'gc', 'oc', 'fl', 'ct', 'pc', 'pm', 'pp', 'tc',
-               'ms', 'mp', 'mx', 'mi']
+        all = [
+            'st',
+            'he',
+            'gc',
+            'oc',
+            'fl',
+            'ct',
+            'pc',
+            'pm',
+            'pp',
+            'tc',
+            'ms',
+            'mp',
+            'mx',
+            'mi',
+        ]
 
-        self.add('tc', ['st', 'he'],                                     'he')
-        self.add('tc', ['gc', 'oc', 'fl', 'tc', 'pc', 'pm', 'pp', 'ms',
-                        'mp', 'mx', 'mi'],                               'tc')
-        self.add('gc', all,                                              'gc')
-        self.add('oc', all,                                              'oc')
-        self.add('fl', all,                                              'fl')
-        self.add('pc', all,                                              'pc')
-        self.add('pm', all,                                              'pm')
-        self.add('pp', all,                                              'pp')
-        self.add('ct', ['st', 'he', 'gc', 'oc', 'fl', 'tc', 'pc', 'pm',
-                        'pp', 'ms', 'mx'],                               'ct')
-        self.add('mi', ['st', 'he', 'gc', 'oc', 'fl', 'ct', 'tc', 'pc',
-                 'pm', 'pp', 'ms', 'mx'],                                'mi')
-        self.add('mp', ['tc', 'gc', 'pc', 'pm', 'pp', 'mi'],             'mp')
-        self.add('ms', ['mi', 'mp', 'tc'],                               'ms')
-        self.add('mx', ['mi', 'mx', 'mp', 'tc'],                         'mx')
+        self.add('tc', ['st', 'he'], 'he')
+        self.add(
+            'tc',
+            ['gc', 'oc', 'fl', 'tc', 'pc', 'pm', 'pp', 'ms', 'mp', 'mx', 'mi'],
+            'tc',
+        )
+        self.add('gc', all, 'gc')
+        self.add('oc', all, 'oc')
+        self.add('fl', all, 'fl')
+        self.add('pc', all, 'pc')
+        self.add('pm', all, 'pm')
+        self.add('pp', all, 'pp')
+        self.add(
+            'ct',
+            ['st', 'he', 'gc', 'oc', 'fl', 'tc', 'pc', 'pm', 'pp', 'ms', 'mx'],
+            'ct',
+        )
+        self.add(
+            'mi',
+            [
+                'st',
+                'he',
+                'gc',
+                'oc',
+                'fl',
+                'ct',
+                'tc',
+                'pc',
+                'pm',
+                'pp',
+                'ms',
+                'mx',
+            ],
+            'mi',
+        )
+        self.add('mp', ['tc', 'gc', 'pc', 'pm', 'pp', 'mi'], 'mp')
+        self.add('ms', ['mi', 'mp', 'tc'], 'ms')
+        self.add('mx', ['mi', 'mx', 'mp', 'tc'], 'mx')
         self.add('mc', ['ct', 'mi', 'mp', 'ms', 'mx', 'pm', 'pp', 'pc'], 'mc')
 
     def parse(self):
@@ -1300,11 +1404,13 @@
             # Take care of keywords like
             # msgid, msgid_plural, msgctxt & msgstr.
             if tokens[0] in keywords and nb_tokens > 1:
-                line = line[len(tokens[0]):].lstrip()
+                line = line[len(tokens[0]) :].lstrip()
                 if re.search(r'([^\\]|^)"', line[1:-1]):
-                    raise IOError('Syntax error in po file %s (line %s): '
-                                  'unescaped double quote found' %
-                                  (self.instance.fpath, self.current_line))
+                    raise IOError(
+                        'Syntax error in po file %s (line %s): '
+                        'unescaped double quote found'
+                        % (self.instance.fpath, self.current_line)
+                    )
                 self.current_token = line
                 self.process(keywords[tokens[0]])
                 continue
@@ -1320,9 +1426,11 @@
             elif line[:1] == '"':
                 # we are on a continuation line
                 if re.search(r'([^\\]|^)"', line[1:-1]):
-                    raise IOError('Syntax error in po file %s (line %s): '
-                                  'unescaped double quote found' %
-                                  (self.instance.fpath, self.current_line))
+                    raise IOError(
+                        'Syntax error in po file %s (line %s): '
+                        'unescaped double quote found'
+                        % (self.instance.fpath, self.current_line)
+                    )
                 self.process('mc')
 
             elif line[:7] == 'msgstr[':
@@ -1349,8 +1457,10 @@
 
             elif tokens[0] == '#|':
                 if nb_tokens <= 1:
-                    raise IOError('Syntax error in po file %s (line %s)' %
-                                  (self.instance.fpath, self.current_line))
+                    raise IOError(
+                        'Syntax error in po file %s (line %s)'
+                        % (self.instance.fpath, self.current_line)
+                    )
 
                 # Remove the marker and any whitespace right after that.
                 line = line[2:].lstrip()
@@ -1363,30 +1473,38 @@
 
                 if nb_tokens == 2:
                     # Invalid continuation line.
-                    raise IOError('Syntax error in po file %s (line %s): '
-                                  'invalid continuation line' %
-                                  (self.instance.fpath, self.current_line))
+                    raise IOError(
+                        'Syntax error in po file %s (line %s): '
+                        'invalid continuation line'
+                        % (self.instance.fpath, self.current_line)
+                    )
 
                 # we are on a "previous translation" comment line,
                 if tokens[1] not in prev_keywords:
                     # Unknown keyword in previous translation comment.
-                    raise IOError('Syntax error in po file %s (line %s): '
-                                  'unknown keyword %s' %
-                                  (self.instance.fpath, self.current_line,
-                                   tokens[1]))
+                    raise IOError(
+                        'Syntax error in po file %s (line %s): '
+                        'unknown keyword %s'
+                        % (self.instance.fpath, self.current_line, tokens[1])
+                    )
 
                 # Remove the keyword and any whitespace
                 # between it and the starting quote.
-                line = line[len(tokens[1]):].lstrip()
+                line = line[len(tokens[1]) :].lstrip()
                 self.current_token = line
                 self.process(prev_keywords[tokens[1]])
 
             else:
-                raise IOError('Syntax error in po file %s (line %s)' %
-                              (self.instance.fpath, self.current_line))
+                raise IOError(
+                    'Syntax error in po file %s (line %s)'
+                    % (self.instance.fpath, self.current_line)
+                )
 
-        if self.current_entry and len(tokens) > 0 and \
-           not tokens[0].startswith('#'):
+        if (
+            self.current_entry
+            and len(tokens) > 0
+            and not tokens[0].startswith('#')
+        ):
             # since entries are added when another entry is found, we must add
             # the last entry here (only if there are lines). Trailing comments
             # are ignored
@@ -1449,8 +1567,9 @@
             if action():
                 self.current_state = state
         except Exception:
-            raise IOError('Syntax error in po file (line %s)' %
-                          self.current_line)
+            raise IOError(
+                'Syntax error in po file (line %s)' % self.current_line
+            )
 
     # state handlers
 
@@ -1507,8 +1626,9 @@
         if self.current_state in ['mc', 'ms', 'mx']:
             self.instance.append(self.current_entry)
             self.current_entry = POEntry(linenum=self.current_line)
-        self.current_entry.flags += [c.strip() for c in
-                                     self.current_token[3:].split(',')]
+        self.current_entry.flags += [
+            c.strip() for c in self.current_token[3:].split(',')
+        ]
         return True
 
     def handle_pp(self):
@@ -1516,8 +1636,9 @@
         if self.current_state in ['mc', 'ms', 'mx']:
             self.instance.append(self.current_entry)
             self.current_entry = POEntry(linenum=self.current_line)
-        self.current_entry.previous_msgid_plural = \
-            unescape(self.current_token[1:-1])
+        self.current_entry.previous_msgid_plural = unescape(
+            self.current_token[1:-1]
+        )
         return True
 
     def handle_pm(self):
@@ -1525,8 +1646,7 @@
         if self.current_state in ['mc', 'ms', 'mx']:
             self.instance.append(self.current_entry)
             self.current_entry = POEntry(linenum=self.current_line)
-        self.current_entry.previous_msgid = \
-            unescape(self.current_token[1:-1])
+        self.current_entry.previous_msgid = unescape(self.current_token[1:-1])
         return True
 
     def handle_pc(self):
@@ -1534,8 +1654,7 @@
         if self.current_state in ['mc', 'ms', 'mx']:
             self.instance.append(self.current_entry)
             self.current_entry = POEntry(linenum=self.current_line)
-        self.current_entry.previous_msgctxt = \
-            unescape(self.current_token[1:-1])
+        self.current_entry.previous_msgctxt = unescape(self.current_token[1:-1])
         return True
 
     def handle_ct(self):
@@ -1568,7 +1687,7 @@
     def handle_mx(self):
         """Handle a msgstr plural."""
         index = self.current_token[7]
-        value = self.current_token[self.current_token.find('"') + 1:-1]
+        value = self.current_token[self.current_token.find('"') + 1 : -1]
         self.current_entry.msgstr_plural[int(index)] = unescape(value)
         self.msgstr_index = int(index)
         return True
@@ -1594,6 +1713,8 @@
             self.current_entry.previous_msgctxt += token
         # don't change the current state
         return False
+
+
 # }}}
 # class _MOFileParser {{{
 
@@ -1628,7 +1749,7 @@
         self.instance = klass(
             fpath=mofile,
             encoding=kwargs.get('encoding', default_encoding),
-            check_for_duplicates=kwargs.get('check_for_duplicates', False)
+            check_for_duplicates=kwargs.get('check_for_duplicates', False),
         )
 
     def __del__(self):
@@ -1699,8 +1820,9 @@
                 entry = self._build_entry(
                     msgid=msgid_tokens[0],
                     msgid_plural=msgid_tokens[1],
-                    msgstr_plural=dict((k, v) for k, v in
-                                       enumerate(msgstr.split(b('\0'))))
+                    msgstr_plural=dict(
+                        (k, v) for k, v in enumerate(msgstr.split(b('\0')))
+                    ),
                 )
             else:
                 entry = self._build_entry(msgid=msgid, msgstr=msgstr)
@@ -1709,8 +1831,9 @@
         self.fhandle.close()
         return self.instance
 
-    def _build_entry(self, msgid, msgstr=None, msgid_plural=None,
-                     msgstr_plural=None):
+    def _build_entry(
+        self, msgid, msgstr=None, msgid_plural=None, msgstr_plural=None
+    ):
         msgctxt_msgid = msgid.split(b('\x04'))
         encoding = self.instance.encoding
         if len(msgctxt_msgid) > 1:
@@ -1740,6 +1863,8 @@
         if len(tup) == 1:
             return tup[0]
         return tup
+
+
 # }}}
 # class TextWrapper {{{
 
@@ -1749,6 +1874,7 @@
     Subclass of textwrap.TextWrapper that backport the
     drop_whitespace option.
     """
+
     def __init__(self, *args, **kwargs):
         drop_whitespace = kwargs.pop('drop_whitespace', True)
         textwrap.TextWrapper.__init__(self, *args, **kwargs)
@@ -1823,6 +1949,8 @@
                 lines.append(indent + ''.join(cur_line))
 
         return lines
+
+
 # }}}
 # function wrap() {{{
 
@@ -1835,4 +1963,5 @@
         return TextWrapper(width=width, **kwargs).wrap(text)
     return textwrap.wrap(text, width=width, **kwargs)
 
+
 # }}}
--- a/mercurial/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,6 +11,7 @@
 
 # Allow 'from mercurial import demandimport' to keep working.
 import hgdemandimport
+
 demandimport = hgdemandimport
 
 __all__ = []
@@ -27,6 +28,7 @@
 
     class hgpathentryfinder(importlib.abc.MetaPathFinder):
         """A sys.meta_path finder that uses a custom module loader."""
+
         def find_spec(self, fullname, path, target=None):
             # Only handle Mercurial-related modules.
             if not fullname.startswith(('mercurial.', 'hgext.')):
@@ -78,7 +80,7 @@
             loader = hgloader(spec.name, spec.origin)
             # Can't use util.safehasattr here because that would require
             # importing util, and we're in import code.
-            if hasattr(spec.loader, 'loader'): # hasattr-py3-only
+            if hasattr(spec.loader, 'loader'):  # hasattr-py3-only
                 # This is a nested loader (maybe a lazy loader?)
                 spec.loader.loader = loader
             else:
@@ -184,9 +186,13 @@
 
             # Insert compatibility imports at "from __future__ import" line.
             # No '\n' should be added to preserve line numbers.
-            if (t.type == token.NAME and t.string == 'import' and
-                all(u.type == token.NAME for u in tokens[i - 2:i]) and
-                [u.string for u in tokens[i - 2:i]] == ['from', '__future__']):
+            if (
+                t.type == token.NAME
+                and t.string == 'import'
+                and all(u.type == token.NAME for u in tokens[i - 2 : i])
+                and [u.string for u in tokens[i - 2 : i]]
+                == ['from', '__future__']
+            ):
                 futureimpline = True
             if t.type == token.NEWLINE and futureimpline:
                 futureimpline = False
@@ -194,14 +200,17 @@
                     yield t
                     continue
                 r, c = t.start
-                l = (b'; from mercurial.pycompat import '
-                     b'delattr, getattr, hasattr, setattr, '
-                     b'open, unicode\n')
+                l = (
+                    b'; from mercurial.pycompat import '
+                    b'delattr, getattr, hasattr, setattr, '
+                    b'open, unicode\n'
+                )
                 for u in tokenize.tokenize(io.BytesIO(l).readline):
                     if u.type in (tokenize.ENCODING, token.ENDMARKER):
                         continue
                     yield u._replace(
-                        start=(r, c + u.start[1]), end=(r, c + u.end[1]))
+                        start=(r, c + u.start[1]), end=(r, c + u.end[1])
+                    )
                 continue
 
             # This looks like a function call.
@@ -209,8 +218,12 @@
                 fn = t.string
 
                 # *attr() builtins don't accept byte strings to 2nd argument.
-                if (fn in ('getattr', 'setattr', 'hasattr', 'safehasattr') and
-                        not _isop(i - 1, '.')):
+                if fn in (
+                    'getattr',
+                    'setattr',
+                    'hasattr',
+                    'safehasattr',
+                ) and not _isop(i - 1, '.'):
                     arg1idx = _findargnofcall(1)
                     if arg1idx is not None:
                         _ensureunicode(arg1idx)
@@ -225,9 +238,10 @@
 
                 # It changes iteritems/values to items/values as they are not
                 # present in Python 3 world.
-                elif (fn in ('iteritems', 'itervalues') and
-                      not (tokens[i - 1].type == token.NAME and
-                           tokens[i - 1].string == 'def')):
+                elif fn in ('iteritems', 'itervalues') and not (
+                    tokens[i - 1].type == token.NAME
+                    and tokens[i - 1].string == 'def'
+                ):
                     yield t._replace(string=fn[4:])
                     continue
 
@@ -269,6 +283,7 @@
         The added header has the form ``HG<VERSION>``. That is a literal
         ``HG`` with 2 binary bytes indicating the transformation version.
         """
+
         def get_data(self, path):
             data = super(hgloader, self).get_data(path)
 
--- a/mercurial/ancestor.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/ancestor.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
 
 parsers = policy.importmod(r'parsers')
 
+
 def commonancestorsheads(pfunc, *nodes):
     """Returns a set with the heads of all common ancestors of all nodes,
     heads(::nodes[0] and ::nodes[1] and ...) .
@@ -74,6 +75,7 @@
                 seen[p] = sv
     return gca
 
+
 def ancestors(pfunc, *orignodes):
     """
     Returns the common ancestors of a and b that are furthest from a
@@ -81,6 +83,7 @@
 
     pfunc must return a list of parent vertices for a given vertex.
     """
+
     def deepest(nodes):
         interesting = {}
         count = max(nodes) + 1
@@ -143,12 +146,14 @@
         return gca
     return deepest(gca)
 
+
 class incrementalmissingancestors(object):
     '''persistent state used to calculate missing ancestors incrementally
 
     Although similar in spirit to lazyancestors below, this is a separate class
     because trying to support contains and missingancestors operations with the
     same internal data structures adds needless complexity.'''
+
     def __init__(self, pfunc, bases):
         self.bases = set(bases)
         if not self.bases:
@@ -266,6 +271,7 @@
         missing.reverse()
         return missing
 
+
 # Extracted from lazyancestors.__iter__ to avoid a reference cycle
 def _lazyancestorsiter(parentrevs, initrevs, stoprev, inclusive):
     seen = {nullrev}
@@ -310,6 +316,7 @@
             heappush(visit, -p2)
             see(p2)
 
+
 class lazyancestors(object):
     def __init__(self, pfunc, revs, stoprev=0, inclusive=False):
         """Create a new object generating ancestors for the given revs. Does
@@ -329,10 +336,9 @@
         self._inclusive = inclusive
 
         self._containsseen = set()
-        self._containsiter = _lazyancestorsiter(self._parentrevs,
-                                                self._initrevs,
-                                                self._stoprev,
-                                                self._inclusive)
+        self._containsiter = _lazyancestorsiter(
+            self._parentrevs, self._initrevs, self._stoprev, self._inclusive
+        )
 
     def __nonzero__(self):
         """False if the set is empty, True otherwise."""
@@ -355,8 +361,9 @@
 
         If inclusive is True, the source revisions are also yielded. The
         reverse revision number order is still enforced."""
-        return _lazyancestorsiter(self._parentrevs, self._initrevs,
-                                  self._stoprev, self._inclusive)
+        return _lazyancestorsiter(
+            self._parentrevs, self._initrevs, self._stoprev, self._inclusive
+        )
 
     def __contains__(self, target):
         """Test whether target is an ancestor of self._initrevs."""
@@ -387,8 +394,8 @@
             self._containsiter = None
             return False
 
+
 class rustlazyancestors(object):
-
     def __init__(self, index, revs, stoprev=0, inclusive=False):
         self._index = index
         self._stoprev = stoprev
@@ -400,7 +407,8 @@
         self._initrevs = initrevs = list(revs)
 
         self._containsiter = parsers.rustlazyancestors(
-            index, initrevs, stoprev, inclusive)
+            index, initrevs, stoprev, inclusive
+        )
 
     def __nonzero__(self):
         """False if the set is empty, True otherwise.
@@ -415,10 +423,9 @@
             return False
 
     def __iter__(self):
-        return parsers.rustlazyancestors(self._index,
-                                         self._initrevs,
-                                         self._stoprev,
-                                         self._inclusive)
+        return parsers.rustlazyancestors(
+            self._index, self._initrevs, self._stoprev, self._inclusive
+        )
 
     def __contains__(self, target):
         return target in self._containsiter
--- a/mercurial/archival.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/archival.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,9 +16,7 @@
 import zlib
 
 from .i18n import _
-from .node import (
-    nullrev,
-)
+from .node import nullrev
 
 from . import (
     error,
@@ -29,11 +27,13 @@
     util,
     vfs as vfsmod,
 )
+
 stringio = util.stringio
 
 # from unzip source code:
 _UNX_IFREG = 0x8000
-_UNX_IFLNK = 0xa000
+_UNX_IFLNK = 0xA000
+
 
 def tidyprefix(dest, kind, prefix):
     '''choose prefix to use for names in archive.  make sure prefix is
@@ -48,7 +48,7 @@
         lower = prefix.lower()
         for sfx in exts.get(kind, []):
             if lower.endswith(sfx):
-                prefix = prefix[:-len(sfx)]
+                prefix = prefix[: -len(sfx)]
                 break
     lpfx = os.path.normpath(util.localpath(prefix))
     prefix = util.pconvert(lpfx)
@@ -62,13 +62,15 @@
         raise error.Abort(_('archive prefix contains illegal components'))
     return prefix
 
+
 exts = {
     'tar': ['.tar'],
     'tbz2': ['.tbz2', '.tar.bz2'],
     'tgz': ['.tgz', '.tar.gz'],
     'zip': ['.zip'],
-    'txz': ['.txz', '.tar.xz']
-    }
+    'txz': ['.txz', '.tar.xz'],
+}
+
 
 def guesskind(dest):
     for kind, extensions in exts.iteritems():
@@ -76,12 +78,14 @@
             return kind
     return None
 
+
 def _rootctx(repo):
     # repo[0] may be hidden
     for rev in repo:
         return repo[rev]
     return repo[nullrev]
 
+
 # {tags} on ctx includes local tags and 'tip', with no current way to limit
 # that to global tags.  Therefore, use {latesttag} as a substitute when
 # the distance is 0, since that will be the list of global tags on ctx.
@@ -94,15 +98,19 @@
                join(latesttag % "latesttag: {tag}", "\n"),
                "latesttagdistance: {latesttagdistance}",
                "changessincelatesttag: {changessincelatesttag}"))}
-'''[1:]  # drop leading '\n'
+'''[
+    1:
+]  # drop leading '\n'
+
 
 def buildmetadata(ctx):
     '''build content of .hg_archival.txt'''
     repo = ctx.repo()
 
     opts = {
-        'template': repo.ui.config('experimental', 'archivemetatemplate',
-                                   _defaultmetatemplate)
+        'template': repo.ui.config(
+            'experimental', 'archivemetatemplate', _defaultmetatemplate
+        )
     }
 
     out = util.stringio()
@@ -121,12 +129,12 @@
 
     return out.getvalue()
 
+
 class tarit(object):
     '''write archive to tar file or stream.  can write uncompressed,
     or compress with gzip or bzip2.'''
 
     class GzipFileWithTime(gzip.GzipFile):
-
         def __init__(self, *args, **kw):
             timestamp = None
             if r'timestamp' in kw:
@@ -138,8 +146,8 @@
             gzip.GzipFile.__init__(self, *args, **kw)
 
         def _write_gzip_header(self):
-            self.fileobj.write('\037\213')             # magic header
-            self.fileobj.write('\010')                 # compression method
+            self.fileobj.write('\037\213')  # magic header
+            self.fileobj.write('\010')  # compression method
             fname = self.name
             if fname and fname.endswith('.gz'):
                 fname = fname[:-3]
@@ -162,16 +170,19 @@
                 mode = mode[0:1]
                 if not fileobj:
                     fileobj = open(name, mode + 'b')
-                gzfileobj = self.GzipFileWithTime(name,
-                                                  pycompat.sysstr(mode + 'b'),
-                                                  zlib.Z_BEST_COMPRESSION,
-                                                  fileobj, timestamp=mtime)
+                gzfileobj = self.GzipFileWithTime(
+                    name,
+                    pycompat.sysstr(mode + 'b'),
+                    zlib.Z_BEST_COMPRESSION,
+                    fileobj,
+                    timestamp=mtime,
+                )
                 self.fileobj = gzfileobj
                 return tarfile.TarFile.taropen(
-                    name, pycompat.sysstr(mode), gzfileobj)
+                    name, pycompat.sysstr(mode), gzfileobj
+                )
             else:
-                return tarfile.open(
-                    name, pycompat.sysstr(mode + kind), fileobj)
+                return tarfile.open(name, pycompat.sysstr(mode + kind), fileobj)
 
         if isinstance(dest, bytes):
             self.z = taropen('w:', name=dest)
@@ -199,6 +210,7 @@
         if self.fileobj:
             self.fileobj.close()
 
+
 class zipit(object):
     '''write archive to zip file or stream.  can write uncompressed,
     or compressed with deflate.'''
@@ -206,13 +218,13 @@
     def __init__(self, dest, mtime, compress=True):
         if isinstance(dest, bytes):
             dest = pycompat.fsdecode(dest)
-        self.z = zipfile.ZipFile(dest, r'w',
-                                 compress and zipfile.ZIP_DEFLATED or
-                                 zipfile.ZIP_STORED)
+        self.z = zipfile.ZipFile(
+            dest, r'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED
+        )
 
         # Python's zipfile module emits deprecation warnings if we try
         # to store files with a date before 1980.
-        epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
+        epoch = 315532800  # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0))
         if mtime < epoch:
             mtime = epoch
 
@@ -233,16 +245,19 @@
         # add "extended-timestamp" extra block, because zip archives
         # without this will be extracted with unexpected timestamp,
         # if TZ is not configured as GMT
-        i.extra += struct.pack('<hhBl',
-                               0x5455,     # block type: "extended-timestamp"
-                               1 + 4,      # size of this block
-                               1,          # "modification time is present"
-                               int(self.mtime)) # last modification (UTC)
+        i.extra += struct.pack(
+            '<hhBl',
+            0x5455,  # block type: "extended-timestamp"
+            1 + 4,  # size of this block
+            1,  # "modification time is present"
+            int(self.mtime),
+        )  # last modification (UTC)
         self.z.writestr(i, data)
 
     def done(self):
         self.z.close()
 
+
 class fileit(object):
     '''write archive as files in directory.'''
 
@@ -266,6 +281,7 @@
     def done(self):
         pass
 
+
 archivers = {
     'files': fileit,
     'tar': tarit,
@@ -274,10 +290,20 @@
     'txz': lambda name, mtime: tarit(name, mtime, 'xz'),
     'uzip': lambda name, mtime: zipit(name, mtime, False),
     'zip': zipit,
-    }
+}
+
 
-def archive(repo, dest, node, kind, decode=True, match=None,
-            prefix='', mtime=None, subrepos=False):
+def archive(
+    repo,
+    dest,
+    node,
+    kind,
+    decode=True,
+    match=None,
+    prefix='',
+    mtime=None,
+    subrepos=False,
+):
     '''create archive of repo as it was at node.
 
     dest can be name of directory, name of archive file, or file
@@ -330,10 +356,12 @@
     total = len(files)
     if total:
         files.sort()
-        scmutil.prefetchfiles(repo, [ctx.rev()],
-                              scmutil.matchfiles(repo, files))
-        progress = repo.ui.makeprogress(_('archiving'), unit=_('files'),
-                                        total=total)
+        scmutil.prefetchfiles(
+            repo, [ctx.rev()], scmutil.matchfiles(repo, files)
+        )
+        progress = repo.ui.makeprogress(
+            _('archiving'), unit=_('files'), total=total
+        )
         progress.update(0)
         for f in files:
             ff = ctx.flags(f)
--- a/mercurial/bookmarks.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/bookmarks.py	Sun Oct 06 09:45:02 2019 -0400
@@ -35,12 +35,15 @@
 
 BOOKMARKS_IN_STORE_REQUIREMENT = 'bookmarksinstore'
 
+
 def bookmarksinstore(repo):
     return BOOKMARKS_IN_STORE_REQUIREMENT in repo.requirements
 
+
 def bookmarksvfs(repo):
     return repo.svfs if bookmarksinstore(repo) else repo.vfs
 
+
 def _getbkfile(repo):
     """Hook so that extensions that mess with the store can hook bm storage.
 
@@ -51,6 +54,7 @@
     fp, pending = txnutil.trypending(repo.root, bookmarksvfs(repo), 'bookmarks')
     return fp
 
+
 class bmstore(object):
     r"""Storage for bookmarks.
 
@@ -72,7 +76,7 @@
         self._clean = True
         self._aclean = True
         nm = repo.changelog.nodemap
-        tonode = bin # force local lookup
+        tonode = bin  # force local lookup
         try:
             with _getbkfile(repo) as bkfile:
                 for line in bkfile:
@@ -102,8 +106,10 @@
                         bookmarkspath = '.hg/bookmarks'
                         if bookmarksinstore(repo):
                             bookmarkspath = '.hg/store/bookmarks'
-                        repo.ui.warn(_('malformed line in %s: %r\n')
-                                     % (bookmarkspath, pycompat.bytestr(line)))
+                        repo.ui.warn(
+                            _('malformed line in %s: %r\n')
+                            % (bookmarkspath, pycompat.bytestr(line))
+                        )
         except IOError as inst:
             if inst.errno != errno.ENOENT:
                 raise
@@ -200,8 +206,9 @@
 
         The transaction is then responsible for updating the file content."""
         location = '' if bookmarksinstore(self._repo) else 'plain'
-        tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
-                            location=location)
+        tr.addfilegenerator(
+            'bookmarks', ('bookmarks',), self._write, location=location
+        )
         tr.hookargs['bookmark_moved'] = '1'
 
     def _writerepo(self, repo):
@@ -226,8 +233,9 @@
             return
         with self._repo.wlock():
             if self._active is not None:
-                with self._repo.vfs('bookmarks.current', 'w', atomictemp=True,
-                                   checkambig=True) as f:
+                with self._repo.vfs(
+                    'bookmarks.current', 'w', atomictemp=True, checkambig=True
+                ) as f:
                     f.write(encoding.fromlocal(self._active))
             else:
                 self._repo.vfs.tryunlink('bookmarks.current')
@@ -268,8 +276,11 @@
                 rev = self._repo[target].rev()
                 anc = self._repo.changelog.ancestors([rev])
                 bmctx = self._repo[self[mark]]
-                divs = [self._refmap[b] for b in self._refmap
-                        if b.split('@', 1)[0] == mark.split('@', 1)[0]]
+                divs = [
+                    self._refmap[b]
+                    for b in self._refmap
+                    if b.split('@', 1)[0] == mark.split('@', 1)[0]
+                ]
 
                 # allow resolving a single divergent bookmark even if moving
                 # the bookmark across branches when a revision is specified
@@ -277,20 +288,26 @@
                 if bmctx.rev() not in anc and target in divs:
                     return divergent2delete(self._repo, [target], mark)
 
-                deletefrom = [b for b in divs
-                              if self._repo[b].rev() in anc or b == target]
+                deletefrom = [
+                    b for b in divs if self._repo[b].rev() in anc or b == target
+                ]
                 delbms = divergent2delete(self._repo, deletefrom, mark)
                 if validdest(self._repo, bmctx, self._repo[target]):
                     self._repo.ui.status(
-                        _("moving bookmark '%s' forward from %s\n") %
-                        (mark, short(bmctx.node())))
+                        _("moving bookmark '%s' forward from %s\n")
+                        % (mark, short(bmctx.node()))
+                    )
                     return delbms
-            raise error.Abort(_("bookmark '%s' already exists "
-                                "(use -f to force)") % mark)
-        if ((mark in self._repo.branchmap() or
-             mark == self._repo.dirstate.branch()) and not force):
             raise error.Abort(
-                _("a bookmark cannot have the name of an existing branch"))
+                _("bookmark '%s' already exists " "(use -f to force)") % mark
+            )
+        if (
+            mark in self._repo.branchmap()
+            or mark == self._repo.dirstate.branch()
+        ) and not force:
+            raise error.Abort(
+                _("a bookmark cannot have the name of an existing branch")
+            )
         if len(mark) > 3 and not force:
             try:
                 shadowhash = scmutil.isrevsymbol(self._repo, mark)
@@ -298,12 +315,16 @@
                 shadowhash = False
             if shadowhash:
                 self._repo.ui.warn(
-                    _("bookmark %s matches a changeset hash\n"
-                      "(did you leave a -r out of an 'hg bookmark' "
-                      "command?)\n")
-                    % mark)
+                    _(
+                        "bookmark %s matches a changeset hash\n"
+                        "(did you leave a -r out of an 'hg bookmark' "
+                        "command?)\n"
+                    )
+                    % mark
+                )
         return []
 
+
 def _readactive(repo, marks):
     """
     Get the active bookmark. We can have an active bookmark that updates
@@ -318,6 +339,7 @@
         mark = None
     return mark
 
+
 def activate(repo, mark):
     """
     Set the given bookmark to be 'active', meaning that this bookmark will
@@ -327,6 +349,7 @@
     repo._bookmarks.active = mark
     repo._bookmarks._writeactive()
 
+
 def deactivate(repo):
     """
     Unset the active bookmark in this repository.
@@ -334,6 +357,7 @@
     repo._bookmarks.active = None
     repo._bookmarks._writeactive()
 
+
 def isactivewdirparent(repo):
     """
     Tell whether the 'active' bookmark (the one that follows new commits)
@@ -346,7 +370,8 @@
     mark = repo._activebookmark
     marks = repo._bookmarks
     parents = [p.node() for p in repo[None].parents()]
-    return (mark in marks and marks[mark] in parents)
+    return mark in marks and marks[mark] in parents
+
 
 def divergent2delete(repo, deletefrom, bm):
     """find divergent versions of bm on nodes in deletefrom.
@@ -364,6 +389,7 @@
                 todelete.append(mark)
     return todelete
 
+
 def headsforactive(repo):
     """Given a repo with an active bookmark, return divergent bookmark nodes.
 
@@ -378,7 +404,8 @@
     """
     if not repo._activebookmark:
         raise ValueError(
-            'headsforactive() only makes sense with an active bookmark')
+            'headsforactive() only makes sense with an active bookmark'
+        )
     name = repo._activebookmark.split('@', 1)[0]
     heads = []
     for mark, n in repo._bookmarks.iteritems():
@@ -386,6 +413,7 @@
             heads.append(n)
     return heads
 
+
 def calculateupdate(ui, repo):
     '''Return a tuple (activemark, movemarkfrom) indicating the active bookmark
     and where to move the active bookmark from, if needed.'''
@@ -398,6 +426,7 @@
         checkout = activemark
     return (checkout, movemarkfrom)
 
+
 def update(repo, parents, node):
     deletefrom = parents
     marks = repo._bookmarks
@@ -408,8 +437,11 @@
     bmchanges = []
     if marks[active] in parents:
         new = repo[node]
-        divs = [repo[marks[b]] for b in marks
-                if b.split('@', 1)[0] == active.split('@', 1)[0]]
+        divs = [
+            repo[marks[b]]
+            for b in marks
+            if b.split('@', 1)[0] == active.split('@', 1)[0]
+        ]
         anc = repo.changelog.ancestors([new.rev()])
         deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
         if validdest(repo, repo[marks[active]], new):
@@ -423,6 +455,7 @@
             marks.applychanges(repo, tr, bmchanges)
     return bool(bmchanges)
 
+
 def listbinbookmarks(repo):
     # We may try to list bookmarks on a repo type that does not
     # support it (e.g., statichttprepository).
@@ -434,12 +467,14 @@
         if hasnode(v) and ('@' not in k or k.endswith('@')):
             yield k, v
 
+
 def listbookmarks(repo):
     d = {}
     for book, node in listbinbookmarks(repo):
         d[book] = hex(node)
     return d
 
+
 def pushbookmark(repo, key, old, new):
     if bookmarksinstore(repo):
         wlock = util.nullcontextmanager()
@@ -459,6 +494,7 @@
         marks.applychanges(repo, tr, changes)
         return True
 
+
 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
     '''Compare bookmarks between srcmarks and dstmarks
 
@@ -534,6 +570,7 @@
 
     return results
 
+
 def _diverge(ui, b, path, localmarks, remotenode):
     '''Return appropriate diverged bookmark for specified ``path``
 
@@ -563,14 +600,17 @@
 
     return None
 
+
 def unhexlifybookmarks(marks):
     binremotemarks = {}
     for name, node in marks.items():
         binremotemarks[name] = bin(node)
     return binremotemarks
 
+
 _binaryentry = struct.Struct('>20sH')
 
+
 def binaryencode(bookmarks):
     """encode a '(bookmark, node)' iterable into a binary stream
 
@@ -586,12 +626,13 @@
     """
     binarydata = []
     for book, node in bookmarks:
-        if not node: # None or ''
+        if not node:  # None or ''
             node = wdirid
         binarydata.append(_binaryentry.pack(node, len(book)))
         binarydata.append(book)
     return ''.join(binarydata)
 
+
 def binarydecode(stream):
     """decode a binary stream into an '(bookmark, node)' iterable
 
@@ -623,10 +664,19 @@
         books.append((bookmark, node))
     return books
 
+
 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
     ui.debug("checking for updated bookmarks\n")
     localmarks = repo._bookmarks
-    (addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same
+    (
+        addsrc,
+        adddst,
+        advsrc,
+        advdst,
+        diverge,
+        differ,
+        invalid,
+        same,
     ) = comparebookmarks(repo, remotemarks, localmarks)
 
     status = ui.status
@@ -637,44 +687,56 @@
     explicit = set(explicit)
     changed = []
     for b, scid, dcid in addsrc:
-        if scid in repo: # add remote bookmarks for changes we already have
-            changed.append((b, scid, status,
-                            _("adding remote bookmark %s\n") % (b)))
+        if scid in repo:  # add remote bookmarks for changes we already have
+            changed.append(
+                (b, scid, status, _("adding remote bookmark %s\n") % b)
+            )
         elif b in explicit:
             explicit.remove(b)
-            ui.warn(_("remote bookmark %s points to locally missing %s\n")
-                    % (b, hex(scid)[:12]))
+            ui.warn(
+                _("remote bookmark %s points to locally missing %s\n")
+                % (b, hex(scid)[:12])
+            )
 
     for b, scid, dcid in advsrc:
-        changed.append((b, scid, status,
-                        _("updating bookmark %s\n") % (b)))
+        changed.append((b, scid, status, _("updating bookmark %s\n") % b))
     # remove normal movement from explicit set
     explicit.difference_update(d[0] for d in changed)
 
     for b, scid, dcid in diverge:
         if b in explicit:
             explicit.discard(b)
-            changed.append((b, scid, status,
-                            _("importing bookmark %s\n") % (b)))
+            changed.append((b, scid, status, _("importing bookmark %s\n") % b))
         else:
             db = _diverge(ui, b, path, localmarks, scid)
             if db:
-                changed.append((db, scid, warn,
-                                _("divergent bookmark %s stored as %s\n") %
-                                (b, db)))
+                changed.append(
+                    (
+                        db,
+                        scid,
+                        warn,
+                        _("divergent bookmark %s stored as %s\n") % (b, db),
+                    )
+                )
             else:
-                warn(_("warning: failed to assign numbered name "
-                       "to divergent bookmark %s\n") % (b))
+                warn(
+                    _(
+                        "warning: failed to assign numbered name "
+                        "to divergent bookmark %s\n"
+                    )
+                    % b
+                )
     for b, scid, dcid in adddst + advdst:
         if b in explicit:
             explicit.discard(b)
-            changed.append((b, scid, status,
-                            _("importing bookmark %s\n") % (b)))
+            changed.append((b, scid, status, _("importing bookmark %s\n") % b))
     for b, scid, dcid in differ:
         if b in explicit:
             explicit.remove(b)
-            ui.warn(_("remote bookmark %s points to locally missing %s\n")
-                    % (b, hex(scid)[:12]))
+            ui.warn(
+                _("remote bookmark %s points to locally missing %s\n")
+                % (b, hex(scid)[:12])
+            )
 
     if changed:
         tr = trfunc()
@@ -684,15 +746,16 @@
             writer(msg)
         localmarks.applychanges(repo, tr, changes)
 
+
 def incoming(ui, repo, peer):
     '''Show bookmarks incoming from other to repo
     '''
     ui.status(_("searching for changed bookmarks\n"))
 
     with peer.commandexecutor() as e:
-        remotemarks = unhexlifybookmarks(e.callcommand('listkeys', {
-            'namespace': 'bookmarks',
-        }).result())
+        remotemarks = unhexlifybookmarks(
+            e.callcommand('listkeys', {'namespace': 'bookmarks',}).result()
+        )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
     addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
@@ -703,11 +766,15 @@
     else:
         getid = lambda id: id[:12]
     if ui.verbose:
+
         def add(b, id, st):
             incomings.append("   %-25s %s %s\n" % (b, getid(id), st))
+
     else:
+
         def add(b, id, st):
             incomings.append("   %-25s %s\n" % (b, getid(id)))
+
     for b, scid, dcid in addsrc:
         # i18n: "added" refers to a bookmark
         add(b, hex(scid), _('added'))
@@ -730,6 +797,7 @@
 
     return 0
 
+
 def outgoing(ui, repo, other):
     '''Show bookmarks outgoing from repo to other
     '''
@@ -745,11 +813,15 @@
     else:
         getid = lambda id: id[:12]
     if ui.verbose:
+
         def add(b, id, st):
             outgoings.append("   %-25s %s %s\n" % (b, getid(id), st))
+
     else:
+
         def add(b, id, st):
             outgoings.append("   %-25s %s\n" % (b, getid(id)))
+
     for b, scid, dcid in addsrc:
         # i18n: "added refers to a bookmark
         add(b, hex(scid), _('added'))
@@ -775,20 +847,22 @@
 
     return 0
 
+
 def summary(repo, peer):
     '''Compare bookmarks between repo and other for "hg summary" output
 
     This returns "(# of incoming, # of outgoing)" tuple.
     '''
     with peer.commandexecutor() as e:
-        remotemarks = unhexlifybookmarks(e.callcommand('listkeys', {
-            'namespace': 'bookmarks',
-        }).result())
+        remotemarks = unhexlifybookmarks(
+            e.callcommand('listkeys', {'namespace': 'bookmarks',}).result()
+        )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
     addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r
     return (len(addsrc), len(adddst))
 
+
 def validdest(repo, old, new):
     """Is the new bookmark destination a valid update from the old one"""
     repo = repo.unfiltered()
@@ -805,6 +879,7 @@
         # still an independent clause as it is lazier (and therefore faster)
         return old.isancestorof(new)
 
+
 def checkformat(repo, mark):
     """return a valid version of a potential bookmark name
 
@@ -812,11 +887,13 @@
     """
     mark = mark.strip()
     if not mark:
-        raise error.Abort(_("bookmark names cannot consist entirely of "
-                            "whitespace"))
+        raise error.Abort(
+            _("bookmark names cannot consist entirely of " "whitespace")
+        )
     scmutil.checknewlabel(repo, mark, 'bookmark')
     return mark
 
+
 def delete(repo, tr, names):
     """remove a mark from the bookmark store
 
@@ -832,6 +909,7 @@
         changes.append((mark, None))
     marks.applychanges(repo, tr, changes)
 
+
 def rename(repo, tr, old, new, force=False, inactive=False):
     """rename a bookmark from old to new
 
@@ -854,6 +932,7 @@
     if repo._activebookmark == old and not inactive:
         activate(repo, mark)
 
+
 def addbookmarks(repo, tr, names, rev=None, force=False, inactive=False):
     """add a list of bookmarks
 
@@ -905,6 +984,7 @@
     elif cur != tgt and newact == repo._activebookmark:
         deactivate(repo)
 
+
 def _printbookmarks(ui, repo, fm, bmarks):
     """private method to print bookmarks
 
@@ -921,11 +1001,18 @@
             fm.plain(' %s ' % prefix, label=label)
         fm.write('bookmark', '%s', bmark, label=label)
         pad = " " * (25 - encoding.colwidth(bmark))
-        fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
-                     repo.changelog.rev(n), hexfn(n), label=label)
+        fm.condwrite(
+            not ui.quiet,
+            'rev node',
+            pad + ' %d:%s',
+            repo.changelog.rev(n),
+            hexfn(n),
+            label=label,
+        )
         fm.data(active=(activebookmarklabel in label))
         fm.plain('\n')
 
+
 def printbookmarks(ui, repo, fm, names=None):
     """print bookmarks by the given formatter
 
@@ -933,7 +1020,7 @@
     """
     marks = repo._bookmarks
     bmarks = {}
-    for bmark in (names or marks):
+    for bmark in names or marks:
         if bmark not in marks:
             raise error.Abort(_("bookmark '%s' does not exist") % bmark)
         active = repo._activebookmark
@@ -945,11 +1032,10 @@
         bmarks[bmark] = (marks[bmark], prefix, label)
     _printbookmarks(ui, repo, fm, bmarks)
 
+
 def preparehookargs(name, old, new):
     if new is None:
         new = ''
     if old is None:
         old = ''
-    return {'bookmark': name,
-            'node': hex(new),
-            'oldnode': hex(old)}
+    return {'bookmark': name, 'node': hex(new), 'oldnode': hex(old)}
--- a/mercurial/branchmap.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/branchmap.py	Sun Oct 06 09:45:02 2019 -0400
@@ -27,7 +27,7 @@
     stringutil,
 )
 
-subsettable = repoviewutil. subsettable
+subsettable = repoviewutil.subsettable
 
 calcsize = struct.calcsize
 pack_into = struct.pack_into
@@ -36,6 +36,7 @@
 
 class BranchMapCache(object):
     """mapping of filtered views of repo with their branchcache"""
+
     def __init__(self):
         self._per_filter = {}
 
@@ -101,8 +102,11 @@
         if rbheads:
             rtiprev = max((int(clrev(node)) for node in rbheads))
             cache = branchcache(
-                remotebranchmap, repo[rtiprev].node(), rtiprev,
-                closednodes=closed)
+                remotebranchmap,
+                repo[rtiprev].node(),
+                rtiprev,
+                closednodes=closed,
+            )
 
             # Try to stick it as low as possible
             # filter above served are unlikely to be fetch from a clone
@@ -116,17 +120,20 @@
     def clear(self):
         self._per_filter.clear()
 
+
 def _unknownnode(node):
     """ raises ValueError when branchcache found a node which does not exists
     """
     raise ValueError(r'node %s does not exist' % pycompat.sysstr(hex(node)))
 
+
 def _branchcachedesc(repo):
     if repo.filtername is not None:
         return 'branch cache (%s)' % repo.filtername
     else:
         return 'branch cache'
 
+
 class branchcache(object):
     """A dict like object that hold branches heads cache.
 
@@ -149,8 +156,15 @@
     branch head closes a branch or not.
     """
 
-    def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
-                 filteredhash=None, closednodes=None, hasnode=None):
+    def __init__(
+        self,
+        entries=(),
+        tipnode=nullid,
+        tiprev=nullrev,
+        filteredhash=None,
+        closednodes=None,
+        hasnode=None,
+    ):
         """ hasnode is a function which can be used to verify whether changelog
         has a given node or not. If it's not provided, we assume that every node
         we have exists in changelog """
@@ -238,8 +252,12 @@
             hasnode = repo.changelog.hasnode
             if len(cachekey) > 2:
                 filteredhash = bin(cachekey[2])
-            bcache = cls(tipnode=last, tiprev=lrev, filteredhash=filteredhash,
-                         hasnode=hasnode)
+            bcache = cls(
+                tipnode=last,
+                tiprev=lrev,
+                filteredhash=filteredhash,
+                hasnode=hasnode,
+            )
             if not bcache.validfor(repo):
                 # invalidate the cache
                 raise ValueError(r'tip differs')
@@ -250,8 +268,9 @@
         except Exception as inst:
             if repo.ui.debugflag:
                 msg = 'invalid %s: %s\n'
-                repo.ui.debug(msg % (_branchcachedesc(repo),
-                                     pycompat.bytestr(inst)))
+                repo.ui.debug(
+                    msg % (_branchcachedesc(repo), pycompat.bytestr(inst))
+                )
             bcache = None
 
         finally:
@@ -290,9 +309,9 @@
         - False when cached tipnode is unknown or if we detect a strip.
         - True when cache is up to date or a subset of current repo."""
         try:
-            return ((self.tipnode == repo.changelog.node(self.tiprev))
-                    and (self.filteredhash ==
-                         scmutil.filteredhash(repo, self.tiprev)))
+            return (self.tipnode == repo.changelog.node(self.tiprev)) and (
+                self.filteredhash == scmutil.filteredhash(repo, self.tiprev)
+            )
         except IndexError:
             return False
 
@@ -336,8 +355,12 @@
     def copy(self):
         """return an deep copy of the branchcache object"""
         return type(self)(
-            self._entries, self.tipnode, self.tiprev, self.filteredhash,
-            self._closednodes)
+            self._entries,
+            self.tipnode,
+            self.tiprev,
+            self.filteredhash,
+            self._closednodes,
+        )
 
     def write(self, repo):
         try:
@@ -357,12 +380,19 @@
                         state = 'o'
                     f.write("%s %s %s\n" % (hex(node), state, label))
             f.close()
-            repo.ui.log('branchcache', 'wrote %s with %d labels and %d nodes\n',
-                        _branchcachedesc(repo), len(self._entries), nodecount)
+            repo.ui.log(
+                'branchcache',
+                'wrote %s with %d labels and %d nodes\n',
+                _branchcachedesc(repo),
+                len(self._entries),
+                nodecount,
+            )
         except (IOError, OSError, error.Abort) as inst:
             # Abort may be raised by read only opener, so log and continue
-            repo.ui.debug("couldn't write branch cache: %s\n" %
-                          stringutil.forcebytestr(inst))
+            repo.ui.debug(
+                "couldn't write branch cache: %s\n"
+                % stringutil.forcebytestr(inst)
+            )
 
     def update(self, repo, revgen):
         """Given a branchhead cache, self, that may have extra nodes or be
@@ -429,14 +459,19 @@
         self.filteredhash = scmutil.filteredhash(repo, self.tiprev)
 
         duration = util.timer() - starttime
-        repo.ui.log('branchcache', 'updated %s in %.4f seconds\n',
-                    _branchcachedesc(repo), duration)
+        repo.ui.log(
+            'branchcache',
+            'updated %s in %.4f seconds\n',
+            _branchcachedesc(repo),
+            duration,
+        )
 
         self.write(repo)
 
 
 class remotebranchcache(branchcache):
     """Branchmap info for a remote connection, should not write locally"""
+
     def write(self, repo):
         pass
 
@@ -450,9 +485,10 @@
 _rbcrecfmt = '>4sI'
 _rbcrecsize = calcsize(_rbcrecfmt)
 _rbcnodelen = 4
-_rbcbranchidxmask = 0x7fffffff
+_rbcbranchidxmask = 0x7FFFFFFF
 _rbccloseflag = 0x80000000
 
+
 class revbranchcache(object):
     """Persistent cache, mapping from revision number to branch name and close.
     This is a low level cache, independent of filtering.
@@ -479,15 +515,16 @@
     def __init__(self, repo, readonly=True):
         assert repo.filtername is None
         self._repo = repo
-        self._names = [] # branch names in local encoding with static index
+        self._names = []  # branch names in local encoding with static index
         self._rbcrevs = bytearray()
-        self._rbcsnameslen = 0 # length of names read at _rbcsnameslen
+        self._rbcsnameslen = 0  # length of names read at _rbcsnameslen
         try:
             bndata = repo.cachevfs.read(_rbcnames)
-            self._rbcsnameslen = len(bndata) # for verification before writing
+            self._rbcsnameslen = len(bndata)  # for verification before writing
             if bndata:
-                self._names = [encoding.tolocal(bn)
-                               for bn in bndata.split('\0')]
+                self._names = [
+                    encoding.tolocal(bn) for bn in bndata.split('\0')
+                ]
         except (IOError, OSError):
             if readonly:
                 # don't try to use cache - fall back to the slow path
@@ -498,15 +535,18 @@
                 data = repo.cachevfs.read(_rbcrevs)
                 self._rbcrevs[:] = data
             except (IOError, OSError) as inst:
-                repo.ui.debug("couldn't read revision branch cache: %s\n" %
-                              stringutil.forcebytestr(inst))
+                repo.ui.debug(
+                    "couldn't read revision branch cache: %s\n"
+                    % stringutil.forcebytestr(inst)
+                )
         # remember number of good records on disk
-        self._rbcrevslen = min(len(self._rbcrevs) // _rbcrecsize,
-                               len(repo.changelog))
+        self._rbcrevslen = min(
+            len(self._rbcrevs) // _rbcrecsize, len(repo.changelog)
+        )
         if self._rbcrevslen == 0:
             self._names = []
-        self._rbcnamescount = len(self._names) # number of names read at
-                                               # _rbcsnameslen
+        self._rbcnamescount = len(self._names)  # number of names read at
+        # _rbcsnameslen
 
     def _clear(self):
         self._rbcsnameslen = 0
@@ -537,7 +577,8 @@
         # fast path: extract data from cache, use it if node is matching
         reponode = changelog.node(rev)[:_rbcnodelen]
         cachenode, branchidx = unpack_from(
-            _rbcrecfmt, util.buffer(self._rbcrevs), rbcrevidx)
+            _rbcrecfmt, util.buffer(self._rbcrevs), rbcrevidx
+        )
         close = bool(branchidx & _rbccloseflag)
         if close:
             branchidx &= _rbcbranchidxmask
@@ -548,13 +589,17 @@
                 return self._names[branchidx], close
             except IndexError:
                 # recover from invalid reference to unknown branch
-                self._repo.ui.debug("referenced branch names not found"
-                    " - rebuilding revision branch cache from scratch\n")
+                self._repo.ui.debug(
+                    "referenced branch names not found"
+                    " - rebuilding revision branch cache from scratch\n"
+                )
                 self._clear()
         else:
             # rev/node map has changed, invalidate the cache from here up
-            self._repo.ui.debug("history modification detected - truncating "
-                "revision branch cache to revision %d\n" % rev)
+            self._repo.ui.debug(
+                "history modification detected - truncating "
+                "revision branch cache to revision %d\n" % rev
+            )
             truncate = rbcrevidx + _rbcrecsize
             del self._rbcrevs[truncate:]
             self._rbcrevslen = min(self._rbcrevslen, truncate)
@@ -604,9 +649,10 @@
             return
         rbcrevidx = rev * _rbcrecsize
         if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
-            self._rbcrevs.extend('\0' *
-                                 (len(self._repo.changelog) * _rbcrecsize -
-                                  len(self._rbcrevs)))
+            self._rbcrevs.extend(
+                '\0'
+                * (len(self._repo.changelog) * _rbcrecsize - len(self._rbcrevs))
+            )
         pack_into(_rbcrecfmt, self._rbcrevs, rbcrevidx, node, branchidx)
         self._rbcrevslen = min(self._rbcrevslen, rev)
 
@@ -635,8 +681,10 @@
                 self._writerevs(repo, start)
 
         except (IOError, OSError, error.Abort, error.LockError) as inst:
-            repo.ui.debug("couldn't write revision branch cache%s: %s\n"
-                          % (step, stringutil.forcebytestr(inst)))
+            repo.ui.debug(
+                "couldn't write revision branch cache%s: %s\n"
+                % (step, stringutil.forcebytestr(inst))
+            )
         finally:
             if wlock is not None:
                 wlock.release()
@@ -656,8 +704,12 @@
             # before rewriting names, make sure references are removed
             repo.cachevfs.unlinkpath(_rbcrevs, ignoremissing=True)
             f = repo.cachevfs.open(_rbcnames, 'wb')
-        f.write('\0'.join(encoding.fromlocal(b)
-                          for b in self._names[self._rbcnamescount:]))
+        f.write(
+            '\0'.join(
+                encoding.fromlocal(b)
+                for b in self._names[self._rbcnamescount :]
+            )
+        )
         self._rbcsnameslen = f.tell()
         f.close()
         self._rbcnamescount = len(self._names)
--- a/mercurial/bundle2.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/bundle2.py	Sun Oct 06 09:45:02 2019 -0400
@@ -171,9 +171,7 @@
     url,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 urlerr = util.urlerr
 urlreq = util.urlreq
@@ -192,31 +190,37 @@
 
 _parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]')
 
+
 def outdebug(ui, message):
     """debug regarding output stream (bundling)"""
     if ui.configbool('devel', 'bundle2.debug'):
         ui.debug('bundle2-output: %s\n' % message)
 
+
 def indebug(ui, message):
     """debug on input stream (unbundling)"""
     if ui.configbool('devel', 'bundle2.debug'):
         ui.debug('bundle2-input: %s\n' % message)
 
+
 def validateparttype(parttype):
     """raise ValueError if a parttype contains invalid character"""
     if _parttypeforbidden.search(parttype):
         raise ValueError(parttype)
 
+
 def _makefpartparamsizes(nbparams):
     """return a struct format to read part parameter sizes
 
     The number parameters is variable so we need to build that format
     dynamically.
     """
-    return '>'+('BB'*nbparams)
+    return '>' + ('BB' * nbparams)
+
 
 parthandlermapping = {}
 
+
 def parthandler(parttype, params=()):
     """decorator that register a function as a bundle2 part handler
 
@@ -228,14 +232,17 @@
             ...
     """
     validateparttype(parttype)
+
     def _decorator(func):
-        lparttype = parttype.lower() # enforce lower case matching.
+        lparttype = parttype.lower()  # enforce lower case matching.
         assert lparttype not in parthandlermapping
         parthandlermapping[lparttype] = func
         func.params = frozenset(params)
         return func
+
     return _decorator
 
+
 class unbundlerecords(object):
     """keep record of what happens during and unbundle
 
@@ -283,6 +290,7 @@
 
     __bool__ = __nonzero__
 
+
 class bundleoperation(object):
     """an object that represents a single bundling process
 
@@ -328,13 +336,17 @@
 
     def addhookargs(self, hookargs):
         if self.hookargs is None:
-            raise error.ProgrammingError('attempted to add hookargs to '
-                                         'operation after transaction started')
+            raise error.ProgrammingError(
+                'attempted to add hookargs to '
+                'operation after transaction started'
+            )
         self.hookargs.update(hookargs)
 
+
 class TransactionUnavailable(RuntimeError):
     pass
 
+
 def _notransaction():
     """default method to get a transaction while processing a bundle
 
@@ -342,6 +354,7 @@
     to be created"""
     raise TransactionUnavailable()
 
+
 def applybundle(repo, unbundler, tr, source, url=None, **kwargs):
     # transform me into unbundler.apply() as soon as the freeze is lifted
     if isinstance(unbundler, unbundle20):
@@ -357,6 +370,7 @@
         _processchangegroup(op, unbundler, tr, source, url, **kwargs)
         return op
 
+
 class partiterator(object):
     def __init__(self, repo, op, unbundler):
         self.repo = repo
@@ -375,6 +389,7 @@
                 yield p
                 p.consume()
                 self.current = None
+
         self.iterator = func()
         return self.iterator
 
@@ -422,8 +437,10 @@
             if seekerror:
                 raise exc
 
-        self.repo.ui.debug('bundle2-input-bundle: %i parts total\n' %
-                           self.count)
+        self.repo.ui.debug(
+            'bundle2-input-bundle: %i parts total\n' % self.count
+        )
+
 
 def processbundle(repo, unbundler, transactiongetter=None, op=None, source=''):
     """This function process a bundle, apply effect to/from a repo
@@ -461,20 +478,21 @@
 
     return op
 
+
 def processparts(repo, op, unbundler):
     with partiterator(repo, op, unbundler) as parts:
         for part in parts:
             _processpart(op, part)
 
+
 def _processchangegroup(op, cg, tr, source, url, **kwargs):
     ret = cg.apply(op.repo, tr, source, url, **kwargs)
-    op.records.add('changegroup', {
-        'return': ret,
-    })
+    op.records.add('changegroup', {'return': ret,})
     return ret
 
+
 def _gethandler(op, part):
-    status = 'unknown' # used by debug output
+    status = 'unknown'  # used by debug output
     try:
         handler = parthandlermapping.get(part.type)
         if handler is None:
@@ -486,14 +504,15 @@
             unknownparams = list(unknownparams)
             unknownparams.sort()
             status = 'unsupported-params (%s)' % ', '.join(unknownparams)
-            raise error.BundleUnknownFeatureError(parttype=part.type,
-                                                  params=unknownparams)
+            raise error.BundleUnknownFeatureError(
+                parttype=part.type, params=unknownparams
+            )
         status = 'supported'
     except error.BundleUnknownFeatureError as exc:
-        if part.mandatory: # mandatory parts
+        if part.mandatory:  # mandatory parts
             raise
         indebug(op.ui, 'ignoring unsupported advisory part %s' % exc)
-        return # skip to part processing
+        return  # skip to part processing
     finally:
         if op.ui.debugflag:
             msg = ['bundle2-input-part: "%s"' % part.type]
@@ -513,6 +532,7 @@
 
     return handler
 
+
 def _processpart(op, part):
     """process a single part from a bundle
 
@@ -536,10 +556,11 @@
         if output is not None:
             output = op.ui.popbuffer()
         if output:
-            outpart = op.reply.newpart('output', data=output,
-                                       mandatory=False)
+            outpart = op.reply.newpart('output', data=output, mandatory=False)
             outpart.addparam(
-                'in-reply-to', pycompat.bytestr(part.id), mandatory=False)
+                'in-reply-to', pycompat.bytestr(part.id), mandatory=False
+            )
+
 
 def decodecaps(blob):
     """decode a bundle2 caps bytes blob into a dictionary
@@ -564,6 +585,7 @@
         caps[key] = vals
     return caps
 
+
 def encodecaps(caps):
     """encode a bundle2 caps dictionary into a bytes blob"""
     chunks = []
@@ -576,11 +598,12 @@
         chunks.append(ca)
     return '\n'.join(chunks)
 
+
 bundletypes = {
-    "": ("", 'UN'),       # only when using unbundle on ssh and old http servers
-                          # since the unification ssh accepts a header but there
-                          # is no capability signaling it.
-    "HG20": (), # special-cased below
+    "": ("", 'UN'),  # only when using unbundle on ssh and old http servers
+    # since the unification ssh accepts a header but there
+    # is no capability signaling it.
+    "HG20": (),  # special-cased below
     "HG10UN": ("HG10UN", 'UN'),
     "HG10BZ": ("HG10", 'BZ'),
     "HG10GZ": ("HG10GZ", 'GZ'),
@@ -589,6 +612,7 @@
 # hgweb uses this list to communicate its preferred type
 bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN']
 
+
 class bundle20(object):
     """represent an outgoing bundle2 container
 
@@ -630,8 +654,9 @@
         if not name:
             raise error.ProgrammingError(b'empty parameter name')
         if name[0:1] not in pycompat.bytestr(string.ascii_letters):
-            raise error.ProgrammingError(b'non letter first character: %s'
-                                         % name)
+            raise error.ProgrammingError(
+                b'non letter first character: %s' % name
+            )
         self._params.append((name, value))
 
     def addpart(self, part):
@@ -639,7 +664,7 @@
 
         Parts contains the actual applicative payload."""
         assert part.id is None
-        part.id = len(self._parts) # very cheap counter
+        part.id = len(self._parts)  # very cheap counter
         self._parts.append(part)
 
     def newpart(self, typeid, *args, **kwargs):
@@ -670,8 +695,9 @@
         yield _pack(_fstreamparamsize, len(param))
         if param:
             yield param
-        for chunk in self._compengine.compressstream(self._getcorechunk(),
-                                                     self._compopts):
+        for chunk in self._compengine.compressstream(
+            self._getcorechunk(), self._compopts
+        ):
             yield chunk
 
     def _paramchunk(self):
@@ -697,7 +723,6 @@
         outdebug(self.ui, 'end of bundle')
         yield _pack(_fpartheadersize, 0)
 
-
     def salvageoutput(self):
         """return a list with a copy of all output parts in the bundle
 
@@ -737,6 +762,7 @@
         Do not use it to implement higher-level logic or methods."""
         return changegroup.readexactly(self._fp, size)
 
+
 def getunbundler(ui, fp, magicstring=None):
     """return a valid unbundler object for a given magicstring"""
     if magicstring is None:
@@ -745,7 +771,8 @@
     if magic != 'HG':
         ui.debug(
             "error: invalid magic: %r (version %r), should be 'HG'\n"
-            % (magic, version))
+            % (magic, version)
+        )
         raise error.Abort(_('not a Mercurial bundle'))
     unbundlerclass = formatmap.get(version)
     if unbundlerclass is None:
@@ -754,6 +781,7 @@
     indebug(ui, 'start processing of %s stream' % magicstring)
     return unbundler
 
+
 class unbundle20(unpackermixin):
     """interpret a bundle2 stream
 
@@ -776,8 +804,9 @@
         params = {}
         paramssize = self._unpack(_fstreamparamsize)[0]
         if paramssize < 0:
-            raise error.BundleValueError('negative bundle param size: %i'
-                                         % paramssize)
+            raise error.BundleValueError(
+                'negative bundle param size: %i' % paramssize
+            )
         if paramssize:
             params = self._readexact(paramssize)
             params = self._processallparams(params)
@@ -795,7 +824,6 @@
             params[p[0]] = p[1]
         return params
 
-
     def _processparam(self, name, value):
         """process a parameter, applying its effect if needed
 
@@ -832,8 +860,9 @@
         assert 'params' not in vars(self)
         paramssize = self._unpack(_fstreamparamsize)[0]
         if paramssize < 0:
-            raise error.BundleValueError('negative bundle param size: %i'
-                                         % paramssize)
+            raise error.BundleValueError(
+                'negative bundle param size: %i' % paramssize
+            )
         if paramssize:
             params = self._readexact(paramssize)
             self._processallparams(params)
@@ -868,7 +897,6 @@
                 raise error.BundleValueError('negative chunk size: %i')
             yield self._readexact(size)
 
-
     def iterparts(self, seekable=False):
         """yield all parts contained in the stream"""
         cls = seekableunbundlepart if seekable else unbundlepart
@@ -894,15 +922,16 @@
         returns None if empty"""
         headersize = self._unpack(_fpartheadersize)[0]
         if headersize < 0:
-            raise error.BundleValueError('negative part header size: %i'
-                                         % headersize)
+            raise error.BundleValueError(
+                'negative part header size: %i' % headersize
+            )
         indebug(self.ui, 'part header size: %i' % headersize)
         if headersize:
             return self._readexact(headersize)
         return None
 
     def compressed(self):
-        self.params # load params
+        self.params  # load params
         return self._compressed
 
     def close(self):
@@ -910,28 +939,33 @@
         if util.safehasattr(self._fp, 'close'):
             return self._fp.close()
 
+
 formatmap = {'20': unbundle20}
 
 b2streamparamsmap = {}
 
+
 def b2streamparamhandler(name):
     """register a handler for a stream level parameter"""
+
     def decorator(func):
         assert name not in formatmap
         b2streamparamsmap[name] = func
         return func
+
     return decorator
 
+
 @b2streamparamhandler('compression')
 def processcompression(unbundler, param, value):
     """read compression parameter and install payload decompression"""
     if value not in util.compengines.supportedbundletypes:
-        raise error.BundleUnknownFeatureError(params=(param,),
-                                              values=(value,))
+        raise error.BundleUnknownFeatureError(params=(param,), values=(value,))
     unbundler._compengine = util.compengines.forbundletype(value)
     if value is not None:
         unbundler._compressed = True
 
+
 class bundlepart(object):
     """A bundle2 part contains application level payload
 
@@ -948,8 +982,14 @@
     Both data and parameters cannot be modified after the generation has begun.
     """
 
-    def __init__(self, parttype, mandatoryparams=(), advisoryparams=(),
-                 data='', mandatory=True):
+    def __init__(
+        self,
+        parttype,
+        mandatoryparams=(),
+        advisoryparams=(),
+        data='',
+        mandatory=True,
+    ):
         validateparttype(parttype)
         self.id = None
         self.type = parttype
@@ -971,8 +1011,13 @@
 
     def __repr__(self):
         cls = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
-        return ('<%s object at %x; id: %s; type: %s; mandatory: %s>'
-                % (cls, id(self), self.id, self.type, self.mandatory))
+        return '<%s object at %x; id: %s; type: %s; mandatory: %s>' % (
+            cls,
+            id(self),
+            self.id,
+            self.type,
+            self.mandatory,
+        )
 
     def copy(self):
         """return a copy of the part
@@ -980,8 +1025,13 @@
         The new part have the very same content but no partid assigned yet.
         Parts with generated data cannot be copied."""
         assert not util.safehasattr(self.data, 'next')
-        return self.__class__(self.type, self._mandatoryparams,
-                              self._advisoryparams, self._data, self.mandatory)
+        return self.__class__(
+            self.type,
+            self._mandatoryparams,
+            self._advisoryparams,
+            self._data,
+            self.mandatory,
+        )
 
     # methods used to defines the part content
     @property
@@ -1043,8 +1093,9 @@
                 msg.append(')')
             if not self.data:
                 msg.append(' empty payload')
-            elif (util.safehasattr(self.data, 'next')
-                  or util.safehasattr(self.data, '__next__')):
+            elif util.safehasattr(self.data, 'next') or util.safehasattr(
+                self.data, '__next__'
+            ):
                 msg.append(' streamed payload')
             else:
                 msg.append(' %i bytes payload' % len(self.data))
@@ -1058,9 +1109,11 @@
             parttype = self.type.lower()
         outdebug(ui, 'part %s: "%s"' % (pycompat.bytestr(self.id), parttype))
         ## parttype
-        header = [_pack(_fparttypesize, len(parttype)),
-                  parttype, _pack(_fpartid, self.id),
-                 ]
+        header = [
+            _pack(_fparttypesize, len(parttype)),
+            parttype,
+            _pack(_fpartid, self.id),
+        ]
         ## parameters
         # count
         manpar = self.mandatoryparams
@@ -1087,8 +1140,10 @@
         try:
             headerchunk = ''.join(header)
         except TypeError:
-            raise TypeError(r'Found a non-bytes trying to '
-                            r'build bundle part header: %r' % header)
+            raise TypeError(
+                r'Found a non-bytes trying to '
+                r'build bundle part header: %r' % header
+            )
         outdebug(ui, 'header chunk size: %i' % len(headerchunk))
         yield _pack(_fpartheadersize, len(headerchunk))
         yield headerchunk
@@ -1107,12 +1162,14 @@
         except BaseException as exc:
             bexc = stringutil.forcebytestr(exc)
             # backup exception data for later
-            ui.debug('bundle2-input-stream-interrupt: encoding exception %s'
-                     % bexc)
+            ui.debug(
+                'bundle2-input-stream-interrupt: encoding exception %s' % bexc
+            )
             tb = sys.exc_info()[2]
             msg = 'unexpected error: %s' % bexc
-            interpart = bundlepart('error:abort', [('message', msg)],
-                                   mandatory=False)
+            interpart = bundlepart(
+                'error:abort', [('message', msg)], mandatory=False
+            )
             interpart.id = 0
             yield _pack(_fpayloadsize, -1)
             for chunk in interpart.getchunks(ui=ui):
@@ -1132,8 +1189,9 @@
         Exists to handle the different methods to provide data to a part."""
         # we only support fixed size data now.
         # This will be improved in the future.
-        if (util.safehasattr(self.data, 'next')
-            or util.safehasattr(self.data, '__next__')):
+        if util.safehasattr(self.data, 'next') or util.safehasattr(
+            self.data, '__next__'
+        ):
             buff = util.chunkbuffer(self.data)
             chunk = buff.read(preferedchunksize)
             while chunk:
@@ -1145,6 +1203,7 @@
 
 flaginterrupt = -1
 
+
 class interrupthandler(unpackermixin):
     """read one part and process it with restricted capability
 
@@ -1163,8 +1222,9 @@
         returns None if empty"""
         headersize = self._unpack(_fpartheadersize)[0]
         if headersize < 0:
-            raise error.BundleValueError('negative part header size: %i'
-                                         % headersize)
+            raise error.BundleValueError(
+                'negative part header size: %i' % headersize
+            )
         indebug(self.ui, 'part header size: %i\n' % headersize)
         if headersize:
             return self._readexact(headersize)
@@ -1172,8 +1232,9 @@
 
     def __call__(self):
 
-        self.ui.debug('bundle2-input-stream-interrupt:'
-                      ' opening out of band context\n')
+        self.ui.debug(
+            'bundle2-input-stream-interrupt:' ' opening out of band context\n'
+        )
         indebug(self.ui, 'bundle2 stream interruption, looking for a part.')
         headerblock = self._readpartheader()
         if headerblock is None:
@@ -1190,8 +1251,10 @@
         finally:
             if not hardabort:
                 part.consume()
-        self.ui.debug('bundle2-input-stream-interrupt:'
-                      ' closing out of band context\n')
+        self.ui.debug(
+            'bundle2-input-stream-interrupt:' ' closing out of band context\n'
+        )
+
 
 class interruptoperation(object):
     """A limited operation to be use by part handler during interruption
@@ -1211,6 +1274,7 @@
     def gettransaction(self):
         raise TransactionUnavailable('no repo access from stream interruption')
 
+
 def decodepayloadchunks(ui, fh):
     """Reads bundle2 part payload data into chunks.
 
@@ -1235,9 +1299,13 @@
         if chunksize >= 0:
             s = read(chunksize)
             if len(s) < chunksize:
-                raise error.Abort(_('stream ended unexpectedly '
-                                    ' (got %d bytes, expected %d)') %
-                                  (len(s), chunksize))
+                raise error.Abort(
+                    _(
+                        'stream ended unexpectedly '
+                        ' (got %d bytes, expected %d)'
+                    )
+                    % (len(s), chunksize)
+                )
 
             yield s
         elif chunksize == flaginterrupt:
@@ -1246,13 +1314,15 @@
             interrupthandler(ui, fh)()
         else:
             raise error.BundleValueError(
-                'negative payload chunk size: %s' % chunksize)
+                'negative payload chunk size: %s' % chunksize
+            )
 
         s = read(headersize)
         if len(s) < headersize:
-            raise error.Abort(_('stream ended unexpectedly '
-                                ' (got %d bytes, expected %d)') %
-                              (len(s), chunksize))
+            raise error.Abort(
+                _('stream ended unexpectedly ' ' (got %d bytes, expected %d)')
+                % (len(s), chunksize)
+            )
 
         chunksize = unpack(s)[0]
 
@@ -1260,13 +1330,15 @@
         if dolog:
             debug('bundle2-input: payload chunk size: %i\n' % chunksize)
 
+
 class unbundlepart(unpackermixin):
     """a bundle part read from a bundle"""
 
     def __init__(self, ui, header, fp):
         super(unbundlepart, self).__init__(fp)
-        self._seekable = (util.safehasattr(fp, 'seek') and
-                          util.safehasattr(fp, 'tell'))
+        self._seekable = util.safehasattr(fp, 'seek') and util.safehasattr(
+            fp, 'tell'
+        )
         self.ui = ui
         # unbundle state attr
         self._headerdata = header
@@ -1287,7 +1359,7 @@
     def _fromheader(self, size):
         """return the next <size> byte from the header"""
         offset = self._headeroffset
-        data = self._headerdata[offset:(offset + size)]
+        data = self._headerdata[offset : (offset + size)]
         self._headeroffset = offset + size
         return data
 
@@ -1302,7 +1374,7 @@
         """internal function to setup all logic related parameters"""
         # make it read only to prevent people touching it by mistake.
         self.mandatoryparams = tuple(mandatoryparams)
-        self.advisoryparams  = tuple(advisoryparams)
+        self.advisoryparams = tuple(advisoryparams)
         # user friendly UI
         self.params = util.sortdict(self.mandatoryparams)
         self.params.update(self.advisoryparams)
@@ -1316,7 +1388,7 @@
         self.id = self._unpackheader(_fpartid)[0]
         indebug(self.ui, 'part id: "%s"' % pycompat.bytestr(self.id))
         # extract mandatory bit from type
-        self.mandatory = (self.type != self.type.lower())
+        self.mandatory = self.type != self.type.lower()
         self.type = self.type.lower()
         ## reading parameters
         # param count
@@ -1372,11 +1444,13 @@
         self._pos += len(data)
         if size is None or len(data) < size:
             if not self.consumed and self._pos:
-                self.ui.debug('bundle2-input-part: total payload size %i\n'
-                              % self._pos)
+                self.ui.debug(
+                    'bundle2-input-part: total payload size %i\n' % self._pos
+                )
             self.consumed = True
         return data
 
+
 class seekableunbundlepart(unbundlepart):
     """A bundle2 part in a bundle that is seekable.
 
@@ -1394,6 +1468,7 @@
     to the number of chunks within the payload (which almost certainly
     increases in proportion with the size of the part).
     """
+
     def __init__(self, ui, header, fp):
         # (payload, file) offsets for chunk starts.
         self._chunkindex = []
@@ -1407,7 +1482,8 @@
             self._chunkindex.append((0, self._tellfp()))
         else:
             assert chunknum < len(self._chunkindex), (
-                   'Unknown chunk %d' % chunknum)
+                'Unknown chunk %d' % chunknum
+            )
             self._seekfp(self._chunkindex[chunknum][1])
 
         pos = self._chunkindex[chunknum][0]
@@ -1495,21 +1571,23 @@
                     raise
         return None
 
+
 # These are only the static capabilities.
 # Check the 'getrepocaps' function for the rest.
-capabilities = {'HG20': (),
-                'bookmarks': (),
-                'error': ('abort', 'unsupportedcontent', 'pushraced',
-                          'pushkey'),
-                'listkeys': (),
-                'pushkey': (),
-                'digests': tuple(sorted(util.DIGESTS.keys())),
-                'remote-changegroup': ('http', 'https'),
-                'hgtagsfnodes': (),
-                'rev-branch-cache': (),
-                'phases': ('heads',),
-                'stream': ('v2',),
-               }
+capabilities = {
+    'HG20': (),
+    'bookmarks': (),
+    'error': ('abort', 'unsupportedcontent', 'pushraced', 'pushkey'),
+    'listkeys': (),
+    'pushkey': (),
+    'digests': tuple(sorted(util.DIGESTS.keys())),
+    'remote-changegroup': ('http', 'https'),
+    'hgtagsfnodes': (),
+    'rev-branch-cache': (),
+    'phases': ('heads',),
+    'stream': ('v2',),
+}
+
 
 def getrepocaps(repo, allowpushback=False, role=None):
     """return the bundle2 capabilities for a given repo
@@ -1524,8 +1602,9 @@
         raise error.ProgrammingError('role argument must be client or server')
 
     caps = capabilities.copy()
-    caps['changegroup'] = tuple(sorted(
-        changegroup.supportedincomingversions(repo)))
+    caps['changegroup'] = tuple(
+        sorted(changegroup.supportedincomingversions(repo))
+    )
     if obsolete.isenabled(repo, obsolete.exchangeopt):
         supportedformat = tuple('V%i' % v for v in obsolete.formats)
         caps['obsmarkers'] = supportedformat
@@ -1539,8 +1618,9 @@
 
     # Don't advertise stream clone support in server mode if not configured.
     if role == 'server':
-        streamsupported = repo.ui.configbool('server', 'uncompressed',
-                                             untrusted=True)
+        streamsupported = repo.ui.configbool(
+            'server', 'uncompressed', untrusted=True
+        )
         featuresupported = repo.ui.configbool('server', 'bundle2.stream')
 
         if not streamsupported or not featuresupported:
@@ -1550,6 +1630,7 @@
 
     return caps
 
+
 def bundle2caps(remote):
     """return the bundle capabilities of a peer as dict"""
     raw = remote.capable('bundle2')
@@ -1558,18 +1639,37 @@
     capsblob = urlreq.unquote(remote.capable('bundle2'))
     return decodecaps(capsblob)
 
+
 def obsmarkersversion(caps):
     """extract the list of supported obsmarkers versions from a bundle2caps dict
     """
     obscaps = caps.get('obsmarkers', ())
     return [int(c[1:]) for c in obscaps if c.startswith('V')]
 
-def writenewbundle(ui, repo, source, filename, bundletype, outgoing, opts,
-                   vfs=None, compression=None, compopts=None):
+
+def writenewbundle(
+    ui,
+    repo,
+    source,
+    filename,
+    bundletype,
+    outgoing,
+    opts,
+    vfs=None,
+    compression=None,
+    compopts=None,
+):
     if bundletype.startswith('HG10'):
         cg = changegroup.makechangegroup(repo, outgoing, '01', source)
-        return writebundle(ui, cg, filename, bundletype, vfs=vfs,
-                           compression=compression, compopts=compopts)
+        return writebundle(
+            ui,
+            cg,
+            filename,
+            bundletype,
+            vfs=vfs,
+            compression=compression,
+            compopts=compopts,
+        )
     elif not bundletype.startswith('HG20'):
         raise error.ProgrammingError('unknown bundle type: %s' % bundletype)
 
@@ -1583,6 +1683,7 @@
 
     return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
 
+
 def _addpartsfromopts(ui, repo, bundler, source, outgoing, opts):
     # We should eventually reconcile this logic with the one behind
     # 'exchange.getbundle2partsgenerator'.
@@ -1601,10 +1702,12 @@
         part = bundler.newpart('changegroup', data=cg.getchunks())
         part.addparam('version', cg.version)
         if 'clcount' in cg.extras:
-            part.addparam('nbchanges', '%d' % cg.extras['clcount'],
-                          mandatory=False)
-        if opts.get('phases') and repo.revs('%ln and secret()',
-                                            outgoing.missingheads):
+            part.addparam(
+                'nbchanges', '%d' % cg.extras['clcount'], mandatory=False
+            )
+        if opts.get('phases') and repo.revs(
+            '%ln and secret()', outgoing.missingheads
+        ):
             part.addparam('targetphase', '%d' % phases.secret, mandatory=False)
 
     if opts.get('streamv2', False):
@@ -1625,6 +1728,7 @@
         phasedata = phases.binaryencode(headsbyphase)
         bundler.newpart('phase-heads', data=phasedata)
 
+
 def addparttagsfnodescache(repo, bundler, outgoing):
     # we include the tags fnode cache for the bundle changeset
     # (as an optional parts)
@@ -1649,6 +1753,7 @@
     if chunks:
         bundler.newpart('hgtagsfnodes', data=''.join(chunks))
 
+
 def addpartrevbranchcache(repo, bundler, outgoing):
     # we include the rev branch cache for the bundle changeset
     # (as an optional parts)
@@ -1669,28 +1774,36 @@
             for n in sorted(closed):
                 yield n
 
-    bundler.newpart('cache:rev-branch-cache', data=generate(),
-                    mandatory=False)
+    bundler.newpart('cache:rev-branch-cache', data=generate(), mandatory=False)
+
 
 def _formatrequirementsspec(requirements):
     requirements = [req for req in requirements if req != "shared"]
     return urlreq.quote(','.join(sorted(requirements)))
 
+
 def _formatrequirementsparams(requirements):
     requirements = _formatrequirementsspec(requirements)
     params = "%s%s" % (urlreq.quote("requirements="), requirements)
     return params
 
+
 def addpartbundlestream2(bundler, repo, **kwargs):
     if not kwargs.get(r'stream', False):
         return
 
     if not streamclone.allowservergeneration(repo):
-        raise error.Abort(_('stream data requested but server does not allow '
-                            'this feature'),
-                          hint=_('well-behaved clients should not be '
-                                 'requesting stream data from servers not '
-                                 'advertising it; the client may be buggy'))
+        raise error.Abort(
+            _(
+                'stream data requested but server does not allow '
+                'this feature'
+            ),
+            hint=_(
+                'well-behaved clients should not be '
+                'requesting stream data from servers not '
+                'advertising it; the client may be buggy'
+            ),
+        )
 
     # Stream clones don't compress well. And compression undermines a
     # goal of stream clones, which is to be fast. Communicate the desire
@@ -1701,8 +1814,9 @@
     includepats = kwargs.get(r'includepats')
     excludepats = kwargs.get(r'excludepats')
 
-    narrowstream = repo.ui.configbool('experimental',
-                                      'server.stream-narrow-clones')
+    narrowstream = repo.ui.configbool(
+        'experimental', 'server.stream-narrow-clones'
+    )
 
     if (includepats or excludepats) and not narrowstream:
         raise error.Abort(_('server does not support narrow stream clones'))
@@ -1711,20 +1825,25 @@
     if repo.obsstore:
         remoteversions = obsmarkersversion(bundler.capabilities)
         if not remoteversions:
-            raise error.Abort(_('server has obsolescence markers, but client '
-                                'cannot receive them via stream clone'))
+            raise error.Abort(
+                _(
+                    'server has obsolescence markers, but client '
+                    'cannot receive them via stream clone'
+                )
+            )
         elif repo.obsstore._version in remoteversions:
             includeobsmarkers = True
 
-    filecount, bytecount, it = streamclone.generatev2(repo, includepats,
-                                                      excludepats,
-                                                      includeobsmarkers)
+    filecount, bytecount, it = streamclone.generatev2(
+        repo, includepats, excludepats, includeobsmarkers
+    )
     requirements = _formatrequirementsspec(repo.requirements)
     part = bundler.newpart('stream2', data=it)
     part.addparam('bytecount', '%d' % bytecount, mandatory=True)
     part.addparam('filecount', '%d' % filecount, mandatory=True)
     part.addparam('requirements', requirements, mandatory=True)
 
+
 def buildobsmarkerspart(bundler, markers):
     """add an obsmarker part to the bundler with <markers>
 
@@ -1741,8 +1860,10 @@
     stream = obsolete.encodemarkers(markers, True, version=version)
     return bundler.newpart('obsmarkers', data=stream)
 
-def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None,
-                compopts=None):
+
+def writebundle(
+    ui, cg, filename, bundletype, vfs=None, compression=None, compopts=None
+):
     """Write a bundle file and return its filename.
 
     Existing files will not be overwritten.
@@ -1757,34 +1878,37 @@
         part = bundle.newpart('changegroup', data=cg.getchunks())
         part.addparam('version', cg.version)
         if 'clcount' in cg.extras:
-            part.addparam('nbchanges', '%d' % cg.extras['clcount'],
-                          mandatory=False)
+            part.addparam(
+                'nbchanges', '%d' % cg.extras['clcount'], mandatory=False
+            )
         chunkiter = bundle.getchunks()
     else:
         # compression argument is only for the bundle2 case
         assert compression is None
         if cg.version != '01':
-            raise error.Abort(_('old bundle types only supports v1 '
-                                'changegroups'))
+            raise error.Abort(
+                _('old bundle types only supports v1 ' 'changegroups')
+            )
         header, comp = bundletypes[bundletype]
         if comp not in util.compengines.supportedbundletypes:
-            raise error.Abort(_('unknown stream compression type: %s')
-                              % comp)
+            raise error.Abort(_('unknown stream compression type: %s') % comp)
         compengine = util.compengines.forbundletype(comp)
+
         def chunkiter():
             yield header
             for chunk in compengine.compressstream(cg.getchunks(), compopts):
                 yield chunk
+
         chunkiter = chunkiter()
 
     # parse the changegroup data, otherwise we will block
     # in case of sshrepo because we don't know the end of the stream
     return changegroup.writechunks(ui, chunkiter, filename, vfs=vfs)
 
+
 def combinechangegroupresults(op):
     """logic to combine 0 or more addchangegroup results into one"""
-    results = [r.get('return', 0)
-               for r in op.records['changegroup']]
+    results = [r.get('return', 0) for r in op.records['changegroup']]
     changedheads = 0
     result = 1
     for ret in results:
@@ -1802,8 +1926,10 @@
         result = -1 + changedheads
     return result
 
-@parthandler('changegroup', ('version', 'nbchanges', 'treemanifest',
-                             'targetphase'))
+
+@parthandler(
+    'changegroup', ('version', 'nbchanges', 'treemanifest', 'targetphase')
+)
 def handlechangegroup(op, inpart):
     """apply a changegroup part on the repo
 
@@ -1821,33 +1947,51 @@
     nbchangesets = None
     if 'nbchanges' in inpart.params:
         nbchangesets = int(inpart.params.get('nbchanges'))
-    if ('treemanifest' in inpart.params and
-        'treemanifest' not in op.repo.requirements):
+    if (
+        'treemanifest' in inpart.params
+        and 'treemanifest' not in op.repo.requirements
+    ):
         if len(op.repo.changelog) != 0:
-            raise error.Abort(_(
-                "bundle contains tree manifests, but local repo is "
-                "non-empty and does not use tree manifests"))
+            raise error.Abort(
+                _(
+                    "bundle contains tree manifests, but local repo is "
+                    "non-empty and does not use tree manifests"
+                )
+            )
         op.repo.requirements.add('treemanifest')
         op.repo.svfs.options = localrepo.resolvestorevfsoptions(
-            op.repo.ui, op.repo.requirements, op.repo.features)
+            op.repo.ui, op.repo.requirements, op.repo.features
+        )
         op.repo._writerequirements()
     extrakwargs = {}
     targetphase = inpart.params.get('targetphase')
     if targetphase is not None:
         extrakwargs[r'targetphase'] = int(targetphase)
-    ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2',
-                              expectedtotal=nbchangesets, **extrakwargs)
+    ret = _processchangegroup(
+        op,
+        cg,
+        tr,
+        'bundle2',
+        'bundle2',
+        expectedtotal=nbchangesets,
+        **extrakwargs
+    )
     if op.reply is not None:
         # This is definitely not the final form of this
         # return. But one need to start somewhere.
         part = op.reply.newpart('reply:changegroup', mandatory=False)
         part.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
+            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+        )
         part.addparam('return', '%i' % ret, mandatory=False)
     assert not inpart.read()
 
-_remotechangegroupparams = tuple(['url', 'size', 'digests'] +
-    ['digest:%s' % k for k in util.DIGESTS.keys()])
+
+_remotechangegroupparams = tuple(
+    ['url', 'size', 'digests'] + ['digest:%s' % k for k in util.DIGESTS.keys()]
+)
+
+
 @parthandler('remote-changegroup', _remotechangegroupparams)
 def handleremotechangegroup(op, inpart):
     """apply a bundle10 on the repo, given an url and validation information
@@ -1871,14 +2015,16 @@
         raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url')
     parsed_url = util.url(raw_url)
     if parsed_url.scheme not in capabilities['remote-changegroup']:
-        raise error.Abort(_('remote-changegroup does not support %s urls') %
-            parsed_url.scheme)
+        raise error.Abort(
+            _('remote-changegroup does not support %s urls') % parsed_url.scheme
+        )
 
     try:
         size = int(inpart.params['size'])
     except ValueError:
-        raise error.Abort(_('remote-changegroup: invalid value for param "%s"')
-            % 'size')
+        raise error.Abort(
+            _('remote-changegroup: invalid value for param "%s"') % 'size'
+        )
     except KeyError:
         raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size')
 
@@ -1888,39 +2034,47 @@
         try:
             value = inpart.params[param]
         except KeyError:
-            raise error.Abort(_('remote-changegroup: missing "%s" param') %
-                param)
+            raise error.Abort(
+                _('remote-changegroup: missing "%s" param') % param
+            )
         digests[typ] = value
 
     real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests)
 
     tr = op.gettransaction()
     from . import exchange
+
     cg = exchange.readbundle(op.repo.ui, real_part, raw_url)
     if not isinstance(cg, changegroup.cg1unpacker):
-        raise error.Abort(_('%s: not a bundle version 1.0') %
-            util.hidepassword(raw_url))
+        raise error.Abort(
+            _('%s: not a bundle version 1.0') % util.hidepassword(raw_url)
+        )
     ret = _processchangegroup(op, cg, tr, 'bundle2', 'bundle2')
     if op.reply is not None:
         # This is definitely not the final form of this
         # return. But one need to start somewhere.
         part = op.reply.newpart('reply:changegroup')
         part.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
+            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+        )
         part.addparam('return', '%i' % ret, mandatory=False)
     try:
         real_part.validate()
     except error.Abort as e:
-        raise error.Abort(_('bundle at %s is corrupted:\n%s') %
-                          (util.hidepassword(raw_url), bytes(e)))
+        raise error.Abort(
+            _('bundle at %s is corrupted:\n%s')
+            % (util.hidepassword(raw_url), bytes(e))
+        )
     assert not inpart.read()
 
+
 @parthandler('reply:changegroup', ('return', 'in-reply-to'))
 def handlereplychangegroup(op, inpart):
     ret = int(inpart.params['return'])
     replyto = int(inpart.params['in-reply-to'])
     op.records.add('changegroup', {'return': ret}, replyto)
 
+
 @parthandler('check:bookmarks')
 def handlecheckbookmarks(op, inpart):
     """check location of bookmarks
@@ -1931,12 +2085,18 @@
     """
     bookdata = bookmarks.binarydecode(inpart)
 
-    msgstandard = ('remote repository changed while pushing - please try again '
-                   '(bookmark "%s" move from %s to %s)')
-    msgmissing = ('remote repository changed while pushing - please try again '
-                  '(bookmark "%s" is missing, expected %s)')
-    msgexist = ('remote repository changed while pushing - please try again '
-                '(bookmark "%s" set on %s, expected missing)')
+    msgstandard = (
+        'remote repository changed while pushing - please try again '
+        '(bookmark "%s" move from %s to %s)'
+    )
+    msgmissing = (
+        'remote repository changed while pushing - please try again '
+        '(bookmark "%s" is missing, expected %s)'
+    )
+    msgexist = (
+        'remote repository changed while pushing - please try again '
+        '(bookmark "%s" set on %s, expected missing)'
+    )
     for book, node in bookdata:
         currentnode = op.repo._bookmarks.get(book)
         if currentnode != node:
@@ -1945,10 +2105,14 @@
             elif currentnode is None:
                 finalmsg = msgmissing % (book, nodemod.short(node))
             else:
-                finalmsg = msgstandard % (book, nodemod.short(node),
-                                          nodemod.short(currentnode))
+                finalmsg = msgstandard % (
+                    book,
+                    nodemod.short(node),
+                    nodemod.short(currentnode),
+                )
             raise error.PushRaced(finalmsg)
 
+
 @parthandler('check:heads')
 def handlecheckheads(op, inpart):
     """check that head of the repo did not change
@@ -1965,8 +2129,10 @@
     if op.ui.configbool('experimental', 'bundle2lazylocking'):
         op.gettransaction()
     if sorted(heads) != sorted(op.repo.heads()):
-        raise error.PushRaced('remote repository changed while pushing - '
-                              'please try again')
+        raise error.PushRaced(
+            'remote repository changed while pushing - ' 'please try again'
+        )
+
 
 @parthandler('check:updated-heads')
 def handlecheckupdatedheads(op, inpart):
@@ -1994,8 +2160,10 @@
 
     for h in heads:
         if h not in currentheads:
-            raise error.PushRaced('remote repository changed while pushing - '
-                                  'please try again')
+            raise error.PushRaced(
+                'remote repository changed while pushing - ' 'please try again'
+            )
+
 
 @parthandler('check:phases')
 def handlecheckphases(op, inpart):
@@ -2007,23 +2175,29 @@
     unfi = op.repo.unfiltered()
     cl = unfi.changelog
     phasecache = unfi._phasecache
-    msg = ('remote repository changed while pushing - please try again '
-           '(%s is %s expected %s)')
+    msg = (
+        'remote repository changed while pushing - please try again '
+        '(%s is %s expected %s)'
+    )
     for expectedphase, nodes in enumerate(phasetonodes):
         for n in nodes:
             actualphase = phasecache.phase(unfi, cl.rev(n))
             if actualphase != expectedphase:
-                finalmsg = msg % (nodemod.short(n),
-                                  phases.phasenames[actualphase],
-                                  phases.phasenames[expectedphase])
+                finalmsg = msg % (
+                    nodemod.short(n),
+                    phases.phasenames[actualphase],
+                    phases.phasenames[expectedphase],
+                )
                 raise error.PushRaced(finalmsg)
 
+
 @parthandler('output')
 def handleoutput(op, inpart):
     """forward output captured on the server to the client"""
     for line in inpart.read().splitlines():
         op.ui.status(_('remote: %s\n') % line)
 
+
 @parthandler('replycaps')
 def handlereplycaps(op, inpart):
     """Notify that a reply bundle should be created
@@ -2033,17 +2207,22 @@
     if op.reply is None:
         op.reply = bundle20(op.ui, caps)
 
+
 class AbortFromPart(error.Abort):
     """Sub-class of Abort that denotes an error from a bundle2 part."""
 
+
 @parthandler('error:abort', ('message', 'hint'))
 def handleerrorabort(op, inpart):
     """Used to transmit abort error over the wire"""
-    raise AbortFromPart(inpart.params['message'],
-                        hint=inpart.params.get('hint'))
-
-@parthandler('error:pushkey', ('namespace', 'key', 'new', 'old', 'ret',
-                               'in-reply-to'))
+    raise AbortFromPart(
+        inpart.params['message'], hint=inpart.params.get('hint')
+    )
+
+
+@parthandler(
+    'error:pushkey', ('namespace', 'key', 'new', 'old', 'ret', 'in-reply-to')
+)
 def handleerrorpushkey(op, inpart):
     """Used to transmit failure of a mandatory pushkey over the wire"""
     kwargs = {}
@@ -2051,8 +2230,10 @@
         value = inpart.params.get(name)
         if value is not None:
             kwargs[name] = value
-    raise error.PushkeyFailed(inpart.params['in-reply-to'],
-                              **pycompat.strkwargs(kwargs))
+    raise error.PushkeyFailed(
+        inpart.params['in-reply-to'], **pycompat.strkwargs(kwargs)
+    )
+
 
 @parthandler('error:unsupportedcontent', ('parttype', 'params'))
 def handleerrorunsupportedcontent(op, inpart):
@@ -2067,11 +2248,13 @@
 
     raise error.BundleUnknownFeatureError(**pycompat.strkwargs(kwargs))
 
+
 @parthandler('error:pushraced', ('message',))
 def handleerrorpushraced(op, inpart):
     """Used to transmit push race error over the wire"""
     raise error.ResponseError(_('push failed:'), inpart.params['message'])
 
+
 @parthandler('listkeys', ('namespace',))
 def handlelistkeys(op, inpart):
     """retrieve pushkey namespace content stored in a bundle2"""
@@ -2079,6 +2262,7 @@
     r = pushkey.decodekeys(inpart.read())
     op.records.add('listkeys', (namespace, r))
 
+
 @parthandler('pushkey', ('namespace', 'key', 'old', 'new'))
 def handlepushkey(op, inpart):
     """process a pushkey request"""
@@ -2092,23 +2276,23 @@
     if op.ui.configbool('experimental', 'bundle2lazylocking'):
         op.gettransaction()
     ret = op.repo.pushkey(namespace, key, old, new)
-    record = {'namespace': namespace,
-              'key': key,
-              'old': old,
-              'new': new}
+    record = {'namespace': namespace, 'key': key, 'old': old, 'new': new}
     op.records.add('pushkey', record)
     if op.reply is not None:
         rpart = op.reply.newpart('reply:pushkey')
         rpart.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
+            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+        )
         rpart.addparam('return', '%i' % ret, mandatory=False)
     if inpart.mandatory and not ret:
         kwargs = {}
         for key in ('namespace', 'key', 'new', 'old', 'ret'):
             if key in inpart.params:
                 kwargs[key] = inpart.params[key]
-        raise error.PushkeyFailed(partid='%d' % inpart.id,
-                                  **pycompat.strkwargs(kwargs))
+        raise error.PushkeyFailed(
+            partid='%d' % inpart.id, **pycompat.strkwargs(kwargs)
+        )
+
 
 @parthandler('bookmarks')
 def handlebookmark(op, inpart):
@@ -2147,15 +2331,18 @@
                 allhooks.append(hookargs)
 
             for hookargs in allhooks:
-                op.repo.hook('prepushkey', throw=True,
-                             **pycompat.strkwargs(hookargs))
+                op.repo.hook(
+                    'prepushkey', throw=True, **pycompat.strkwargs(hookargs)
+                )
 
         bookstore.applychanges(op.repo, op.gettransaction(), changes)
 
         if pushkeycompat:
+
             def runhook():
                 for hookargs in allhooks:
                     op.repo.hook('pushkey', **pycompat.strkwargs(hookargs))
+
             op.repo._afterlock(runhook)
 
     elif bookmarksmode == 'records':
@@ -2165,12 +2352,14 @@
     else:
         raise error.ProgrammingError('unkown bookmark mode: %s' % bookmarksmode)
 
+
 @parthandler('phase-heads')
 def handlephases(op, inpart):
     """apply phases from bundle part to repo"""
     headsbyphase = phases.binarydecode(inpart)
     phases.updatephases(op.repo.unfiltered(), op.gettransaction, headsbyphase)
 
+
 @parthandler('reply:pushkey', ('return', 'in-reply-to'))
 def handlepushkeyreply(op, inpart):
     """retrieve the result of a pushkey request"""
@@ -2178,14 +2367,14 @@
     partid = int(inpart.params['in-reply-to'])
     op.records.add('pushkey', {'return': ret}, partid)
 
+
 @parthandler('obsmarkers')
 def handleobsmarker(op, inpart):
     """add a stream of obsmarkers to the repo"""
     tr = op.gettransaction()
     markerdata = inpart.read()
     if op.ui.config('experimental', 'obsmarkers-exchange-debug'):
-        op.ui.write(('obsmarker-exchange: %i bytes received\n')
-                    % len(markerdata))
+        op.ui.write('obsmarker-exchange: %i bytes received\n' % len(markerdata))
     # The mergemarkers call will crash if marker creation is not enabled.
     # we want to avoid this if the part is advisory.
     if not inpart.mandatory and op.repo.obsstore.readonly:
@@ -2197,7 +2386,8 @@
     if op.reply is not None:
         rpart = op.reply.newpart('reply:obsmarkers')
         rpart.addparam(
-            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False)
+            'in-reply-to', pycompat.bytestr(inpart.id), mandatory=False
+        )
         rpart.addparam('new', '%i' % new, mandatory=False)
 
 
@@ -2208,6 +2398,7 @@
     partid = int(inpart.params['in-reply-to'])
     op.records.add('obsmarkers', {'new': ret}, partid)
 
+
 @parthandler('hgtagsfnodes')
 def handlehgtagsfnodes(op, inpart):
     """Applies .hgtags fnodes cache entries to the local repo.
@@ -2232,8 +2423,10 @@
     cache.write()
     op.ui.debug('applied %i hgtags fnodes cache entries\n' % count)
 
+
 rbcstruct = struct.Struct('>III')
 
+
 @parthandler('cache:rev-branch-cache')
 def handlerbc(op, inpart):
     """receive a rev-branch-cache payload and update the local cache
@@ -2266,6 +2459,7 @@
         rawheader = inpart.read(rbcstruct.size)
     cache.write()
 
+
 @parthandler('pushvars')
 def bundle2getvars(op, part):
     '''unbundle a bundle2 containing shellvars on the server'''
@@ -2280,6 +2474,7 @@
             hookargs[key] = value
         op.addhookargs(hookargs)
 
+
 @parthandler('stream2', ('requirements', 'filecount', 'bytecount'))
 def handlestreamv2bundle(op, part):
 
@@ -2293,11 +2488,12 @@
         raise error.Abort(msg)
 
     repo.ui.debug('applying stream bundle\n')
-    streamclone.applybundlev2(repo, part, filecount, bytecount,
-                              requirements)
-
-def widen_bundle(bundler, repo, oldmatcher, newmatcher, common,
-                 known, cgversion, ellipses):
+    streamclone.applybundlev2(repo, part, filecount, bytecount, requirements)
+
+
+def widen_bundle(
+    bundler, repo, oldmatcher, newmatcher, common, known, cgversion, ellipses
+):
     """generates bundle2 for widening a narrow clone
 
     bundler is the bundle to which data should be added
@@ -2318,12 +2514,20 @@
     if commonnodes:
         # XXX: we should only send the filelogs (and treemanifest). user
         # already has the changelog and manifest
-        packer = changegroup.getbundler(cgversion, repo,
-                                        oldmatcher=oldmatcher,
-                                        matcher=newmatcher,
-                                        fullnodes=commonnodes)
-        cgdata = packer.generate({nodemod.nullid}, list(commonnodes),
-                                 False, 'narrow_widen', changelog=False)
+        packer = changegroup.getbundler(
+            cgversion,
+            repo,
+            oldmatcher=oldmatcher,
+            matcher=newmatcher,
+            fullnodes=commonnodes,
+        )
+        cgdata = packer.generate(
+            {nodemod.nullid},
+            list(commonnodes),
+            False,
+            'narrow_widen',
+            changelog=False,
+        )
 
         part = bundler.newpart('changegroup', data=cgdata)
         part.addparam('version', cgversion)
--- a/mercurial/bundlerepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/bundlerepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,10 +17,7 @@
 import shutil
 
 from .i18n import _
-from .node import (
-    nullid,
-    nullrev
-)
+from .node import nullid, nullrev
 
 from . import (
     bundle2,
@@ -44,6 +41,7 @@
     vfs as vfsmod,
 )
 
+
 class bundlerevlog(revlog.revlog):
     def __init__(self, opener, indexfile, cgunpacker, linkmapper):
         # How it works:
@@ -58,7 +56,7 @@
         self.bundle = cgunpacker
         n = len(self)
         self.repotiprev = n - 1
-        self.bundlerevs = set() # used by 'bundle()' revset expression
+        self.bundlerevs = set()  # used by 'bundle()' revset expression
         for deltadata in cgunpacker.deltaiter():
             node, p1, p2, cs, deltabase, delta, flags = deltadata
 
@@ -73,17 +71,27 @@
 
             for p in (p1, p2):
                 if p not in self.nodemap:
-                    raise error.LookupError(p, self.indexfile,
-                                            _("unknown parent"))
+                    raise error.LookupError(
+                        p, self.indexfile, _("unknown parent")
+                    )
 
             if deltabase not in self.nodemap:
-                raise LookupError(deltabase, self.indexfile,
-                                  _('unknown delta base'))
+                raise LookupError(
+                    deltabase, self.indexfile, _('unknown delta base')
+                )
 
             baserev = self.rev(deltabase)
             # start, size, full unc. size, base (unused), link, p1, p2, node
-            e = (revlog.offset_type(start, flags), size, -1, baserev, link,
-                 self.rev(p1), self.rev(p2), node)
+            e = (
+                revlog.offset_type(start, flags),
+                size,
+                -1,
+                baserev,
+                link,
+                self.rev(p1),
+                self.rev(p2),
+                node,
+            )
             self.index.append(e)
             self.nodemap[node] = n
             self.bundlerevs.add(n)
@@ -108,8 +116,7 @@
         elif rev1 <= self.repotiprev and rev2 <= self.repotiprev:
             return revlog.revlog.revdiff(self, rev1, rev2)
 
-        return mdiff.textdiff(self.rawdata(rev1),
-                              self.rawdata(rev2))
+        return mdiff.textdiff(self.rawdata(rev1), self.rawdata(rev2))
 
     def _rawtext(self, node, rev, _df=None):
         if rev is None:
@@ -128,9 +135,9 @@
         if iterrev == nullrev:
             rawtext = ''
         elif rawtext is None:
-            r = super(bundlerevlog, self)._rawtext(self.node(iterrev),
-                                                   iterrev,
-                                                   _df=_df)
+            r = super(bundlerevlog, self)._rawtext(
+                self.node(iterrev), iterrev, _df=_df
+            )
             __, rawtext, validated = r
         if chain:
             validated = False
@@ -151,19 +158,24 @@
     def checksize(self):
         raise NotImplementedError
 
+
 class bundlechangelog(bundlerevlog, changelog.changelog):
     def __init__(self, opener, cgunpacker):
         changelog.changelog.__init__(self, opener)
         linkmapper = lambda x: x
-        bundlerevlog.__init__(self, opener, self.indexfile, cgunpacker,
-                              linkmapper)
+        bundlerevlog.__init__(
+            self, opener, self.indexfile, cgunpacker, linkmapper
+        )
+
 
 class bundlemanifest(bundlerevlog, manifest.manifestrevlog):
-    def __init__(self, opener, cgunpacker, linkmapper, dirlogstarts=None,
-                 dir=''):
+    def __init__(
+        self, opener, cgunpacker, linkmapper, dirlogstarts=None, dir=''
+    ):
         manifest.manifestrevlog.__init__(self, opener, tree=dir)
-        bundlerevlog.__init__(self, opener, self.indexfile, cgunpacker,
-                              linkmapper)
+        bundlerevlog.__init__(
+            self, opener, self.indexfile, cgunpacker, linkmapper
+        )
         if dirlogstarts is None:
             dirlogstarts = {}
             if self.bundle.version == "03":
@@ -175,20 +187,28 @@
         if d in self._dirlogstarts:
             self.bundle.seek(self._dirlogstarts[d])
             return bundlemanifest(
-                self.opener, self.bundle, self._linkmapper,
-                self._dirlogstarts, dir=d)
+                self.opener,
+                self.bundle,
+                self._linkmapper,
+                self._dirlogstarts,
+                dir=d,
+            )
         return super(bundlemanifest, self).dirlog(d)
 
+
 class bundlefilelog(filelog.filelog):
     def __init__(self, opener, path, cgunpacker, linkmapper):
         filelog.filelog.__init__(self, opener, path)
-        self._revlog = bundlerevlog(opener, self.indexfile,
-                                    cgunpacker, linkmapper)
+        self._revlog = bundlerevlog(
+            opener, self.indexfile, cgunpacker, linkmapper
+        )
+
 
 class bundlepeer(localrepo.localpeer):
     def canpush(self):
         return False
 
+
 class bundlephasecache(phases.phasecache):
     def __init__(self, *args, **kwargs):
         super(bundlephasecache, self).__init__(*args, **kwargs)
@@ -206,6 +226,7 @@
         self.invalidate()
         self.dirty = True
 
+
 def _getfilestarts(cgunpacker):
     filespos = {}
     for chunkdata in iter(cgunpacker.filelogheader, {}):
@@ -215,6 +236,7 @@
             pass
     return filespos
 
+
 class bundlerepository(object):
     """A repository instance that is a union of a local repo and a bundle.
 
@@ -227,6 +249,7 @@
     Instances constructed directly are not usable as repository objects.
     Use instance() or makebundlerepository() to create instances.
     """
+
     def __init__(self, bundlepath, url, tempparent):
         self._tempparent = tempparent
         self._url = url
@@ -245,8 +268,9 @@
             for part in bundle.iterparts(seekable=True):
                 if part.type == 'changegroup':
                     if cgpart:
-                        raise NotImplementedError("can't process "
-                                                  "multiple changegroups")
+                        raise NotImplementedError(
+                            "can't process " "multiple changegroups"
+                        )
                     cgpart = part
 
                 self._handlebundle2part(bundle, part)
@@ -263,22 +287,26 @@
 
         elif isinstance(bundle, changegroup.cg1unpacker):
             if bundle.compressed():
-                f = self._writetempbundle(bundle.read, '.hg10un',
-                                          header='HG10UN')
+                f = self._writetempbundle(
+                    bundle.read, '.hg10un', header='HG10UN'
+                )
                 bundle = exchange.readbundle(self.ui, f, bundlepath, self.vfs)
 
             self._bundlefile = bundle
             self._cgunpacker = bundle
         else:
-            raise error.Abort(_('bundle type %s cannot be read') %
-                              type(bundle))
+            raise error.Abort(_('bundle type %s cannot be read') % type(bundle))
 
         # dict with the mapping 'filename' -> position in the changegroup.
         self._cgfilespos = {}
 
         self.firstnewrev = self.changelog.repotiprev + 1
-        phases.retractboundary(self, None, phases.draft,
-                               [ctx.node() for ctx in self[self.firstnewrev:]])
+        phases.retractboundary(
+            self,
+            None,
+            phases.draft,
+            [ctx.node() for ctx in self[self.firstnewrev :]],
+        )
 
     def _handlebundle2part(self, bundle, part):
         if part.type != 'changegroup':
@@ -298,14 +326,13 @@
     def _writetempbundle(self, readfn, suffix, header=''):
         """Write a temporary file to disk
         """
-        fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-",
-                                        suffix=suffix)
+        fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-", suffix=suffix)
         self.tempfile = temp
 
         with os.fdopen(fdtemp, r'wb') as fptemp:
             fptemp.write(header)
             while True:
-                chunk = readfn(2**18)
+                chunk = readfn(2 ** 18)
                 if not chunk:
                     break
                 fptemp.write(chunk)
@@ -338,8 +365,9 @@
         rootstore = bundlemanifest(self.svfs, self._cgunpacker, linkmapper)
         self.filestart = self._cgunpacker.tell()
 
-        return manifest.manifestlog(self.svfs, self, rootstore,
-                                    self.narrowmatch())
+        return manifest.manifestlog(
+            self.svfs, self, rootstore, self.narrowmatch()
+        )
 
     def _consumemanifest(self):
         """Consumes the manifest portion of the bundle, setting filestart so the
@@ -398,7 +426,7 @@
         return bundlepeer(self)
 
     def getcwd(self):
-        return encoding.getcwd() # always outside the repo
+        return encoding.getcwd()  # always outside the repo
 
     # Check if parents exist in localrepo before setting
     def setparents(self, p1, p2=nullid):
@@ -411,6 +439,7 @@
             self.ui.warn(msg % nodemod.hex(p2))
         return super(bundlerepository, self).setparents(p1, p2)
 
+
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
         raise error.Abort(_('cannot create new bundle repository'))
@@ -430,7 +459,7 @@
         else:
             cwd = pathutil.normasprefix(cwd)
             if parentpath.startswith(cwd):
-                parentpath = parentpath[len(cwd):]
+                parentpath = parentpath[len(cwd) :]
     u = util.url(path)
     path = u.localpath()
     if u.scheme == 'bundle':
@@ -444,6 +473,7 @@
 
     return makebundlerepository(ui, repopath, bundlename)
 
+
 def makebundlerepository(ui, repopath, bundlepath):
     """Make a bundle repository object based on repo and bundle paths."""
     if repopath:
@@ -481,6 +511,7 @@
 
     return repo
 
+
 class bundletransactionmanager(object):
     def transaction(self):
         return None
@@ -491,8 +522,10 @@
     def release(self):
         raise NotImplementedError
 
-def getremotechanges(ui, repo, peer, onlyheads=None, bundlename=None,
-                     force=False):
+
+def getremotechanges(
+    ui, repo, peer, onlyheads=None, bundlename=None, force=False
+):
     '''obtains a bundle of changes incoming from peer
 
     "onlyheads" restricts the returned changes to those reachable from the
@@ -512,8 +545,7 @@
       the changes; it closes both the original "peer" and the one returned
       here.
     '''
-    tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads,
-                                       force=force)
+    tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads, force=force)
     common, incoming, rheads = tmp
     if not incoming:
         try:
@@ -535,62 +567,72 @@
         # developer config: devel.legacy.exchange
         legexc = ui.configlist('devel', 'legacy.exchange')
         forcebundle1 = 'bundle2' not in legexc and 'bundle1' in legexc
-        canbundle2 = (not forcebundle1
-                      and peer.capable('getbundle')
-                      and peer.capable('bundle2'))
+        canbundle2 = (
+            not forcebundle1
+            and peer.capable('getbundle')
+            and peer.capable('bundle2')
+        )
         if canbundle2:
             with peer.commandexecutor() as e:
-                b2 = e.callcommand('getbundle', {
-                    'source': 'incoming',
-                    'common': common,
-                    'heads': rheads,
-                    'bundlecaps': exchange.caps20to10(repo, role='client'),
-                    'cg': True,
-                }).result()
+                b2 = e.callcommand(
+                    'getbundle',
+                    {
+                        'source': 'incoming',
+                        'common': common,
+                        'heads': rheads,
+                        'bundlecaps': exchange.caps20to10(repo, role='client'),
+                        'cg': True,
+                    },
+                ).result()
 
-                fname = bundle = changegroup.writechunks(ui,
-                                                         b2._forwardchunks(),
-                                                         bundlename)
+                fname = bundle = changegroup.writechunks(
+                    ui, b2._forwardchunks(), bundlename
+                )
         else:
             if peer.capable('getbundle'):
                 with peer.commandexecutor() as e:
-                    cg = e.callcommand('getbundle', {
-                        'source': 'incoming',
-                        'common': common,
-                        'heads': rheads,
-                    }).result()
+                    cg = e.callcommand(
+                        'getbundle',
+                        {
+                            'source': 'incoming',
+                            'common': common,
+                            'heads': rheads,
+                        },
+                    ).result()
             elif onlyheads is None and not peer.capable('changegroupsubset'):
                 # compat with older servers when pulling all remote heads
 
                 with peer.commandexecutor() as e:
-                    cg = e.callcommand('changegroup', {
-                        'nodes': incoming,
-                        'source': 'incoming',
-                    }).result()
+                    cg = e.callcommand(
+                        'changegroup',
+                        {'nodes': incoming, 'source': 'incoming',},
+                    ).result()
 
                 rheads = None
             else:
                 with peer.commandexecutor() as e:
-                    cg = e.callcommand('changegroupsubset', {
-                        'bases': incoming,
-                        'heads': rheads,
-                        'source': 'incoming',
-                    }).result()
+                    cg = e.callcommand(
+                        'changegroupsubset',
+                        {
+                            'bases': incoming,
+                            'heads': rheads,
+                            'source': 'incoming',
+                        },
+                    ).result()
 
             if localrepo:
                 bundletype = "HG10BZ"
             else:
                 bundletype = "HG10UN"
-            fname = bundle = bundle2.writebundle(ui, cg, bundlename,
-                                                     bundletype)
+            fname = bundle = bundle2.writebundle(ui, cg, bundlename, bundletype)
         # keep written bundle?
         if bundlename:
             bundle = None
         if not localrepo:
             # use the created uncompressed bundlerepo
-            localrepo = bundlerepo = makebundlerepository(repo. baseui,
-                                                          repo.root,
-                                                          fname)
+            localrepo = bundlerepo = makebundlerepository(
+                repo.baseui, repo.root, fname
+            )
 
             # this repo contains local and peer now, so filter out local again
             common = repo.heads()
@@ -603,12 +645,12 @@
     csets = localrepo.changelog.findmissing(common, rheads)
 
     if bundlerepo:
-        reponodes = [ctx.node() for ctx in bundlerepo[bundlerepo.firstnewrev:]]
+        reponodes = [ctx.node() for ctx in bundlerepo[bundlerepo.firstnewrev :]]
 
         with peer.commandexecutor() as e:
-            remotephases = e.callcommand('listkeys', {
-                'namespace': 'phases',
-            }).result()
+            remotephases = e.callcommand(
+                'listkeys', {'namespace': 'phases',}
+            ).result()
 
         pullop = exchange.pulloperation(bundlerepo, peer, heads=reponodes)
         pullop.trmanager = bundletransactionmanager()
--- a/mercurial/cffi/bdiff.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/cffi/bdiff.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,6 +15,7 @@
 ffi = _bdiff.ffi
 lib = _bdiff.lib
 
+
 def blocks(sa, sb):
     a = ffi.new("struct bdiff_line**")
     b = ffi.new("struct bdiff_line**")
@@ -42,6 +43,7 @@
         lib.bdiff_freehunks(l.next)
     return rl
 
+
 def bdiff(sa, sb):
     a = ffi.new("struct bdiff_line**")
     b = ffi.new("struct bdiff_line**")
@@ -62,8 +64,14 @@
         while h:
             if h.a1 != la or h.b1 != lb:
                 lgt = (b[0] + h.b1).l - (b[0] + lb).l
-                rl.append(struct.pack(">lll", (a[0] + la).l - a[0].l,
-                                      (a[0] + h.a1).l - a[0].l, lgt))
+                rl.append(
+                    struct.pack(
+                        ">lll",
+                        (a[0] + la).l - a[0].l,
+                        (a[0] + h.a1).l - a[0].l,
+                        lgt,
+                    )
+                )
                 rl.append(str(ffi.buffer((b[0] + lb).l, lgt)))
             la = h.a2
             lb = h.b2
--- a/mercurial/cffi/bdiffbuild.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/cffi/bdiffbuild.py	Sun Oct 06 09:45:02 2019 -0400
@@ -4,11 +4,14 @@
 import os
 
 ffi = cffi.FFI()
-with open(os.path.join(os.path.join(os.path.dirname(__file__), '..'),
-                       'bdiff.c')) as f:
-    ffi.set_source("mercurial.cffi._bdiff",
-                   f.read(), include_dirs=['mercurial'])
-ffi.cdef("""
+with open(
+    os.path.join(os.path.join(os.path.dirname(__file__), '..'), 'bdiff.c')
+) as f:
+    ffi.set_source(
+        "mercurial.cffi._bdiff", f.read(), include_dirs=['mercurial']
+    )
+ffi.cdef(
+    """
 struct bdiff_line {
     int hash, n, e;
     ssize_t len;
@@ -26,7 +29,8 @@
     struct bdiff_hunk *base);
 void bdiff_freehunks(struct bdiff_hunk *l);
 void free(void*);
-""")
+"""
+)
 
 if __name__ == '__main__':
     ffi.compile()
--- a/mercurial/cffi/mpatch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/cffi/mpatch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,6 +14,7 @@
 ffi = _mpatch.ffi
 lib = _mpatch.lib
 
+
 @ffi.def_extern()
 def cffi_get_next_item(arg, pos):
     all, bins = ffi.from_handle(arg)
@@ -25,14 +26,14 @@
         return ffi.NULL
     return container[0]
 
+
 def patches(text, bins):
     lgt = len(bins)
     all = []
     if not lgt:
         return text
     arg = (all, bins)
-    patch = lib.mpatch_fold(ffi.new_handle(arg),
-                            lib.cffi_get_next_item, 0, lgt)
+    patch = lib.mpatch_fold(ffi.new_handle(arg), lib.cffi_get_next_item, 0, lgt)
     if not patch:
         raise mpatchError("cannot decode chunk")
     outlen = lib.mpatch_calcsize(len(text), patch)
--- a/mercurial/cffi/mpatchbuild.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/cffi/mpatchbuild.py	Sun Oct 06 09:45:02 2019 -0400
@@ -4,12 +4,15 @@
 import os
 
 ffi = cffi.FFI()
-mpatch_c = os.path.join(os.path.join(os.path.dirname(__file__), '..',
-                                     'mpatch.c'))
+mpatch_c = os.path.join(
+    os.path.join(os.path.dirname(__file__), '..', 'mpatch.c')
+)
 with open(mpatch_c) as f:
-    ffi.set_source("mercurial.cffi._mpatch", f.read(),
-                   include_dirs=["mercurial"])
-ffi.cdef("""
+    ffi.set_source(
+        "mercurial.cffi._mpatch", f.read(), include_dirs=["mercurial"]
+    )
+ffi.cdef(
+    """
 
 struct mpatch_frag {
        int start, end, len;
@@ -30,7 +33,8 @@
 struct mpatch_flist *mpatch_fold(void *bins,
                        struct mpatch_flist* (*get_next_item)(void*, ssize_t),
                        ssize_t start, ssize_t end);
-""")
+"""
+)
 
 if __name__ == '__main__':
     ffi.compile()
--- a/mercurial/cffi/osutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/cffi/osutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,9 +12,7 @@
 
 from ..pure.osutil import *
 
-from .. import (
-    pycompat,
-)
+from .. import pycompat
 
 if pycompat.isdarwin:
     from . import _osutil
@@ -26,7 +24,7 @@
     # tweakable number, only affects performance, which chunks
     # of bytes do we get back from getattrlistbulk
 
-    attrkinds = [None] * 20 # we need the max no for enum VXXX, 20 is plenty
+    attrkinds = [None] * 20  # we need the max no for enum VXXX, 20 is plenty
 
     attrkinds[lib.VREG] = statmod.S_IFREG
     attrkinds[lib.VDIR] = statmod.S_IFDIR
@@ -60,8 +58,11 @@
                 ofs = cur.name_info.attr_dataoffset
                 str_lgt = cur.name_info.attr_length
                 base_ofs = ffi.offsetof('val_attrs_t', 'name_info')
-                name = str(ffi.buffer(ffi.cast("char*", cur) + base_ofs + ofs,
-                           str_lgt - 1))
+                name = str(
+                    ffi.buffer(
+                        ffi.cast("char*", cur) + base_ofs + ofs, str_lgt - 1
+                    )
+                )
                 tp = attrkinds[cur.obj_type]
                 if name == "." or name == "..":
                     continue
@@ -69,23 +70,35 @@
                     return []
                 if stat:
                     mtime = cur.mtime.tv_sec
-                    mode = (cur.accessmask & ~lib.S_IFMT)| tp
-                    ret.append((name, tp, stat_res(st_mode=mode, st_mtime=mtime,
-                                st_size=cur.datalength)))
+                    mode = (cur.accessmask & ~lib.S_IFMT) | tp
+                    ret.append(
+                        (
+                            name,
+                            tp,
+                            stat_res(
+                                st_mode=mode,
+                                st_mtime=mtime,
+                                st_size=cur.datalength,
+                            ),
+                        )
+                    )
                 else:
                     ret.append((name, tp))
-                cur = ffi.cast("val_attrs_t*", int(ffi.cast("intptr_t", cur))
-                    + lgt)
+                cur = ffi.cast(
+                    "val_attrs_t*", int(ffi.cast("intptr_t", cur)) + lgt
+                )
         return ret
 
     def listdir(path, stat=False, skip=None):
         req = ffi.new("struct attrlist*")
         req.bitmapcount = lib.ATTR_BIT_MAP_COUNT
-        req.commonattr = (lib.ATTR_CMN_RETURNED_ATTRS |
-                          lib.ATTR_CMN_NAME |
-                          lib.ATTR_CMN_OBJTYPE |
-                          lib.ATTR_CMN_ACCESSMASK |
-                          lib.ATTR_CMN_MODTIME)
+        req.commonattr = (
+            lib.ATTR_CMN_RETURNED_ATTRS
+            | lib.ATTR_CMN_NAME
+            | lib.ATTR_CMN_OBJTYPE
+            | lib.ATTR_CMN_ACCESSMASK
+            | lib.ATTR_CMN_MODTIME
+        )
         req.fileattr = lib.ATTR_FILE_DATALENGTH
         dfd = lib.open(path, lib.O_RDONLY, 0)
         if dfd == -1:
@@ -97,6 +110,6 @@
             try:
                 lib.close(dfd)
             except BaseException:
-                pass # we ignore all the errors from closing, not
+                pass  # we ignore all the errors from closing, not
                 # much we can do about that
         return ret
--- a/mercurial/cffi/osutilbuild.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/cffi/osutilbuild.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,7 +3,9 @@
 import cffi
 
 ffi = cffi.FFI()
-ffi.set_source("mercurial.cffi._osutil", """
+ffi.set_source(
+    "mercurial.cffi._osutil",
+    """
 #include <sys/attr.h>
 #include <sys/vnode.h>
 #include <unistd.h>
@@ -19,8 +21,11 @@
     uint32_t          accessmask;
     off_t             datalength;
 } __attribute__((aligned(4), packed)) val_attrs_t;
-""", include_dirs=['mercurial'])
-ffi.cdef('''
+""",
+    include_dirs=['mercurial'],
+)
+ffi.cdef(
+    '''
 
 typedef uint32_t attrgroup_t;
 
@@ -96,7 +101,8 @@
 int close(int);
 
 #define O_RDONLY ...
-''')
+'''
+)
 
 if __name__ == '__main__':
     ffi.compile()
--- a/mercurial/changegroup.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/changegroup.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,9 +28,7 @@
     util,
 )
 
-from .interfaces import (
-    repository,
-)
+from .interfaces import repository
 
 _CHANGEGROUPV1_DELTA_HEADER = struct.Struct("20s20s20s20s")
 _CHANGEGROUPV2_DELTA_HEADER = struct.Struct("20s20s20s20s20s")
@@ -40,6 +38,7 @@
 
 readexactly = util.readexactly
 
+
 def getchunk(stream):
     """return the next chunk from stream as a string"""
     d = readexactly(stream, 4)
@@ -50,18 +49,22 @@
         return ""
     return readexactly(stream, l - 4)
 
+
 def chunkheader(length):
     """return a changegroup chunk header (string)"""
     return struct.pack(">l", length + 4)
 
+
 def closechunk():
     """return a changegroup chunk header (string) for a zero-length chunk"""
     return struct.pack(">l", 0)
 
+
 def _fileheader(path):
     """Obtain a changegroup chunk header for a named path."""
     return chunkheader(len(path)) + path
 
+
 def writechunks(ui, chunks, filename, vfs=None):
     """Write chunks to a file and return its filename.
 
@@ -96,6 +99,7 @@
             else:
                 os.unlink(cleanup)
 
+
 class cg1unpacker(object):
     """Unpacker for cg1 changegroup streams.
 
@@ -114,17 +118,17 @@
     A few other public methods exist. Those are used only for
     bundlerepo and some debug commands - their use is discouraged.
     """
+
     deltaheader = _CHANGEGROUPV1_DELTA_HEADER
     deltaheadersize = deltaheader.size
     version = '01'
-    _grouplistcount = 1 # One list of files after the manifests
+    _grouplistcount = 1  # One list of files after the manifests
 
     def __init__(self, fh, alg, extras=None):
         if alg is None:
             alg = 'UN'
         if alg not in util.compengines.supportedbundletypes:
-            raise error.Abort(_('unknown stream compression type: %s')
-                             % alg)
+            raise error.Abort(_('unknown stream compression type: %s') % alg)
         if alg == 'BZ':
             alg = '_truncatedBZ'
 
@@ -138,12 +142,16 @@
     # be used by bundlerepo, but it's a little hard to tell.
     def compressed(self):
         return self._type is not None and self._type != 'UN'
+
     def read(self, l):
         return self._stream.read(l)
+
     def seek(self, pos):
         return self._stream.seek(pos)
+
     def tell(self):
         return self._stream.tell()
+
     def close(self):
         return self._stream.close()
 
@@ -233,7 +241,7 @@
                 yield chunkheader(len(chunk))
                 pos = 0
                 while pos < len(chunk):
-                    next = pos + 2**20
+                    next = pos + 2 ** 20
                     yield chunk[pos:next]
                     pos = next
             yield closechunk()
@@ -250,8 +258,15 @@
         prog.complete()
         self.callback = None
 
-    def apply(self, repo, tr, srctype, url, targetphase=phases.draft,
-              expectedtotal=None):
+    def apply(
+        self,
+        repo,
+        tr,
+        srctype,
+        url,
+        targetphase=phases.draft,
+        expectedtotal=None,
+    ):
         """Add the changegroup returned by source.read() to this repo.
         srctype is a string like 'push', 'pull', or 'unbundle'.  url is
         the URL of the repo where this changegroup is coming from.
@@ -263,6 +278,7 @@
         - number of heads stays the same: 1
         """
         repo = repo.unfiltered()
+
         def csmap(x):
             repo.ui.debug("add changeset %s\n" % short(x))
             return len(cl)
@@ -279,8 +295,9 @@
             # in this function.
             srctype = tr.hookargs.setdefault('source', srctype)
             tr.hookargs.setdefault('url', url)
-            repo.hook('prechangegroup',
-                      throw=True, **pycompat.strkwargs(tr.hookargs))
+            repo.hook(
+                'prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs)
+            )
 
             # write changelog data to temp files so concurrent readers
             # will not see an inconsistent view
@@ -292,11 +309,13 @@
             # pull off the changeset group
             repo.ui.status(_("adding changesets\n"))
             clstart = len(cl)
-            progress = repo.ui.makeprogress(_('changesets'), unit=_('chunks'),
-                                            total=expectedtotal)
+            progress = repo.ui.makeprogress(
+                _('changesets'), unit=_('chunks'), total=expectedtotal
+            )
             self.callback = progress.increment
 
             efiles = set()
+
             def onchangelog(cl, node):
                 efiles.update(cl.readfiles(node))
 
@@ -306,8 +325,10 @@
             efiles = len(efiles)
 
             if not cgnodes:
-                repo.ui.develwarn('applied empty changelog from changegroup',
-                                  config='warn-empty-changegroup')
+                repo.ui.develwarn(
+                    'applied empty changelog from changegroup',
+                    config='warn-empty-changegroup',
+                )
             clend = len(cl)
             changesets = clend - clstart
             progress.complete()
@@ -317,8 +338,9 @@
             repo.ui.status(_("adding manifests\n"))
             # We know that we'll never have more manifests than we had
             # changesets.
-            progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
-                                            total=changesets)
+            progress = repo.ui.makeprogress(
+                _('manifests'), unit=_('chunks'), total=changesets
+            )
             self._unpackmanifests(repo, revmap, trp, progress)
 
             needfiles = {}
@@ -336,7 +358,8 @@
             # process the files
             repo.ui.status(_("adding file changes\n"))
             newrevs, newfiles = _addchangegroupfiles(
-                repo, self, revmap, trp, efiles, needfiles)
+                repo, self, revmap, trp, efiles, needfiles
+            )
 
             # making sure the value exists
             tr.changes.setdefault('changegroup-count-changesets', 0)
@@ -380,8 +403,11 @@
                     hookargs = dict(tr.hookargs)
                     hookargs['node'] = hex(cl.node(clstart))
                     hookargs['node_last'] = hex(cl.node(clend - 1))
-                repo.hook('pretxnchangegroup',
-                          throw=True, **pycompat.strkwargs(hookargs))
+                repo.hook(
+                    'pretxnchangegroup',
+                    throw=True,
+                    **pycompat.strkwargs(hookargs)
+                )
 
             added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
             phaseall = None
@@ -424,15 +450,18 @@
                         del args['node_last']
                         repo.hook("incoming", **pycompat.strkwargs(args))
 
-                    newheads = [h for h in repo.heads()
-                                if h not in oldheads]
-                    repo.ui.log("incoming",
-                                "%d incoming changes - new heads: %s\n",
-                                len(added),
-                                ', '.join([hex(c[:6]) for c in newheads]))
+                    newheads = [h for h in repo.heads() if h not in oldheads]
+                    repo.ui.log(
+                        "incoming",
+                        "%d incoming changes - new heads: %s\n",
+                        len(added),
+                        ', '.join([hex(c[:6]) for c in newheads]),
+                    )
 
-                tr.addpostclose('changegroup-runhooks-%020i' % clstart,
-                                lambda tr: repo._afterlock(runhooks))
+                tr.addpostclose(
+                    'changegroup-runhooks-%020i' % clstart,
+                    lambda tr: repo._afterlock(runhooks),
+                )
         finally:
             repo.ui.flush()
         # never return 0 here:
@@ -454,6 +483,7 @@
             yield chunkdata
             chain = chunkdata[0]
 
+
 class cg2unpacker(cg1unpacker):
     """Unpacker for cg2 streams.
 
@@ -461,6 +491,7 @@
     format is slightly different. All other features about the data
     remain the same.
     """
+
     deltaheader = _CHANGEGROUPV2_DELTA_HEADER
     deltaheadersize = deltaheader.size
     version = '02'
@@ -470,6 +501,7 @@
         flags = 0
         return node, p1, p2, deltabase, cs, flags
 
+
 class cg3unpacker(cg2unpacker):
     """Unpacker for cg3 streams.
 
@@ -477,10 +509,11 @@
     flags. It adds the revlog flags to the delta header and an empty chunk
     separating manifests and files.
     """
+
     deltaheader = _CHANGEGROUPV3_DELTA_HEADER
     deltaheadersize = deltaheader.size
     version = '03'
-    _grouplistcount = 2 # One list of manifests and one list of files
+    _grouplistcount = 2  # One list of manifests and one list of files
 
     def _deltaheader(self, headertuple, prevnode):
         node, p1, p2, deltabase, cs, flags = headertuple
@@ -496,10 +529,12 @@
             if not repo.manifestlog.getstorage(d).addgroup(deltas, revmap, trp):
                 raise error.Abort(_("received dir revlog group is empty"))
 
+
 class headerlessfixup(object):
     def __init__(self, fh, h):
         self._h = h
         self._fh = fh
+
     def read(self, n):
         if self._h:
             d, self._h = self._h[:n], self._h[n:]
@@ -508,6 +543,7 @@
             return d
         return readexactly(self._fh, n)
 
+
 def _revisiondeltatochunks(delta, headerfn):
     """Serialize a revisiondelta to changegroup chunks."""
 
@@ -524,8 +560,7 @@
         prefix = mdiff.trivialdiffheader(len(data))
     else:
         data = delta.revision
-        prefix = mdiff.replacediffheader(delta.baserevisionsize,
-                                         len(data))
+        prefix = mdiff.replacediffheader(delta.baserevisionsize, len(data))
 
     meta = headerfn(delta)
 
@@ -535,6 +570,7 @@
         yield prefix
     yield data
 
+
 def _sortnodesellipsis(store, nodes, cl, lookup):
     """Sort nodes for changegroup generation."""
     # Ellipses serving mode.
@@ -556,10 +592,20 @@
     key = lambda n: cl.rev(lookup(n))
     return sorted(nodes, key=key)
 
-def _resolvenarrowrevisioninfo(cl, store, ischangelog, rev, linkrev,
-                               linknode, clrevtolocalrev, fullclnodes,
-                               precomputedellipsis):
+
+def _resolvenarrowrevisioninfo(
+    cl,
+    store,
+    ischangelog,
+    rev,
+    linkrev,
+    linknode,
+    clrevtolocalrev,
+    fullclnodes,
+    precomputedellipsis,
+):
     linkparents = precomputedellipsis[linkrev]
+
     def local(clrev):
         """Turn a changelog revnum into a local revnum.
 
@@ -593,11 +639,11 @@
             if p in clrevtolocalrev:
                 return clrevtolocalrev[p]
             elif p in fullclnodes:
-                walk.extend([pp for pp in cl.parentrevs(p)
-                                if pp != nullrev])
+                walk.extend([pp for pp in cl.parentrevs(p) if pp != nullrev])
             elif p in precomputedellipsis:
-                walk.extend([pp for pp in precomputedellipsis[p]
-                                if pp != nullrev])
+                walk.extend(
+                    [pp for pp in precomputedellipsis[p] if pp != nullrev]
+                )
             else:
                 # In this case, we've got an ellipsis with parents
                 # outside the current bundle (likely an
@@ -618,15 +664,15 @@
                 # we crash the changegroup construction.
                 raise error.Abort(
                     'unable to resolve parent while packing %r %r'
-                    ' for changeset %r' % (store.indexfile, rev, clrev))
+                    ' for changeset %r' % (store.indexfile, rev, clrev)
+                )
 
         return nullrev
 
-    if not linkparents or (
-        store.parentrevs(rev) == (nullrev, nullrev)):
+    if not linkparents or (store.parentrevs(rev) == (nullrev, nullrev)):
         p1, p2 = nullrev, nullrev
     elif len(linkparents) == 1:
-        p1, = sorted(local(p) for p in linkparents)
+        (p1,) = sorted(local(p) for p in linkparents)
         p2 = nullrev
     else:
         p1, p2 = sorted(local(p) for p in linkparents)
@@ -635,10 +681,20 @@
 
     return p1node, p2node, linknode
 
-def deltagroup(repo, store, nodes, ischangelog, lookup, forcedeltaparentprev,
-               topic=None,
-               ellipses=False, clrevtolocalrev=None, fullclnodes=None,
-               precomputedellipsis=None):
+
+def deltagroup(
+    repo,
+    store,
+    nodes,
+    ischangelog,
+    lookup,
+    forcedeltaparentprev,
+    topic=None,
+    ellipses=False,
+    clrevtolocalrev=None,
+    fullclnodes=None,
+    precomputedellipsis=None,
+):
     """Calculate deltas for a set of revisions.
 
     Is a generator of ``revisiondelta`` instances.
@@ -698,8 +754,16 @@
                 # We could probably do this later and avoid the dict
                 # holding state. But it likely doesn't matter.
                 p1node, p2node, linknode = _resolvenarrowrevisioninfo(
-                    cl, store, ischangelog, rev, linkrev, linknode,
-                    clrevtolocalrev, fullclnodes, precomputedellipsis)
+                    cl,
+                    store,
+                    ischangelog,
+                    rev,
+                    linkrev,
+                    linknode,
+                    clrevtolocalrev,
+                    fullclnodes,
+                    precomputedellipsis,
+                )
 
                 adjustedparents[node] = (p1node, p2node)
                 linknodes[node] = linknode
@@ -712,8 +776,9 @@
     # meter for constructing the revision deltas.
     progress = None
     if topic is not None:
-        progress = repo.ui.makeprogress(topic, unit=_('chunks'),
-                                        total=len(nodes))
+        progress = repo.ui.makeprogress(
+            topic, unit=_('chunks'), total=len(nodes)
+        )
 
     configtarget = repo.ui.config('devel', 'bundle.delta')
     if configtarget not in ('', 'p1', 'full'):
@@ -733,7 +798,8 @@
         nodesorder=nodesorder,
         revisiondata=True,
         assumehaveparentrevisions=not ellipses,
-        deltamode=deltamode)
+        deltamode=deltamode,
+    )
 
     for i, revision in enumerate(revisions):
         if progress:
@@ -757,12 +823,23 @@
     if progress:
         progress.complete()
 
+
 class cgpacker(object):
-    def __init__(self, repo, oldmatcher, matcher, version,
-                 builddeltaheader, manifestsend,
-                 forcedeltaparentprev=False,
-                 bundlecaps=None, ellipses=False,
-                 shallow=False, ellipsisroots=None, fullnodes=None):
+    def __init__(
+        self,
+        repo,
+        oldmatcher,
+        matcher,
+        version,
+        builddeltaheader,
+        manifestsend,
+        forcedeltaparentprev=False,
+        bundlecaps=None,
+        ellipses=False,
+        shallow=False,
+        ellipsisroots=None,
+        fullnodes=None,
+    ):
         """Given a source repo, construct a bundler.
 
         oldmatcher is a matcher that matches on files the client already has.
@@ -823,8 +900,9 @@
         else:
             self._verbosenote = lambda s: None
 
-    def generate(self, commonrevs, clnodes, fastpathlinkrev, source,
-                 changelog=True):
+    def generate(
+        self, commonrevs, clnodes, fastpathlinkrev, source, changelog=True
+    ):
         """Yield a sequence of changegroup byte chunks.
         If changelog is False, changelog data won't be added to changegroup
         """
@@ -835,11 +913,11 @@
         self._verbosenote(_('uncompressed size of bundle content:\n'))
         size = 0
 
-        clstate, deltas = self._generatechangelog(cl, clnodes,
-                                                  generate=changelog)
+        clstate, deltas = self._generatechangelog(
+            cl, clnodes, generate=changelog
+        )
         for delta in deltas:
-            for chunk in _revisiondeltatochunks(delta,
-                                                self._builddeltaheader):
+            for chunk in _revisiondeltatochunks(delta, self._builddeltaheader):
                 size += len(chunk)
                 yield chunk
 
@@ -872,14 +950,21 @@
         # either, because we don't discover which directory nodes to
         # send along with files. This could probably be fixed.
         fastpathlinkrev = fastpathlinkrev and (
-            'treemanifest' not in repo.requirements)
+            'treemanifest' not in repo.requirements
+        )
 
         fnodes = {}  # needed file nodes
 
         size = 0
         it = self.generatemanifests(
-            commonrevs, clrevorder, fastpathlinkrev, manifests, fnodes, source,
-            clstate['clrevtomanifestrev'])
+            commonrevs,
+            clrevorder,
+            fastpathlinkrev,
+            manifests,
+            fnodes,
+            source,
+            clstate['clrevtomanifestrev'],
+        )
 
         for tree, deltas in it:
             if tree:
@@ -903,15 +988,23 @@
 
         mfdicts = None
         if self._ellipses and self._isshallow:
-            mfdicts = [(self._repo.manifestlog[n].read(), lr)
-                       for (n, lr) in manifests.iteritems()]
+            mfdicts = [
+                (self._repo.manifestlog[n].read(), lr)
+                for (n, lr) in manifests.iteritems()
+            ]
 
         manifests.clear()
         clrevs = set(cl.rev(x) for x in clnodes)
 
-        it = self.generatefiles(changedfiles, commonrevs,
-                                source, mfdicts, fastpathlinkrev,
-                                fnodes, clrevs)
+        it = self.generatefiles(
+            changedfiles,
+            commonrevs,
+            source,
+            mfdicts,
+            fastpathlinkrev,
+            fnodes,
+            clrevs,
+        )
 
         for path, deltas in it:
             h = _fileheader(path)
@@ -984,8 +1077,10 @@
                 # end up with bogus linkrevs specified for manifests and
                 # we skip some manifest nodes that we should otherwise
                 # have sent.
-                if (x in self._fullclnodes
-                    or cl.rev(x) in self._precomputedellipsis):
+                if (
+                    x in self._fullclnodes
+                    or cl.rev(x) in self._precomputedellipsis
+                ):
 
                     manifestnode = c.manifest
                     # Record the first changeset introducing this manifest
@@ -996,7 +1091,8 @@
                     # mapping changelog ellipsis parents to manifest ellipsis
                     # parents)
                     clrevtomanifestrev.setdefault(
-                        cl.rev(x), mfl.rev(manifestnode))
+                        cl.rev(x), mfl.rev(manifestnode)
+                    )
                 # We can't trust the changed files list in the changeset if the
                 # client requested a shallow clone.
                 if self._isshallow:
@@ -1013,18 +1109,31 @@
             return x
 
         gen = deltagroup(
-            self._repo, cl, nodes, True, lookupcl,
+            self._repo,
+            cl,
+            nodes,
+            True,
+            lookupcl,
             self._forcedeltaparentprev,
             ellipses=self._ellipses,
             topic=_('changesets'),
             clrevtolocalrev={},
             fullclnodes=self._fullclnodes,
-            precomputedellipsis=self._precomputedellipsis)
+            precomputedellipsis=self._precomputedellipsis,
+        )
 
         return state, gen
 
-    def generatemanifests(self, commonrevs, clrevorder, fastpathlinkrev,
-                          manifests, fnodes, source, clrevtolocalrev):
+    def generatemanifests(
+        self,
+        commonrevs,
+        clrevorder,
+        fastpathlinkrev,
+        manifests,
+        fnodes,
+        source,
+        clrevtolocalrev,
+    ):
         """Returns an iterator of changegroup chunks containing manifests.
 
         `source` is unused here, but is used by extensions like remotefilelog to
@@ -1061,7 +1170,7 @@
                 clnode = nodes[x]
                 mdata = mfl.get(tree, x).readfast(shallow=True)
                 for p, n, fl in mdata.iterentries():
-                    if fl == 't': # subdirectory manifest
+                    if fl == 't':  # subdirectory manifest
                         subtree = tree + p + '/'
                         tmfclnodes = tmfnodes.setdefault(subtree, {})
                         tmfclnode = tmfclnodes.setdefault(n, clnode)
@@ -1074,6 +1183,7 @@
                         if clrevorder[clnode] < clrevorder[fclnode]:
                             fclnodes[n] = clnode
                 return clnode
+
             return lookupmflinknode
 
         while tmfnodes:
@@ -1103,13 +1213,18 @@
             lookupfn = makelookupmflinknode(tree, nodes)
 
             deltas = deltagroup(
-                self._repo, store, prunednodes, False, lookupfn,
+                self._repo,
+                store,
+                prunednodes,
+                False,
+                lookupfn,
                 self._forcedeltaparentprev,
                 ellipses=self._ellipses,
                 topic=_('manifests'),
                 clrevtolocalrev=clrevtolocalrev,
                 fullclnodes=self._fullclnodes,
-                precomputedellipsis=self._precomputedellipsis)
+                precomputedellipsis=self._precomputedellipsis,
+            )
 
             if not self._oldmatcher.visitdir(store.tree[:-1]):
                 yield tree, deltas
@@ -1138,14 +1253,27 @@
         return [n for n in nodes if flr(frev(n)) not in commonrevs]
 
     # The 'source' parameter is useful for extensions
-    def generatefiles(self, changedfiles, commonrevs, source,
-                      mfdicts, fastpathlinkrev, fnodes, clrevs):
-        changedfiles = [f for f in changedfiles
-                        if self._matcher(f) and not self._oldmatcher(f)]
+    def generatefiles(
+        self,
+        changedfiles,
+        commonrevs,
+        source,
+        mfdicts,
+        fastpathlinkrev,
+        fnodes,
+        clrevs,
+    ):
+        changedfiles = [
+            f
+            for f in changedfiles
+            if self._matcher(f) and not self._oldmatcher(f)
+        ]
 
         if not fastpathlinkrev:
+
             def normallinknodes(unused, fname):
                 return fnodes.get(fname, {})
+
         else:
             cln = self._repo.changelog.node
 
@@ -1153,8 +1281,9 @@
                 flinkrev = store.linkrev
                 fnode = store.node
                 revs = ((r, flinkrev(r)) for r in store)
-                return dict((fnode(r), cln(lr))
-                            for r, lr in revs if lr in clrevs)
+                return dict(
+                    (fnode(r), cln(lr)) for r, lr in revs if lr in clrevs
+                )
 
         clrevtolocalrev = {}
 
@@ -1181,17 +1310,20 @@
                         elif fnode:
                             links[fnode] = lr
                 return links
+
         else:
             linknodes = normallinknodes
 
         repo = self._repo
-        progress = repo.ui.makeprogress(_('files'), unit=_('files'),
-                                        total=len(changedfiles))
+        progress = repo.ui.makeprogress(
+            _('files'), unit=_('files'), total=len(changedfiles)
+        )
         for i, fname in enumerate(sorted(changedfiles)):
             filerevlog = repo.file(fname)
             if not filerevlog:
-                raise error.Abort(_("empty or missing file data for %s") %
-                                  fname)
+                raise error.Abort(
+                    _("empty or missing file data for %s") % fname
+                )
 
             clrevtolocalrev.clear()
 
@@ -1206,8 +1338,9 @@
             # has. This avoids over-sending files relatively
             # inexpensively, so it's not a problem if we under-filter
             # here.
-            filenodes = [n for n in linkrevnodes
-                         if flr(frev(n)) not in commonrevs]
+            filenodes = [
+                n for n in linkrevnodes if flr(frev(n)) not in commonrevs
+            ]
 
             if not filenodes:
                 continue
@@ -1215,76 +1348,128 @@
             progress.update(i + 1, item=fname)
 
             deltas = deltagroup(
-                self._repo, filerevlog, filenodes, False, lookupfilelog,
+                self._repo,
+                filerevlog,
+                filenodes,
+                False,
+                lookupfilelog,
                 self._forcedeltaparentprev,
                 ellipses=self._ellipses,
                 clrevtolocalrev=clrevtolocalrev,
                 fullclnodes=self._fullclnodes,
-                precomputedellipsis=self._precomputedellipsis)
+                precomputedellipsis=self._precomputedellipsis,
+            )
 
             yield fname, deltas
 
         progress.complete()
 
-def _makecg1packer(repo, oldmatcher, matcher, bundlecaps,
-                   ellipses=False, shallow=False, ellipsisroots=None,
-                   fullnodes=None):
+
+def _makecg1packer(
+    repo,
+    oldmatcher,
+    matcher,
+    bundlecaps,
+    ellipses=False,
+    shallow=False,
+    ellipsisroots=None,
+    fullnodes=None,
+):
     builddeltaheader = lambda d: _CHANGEGROUPV1_DELTA_HEADER.pack(
-        d.node, d.p1node, d.p2node, d.linknode)
+        d.node, d.p1node, d.p2node, d.linknode
+    )
 
-    return cgpacker(repo, oldmatcher, matcher, b'01',
-                    builddeltaheader=builddeltaheader,
-                    manifestsend=b'',
-                    forcedeltaparentprev=True,
-                    bundlecaps=bundlecaps,
-                    ellipses=ellipses,
-                    shallow=shallow,
-                    ellipsisroots=ellipsisroots,
-                    fullnodes=fullnodes)
+    return cgpacker(
+        repo,
+        oldmatcher,
+        matcher,
+        b'01',
+        builddeltaheader=builddeltaheader,
+        manifestsend=b'',
+        forcedeltaparentprev=True,
+        bundlecaps=bundlecaps,
+        ellipses=ellipses,
+        shallow=shallow,
+        ellipsisroots=ellipsisroots,
+        fullnodes=fullnodes,
+    )
 
-def _makecg2packer(repo, oldmatcher, matcher, bundlecaps,
-                   ellipses=False, shallow=False, ellipsisroots=None,
-                   fullnodes=None):
+
+def _makecg2packer(
+    repo,
+    oldmatcher,
+    matcher,
+    bundlecaps,
+    ellipses=False,
+    shallow=False,
+    ellipsisroots=None,
+    fullnodes=None,
+):
     builddeltaheader = lambda d: _CHANGEGROUPV2_DELTA_HEADER.pack(
-        d.node, d.p1node, d.p2node, d.basenode, d.linknode)
+        d.node, d.p1node, d.p2node, d.basenode, d.linknode
+    )
 
-    return cgpacker(repo, oldmatcher, matcher, b'02',
-                    builddeltaheader=builddeltaheader,
-                    manifestsend=b'',
-                    bundlecaps=bundlecaps,
-                    ellipses=ellipses,
-                    shallow=shallow,
-                    ellipsisroots=ellipsisroots,
-                    fullnodes=fullnodes)
+    return cgpacker(
+        repo,
+        oldmatcher,
+        matcher,
+        b'02',
+        builddeltaheader=builddeltaheader,
+        manifestsend=b'',
+        bundlecaps=bundlecaps,
+        ellipses=ellipses,
+        shallow=shallow,
+        ellipsisroots=ellipsisroots,
+        fullnodes=fullnodes,
+    )
 
-def _makecg3packer(repo, oldmatcher, matcher, bundlecaps,
-                   ellipses=False, shallow=False, ellipsisroots=None,
-                   fullnodes=None):
-    builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
-        d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags)
 
-    return cgpacker(repo, oldmatcher, matcher, b'03',
-                    builddeltaheader=builddeltaheader,
-                    manifestsend=closechunk(),
-                    bundlecaps=bundlecaps,
-                    ellipses=ellipses,
-                    shallow=shallow,
-                    ellipsisroots=ellipsisroots,
-                    fullnodes=fullnodes)
+def _makecg3packer(
+    repo,
+    oldmatcher,
+    matcher,
+    bundlecaps,
+    ellipses=False,
+    shallow=False,
+    ellipsisroots=None,
+    fullnodes=None,
+):
+    builddeltaheader = lambda d: _CHANGEGROUPV3_DELTA_HEADER.pack(
+        d.node, d.p1node, d.p2node, d.basenode, d.linknode, d.flags
+    )
 
-_packermap = {'01': (_makecg1packer, cg1unpacker),
-             # cg2 adds support for exchanging generaldelta
-             '02': (_makecg2packer, cg2unpacker),
-             # cg3 adds support for exchanging revlog flags and treemanifests
-             '03': (_makecg3packer, cg3unpacker),
+    return cgpacker(
+        repo,
+        oldmatcher,
+        matcher,
+        b'03',
+        builddeltaheader=builddeltaheader,
+        manifestsend=closechunk(),
+        bundlecaps=bundlecaps,
+        ellipses=ellipses,
+        shallow=shallow,
+        ellipsisroots=ellipsisroots,
+        fullnodes=fullnodes,
+    )
+
+
+_packermap = {
+    '01': (_makecg1packer, cg1unpacker),
+    # cg2 adds support for exchanging generaldelta
+    '02': (_makecg2packer, cg2unpacker),
+    # cg3 adds support for exchanging revlog flags and treemanifests
+    '03': (_makecg3packer, cg3unpacker),
 }
 
+
 def allsupportedversions(repo):
     versions = set(_packermap.keys())
     needv03 = False
-    if (repo.ui.configbool('experimental', 'changegroup3') or
-        repo.ui.configbool('experimental', 'treemanifest') or
-        'treemanifest' in repo.requirements):
+    if (
+        repo.ui.configbool('experimental', 'changegroup3')
+        or repo.ui.configbool('experimental', 'treemanifest')
+        or 'treemanifest' in repo.requirements
+    ):
         # we keep version 03 because we need to to exchange treemanifest data
         #
         # we also keep vresion 01 and 02, because it is possible for repo to
@@ -1297,10 +1482,12 @@
         versions.discard('03')
     return versions
 
+
 # Changegroup versions that can be applied to the repo
 def supportedincomingversions(repo):
     return allsupportedversions(repo)
 
+
 # Changegroup versions that can be created from the repo
 def supportedoutgoingversions(repo):
     versions = allsupportedversions(repo)
@@ -1325,11 +1512,13 @@
 
     return versions
 
+
 def localversion(repo):
     # Finds the best version to use for bundles that are meant to be used
     # locally, such as those from strip and shelve, and temporary bundles.
     return max(supportedoutgoingversions(repo))
 
+
 def safeversion(repo):
     # Finds the smallest version that it's safe to assume clients of the repo
     # will support. For example, all hg versions that support generaldelta also
@@ -1340,9 +1529,18 @@
     assert versions
     return min(versions)
 
-def getbundler(version, repo, bundlecaps=None, oldmatcher=None,
-               matcher=None, ellipses=False, shallow=False,
-               ellipsisroots=None, fullnodes=None):
+
+def getbundler(
+    version,
+    repo,
+    bundlecaps=None,
+    oldmatcher=None,
+    matcher=None,
+    ellipses=False,
+    shallow=False,
+    ellipsisroots=None,
+    fullnodes=None,
+):
     assert version in supportedoutgoingversions(repo)
 
     if matcher is None:
@@ -1351,26 +1549,40 @@
         oldmatcher = matchmod.never()
 
     if version == '01' and not matcher.always():
-        raise error.ProgrammingError('version 01 changegroups do not support '
-                                     'sparse file matchers')
+        raise error.ProgrammingError(
+            'version 01 changegroups do not support ' 'sparse file matchers'
+        )
 
     if ellipses and version in (b'01', b'02'):
         raise error.Abort(
-            _('ellipsis nodes require at least cg3 on client and server, '
-              'but negotiated version %s') % version)
+            _(
+                'ellipsis nodes require at least cg3 on client and server, '
+                'but negotiated version %s'
+            )
+            % version
+        )
 
     # Requested files could include files not in the local store. So
     # filter those out.
     matcher = repo.narrowmatch(matcher)
 
     fn = _packermap[version][0]
-    return fn(repo, oldmatcher, matcher, bundlecaps, ellipses=ellipses,
-              shallow=shallow, ellipsisroots=ellipsisroots,
-              fullnodes=fullnodes)
+    return fn(
+        repo,
+        oldmatcher,
+        matcher,
+        bundlecaps,
+        ellipses=ellipses,
+        shallow=shallow,
+        ellipsisroots=ellipsisroots,
+        fullnodes=fullnodes,
+    )
+
 
 def getunbundler(version, fh, alg, extras=None):
     return _packermap[version][1](fh, alg, extras=extras)
 
+
 def _changegroupinfo(repo, nodes, source):
     if repo.ui.verbose or source == 'bundle':
         repo.ui.status(_("%d changesets found\n") % len(nodes))
@@ -1379,17 +1591,36 @@
         for node in nodes:
             repo.ui.debug("%s\n" % hex(node))
 
-def makechangegroup(repo, outgoing, version, source, fastpath=False,
-                    bundlecaps=None):
-    cgstream = makestream(repo, outgoing, version, source,
-                          fastpath=fastpath, bundlecaps=bundlecaps)
-    return getunbundler(version, util.chunkbuffer(cgstream), None,
-                        {'clcount': len(outgoing.missing) })
 
-def makestream(repo, outgoing, version, source, fastpath=False,
-               bundlecaps=None, matcher=None):
-    bundler = getbundler(version, repo, bundlecaps=bundlecaps,
-                         matcher=matcher)
+def makechangegroup(
+    repo, outgoing, version, source, fastpath=False, bundlecaps=None
+):
+    cgstream = makestream(
+        repo,
+        outgoing,
+        version,
+        source,
+        fastpath=fastpath,
+        bundlecaps=bundlecaps,
+    )
+    return getunbundler(
+        version,
+        util.chunkbuffer(cgstream),
+        None,
+        {'clcount': len(outgoing.missing)},
+    )
+
+
+def makestream(
+    repo,
+    outgoing,
+    version,
+    source,
+    fastpath=False,
+    bundlecaps=None,
+    matcher=None,
+):
+    bundler = getbundler(version, repo, bundlecaps=bundlecaps, matcher=matcher)
 
     repo = repo.unfiltered()
     commonrevs = outgoing.common
@@ -1400,17 +1631,20 @@
     # be pulled by the client).
     heads.sort()
     fastpathlinkrev = fastpath or (
-            repo.filtername is None and heads == sorted(repo.heads()))
+        repo.filtername is None and heads == sorted(repo.heads())
+    )
 
     repo.hook('preoutgoing', throw=True, source=source)
     _changegroupinfo(repo, csets, source)
     return bundler.generate(commonrevs, csets, fastpathlinkrev, source)
 
+
 def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
     revisions = 0
     files = 0
-    progress = repo.ui.makeprogress(_('files'), unit=_('files'),
-                                    total=expectedfiles)
+    progress = repo.ui.makeprogress(
+        _('files'), unit=_('files'), total=expectedfiles
+    )
     for chunkdata in iter(source.filelogheader, {}):
         files += 1
         f = chunkdata["filename"]
@@ -1432,8 +1666,7 @@
                 if n in needs:
                     needs.remove(n)
                 else:
-                    raise error.Abort(
-                        _("received spurious file revlog entry"))
+                    raise error.Abort(_("received spurious file revlog entry"))
             if not needs:
                 del needfiles[f]
     progress.complete()
@@ -1445,7 +1678,8 @@
                 fl.rev(n)
             except error.LookupError:
                 raise error.Abort(
-                    _('missing file data for %s:%s - run hg verify') %
-                    (f, hex(n)))
+                    _('missing file data for %s:%s - run hg verify')
+                    % (f, hex(n))
+                )
 
     return revisions, files
--- a/mercurial/changelog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/changelog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,9 +13,7 @@
     hex,
     nullid,
 )
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 
 from . import (
     encoding,
@@ -31,6 +29,7 @@
 
 _defaultextra = {'branch': 'default'}
 
+
 def _string_escape(text):
     """
     >>> from .pycompat import bytechr as chr
@@ -46,6 +45,7 @@
     text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
     return text.replace('\0', '\\0')
 
+
 def _string_unescape(text):
     if '\\0' in text:
         # fix up \0 without getting into trouble with \\0
@@ -54,6 +54,7 @@
         text = text.replace('\n', '')
     return stringutil.unescapestr(text)
 
+
 def decodeextra(text):
     """
     >>> from .pycompat import bytechr as chr
@@ -72,14 +73,15 @@
             extra[k] = v
     return extra
 
+
 def encodeextra(d):
     # keys must be sorted to produce a deterministic changelog entry
     items = [
-        _string_escape('%s:%s' % (k, pycompat.bytestr(d[k])))
-        for k in sorted(d)
+        _string_escape('%s:%s' % (k, pycompat.bytestr(d[k]))) for k in sorted(d)
     ]
     return "\0".join(items)
 
+
 def encodecopies(files, copies):
     items = []
     for i, dst in enumerate(files):
@@ -89,6 +91,7 @@
         raise error.ProgrammingError('some copy targets missing from file list')
     return "\n".join(items)
 
+
 def decodecopies(files, data):
     try:
         copies = {}
@@ -105,6 +108,7 @@
         # used different syntax for the value.
         return None
 
+
 def encodefileindices(files, subset):
     subset = set(subset)
     indices = []
@@ -113,6 +117,7 @@
             indices.append('%d' % i)
     return '\n'.join(indices)
 
+
 def decodefileindices(files, data):
     try:
         subset = []
@@ -129,13 +134,16 @@
         # used different syntax for the value.
         return None
 
+
 def stripdesc(desc):
     """strip trailing whitespace and leading and trailing empty lines"""
     return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
 
+
 class appender(object):
     '''the changelog index must be updated last on disk, so we use this class
     to delay writes to it'''
+
     def __init__(self, vfs, name, mode, buf):
         self.data = buf
         fp = vfs(name, mode)
@@ -146,8 +154,10 @@
 
     def end(self):
         return self._end
+
     def tell(self):
         return self.offset
+
     def flush(self):
         pass
 
@@ -182,7 +192,7 @@
             doff = self.offset - self.size
             self.data.insert(0, "".join(self.data))
             del self.data[1:]
-            s = self.data[0][doff:doff + count]
+            s = self.data[0][doff : doff + count]
             self.offset += len(s)
             ret += s
         return ret
@@ -199,22 +209,29 @@
     def __exit__(self, *args):
         return self.fp.__exit__(*args)
 
+
 def _divertopener(opener, target):
     """build an opener that writes in 'target.a' instead of 'target'"""
+
     def _divert(name, mode='r', checkambig=False):
         if name != target:
             return opener(name, mode)
         return opener(name + ".a", mode)
+
     return _divert
 
+
 def _delayopener(opener, target, buf):
     """build an opener that stores chunks in 'buf' instead of 'target'"""
+
     def _delay(name, mode='r', checkambig=False):
         if name != target:
             return opener(name, mode)
         return appender(opener, name, mode, buf)
+
     return _delay
 
+
 @attr.s
 class _changelogrevision(object):
     # Extensions might modify _defaultextra, so let the constructor below pass
@@ -230,6 +247,7 @@
     p2copies = attr.ib(default=None)
     description = attr.ib(default='')
 
+
 class changelogrevision(object):
     """Holds results of a parsed changelog revision.
 
@@ -268,7 +286,7 @@
 
         # The list of files may be empty. Which means nl3 is the first of the
         # double newline that precedes the description.
-        if text[nl3 + 1:nl3 + 2] == '\n':
+        if text[nl3 + 1 : nl3 + 2] == '\n':
             doublenl = nl3
         else:
             doublenl = text.index('\n\n', nl3 + 1)
@@ -280,23 +298,23 @@
 
     @property
     def manifest(self):
-        return bin(self._text[0:self._offsets[0]])
+        return bin(self._text[0 : self._offsets[0]])
 
     @property
     def user(self):
         off = self._offsets
-        return encoding.tolocal(self._text[off[0] + 1:off[1]])
+        return encoding.tolocal(self._text[off[0] + 1 : off[1]])
 
     @property
     def _rawdate(self):
         off = self._offsets
-        dateextra = self._text[off[1] + 1:off[2]]
+        dateextra = self._text[off[1] + 1 : off[2]]
         return dateextra.split(' ', 2)[0:2]
 
     @property
     def _rawextra(self):
         off = self._offsets
-        dateextra = self._text[off[1] + 1:off[2]]
+        dateextra = self._text[off[1] + 1 : off[2]]
         fields = dateextra.split(' ', 2)
         if len(fields) != 3:
             return None
@@ -329,7 +347,7 @@
         if off[2] == off[3]:
             return []
 
-        return self._text[off[2] + 1:off[3]].split('\n')
+        return self._text[off[2] + 1 : off[3]].split('\n')
 
     @property
     def filesadded(self):
@@ -353,7 +371,8 @@
 
     @property
     def description(self):
-        return encoding.tolocal(self._text[self._offsets[3] + 2:])
+        return encoding.tolocal(self._text[self._offsets[3] + 2 :])
+
 
 class changelog(revlog.revlog):
     def __init__(self, opener, trypending=False):
@@ -372,8 +391,14 @@
             indexfile = '00changelog.i'
 
         datafile = '00changelog.d'
-        revlog.revlog.__init__(self, opener, indexfile, datafile=datafile,
-                               checkambig=True, mmaplargeindex=True)
+        revlog.revlog.__init__(
+            self,
+            opener,
+            indexfile,
+            datafile=datafile,
+            checkambig=True,
+            mmaplargeindex=True,
+        )
 
         if self._initempty and (self.version & 0xFFFF == revlog.REVLOGV1):
             # changelogs don't benefit from generaldelta.
@@ -394,7 +419,7 @@
         self._copiesstorage = opener.options.get('copies-storage')
 
     def tiprev(self):
-        for i in pycompat.xrange(len(self) -1, -2, -1):
+        for i in pycompat.xrange(len(self) - 1, -2, -1):
             if i not in self.filteredrevs:
                 return i
 
@@ -404,8 +429,7 @@
 
     def __contains__(self, rev):
         """filtered version of revlog.__contains__"""
-        return (0 <= rev < len(self)
-                and rev not in self.filteredrevs)
+        return 0 <= rev < len(self) and rev not in self.filteredrevs
 
     def __iter__(self):
         """filtered version of revlog.__iter__"""
@@ -469,8 +493,9 @@
         """filtered version of revlog.rev"""
         r = super(changelog, self).rev(node)
         if r in self.filteredrevs:
-            raise error.FilteredLookupError(hex(node), self.indexfile,
-                                            _('filtered node'))
+            raise error.FilteredLookupError(
+                hex(node), self.indexfile, _('filtered node')
+            )
         return r
 
     def node(self, rev):
@@ -508,8 +533,9 @@
                 self.opener = _divertopener(self._realopener, self.indexfile)
             else:
                 self._delaybuf = []
-                self.opener = _delayopener(self._realopener, self.indexfile,
-                                           self._delaybuf)
+                self.opener = _delayopener(
+                    self._realopener, self.indexfile, self._delaybuf
+                )
         self._delayed = True
         tr.addpending('cl-%i' % id(self), self._writepending)
         tr.addfinalize('cl-%i' % id(self), self._finalize)
@@ -579,14 +605,7 @@
         access.
         """
         c = changelogrevision(self.revision(node))
-        return (
-            c.manifest,
-            c.user,
-            c.date,
-            c.files,
-            c.description,
-            c.extra
-        )
+        return (c.manifest, c.user, c.date, c.files, c.description, c.extra)
 
     def changelogrevision(self, nodeorrev):
         """Obtain a ``changelogrevision`` for a node or revision."""
@@ -603,9 +622,22 @@
         l = text[:last].split('\n')
         return l[3:]
 
-    def add(self, manifest, files, desc, transaction, p1, p2,
-                  user, date=None, extra=None, p1copies=None, p2copies=None,
-                  filesadded=None, filesremoved=None):
+    def add(
+        self,
+        manifest,
+        files,
+        desc,
+        transaction,
+        p1,
+        p2,
+        user,
+        date=None,
+        extra=None,
+        p1copies=None,
+        p2copies=None,
+        filesadded=None,
+        filesremoved=None,
+    ):
         # Convert to UTF-8 encoded bytestrings as the very first
         # thing: calling any method on a localstr object will turn it
         # into a str object and the cached UTF-8 string is thus lost.
@@ -618,8 +650,9 @@
         if not user:
             raise error.StorageError(_("empty username"))
         if "\n" in user:
-            raise error.StorageError(_("username %r contains a newline")
-                                     % pycompat.bytestr(user))
+            raise error.StorageError(
+                _("username %r contains a newline") % pycompat.bytestr(user)
+            )
 
         desc = stripdesc(desc)
 
@@ -632,8 +665,9 @@
             if branch in ("default", ""):
                 del extra["branch"]
             elif branch in (".", "null", "tip"):
-                raise error.StorageError(_('the name \'%s\' is reserved')
-                                         % branch)
+                raise error.StorageError(
+                    _('the name \'%s\' is reserved') % branch
+                )
         sortedfiles = sorted(files)
         if extra is not None:
             for name in ('p1copies', 'p2copies', 'filesadded', 'filesremoved'):
--- a/mercurial/chgserver.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/chgserver.py	Sun Oct 06 09:45:02 2019 -0400
@@ -67,24 +67,27 @@
     stringutil,
 )
 
+
 def _hashlist(items):
     """return sha1 hexdigest for a list"""
     return node.hex(hashlib.sha1(stringutil.pprint(items)).digest())
 
+
 # sensitive config sections affecting confighash
 _configsections = [
     'alias',  # affects global state commands.table
-    'eol',    # uses setconfig('eol', ...)
+    'eol',  # uses setconfig('eol', ...)
     'extdiff',  # uisetup will register new commands
     'extensions',
 ]
 
 _configsectionitems = [
-    ('commands', 'show.aliasprefix'), # show.py reads it in extsetup
+    ('commands', 'show.aliasprefix'),  # show.py reads it in extsetup
 ]
 
 # sensitive environment variables affecting confighash
-_envre = re.compile(br'''\A(?:
+_envre = re.compile(
+    br'''\A(?:
                     CHGHG
                     |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)?
                     |HG(?:ENCODING|PLAIN).*
@@ -95,7 +98,10 @@
                     |PYTHON.*
                     |TERM(?:INFO)?
                     |TZ
-                    )\Z''', re.X)
+                    )\Z''',
+    re.X,
+)
+
 
 def _confighash(ui):
     """return a quick hash for detecting config/env changes
@@ -119,11 +125,15 @@
         ignored = {'HG'}
     else:
         ignored = set()
-    envitems = [(k, v) for k, v in encoding.environ.iteritems()
-                if _envre.match(k) and k not in ignored]
+    envitems = [
+        (k, v)
+        for k, v in encoding.environ.iteritems()
+        if _envre.match(k) and k not in ignored
+    ]
     envhash = _hashlist(sorted(envitems))
     return sectionhash[:6] + envhash[:6]
 
+
 def _getmtimepaths(ui):
     """get a list of paths that should be checked to detect change
 
@@ -135,6 +145,7 @@
     modules = [m for n, m in extensions.extensions(ui)]
     try:
         from . import __version__
+
         modules.append(__version__)
     except ImportError:
         pass
@@ -148,6 +159,7 @@
             pass
     return sorted(set(files))
 
+
 def _mtimehash(paths):
     """return a quick hash for detecting file changes
 
@@ -165,6 +177,7 @@
     extensions after importing them (there is imp.find_module but that faces
     race conditions). We need to calculate confighash without importing.
     """
+
     def trystat(path):
         try:
             st = os.stat(path)
@@ -172,10 +185,13 @@
         except OSError:
             # could be ENOENT, EPERM etc. not fatal in any case
             pass
+
     return _hashlist(pycompat.maplist(trystat, paths))[:12]
 
+
 class hashstate(object):
     """a structure storing confighash, mtimehash, paths used for mtimehash"""
+
     def __init__(self, confighash, mtimehash, mtimepaths):
         self.confighash = confighash
         self.mtimehash = mtimehash
@@ -187,10 +203,15 @@
             mtimepaths = _getmtimepaths(ui)
         confighash = _confighash(ui)
         mtimehash = _mtimehash(mtimepaths)
-        ui.log('cmdserver', 'confighash = %s mtimehash = %s\n',
-               confighash, mtimehash)
+        ui.log(
+            'cmdserver',
+            'confighash = %s mtimehash = %s\n',
+            confighash,
+            mtimehash,
+        )
         return hashstate(confighash, mtimehash, mtimepaths)
 
+
 def _newchgui(srcui, csystem, attachio):
     class chgui(srcui.__class__):
         def __init__(self, src=None):
@@ -206,21 +227,28 @@
             #  b. or stdout is redirected by protectfinout(),
             # because the chg client is not aware of these situations and
             # will behave differently (i.e. write to stdout).
-            if (out is not self.fout
+            if (
+                out is not self.fout
                 or not util.safehasattr(self.fout, 'fileno')
                 or self.fout.fileno() != procutil.stdout.fileno()
-                or self._finoutredirected):
+                or self._finoutredirected
+            ):
                 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
             self.flush()
             return self._csystem(cmd, procutil.shellenviron(environ), cwd)
 
         def _runpager(self, cmd, env=None):
-            self._csystem(cmd, procutil.shellenviron(env), type='pager',
-                          cmdtable={'attachio': attachio})
+            self._csystem(
+                cmd,
+                procutil.shellenviron(env),
+                type='pager',
+                cmdtable={'attachio': attachio},
+            )
             return True
 
     return chgui(srcui)
 
+
 def _loadnewui(srcui, args, cdebug):
     from . import dispatch  # avoid cycle
 
@@ -256,6 +284,7 @@
 
     return (newui, newlui)
 
+
 class channeledsystem(object):
     """Propagate ui.system() request in the following format:
 
@@ -276,6 +305,7 @@
     and executes it defined by cmdtable, or exits the loop if the command name
     is empty.
     """
+
     def __init__(self, in_, out, channel):
         self.in_ = in_
         self.out = out
@@ -291,10 +321,10 @@
 
         if type == 'system':
             length = self.in_.read(4)
-            length, = struct.unpack('>I', length)
+            (length,) = struct.unpack('>I', length)
             if length != 4:
                 raise error.Abort(_('invalid response'))
-            rc, = struct.unpack('>i', self.in_.read(4))
+            (rc,) = struct.unpack('>i', self.in_.read(4))
             return rc
         elif type == 'pager':
             while True:
@@ -308,6 +338,7 @@
         else:
             raise error.ProgrammingError('invalid S channel type: %s' % type)
 
+
 _iochannels = [
     # server.ch, ui.fp, mode
     ('cin', 'fin', r'rb'),
@@ -315,12 +346,18 @@
     ('cerr', 'ferr', r'wb'),
 ]
 
+
 class chgcmdserver(commandserver.server):
-    def __init__(self, ui, repo, fin, fout, sock, prereposetups,
-                 hashstate, baseaddress):
+    def __init__(
+        self, ui, repo, fin, fout, sock, prereposetups, hashstate, baseaddress
+    ):
         super(chgcmdserver, self).__init__(
             _newchgui(ui, channeledsystem(fin, fout, 'S'), self.attachio),
-            repo, fin, fout, prereposetups)
+            repo,
+            fin,
+            fout,
+            prereposetups,
+        )
         self.clientsock = sock
         self._ioattached = False
         self._oldios = []  # original (self.ch, ui.fp, fd) before "attachio"
@@ -512,24 +549,32 @@
         encoding.environ.update(newenv)
 
     capabilities = commandserver.server.capabilities.copy()
-    capabilities.update({'attachio': attachio,
-                         'chdir': chdir,
-                         'runcommand': runcommand,
-                         'setenv': setenv,
-                         'setumask': setumask,
-                         'setumask2': setumask2})
+    capabilities.update(
+        {
+            'attachio': attachio,
+            'chdir': chdir,
+            'runcommand': runcommand,
+            'setenv': setenv,
+            'setumask': setumask,
+            'setumask2': setumask2,
+        }
+    )
 
     if util.safehasattr(procutil, 'setprocname'):
+
         def setprocname(self):
             """Change process title"""
             name = self._readstr()
             self.ui.log('chgserver', 'setprocname: %r\n', name)
             procutil.setprocname(name)
+
         capabilities['setprocname'] = setprocname
 
+
 def _tempaddress(address):
     return '%s.%d.tmp' % (address, os.getpid())
 
+
 def _hashaddress(address, hashstr):
     # if the basename of address contains '.', use only the left part. this
     # makes it possible for the client to pass 'server.tmp$PID' and follow by
@@ -538,6 +583,7 @@
     basename = basename.split('.', 1)[0]
     return '%s-%s' % (os.path.join(dirname, basename), hashstr)
 
+
 class chgunixservicehandler(object):
     """Set of operations for chg services"""
 
@@ -594,8 +640,10 @@
     def _issocketowner(self):
         try:
             st = os.stat(self._realaddress)
-            return (st.st_ino == self._socketstat.st_ino and
-                    st[stat.ST_MTIME] == self._socketstat[stat.ST_MTIME])
+            return (
+                st.st_ino == self._socketstat.st_ino
+                and st[stat.ST_MTIME] == self._socketstat[stat.ST_MTIME]
+            )
         except OSError:
             return False
 
@@ -610,8 +658,9 @@
 
     def shouldexit(self):
         if not self._issocketowner():
-            self.ui.log(b'chgserver', b'%s is not owned, exiting.\n',
-                        self._realaddress)
+            self.ui.log(
+                b'chgserver', b'%s is not owned, exiting.\n', self._realaddress
+            )
             return True
         if time.time() - self._lastactive > self._idletimeout:
             self.ui.log(b'chgserver', b'being idle too long. exiting.\n')
@@ -622,8 +671,17 @@
         self._lastactive = time.time()
 
     def createcmdserver(self, repo, conn, fin, fout, prereposetups):
-        return chgcmdserver(self.ui, repo, fin, fout, conn, prereposetups,
-                            self._hashstate, self._baseaddress)
+        return chgcmdserver(
+            self.ui,
+            repo,
+            fin,
+            fout,
+            conn,
+            prereposetups,
+            self._hashstate,
+            self._baseaddress,
+        )
+
 
 def chgunixservice(ui, repo, opts):
     # CHGINTERNALMARK is set by chg client. It is an indication of things are
--- a/mercurial/cmdutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/cmdutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -61,69 +61,81 @@
 # templates of common command options
 
 dryrunopts = [
-    ('n', 'dry-run', None,
-     _('do not perform actions, just print output')),
+    ('n', 'dry-run', None, _('do not perform actions, just print output')),
 ]
 
 confirmopts = [
-    ('', 'confirm', None,
-     _('ask before applying actions')),
+    ('', 'confirm', None, _('ask before applying actions')),
 ]
 
 remoteopts = [
-    ('e', 'ssh', '',
-     _('specify ssh command to use'), _('CMD')),
-    ('', 'remotecmd', '',
-     _('specify hg command to run on the remote side'), _('CMD')),
-    ('', 'insecure', None,
-     _('do not verify server certificate (ignoring web.cacerts config)')),
+    ('e', 'ssh', '', _('specify ssh command to use'), _('CMD')),
+    (
+        '',
+        'remotecmd',
+        '',
+        _('specify hg command to run on the remote side'),
+        _('CMD'),
+    ),
+    (
+        '',
+        'insecure',
+        None,
+        _('do not verify server certificate (ignoring web.cacerts config)'),
+    ),
 ]
 
 walkopts = [
-    ('I', 'include', [],
-     _('include names matching the given patterns'), _('PATTERN')),
-    ('X', 'exclude', [],
-     _('exclude names matching the given patterns'), _('PATTERN')),
+    (
+        'I',
+        'include',
+        [],
+        _('include names matching the given patterns'),
+        _('PATTERN'),
+    ),
+    (
+        'X',
+        'exclude',
+        [],
+        _('exclude names matching the given patterns'),
+        _('PATTERN'),
+    ),
 ]
 
 commitopts = [
-    ('m', 'message', '',
-     _('use text as commit message'), _('TEXT')),
-    ('l', 'logfile', '',
-     _('read commit message from file'), _('FILE')),
+    ('m', 'message', '', _('use text as commit message'), _('TEXT')),
+    ('l', 'logfile', '', _('read commit message from file'), _('FILE')),
 ]
 
 commitopts2 = [
-    ('d', 'date', '',
-     _('record the specified date as commit date'), _('DATE')),
-    ('u', 'user', '',
-     _('record the specified user as committer'), _('USER')),
+    ('d', 'date', '', _('record the specified date as commit date'), _('DATE')),
+    ('u', 'user', '', _('record the specified user as committer'), _('USER')),
 ]
 
 commitopts3 = [
-    (b'D', b'currentdate', None,
-     _(b'record the current date as commit date')),
-    (b'U', b'currentuser', None,
-     _(b'record the current user as committer')),
+    (b'D', b'currentdate', None, _(b'record the current date as commit date')),
+    (b'U', b'currentuser', None, _(b'record the current user as committer')),
 ]
 
 formatteropts = [
-    ('T', 'template', '',
-     _('display with template'), _('TEMPLATE')),
+    ('T', 'template', '', _('display with template'), _('TEMPLATE')),
 ]
 
 templateopts = [
-    ('', 'style', '',
-     _('display using template map file (DEPRECATED)'), _('STYLE')),
-    ('T', 'template', '',
-     _('display with template'), _('TEMPLATE')),
+    (
+        '',
+        'style',
+        '',
+        _('display using template map file (DEPRECATED)'),
+        _('STYLE'),
+    ),
+    ('T', 'template', '', _('display with template'), _('TEMPLATE')),
 ]
 
 logopts = [
     ('p', 'patch', None, _('show patch')),
     ('g', 'git', None, _('use git extended diff format')),
-    ('l', 'limit', '',
-     _('limit number of changes displayed'), _('NUM')),
+    ('l', 'limit', '', _('limit number of changes displayed'), _('NUM')),
     ('M', 'no-merges', None, _('do not show merges')),
     ('', 'stat', None, _('output diffstat-style summary of changes')),
     ('G', 'graph', None, _("show the revision DAG")),
@@ -133,44 +145,70 @@
     ('a', 'text', None, _('treat all files as text')),
     ('g', 'git', None, _('use git extended diff format')),
     ('', 'binary', None, _('generate binary diffs in git mode (default)')),
-    ('', 'nodates', None, _('omit dates from diff headers'))
+    ('', 'nodates', None, _('omit dates from diff headers')),
 ]
 
 diffwsopts = [
-    ('w', 'ignore-all-space', None,
-     _('ignore white space when comparing lines')),
-    ('b', 'ignore-space-change', None,
-     _('ignore changes in the amount of white space')),
-    ('B', 'ignore-blank-lines', None,
-     _('ignore changes whose lines are all blank')),
-    ('Z', 'ignore-space-at-eol', None,
-     _('ignore changes in whitespace at EOL')),
+    (
+        'w',
+        'ignore-all-space',
+        None,
+        _('ignore white space when comparing lines'),
+    ),
+    (
+        'b',
+        'ignore-space-change',
+        None,
+        _('ignore changes in the amount of white space'),
+    ),
+    (
+        'B',
+        'ignore-blank-lines',
+        None,
+        _('ignore changes whose lines are all blank'),
+    ),
+    (
+        'Z',
+        'ignore-space-at-eol',
+        None,
+        _('ignore changes in whitespace at EOL'),
+    ),
 ]
 
-diffopts2 = [
-    ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
-    ('p', 'show-function', None, _('show which function each change is in')),
-    ('', 'reverse', None, _('produce a diff that undoes the changes')),
-] + diffwsopts + [
-    ('U', 'unified', '',
-     _('number of lines of context to show'), _('NUM')),
-    ('', 'stat', None, _('output diffstat-style summary of changes')),
-    ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
-]
+diffopts2 = (
+    [
+        ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
+        (
+            'p',
+            'show-function',
+            None,
+            _('show which function each change is in'),
+        ),
+        ('', 'reverse', None, _('produce a diff that undoes the changes')),
+    ]
+    + diffwsopts
+    + [
+        ('U', 'unified', '', _('number of lines of context to show'), _('NUM')),
+        ('', 'stat', None, _('output diffstat-style summary of changes')),
+        ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
+    ]
+)
 
 mergetoolopts = [
     ('t', 'tool', '', _('specify merge tool'), _('TOOL')),
 ]
 
 similarityopts = [
-    ('s', 'similarity', '',
-     _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
+    (
+        's',
+        'similarity',
+        '',
+        _('guess renamed files by similarity (0<=s<=100)'),
+        _('SIMILARITY'),
+    )
 ]
 
-subrepoopts = [
-    ('S', 'subrepos', None,
-     _('recurse into subrepositories'))
-]
+subrepoopts = [('S', 'subrepos', None, _('recurse into subrepositories'))]
 
 debugrevlogopts = [
     ('c', 'changelog', False, _('open changelog')),
@@ -182,6 +220,7 @@
 # editor text
 _linebelow = "^HG: ------------------------ >8 ------------------------$"
 
+
 def resolvecommitoptions(ui, opts):
     """modify commit options dict to handle related options
 
@@ -189,19 +228,23 @@
     the ``date`` option is set.
     """
     if opts.get('date') and opts.get('currentdate'):
-        raise error.Abort(_('--date and --currentdate are mutually '
-                            'exclusive'))
+        raise error.Abort(
+            _('--date and --currentdate are mutually ' 'exclusive')
+        )
     if opts.get(b'user') and opts.get(b'currentuser'):
-        raise error.Abort(_('--user and --currentuser are mutually '
-                            'exclusive'))
+        raise error.Abort(
+            _('--user and --currentuser are mutually ' 'exclusive')
+        )
 
     datemaydiffer = False  # date-only change should be ignored?
 
     if opts.get(b'currentdate'):
         opts[b'date'] = b'%d %d' % dateutil.makedate()
-    elif (not opts.get('date')
-          and ui.configbool('rewrite', 'update-timestamp')
-          and opts.get('currentdate') is None):
+    elif (
+        not opts.get('date')
+        and ui.configbool('rewrite', 'update-timestamp')
+        and opts.get('currentdate') is None
+    ):
         opts[b'date'] = b'%d %d' % dateutil.makedate()
         datemaydiffer = True
 
@@ -210,6 +253,7 @@
 
     return datemaydiffer
 
+
 def checknotesize(ui, opts):
     """ make sure note is of valid format """
 
@@ -222,24 +266,32 @@
     if b'\n' in note:
         raise error.Abort(_(b"note cannot contain a newline"))
 
+
 def ishunk(x):
     hunkclasses = (crecordmod.uihunk, patch.recordhunk)
     return isinstance(x, hunkclasses)
 
+
 def newandmodified(chunks, originalchunks):
     newlyaddedandmodifiedfiles = set()
     alsorestore = set()
     for chunk in chunks:
-        if (ishunk(chunk) and chunk.header.isnewfile() and chunk not in
-            originalchunks):
+        if (
+            ishunk(chunk)
+            and chunk.header.isnewfile()
+            and chunk not in originalchunks
+        ):
             newlyaddedandmodifiedfiles.add(chunk.header.filename())
-            alsorestore.update(set(chunk.header.files()) -
-                               {chunk.header.filename()})
+            alsorestore.update(
+                set(chunk.header.files()) - {chunk.header.filename()}
+            )
     return newlyaddedandmodifiedfiles, alsorestore
 
+
 def parsealiases(cmd):
     return cmd.split("|")
 
+
 def setupwrapcolorwrite(ui):
     # wrap ui.write so diff output can be labeled/colorized
     def wrapwrite(orig, *args, **kw):
@@ -248,29 +300,34 @@
             orig(chunk, label=label + l)
 
     oldwrite = ui.write
+
     def wrap(*args, **kwargs):
         return wrapwrite(oldwrite, *args, **kwargs)
+
     setattr(ui, 'write', wrap)
     return oldwrite
 
-def filterchunks(ui, originalhunks, usecurses, testfile, match,
-                 operation=None):
+
+def filterchunks(ui, originalhunks, usecurses, testfile, match, operation=None):
     try:
         if usecurses:
             if testfile:
                 recordfn = crecordmod.testdecorator(
-                    testfile, crecordmod.testchunkselector)
+                    testfile, crecordmod.testchunkselector
+                )
             else:
                 recordfn = crecordmod.chunkselector
 
-            return crecordmod.filterpatch(ui, originalhunks, recordfn,
-                                          operation)
+            return crecordmod.filterpatch(
+                ui, originalhunks, recordfn, operation
+            )
     except crecordmod.fallbackerror as e:
         ui.warn('%s\n' % e.message)
         ui.warn(_('falling back to text mode\n'))
 
     return patch.filterpatch(ui, originalhunks, match, operation)
 
+
 def recordfilter(ui, originalhunks, match, operation=None):
     """ Prompts the user to filter the originalhunks and return a list of
     selected hunks.
@@ -282,14 +339,17 @@
     testfile = ui.config('experimental', 'crecordtest')
     oldwrite = setupwrapcolorwrite(ui)
     try:
-        newchunks, newopts = filterchunks(ui, originalhunks, usecurses,
-                                          testfile, match, operation)
+        newchunks, newopts = filterchunks(
+            ui, originalhunks, usecurses, testfile, match, operation
+        )
     finally:
         ui.write = oldwrite
     return newchunks, newopts
 
-def dorecord(ui, repo, commitfunc, cmdsuggest, backupall,
-            filterfn, *pats, **opts):
+
+def dorecord(
+    ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts
+):
     opts = pycompat.byteskwargs(opts)
     if not ui.interactive():
         if cmdsuggest:
@@ -300,7 +360,7 @@
 
     # make sure username is set before going interactive
     if not opts.get('user'):
-        ui.username() # raise exception, username not provided
+        ui.username()  # raise exception, username not provided
 
     def recordfunc(ui, repo, message, match, opts):
         """This is generic record driver.
@@ -321,8 +381,12 @@
         wctx = repo[None]
         merge = len(wctx.parents()) > 1
         if merge:
-            raise error.Abort(_('cannot partially commit a merge '
-                               '(use "hg commit" instead)'))
+            raise error.Abort(
+                _(
+                    'cannot partially commit a merge '
+                    '(use "hg commit" instead)'
+                )
+            )
 
         def fail(f, msg):
             raise error.Abort('%s: %s' % (f, msg))
@@ -339,17 +403,20 @@
 
         with repo.ui.configoverride(overrides, b'record'):
             # subrepoutil.precommit() modifies the status
-            tmpstatus = scmutil.status(copymod.copy(status[0]),
-                                       copymod.copy(status[1]),
-                                       copymod.copy(status[2]),
-                                       copymod.copy(status[3]),
-                                       copymod.copy(status[4]),
-                                       copymod.copy(status[5]),
-                                       copymod.copy(status[6]))
+            tmpstatus = scmutil.status(
+                copymod.copy(status[0]),
+                copymod.copy(status[1]),
+                copymod.copy(status[2]),
+                copymod.copy(status[3]),
+                copymod.copy(status[4]),
+                copymod.copy(status[5]),
+                copymod.copy(status[6]),
+            )
 
             # Force allows -X subrepo to skip the subrepo.
             subs, commitsubs, newstate = subrepoutil.precommit(
-                repo.ui, wctx, tmpstatus, match, force=True)
+                repo.ui, wctx, tmpstatus, match, force=True
+            )
             for s in subs:
                 if s in commitsubs:
                     dirtyreason = wctx.sub(s).dirtyreason(True)
@@ -357,9 +424,13 @@
 
         if not force:
             repo.checkcommitpatterns(wctx, vdirs, match, status, fail)
-        diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True,
-                                         section='commands',
-                                         configprefix='commit.interactive.')
+        diffopts = patch.difffeatureopts(
+            ui,
+            opts=opts,
+            whitespace=True,
+            section='commands',
+            configprefix='commit.interactive.',
+        )
         diffopts.nodates = True
         diffopts.git = True
         diffopts.showfunc = True
@@ -379,8 +450,9 @@
         # version without the edit in the workdir. We also will need to restore
         # files that were the sources of renames so that the patch application
         # works.
-        newlyaddedandmodifiedfiles, alsorestore = newandmodified(chunks,
-                                                                 originalchunks)
+        newlyaddedandmodifiedfiles, alsorestore = newandmodified(
+            chunks, originalchunks
+        )
         contenders = set()
         for h in chunks:
             try:
@@ -401,8 +473,11 @@
         if backupall:
             tobackup = changed
         else:
-            tobackup = [f for f in newfiles if f in modified or f in
-                        newlyaddedandmodifiedfiles]
+            tobackup = [
+                f
+                for f in newfiles
+                if f in modified or f in newlyaddedandmodifiedfiles
+            ]
         backups = {}
         if tobackup:
             backupdir = repo.vfs.join('record-backups')
@@ -414,8 +489,9 @@
         try:
             # backup continues
             for f in tobackup:
-                fd, tmpname = pycompat.mkstemp(prefix=f.replace('/', '_') + '.',
-                                               dir=backupdir)
+                fd, tmpname = pycompat.mkstemp(
+                    prefix=f.replace('/', '_') + '.', dir=backupdir
+                )
                 os.close(fd)
                 ui.debug('backup %r as %r\n' % (f, tmpname))
                 util.copyfile(repo.wjoin(f), tmpname, copystat=True)
@@ -431,12 +507,14 @@
 
             # 2.5 optionally review / modify patch in text editor
             if opts.get('review', False):
-                patchtext = (crecordmod.diffhelptext
-                             + crecordmod.patchhelptext
-                             + fp.read())
-                reviewedpatch = ui.edit(patchtext, "",
-                                        action="diff",
-                                        repopath=repo.path)
+                patchtext = (
+                    crecordmod.diffhelptext
+                    + crecordmod.patchhelptext
+                    + fp.read()
+                )
+                reviewedpatch = ui.edit(
+                    patchtext, "", action="diff", repopath=repo.path
+                )
                 fp.truncate(0)
                 fp.write(reviewedpatch)
                 fp.seek(0)
@@ -446,8 +524,13 @@
             if backups:
                 # Equivalent to hg.revert
                 m = scmutil.matchfiles(repo, set(backups.keys()) | alsorestore)
-                mergemod.update(repo, repo.dirstate.p1(), branchmerge=False,
-                                force=True, matcher=m)
+                mergemod.update(
+                    repo,
+                    repo.dirstate.p1(),
+                    branchmerge=False,
+                    force=True,
+                    matcher=m,
+                )
 
             # 3b. (apply)
             if dopatch:
@@ -497,6 +580,7 @@
 
     return commit(ui, repo, recordinwlock, pats, opts)
 
+
 class dirnode(object):
     """
     Represent a directory in user working copy with information required for
@@ -592,11 +676,12 @@
         for st, fpath in self.iterfilepaths():
             yield st, fpath
 
-        #recurse on the subdirs
+        # recurse on the subdirs
         for dirobj in self.subdirs.values():
             for st, fpath in dirobj.tersewalk(terseargs):
                 yield st, fpath
 
+
 def tersedir(statuslist, terseargs):
     """
     Terse the status if all the files in a directory shares the same status.
@@ -620,8 +705,15 @@
 
     # creating a dirnode object for the root of the repo
     rootobj = dirnode('')
-    pstatus = ('modified', 'added', 'deleted', 'clean', 'unknown',
-               'ignored', 'removed')
+    pstatus = (
+        'modified',
+        'added',
+        'deleted',
+        'clean',
+        'unknown',
+        'ignored',
+        'removed',
+    )
 
     tersedict = {}
     for attrname in pstatus:
@@ -646,12 +738,14 @@
 
     return tersedlist
 
+
 def _commentlines(raw):
     '''Surround lineswith a comment char and a new line'''
     lines = raw.splitlines()
     commentedlines = ['# %s' % line for line in lines]
     return '\n'.join(commentedlines) + '\n'
 
+
 def _conflictsmsg(repo):
     mergestate = mergemod.mergestate.read(repo)
     if not mergestate.active():
@@ -661,18 +755,27 @@
     unresolvedlist = [f for f in mergestate.unresolved() if m(f)]
     if unresolvedlist:
         mergeliststr = '\n'.join(
-            ['    %s' % util.pathto(repo.root, encoding.getcwd(), path)
-             for path in sorted(unresolvedlist)])
-        msg = _('''Unresolved merge conflicts:
+            [
+                '    %s' % util.pathto(repo.root, encoding.getcwd(), path)
+                for path in sorted(unresolvedlist)
+            ]
+        )
+        msg = (
+            _(
+                '''Unresolved merge conflicts:
 
 %s
 
-To mark files as resolved:  hg resolve --mark FILE''') % mergeliststr
+To mark files as resolved:  hg resolve --mark FILE'''
+            )
+            % mergeliststr
+        )
     else:
         msg = _('No unresolved merge conflicts.')
 
     return _commentlines(msg)
 
+
 def morestatus(repo, fm):
     statetuple = statemod.getrepostate(repo)
     label = 'status.morestatus'
@@ -686,6 +789,7 @@
         if helpfulmsg:
             fm.plain('%s\n' % _commentlines(helpfulmsg), label=label)
 
+
 def findpossible(cmd, table, strict=False):
     """
     Return cmd -> (aliases, command table entry)
@@ -724,6 +828,7 @@
 
     return choice, allcmds
 
+
 def findcmd(cmd, table, strict=True):
     """Return (aliases, command table entry) for command string."""
     choice, allcmds = findpossible(cmd, table, strict)
@@ -740,6 +845,7 @@
 
     raise error.UnknownCommand(cmd, allcmds)
 
+
 def changebranch(ui, repo, revs, label):
     """ Change the branch name of given revs to label """
 
@@ -770,6 +876,7 @@
         # avoid import cycle mercurial.cmdutil -> mercurial.context ->
         # mercurial.subrepo -> mercurial.cmdutil
         from . import context
+
         for rev in revs:
             ctx = repo[rev]
             oldbranch = ctx.branch()
@@ -783,8 +890,10 @@
                 except error.ManifestLookupError:
                     return None
 
-            ui.debug("changing branch of '%s' from '%s' to '%s'\n"
-                     % (hex(ctx.node()), oldbranch, label))
+            ui.debug(
+                "changing branch of '%s' from '%s' to '%s'\n"
+                % (hex(ctx.node()), oldbranch, label)
+            )
             extra = ctx.extra()
             extra['branch_change'] = hex(ctx.node())
             # While changing branch of set of linear commits, make sure that
@@ -797,14 +906,17 @@
             if p2 in replacements:
                 p2 = replacements[p2][0]
 
-            mc = context.memctx(repo, (p1, p2),
-                                ctx.description(),
-                                ctx.files(),
-                                filectxfn,
-                                user=ctx.user(),
-                                date=ctx.date(),
-                                extra=extra,
-                                branch=label)
+            mc = context.memctx(
+                repo,
+                (p1, p2),
+                ctx.description(),
+                ctx.files(),
+                filectxfn,
+                user=ctx.user(),
+                date=ctx.date(),
+                extra=extra,
+                branch=label,
+            )
 
             newnode = repo.commitctx(mc)
             replacements[ctx.node()] = (newnode,)
@@ -822,10 +934,12 @@
                 # avoid import cycle mercurial.cmdutil -> mercurial.hg ->
                 # mercurial.cmdutil
                 from . import hg
+
                 hg.update(repo, newid[0], quietempty=True)
 
         ui.status(_("changed branch on %d changesets\n") % len(replacements))
 
+
 def findrepo(p):
     while not os.path.isdir(os.path.join(p, ".hg")):
         oldp, p = p, os.path.dirname(p)
@@ -834,6 +948,7 @@
 
     return p
 
+
 def bailifchanged(repo, merge=True, hint=None):
     """ enforce the precondition that working directory must be clean.
 
@@ -852,14 +967,16 @@
     for s in sorted(ctx.substate):
         ctx.sub(s).bailifchanged(hint=hint)
 
+
 def logmessage(ui, opts):
     """ get the log message according to -m and -l option """
     message = opts.get('message')
     logfile = opts.get('logfile')
 
     if message and logfile:
-        raise error.Abort(_('options --message and --logfile are mutually '
-                           'exclusive'))
+        raise error.Abort(
+            _('options --message and --logfile are mutually ' 'exclusive')
+        )
     if not message and logfile:
         try:
             if isstdiofilename(logfile):
@@ -867,10 +984,13 @@
             else:
                 message = '\n'.join(util.readfile(logfile).splitlines())
         except IOError as inst:
-            raise error.Abort(_("can't read commit message '%s': %s") %
-                             (logfile, encoding.strtolocal(inst.strerror)))
+            raise error.Abort(
+                _("can't read commit message '%s': %s")
+                % (logfile, encoding.strtolocal(inst.strerror))
+            )
     return message
 
+
 def mergeeditform(ctxorbool, baseformname):
     """return appropriate editform name (referencing a committemplate)
 
@@ -888,8 +1008,10 @@
 
     return baseformname + ".normal"
 
-def getcommiteditor(edit=False, finishdesc=None, extramsg=None,
-                    editform='', **opts):
+
+def getcommiteditor(
+    edit=False, finishdesc=None, extramsg=None, editform='', **opts
+):
     """get appropriate commit message editor according to '--edit' option
 
     'finishdesc' is a function to be called with edited commit message
@@ -910,15 +1032,15 @@
     they are specific for usage in MQ.
     """
     if edit or finishdesc or extramsg:
-        return lambda r, c, s: commitforceeditor(r, c, s,
-                                                 finishdesc=finishdesc,
-                                                 extramsg=extramsg,
-                                                 editform=editform)
+        return lambda r, c, s: commitforceeditor(
+            r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform
+        )
     elif editform:
         return lambda r, c, s: commiteditor(r, c, s, editform=editform)
     else:
         return commiteditor
 
+
 def _escapecommandtemplate(tmpl):
     parts = []
     for typ, start, end in templater.scantemplate(tmpl, raw=True):
@@ -928,6 +1050,7 @@
             parts.append(tmpl[start:end])
     return b''.join(parts)
 
+
 def rendercommandtemplate(ui, tmpl, props):
     r"""Expand a literal template 'tmpl' in a way suitable for command line
 
@@ -946,6 +1069,7 @@
     t = formatter.maketemplater(ui, _escapecommandtemplate(tmpl))
     return t.renderdefault(props)
 
+
 def rendertemplate(ctx, tmpl, props=None):
     """Expand a literal template 'tmpl' byte-string against one changeset
 
@@ -954,13 +1078,15 @@
     """
     repo = ctx.repo()
     tres = formatter.templateresources(repo.ui, repo)
-    t = formatter.maketemplater(repo.ui, tmpl, defaults=templatekw.keywords,
-                                resources=tres)
+    t = formatter.maketemplater(
+        repo.ui, tmpl, defaults=templatekw.keywords, resources=tres
+    )
     mapping = {'ctx': ctx}
     if props:
         mapping.update(props)
     return t.renderdefault(mapping)
 
+
 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
     r"""Convert old-style filename format string to template string
 
@@ -1018,17 +1144,20 @@
                 break
             newname.append(stringutil.escapestr(pat[i:n]))
             if n + 2 > end:
-                raise error.Abort(_("incomplete format spec in output "
-                                    "filename"))
-            c = pat[n + 1:n + 2]
+                raise error.Abort(
+                    _("incomplete format spec in output " "filename")
+                )
+            c = pat[n + 1 : n + 2]
             i = n + 2
             try:
                 newname.append(expander[c])
             except KeyError:
-                raise error.Abort(_("invalid format spec '%%%s' in output "
-                                    "filename") % c)
+                raise error.Abort(
+                    _("invalid format spec '%%%s' in output " "filename") % c
+                )
     return ''.join(newname)
 
+
 def makefilename(ctx, pat, **props):
     if not pat:
         return pat
@@ -1038,10 +1167,12 @@
     # disable the expansion.
     return rendertemplate(ctx, tmpl, pycompat.byteskwargs(props))
 
+
 def isstdiofilename(pat):
     """True if the given pat looks like a filename denoting stdin/stdout"""
     return not pat or pat == '-'
 
+
 class _unclosablefile(object):
     def __init__(self, fp):
         self._fp = fp
@@ -1061,6 +1192,7 @@
     def __exit__(self, exc_type, exc_value, exc_tb):
         pass
 
+
 def makefileobj(ctx, pat, mode='wb', **props):
     writable = mode not in ('r', 'rb')
 
@@ -1074,6 +1206,7 @@
     fn = makefilename(ctx, pat, **props)
     return open(fn, mode)
 
+
 def openstorage(repo, cmd, file_, opts, returnrevlog=False):
     """opens the changelog, manifest, a filelog or a given revlog"""
     cl = opts['changelog']
@@ -1088,8 +1221,10 @@
         if file_:
             msg = _('cannot specify filename with --changelog or --manifest')
         elif not repo:
-            msg = _('cannot specify --changelog or --manifest or --dir '
-                    'without a repository')
+            msg = _(
+                'cannot specify --changelog or --manifest or --dir '
+                'without a repository'
+            )
     if msg:
         raise error.Abort(msg)
 
@@ -1099,8 +1234,12 @@
             r = repo.unfiltered().changelog
         elif dir:
             if 'treemanifest' not in repo.requirements:
-                raise error.Abort(_("--dir can only be used on repos with "
-                                   "treemanifest enabled"))
+                raise error.Abort(
+                    _(
+                        "--dir can only be used on repos with "
+                        "treemanifest enabled"
+                    )
+                )
             if not dir.endswith('/'):
                 dir = dir + '/'
             dirlog = repo.manifestlog.getstorage(dir)
@@ -1131,10 +1270,12 @@
             raise error.CommandError(cmd, _('invalid arguments'))
         if not os.path.isfile(file_):
             raise error.Abort(_("revlog '%s' not found") % file_)
-        r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
-                          file_[:-2] + ".i")
+        r = revlog.revlog(
+            vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + ".i"
+        )
     return r
 
+
 def openrevlog(repo, cmd, file_, opts):
     """Obtain a revlog backing storage of an item.
 
@@ -1147,6 +1288,7 @@
     """
     return openstorage(repo, cmd, file_, opts, returnrevlog=True)
 
+
 def copy(ui, repo, pats, opts, rename=False):
     # called with the repo lock held
     #
@@ -1159,6 +1301,7 @@
     wctx = repo[None]
 
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
+
     def walkpat(pat):
         srcs = []
         if after:
@@ -1174,8 +1317,13 @@
                 if exact and state == '?':
                     ui.warn(_('%s: not copying - file is not managed\n') % rel)
                 if exact and state == 'r':
-                    ui.warn(_('%s: not copying - file has been marked for'
-                              ' remove\n') % rel)
+                    ui.warn(
+                        _(
+                            '%s: not copying - file has been marked for'
+                            ' remove\n'
+                        )
+                        % rel
+                    )
                 continue
             # abs: hgsep
             # rel: ossep
@@ -1202,20 +1350,26 @@
         # check for collisions
         prevsrc = targets.get(abstarget)
         if prevsrc is not None:
-            ui.warn(_('%s: not overwriting - %s collides with %s\n') %
-                    (reltarget, repo.pathto(abssrc, cwd),
-                     repo.pathto(prevsrc, cwd)))
-            return True # report a failure
+            ui.warn(
+                _('%s: not overwriting - %s collides with %s\n')
+                % (
+                    reltarget,
+                    repo.pathto(abssrc, cwd),
+                    repo.pathto(prevsrc, cwd),
+                )
+            )
+            return True  # report a failure
 
         # check for overwrites
         exists = os.path.lexists(target)
         samefile = False
         if exists and abssrc != abstarget:
-            if (repo.dirstate.normalize(abssrc) ==
-                repo.dirstate.normalize(abstarget)):
+            if repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(
+                abstarget
+            ):
                 if not rename:
                     ui.warn(_("%s: can't copy - same file\n") % reltarget)
-                    return True # report a failure
+                    return True  # report a failure
                 exists = False
                 samefile = True
 
@@ -1228,11 +1382,21 @@
                     else:
                         flags = '--force'
                     if rename:
-                        hint = _("('hg rename %s' to replace the file by "
-                                 'recording a rename)\n') % flags
+                        hint = (
+                            _(
+                                "('hg rename %s' to replace the file by "
+                                'recording a rename)\n'
+                            )
+                            % flags
+                        )
                     else:
-                        hint = _("('hg copy %s' to replace the file by "
-                                 'recording a copy)\n') % flags
+                        hint = (
+                            _(
+                                "('hg copy %s' to replace the file by "
+                                'recording a copy)\n'
+                            )
+                            % flags
+                        )
                 else:
                     msg = _('%s: not overwriting - file exists\n')
                     if rename:
@@ -1241,17 +1405,21 @@
                         hint = _("('hg copy --after' to record the copy)\n")
                 ui.warn(msg % reltarget)
                 ui.warn(hint)
-                return True # report a failure
+                return True  # report a failure
 
         if after:
             if not exists:
                 if rename:
-                    ui.warn(_('%s: not recording move - %s does not exist\n') %
-                            (relsrc, reltarget))
+                    ui.warn(
+                        _('%s: not recording move - %s does not exist\n')
+                        % (relsrc, reltarget)
+                    )
                 else:
-                    ui.warn(_('%s: not recording copy - %s does not exist\n') %
-                            (relsrc, reltarget))
-                return True # report a failure
+                    ui.warn(
+                        _('%s: not recording copy - %s does not exist\n')
+                        % (relsrc, reltarget)
+                    )
+                return True  # report a failure
         elif not dryrun:
             try:
                 if exists:
@@ -1273,9 +1441,11 @@
                     ui.warn(_('%s: deleted in working directory\n') % relsrc)
                     srcexists = False
                 else:
-                    ui.warn(_('%s: cannot copy - %s\n') %
-                            (relsrc, encoding.strtolocal(inst.strerror)))
-                    return True # report a failure
+                    ui.warn(
+                        _('%s: cannot copy - %s\n')
+                        % (relsrc, encoding.strtolocal(inst.strerror))
+                    )
+                    return True  # report a failure
 
         if ui.verbose or not exact:
             if rename:
@@ -1286,8 +1456,9 @@
         targets[abstarget] = abssrc
 
         # fix up dirstate
-        scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
-                             dryrun=dryrun, cwd=cwd)
+        scmutil.dirstatecopy(
+            ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd
+        )
         if rename and not dryrun:
             if not after and srcexists and not samefile:
                 rmdir = repo.ui.configbool('experimental', 'removeemptydirs')
@@ -1310,8 +1481,9 @@
                 striplen += len(pycompat.ossep)
             res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
         elif destdirexists:
-            res = lambda p: os.path.join(dest,
-                                         os.path.basename(util.localpath(p)))
+            res = lambda p: os.path.join(
+                dest, os.path.basename(util.localpath(p))
+            )
         else:
             res = lambda p: dest
         return res
@@ -1323,8 +1495,9 @@
     def targetpathafterfn(pat, dest, srcs):
         if matchmod.patkind(pat):
             # a mercurial pattern
-            res = lambda p: os.path.join(dest,
-                                         os.path.basename(util.localpath(p)))
+            res = lambda p: os.path.join(
+                dest, os.path.basename(util.localpath(p))
+            )
         else:
             abspfx = pathutil.canonpath(repo.root, cwd, pat)
             if len(abspfx) < len(srcs[0][0]):
@@ -1349,13 +1522,13 @@
                         striplen1 += len(pycompat.ossep)
                     if evalpath(striplen1) > score:
                         striplen = striplen1
-                res = lambda p: os.path.join(dest,
-                                             util.localpath(p)[striplen:])
+                res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
             else:
                 # a file
                 if destdirexists:
-                    res = lambda p: os.path.join(dest,
-                                        os.path.basename(util.localpath(p)))
+                    res = lambda p: os.path.join(
+                        dest, os.path.basename(util.localpath(p))
+                    )
                 else:
                     res = lambda p: dest
         return res
@@ -1369,8 +1542,12 @@
     destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
     if not destdirexists:
         if len(pats) > 1 or matchmod.patkind(pats[0]):
-            raise error.Abort(_('with multiple sources, destination must be an '
-                               'existing directory'))
+            raise error.Abort(
+                _(
+                    'with multiple sources, destination must be an '
+                    'existing directory'
+                )
+            )
         if util.endswithsep(dest):
             raise error.Abort(_('destination %s is not a directory') % dest)
 
@@ -1394,10 +1571,11 @@
 
     return errors != 0
 
+
 ## facility to let extension process additional data into an import patch
 # list of identifier to be executed in order
 extrapreimport = []  # run before commit
-extrapostimport = [] # run after commit
+extrapostimport = []  # run after commit
 # mapping from identifier to actual import function
 #
 # 'preimport' are run before the commit is made and are provided the following
@@ -1415,6 +1593,7 @@
 # - ctx: the changectx created by import.
 extrapostimportmap = {}
 
+
 def tryimportone(ui, repo, patchdata, parents, opts, msgs, updatefunc):
     """Utility function used by commands.import to import a single patch
 
@@ -1489,8 +1668,12 @@
         except error.RepoError:
             p1, p2 = parents
         if p2.node() == nullid:
-            ui.warn(_("warning: import the patch as a normal revision\n"
-                      "(use --exact to import the patch as a merge)\n"))
+            ui.warn(
+                _(
+                    "warning: import the patch as a normal revision\n"
+                    "(use --exact to import the patch as a merge)\n"
+                )
+            )
     else:
         p1, p2 = parents
 
@@ -1507,8 +1690,16 @@
         partial = opts.get('partial', False)
         files = set()
         try:
-            patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix,
-                        files=files, eolmode=None, similarity=sim / 100.0)
+            patch.patch(
+                ui,
+                repo,
+                tmpname,
+                strip=strip,
+                prefix=prefix,
+                files=files,
+                eolmode=None,
+                similarity=sim / 100.0,
+            )
         except error.PatchError as e:
             if not partial:
                 raise error.Abort(pycompat.bytestr(e))
@@ -1531,8 +1722,9 @@
             if opts.get('exact'):
                 editor = None
             else:
-                editor = getcommiteditor(editform=editform,
-                                         **pycompat.strkwargs(opts))
+                editor = getcommiteditor(
+                    editform=editform, **pycompat.strkwargs(opts)
+                )
             extra = {}
             for idfunc in extrapreimport:
                 extrapreimportmap[idfunc](repo, patchdata, extra, opts)
@@ -1540,9 +1732,9 @@
             if partial:
                 overrides[('ui', 'allowemptycommit')] = True
             with repo.ui.configoverride(overrides, 'import'):
-                n = repo.commit(message, user,
-                                date, match=m,
-                                editor=editor, extra=extra)
+                n = repo.commit(
+                    message, user, date, match=m, editor=editor, extra=extra
+                )
                 for idfunc in extrapostimport:
                     extrapostimportmap[idfunc](repo[n])
     else:
@@ -1554,22 +1746,34 @@
         try:
             files = set()
             try:
-                patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix,
-                                files, eolmode=None)
+                patch.patchrepo(
+                    ui,
+                    repo,
+                    p1,
+                    store,
+                    tmpname,
+                    strip,
+                    prefix,
+                    files,
+                    eolmode=None,
+                )
             except error.PatchError as e:
                 raise error.Abort(stringutil.forcebytestr(e))
             if opts.get('exact'):
                 editor = None
             else:
                 editor = getcommiteditor(editform='import.bypass')
-            memctx = context.memctx(repo, (p1.node(), p2.node()),
-                                    message,
-                                    files=files,
-                                    filectxfn=store,
-                                    user=user,
-                                    date=date,
-                                    branch=branch,
-                                    editor=editor)
+            memctx = context.memctx(
+                repo,
+                (p1.node(), p2.node()),
+                message,
+                files=files,
+                filectxfn=store,
+                user=user,
+                date=date,
+                branch=branch,
+                editor=editor,
+            )
             n = memctx.commit()
         finally:
             store.close()
@@ -1585,6 +1789,7 @@
         msg = _('created %s') % short(n)
     return msg, n, rejects
 
+
 # facility to let extensions include additional data in an exported patch
 # list of identifiers to be executed in order
 extraexport = []
@@ -1593,6 +1798,7 @@
 # it is given two arguments (sequencenumber, changectx)
 extraexportmap = {}
 
+
 def _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts):
     node = scmutil.binnode(ctx)
     parents = [p.node() for p in ctx.parents() if p]
@@ -1610,8 +1816,9 @@
     fm.write('user', '# User %s\n', ctx.user())
     fm.plain('# Date %d %d\n' % ctx.date())
     fm.write('date', '#      %s\n', fm.formatdate(ctx.date()))
-    fm.condwrite(branch and branch != 'default',
-                 'branch', '# Branch %s\n', branch)
+    fm.condwrite(
+        branch and branch != 'default', 'branch', '# Branch %s\n', branch
+    )
     fm.write('node', '# Node ID %s\n', hex(node))
     fm.plain('# Parent  %s\n' % hex(prev))
     if len(parents) > 1:
@@ -1636,6 +1843,7 @@
         # TODO: make it structured?
         fm.data(diff=b''.join(chunkiter))
 
+
 def _exportfile(repo, revs, fm, dest, switch_parent, diffopts, match):
     """Export changesets to stdout or a single file"""
     for seqno, rev in enumerate(revs, 1):
@@ -1645,8 +1853,10 @@
         fm.startitem()
         _exportsingle(repo, ctx, fm, match, switch_parent, seqno, diffopts)
 
-def _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, diffopts,
-                      match):
+
+def _exportfntemplate(
+    repo, revs, basefm, fntemplate, switch_parent, diffopts, match
+):
     """Export changesets to possibly multiple files"""
     total = len(revs)
     revwidth = max(len(str(rev)) for rev in revs)
@@ -1654,8 +1864,9 @@
 
     for seqno, rev in enumerate(revs, 1):
         ctx = repo[rev]
-        dest = makefilename(ctx, fntemplate,
-                            total=total, seqno=seqno, revwidth=revwidth)
+        dest = makefilename(
+            ctx, fntemplate, total=total, seqno=seqno, revwidth=revwidth
+        )
         filemap.setdefault(dest, []).append((seqno, rev))
 
     for dest in filemap:
@@ -1664,8 +1875,10 @@
             for seqno, rev in filemap[dest]:
                 fm.startitem()
                 ctx = repo[rev]
-                _exportsingle(repo, ctx, fm, match, switch_parent, seqno,
-                              diffopts)
+                _exportsingle(
+                    repo, ctx, fm, match, switch_parent, seqno, diffopts
+                )
+
 
 def _prefetchchangedfiles(repo, revs, match):
     allfiles = set()
@@ -1675,8 +1888,16 @@
                 allfiles.add(file)
     scmutil.prefetchfiles(repo, revs, scmutil.matchfiles(repo, allfiles))
 
-def export(repo, revs, basefm, fntemplate='hg-%h.patch', switch_parent=False,
-           opts=None, match=None):
+
+def export(
+    repo,
+    revs,
+    basefm,
+    fntemplate='hg-%h.patch',
+    switch_parent=False,
+    opts=None,
+    match=None,
+):
     '''export changesets as hg patches
 
     Args:
@@ -1704,8 +1925,10 @@
     if not fntemplate:
         _exportfile(repo, revs, basefm, '<unnamed>', switch_parent, opts, match)
     else:
-        _exportfntemplate(repo, revs, basefm, fntemplate, switch_parent, opts,
-                          match)
+        _exportfntemplate(
+            repo, revs, basefm, fntemplate, switch_parent, opts, match
+        )
+
 
 def exportfile(repo, revs, fp, switch_parent=False, opts=None, match=None):
     """Export changesets to the given file stream"""
@@ -1715,6 +1938,7 @@
     with formatter.formatter(repo.ui, fp, 'export', {}) as fm:
         _exportfile(repo, revs, fm, dest, switch_parent, opts, match)
 
+
 def showmarker(fm, marker, index=None):
     """utility function to display obsolescence marker in a readable way
 
@@ -1723,13 +1947,17 @@
         fm.write('index', '%i ', index)
     fm.write('prednode', '%s ', hex(marker.prednode()))
     succs = marker.succnodes()
-    fm.condwrite(succs, 'succnodes', '%s ',
-                 fm.formatlist(map(hex, succs), name='node'))
+    fm.condwrite(
+        succs, 'succnodes', '%s ', fm.formatlist(map(hex, succs), name='node')
+    )
     fm.write('flag', '%X ', marker.flags())
     parents = marker.parentnodes()
     if parents is not None:
-        fm.write('parentnodes', '{%s} ',
-                 fm.formatlist(map(hex, parents), name='node', sep=', '))
+        fm.write(
+            'parentnodes',
+            '{%s} ',
+            fm.formatlist(map(hex, parents), name='node', sep=', '),
+        )
     fm.write('date', '(%s) ', fm.formatdate(marker.date()))
     meta = marker.metadata().copy()
     meta.pop('date', None)
@@ -1737,6 +1965,7 @@
     fm.write('metadata', '{%s}', fm.formatdict(smeta, fmt='%r: %r', sep=', '))
     fm.plain('\n')
 
+
 def finddate(ui, repo, date):
     """Find the tipmost changeset that matches the given date spec"""
 
@@ -1752,18 +1981,22 @@
     for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
         rev = ctx.rev()
         if rev in results:
-            ui.status(_("found revision %s from %s\n") %
-                      (rev, dateutil.datestr(results[rev])))
+            ui.status(
+                _("found revision %s from %s\n")
+                % (rev, dateutil.datestr(results[rev]))
+            )
             return '%d' % rev
 
     raise error.Abort(_("revision matching date not found"))
 
+
 def increasingwindows(windowsize=8, sizelimit=512):
     while True:
         yield windowsize
         if windowsize < sizelimit:
             windowsize *= 2
 
+
 def _walkrevs(repo, opts):
     # Default --rev value depends on --follow but --follow behavior
     # depends on revisions resolved from --rev...
@@ -1779,9 +2012,11 @@
         revs.reverse()
     return revs
 
+
 class FileWalkError(Exception):
     pass
 
+
 def walkfilerevs(repo, match, follow, revs, fncache):
     '''Walks the file history for the matched files.
 
@@ -1793,6 +2028,7 @@
     wanted = set()
     copies = []
     minrev, maxrev = min(revs), max(revs)
+
     def filerevs(filelog, last):
         """
         Only files, no patterns.  Check the history of each file.
@@ -1817,17 +2053,21 @@
                 if p != nullrev:
                     parentlinkrevs.append(filelog.linkrev(p))
             n = filelog.node(j)
-            revs.append((linkrev, parentlinkrevs,
-                         follow and filelog.renamed(n)))
+            revs.append(
+                (linkrev, parentlinkrevs, follow and filelog.renamed(n))
+            )
 
         return reversed(revs)
+
     def iterfiles():
         pctx = repo['.']
         for filename in match.files():
             if follow:
                 if filename not in pctx:
-                    raise error.Abort(_('cannot follow file not in parent '
-                                       'revision: "%s"') % filename)
+                    raise error.Abort(
+                        _('cannot follow file not in parent ' 'revision: "%s"')
+                        % filename
+                    )
                 yield filename, pctx[filename].filenode()
             else:
                 yield filename, None
@@ -1842,7 +2082,8 @@
                 # try to find matching entries on the slow path.
                 if follow:
                     raise error.Abort(
-                        _('cannot follow nonexistent file: "%s"') % file_)
+                        _('cannot follow nonexistent file: "%s"') % file_
+                    )
                 raise FileWalkError("Cannot walk via filelog")
             else:
                 continue
@@ -1879,6 +2120,7 @@
 
     return wanted
 
+
 class _followfilter(object):
     def __init__(self, repo, onlyfirst=False):
         self.repo = repo
@@ -1891,8 +2133,9 @@
             if self.onlyfirst:
                 return self.repo.changelog.parentrevs(rev)[0:1]
             else:
-                return filter(lambda x: x != nullrev,
-                              self.repo.changelog.parentrevs(rev))
+                return filter(
+                    lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)
+                )
 
         if self.startrev == nullrev:
             self.startrev = rev
@@ -1917,6 +2160,7 @@
 
         return False
 
+
 def walkchangerevs(repo, match, opts, prepare):
     '''Iterate over files and the revs in which they changed.
 
@@ -1972,8 +2216,9 @@
         # changed files
 
         if follow:
-            raise error.Abort(_('can only follow copies/renames for explicit '
-                               'filenames'))
+            raise error.Abort(
+                _('can only follow copies/renames for explicit ' 'filenames')
+            )
 
         # The slow path checks files modified in every changeset.
         # This is really slow on large repos, so compute the set lazily.
@@ -2023,9 +2268,12 @@
     def iterate():
         if follow and match.always():
             ff = _followfilter(repo, onlyfirst=opts.get('follow_first'))
+
             def want(rev):
                 return ff.match(rev) and rev in wanted
+
         else:
+
             def want(rev):
                 return rev in wanted
 
@@ -2044,6 +2292,7 @@
                 fns = fncache.get(rev)
                 ctx = change(rev)
                 if not fns:
+
                     def fns_generator():
                         if allfiles:
                             fiter = iter(ctx)
@@ -2052,6 +2301,7 @@
                         for f in fiter:
                             if match(f):
                                 yield f
+
                     fns = fns_generator()
                 prepare(ctx, fns)
             for rev in nrevs:
@@ -2062,6 +2312,7 @@
 
     return iterate()
 
+
 def add(ui, repo, match, prefix, uipathfn, explicitonly, **opts):
     bad = []
 
@@ -2078,16 +2329,24 @@
     dirstate = repo.dirstate
     # We don't want to just call wctx.walk here, since it would return a lot of
     # clean files, which we aren't interested in and takes time.
-    for f in sorted(dirstate.walk(badmatch, subrepos=sorted(wctx.substate),
-                                  unknown=True, ignored=False, full=False)):
+    for f in sorted(
+        dirstate.walk(
+            badmatch,
+            subrepos=sorted(wctx.substate),
+            unknown=True,
+            ignored=False,
+            full=False,
+        )
+    ):
         exact = match.exact(f)
         if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f):
             if cca:
                 cca(f)
             names.append(f)
             if ui.verbose or not exact:
-                ui.status(_('adding %s\n') % uipathfn(f),
-                          label='ui.addremove.added')
+                ui.status(
+                    _('adding %s\n') % uipathfn(f), label='ui.addremove.added'
+                )
 
     for subpath in sorted(wctx.substate):
         sub = wctx.sub(subpath)
@@ -2096,20 +2355,24 @@
             subprefix = repo.wvfs.reljoin(prefix, subpath)
             subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
             if opts.get(r'subrepos'):
-                bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, False,
-                                   **opts))
+                bad.extend(
+                    sub.add(ui, submatch, subprefix, subuipathfn, False, **opts)
+                )
             else:
-                bad.extend(sub.add(ui, submatch, subprefix, subuipathfn, True,
-                                   **opts))
+                bad.extend(
+                    sub.add(ui, submatch, subprefix, subuipathfn, True, **opts)
+                )
         except error.LookupError:
-            ui.status(_("skipping missing subrepository: %s\n")
-                           % uipathfn(subpath))
+            ui.status(
+                _("skipping missing subrepository: %s\n") % uipathfn(subpath)
+            )
 
     if not opts.get(r'dry_run'):
         rejected = wctx.add(names, prefix)
         bad.extend(f for f in rejected if f in match.files())
     return bad
 
+
 def addwebdirpath(repo, serverpath, webconf):
     webconf[serverpath] = repo.root
     repo.ui.debug('adding %s = %s\n' % (serverpath, repo.root))
@@ -2119,8 +2382,10 @@
         for subpath in ctx.substate:
             ctx.sub(subpath).addwebdirpath(serverpath, webconf)
 
-def forget(ui, repo, match, prefix, uipathfn, explicitonly, dryrun,
-           interactive):
+
+def forget(
+    ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
+):
     if dryrun and interactive:
         raise error.Abort(_("cannot specify both --dry-run and --interactive"))
     bad = []
@@ -2139,14 +2404,19 @@
         subprefix = repo.wvfs.reljoin(prefix, subpath)
         subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
         try:
-            subbad, subforgot = sub.forget(submatch, subprefix, subuipathfn,
-                                           dryrun=dryrun,
-                                           interactive=interactive)
+            subbad, subforgot = sub.forget(
+                submatch,
+                subprefix,
+                subuipathfn,
+                dryrun=dryrun,
+                interactive=interactive,
+            )
             bad.extend([subpath + '/' + f for f in subbad])
             forgot.extend([subpath + '/' + f for f in subforgot])
         except error.LookupError:
-            ui.status(_("skipping missing subrepository: %s\n")
-                           % uipathfn(subpath))
+            ui.status(
+                _("skipping missing subrepository: %s\n") % uipathfn(subpath)
+            )
 
     if not explicitonly:
         for f in match.files():
@@ -2159,42 +2429,48 @@
                         # purely from data cached by the status walk above.
                         if repo.dirstate.normalize(f) in repo.dirstate:
                             continue
-                        ui.warn(_('not removing %s: '
-                                  'file is already untracked\n')
-                                % uipathfn(f))
+                        ui.warn(
+                            _('not removing %s: ' 'file is already untracked\n')
+                            % uipathfn(f)
+                        )
                     bad.append(f)
 
     if interactive:
-        responses = _('[Ynsa?]'
-                      '$$ &Yes, forget this file'
-                      '$$ &No, skip this file'
-                      '$$ &Skip remaining files'
-                      '$$ Include &all remaining files'
-                      '$$ &? (display help)')
+        responses = _(
+            '[Ynsa?]'
+            '$$ &Yes, forget this file'
+            '$$ &No, skip this file'
+            '$$ &Skip remaining files'
+            '$$ Include &all remaining files'
+            '$$ &? (display help)'
+        )
         for filename in forget[:]:
-            r = ui.promptchoice(_('forget %s %s') %
-                                (uipathfn(filename), responses))
-            if r == 4: # ?
+            r = ui.promptchoice(
+                _('forget %s %s') % (uipathfn(filename), responses)
+            )
+            if r == 4:  # ?
                 while r == 4:
                     for c, t in ui.extractchoices(responses)[1]:
                         ui.write('%s - %s\n' % (c, encoding.lower(t)))
-                    r = ui.promptchoice(_('forget %s %s') %
-                                        (uipathfn(filename), responses))
-            if r == 0: # yes
+                    r = ui.promptchoice(
+                        _('forget %s %s') % (uipathfn(filename), responses)
+                    )
+            if r == 0:  # yes
                 continue
-            elif r == 1: # no
+            elif r == 1:  # no
                 forget.remove(filename)
-            elif r == 2: # Skip
+            elif r == 2:  # Skip
                 fnindex = forget.index(filename)
                 del forget[fnindex:]
                 break
-            elif r == 3: # All
+            elif r == 3:  # All
                 break
 
     for f in forget:
         if ui.verbose or not match.exact(f) or interactive:
-            ui.status(_('removing %s\n') % uipathfn(f),
-                      label='ui.addremove.removed')
+            ui.status(
+                _('removing %s\n') % uipathfn(f), label='ui.addremove.removed'
+            )
 
     if not dryrun:
         rejected = wctx.forget(forget, prefix)
@@ -2202,6 +2478,7 @@
         forgot.extend(f for f in forget if f not in rejected)
     return bad, forgot
 
+
 def files(ui, ctx, m, uipathfn, fm, fmt, subrepos):
     ret = 1
 
@@ -2219,21 +2496,27 @@
     for subpath in sorted(ctx.substate):
         submatch = matchmod.subdirmatcher(subpath, m)
         subuipathfn = scmutil.subdiruipathfn(subpath, uipathfn)
-        if (subrepos or m.exact(subpath) or any(submatch.files())):
+        if subrepos or m.exact(subpath) or any(submatch.files()):
             sub = ctx.sub(subpath)
             try:
                 recurse = m.exact(subpath) or subrepos
-                if sub.printfiles(ui, submatch, subuipathfn, fm, fmt,
-                                  recurse) == 0:
+                if (
+                    sub.printfiles(ui, submatch, subuipathfn, fm, fmt, recurse)
+                    == 0
+                ):
                     ret = 0
             except error.LookupError:
-                ui.status(_("skipping missing subrepository: %s\n")
-                               % uipathfn(subpath))
+                ui.status(
+                    _("skipping missing subrepository: %s\n")
+                    % uipathfn(subpath)
+                )
 
     return ret
 
-def remove(ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun,
-           warnings=None):
+
+def remove(
+    ui, repo, m, prefix, uipathfn, after, force, subrepos, dryrun, warnings=None
+):
     ret = 0
     s = repo.status(match=m, clean=True)
     modified, added, deleted, clean = s[0], s[1], s[3], s[6]
@@ -2247,8 +2530,9 @@
         warn = False
 
     subs = sorted(wctx.substate)
-    progress = ui.makeprogress(_('searching'), total=len(subs),
-                               unit=_('subrepos'))
+    progress = ui.makeprogress(
+        _('searching'), total=len(subs), unit=_('subrepos')
+    )
     for subpath in subs:
         submatch = matchmod.subdirmatcher(subpath, m)
         subprefix = repo.wvfs.reljoin(prefix, subpath)
@@ -2257,20 +2541,30 @@
             progress.increment()
             sub = wctx.sub(subpath)
             try:
-                if sub.removefiles(submatch, subprefix, subuipathfn, after,
-                                   force, subrepos, dryrun, warnings):
+                if sub.removefiles(
+                    submatch,
+                    subprefix,
+                    subuipathfn,
+                    after,
+                    force,
+                    subrepos,
+                    dryrun,
+                    warnings,
+                ):
                     ret = 1
             except error.LookupError:
-                warnings.append(_("skipping missing subrepository: %s\n")
-                               % uipathfn(subpath))
+                warnings.append(
+                    _("skipping missing subrepository: %s\n")
+                    % uipathfn(subpath)
+                )
     progress.complete()
 
     # warn about failure to delete explicit files/dirs
     deleteddirs = util.dirs(deleted)
     files = m.files()
-    progress = ui.makeprogress(_('deleting'), total=len(files),
-                               unit=_('files'))
+    progress = ui.makeprogress(_('deleting'), total=len(files), unit=_('files'))
     for f in files:
+
         def insubrepo():
             for subpath in wctx.substate:
                 if f.startswith(subpath + '/'):
@@ -2279,17 +2573,18 @@
 
         progress.increment()
         isdir = f in deleteddirs or wctx.hasdir(f)
-        if (f in repo.dirstate or isdir or f == '.'
-            or insubrepo() or f in subs):
+        if f in repo.dirstate or isdir or f == '.' or insubrepo() or f in subs:
             continue
 
         if repo.wvfs.exists(f):
             if repo.wvfs.isdir(f):
-                warnings.append(_('not removing %s: no tracked files\n')
-                        % uipathfn(f))
+                warnings.append(
+                    _('not removing %s: no tracked files\n') % uipathfn(f)
+                )
             else:
-                warnings.append(_('not removing %s: file is untracked\n')
-                        % uipathfn(f))
+                warnings.append(
+                    _('not removing %s: file is untracked\n') % uipathfn(f)
+                )
         # missing files will generate a warning elsewhere
         ret = 1
     progress.complete()
@@ -2299,40 +2594,52 @@
     elif after:
         list = deleted
         remaining = modified + added + clean
-        progress = ui.makeprogress(_('skipping'), total=len(remaining),
-                                   unit=_('files'))
+        progress = ui.makeprogress(
+            _('skipping'), total=len(remaining), unit=_('files')
+        )
         for f in remaining:
             progress.increment()
             if ui.verbose or (f in files):
-                warnings.append(_('not removing %s: file still exists\n')
-                                % uipathfn(f))
+                warnings.append(
+                    _('not removing %s: file still exists\n') % uipathfn(f)
+                )
             ret = 1
         progress.complete()
     else:
         list = deleted + clean
-        progress = ui.makeprogress(_('skipping'),
-                                   total=(len(modified) + len(added)),
-                                   unit=_('files'))
+        progress = ui.makeprogress(
+            _('skipping'), total=(len(modified) + len(added)), unit=_('files')
+        )
         for f in modified:
             progress.increment()
-            warnings.append(_('not removing %s: file is modified (use -f'
-                      ' to force removal)\n') % uipathfn(f))
+            warnings.append(
+                _(
+                    'not removing %s: file is modified (use -f'
+                    ' to force removal)\n'
+                )
+                % uipathfn(f)
+            )
             ret = 1
         for f in added:
             progress.increment()
-            warnings.append(_("not removing %s: file has been marked for add"
-                      " (use 'hg forget' to undo add)\n") % uipathfn(f))
+            warnings.append(
+                _(
+                    "not removing %s: file has been marked for add"
+                    " (use 'hg forget' to undo add)\n"
+                )
+                % uipathfn(f)
+            )
             ret = 1
         progress.complete()
 
     list = sorted(list)
-    progress = ui.makeprogress(_('deleting'), total=len(list),
-                               unit=_('files'))
+    progress = ui.makeprogress(_('deleting'), total=len(list), unit=_('files'))
     for f in list:
         if ui.verbose or not m.exact(f):
             progress.increment()
-            ui.status(_('removing %s\n') % uipathfn(f),
-                      label='ui.addremove.removed')
+            ui.status(
+                _('removing %s\n') % uipathfn(f), label='ui.addremove.removed'
+            )
     progress.complete()
 
     if not dryrun:
@@ -2340,9 +2647,10 @@
             if not after:
                 for f in list:
                     if f in added:
-                        continue # we never unlink added files on remove
-                    rmdir = repo.ui.configbool('experimental',
-                                               'removeemptydirs')
+                        continue  # we never unlink added files on remove
+                    rmdir = repo.ui.configbool(
+                        'experimental', 'removeemptydirs'
+                    )
                     repo.wvfs.unlinkpath(f, ignoremissing=True, rmdir=rmdir)
             repo[None].forget(list)
 
@@ -2352,9 +2660,11 @@
 
     return ret
 
+
 def _catfmtneedsdata(fm):
     return not fm.datahint() or 'data' in fm.datahint()
 
+
 def _updatecatformatter(fm, ctx, matcher, path, decode):
     """Hook for adding data to the formatter used by ``hg cat``.
 
@@ -2373,6 +2683,7 @@
     fm.write('data', '%s', data)
     fm.data(path=path)
 
+
 def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
     err = 1
     opts = pycompat.byteskwargs(opts)
@@ -2380,8 +2691,9 @@
     def write(path):
         filename = None
         if fntemplate:
-            filename = makefilename(ctx, fntemplate,
-                                    pathname=os.path.join(prefix, path))
+            filename = makefilename(
+                ctx, fntemplate, pathname=os.path.join(prefix, path)
+            )
             # attempt to create the directory if it does not already exist
             try:
                 os.makedirs(os.path.dirname(filename))
@@ -2418,15 +2730,22 @@
         try:
             submatch = matchmod.subdirmatcher(subpath, matcher)
             subprefix = os.path.join(prefix, subpath)
-            if not sub.cat(submatch, basefm, fntemplate, subprefix,
-                           **pycompat.strkwargs(opts)):
+            if not sub.cat(
+                submatch,
+                basefm,
+                fntemplate,
+                subprefix,
+                **pycompat.strkwargs(opts)
+            ):
                 err = 0
         except error.RepoLookupError:
-            ui.status(_("skipping missing subrepository: %s\n") %
-                      uipathfn(subpath))
+            ui.status(
+                _("skipping missing subrepository: %s\n") % uipathfn(subpath)
+            )
 
     return err
 
+
 def commit(ui, repo, commitfunc, pats, opts):
     '''commit the specified files or all outstanding changes'''
     date = opts.get('date')
@@ -2446,22 +2765,24 @@
             uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
             if scmutil.addremove(repo, matcher, "", uipathfn, opts) != 0:
                 raise error.Abort(
-                    _("failed to mark all new/missing files as added/removed"))
+                    _("failed to mark all new/missing files as added/removed")
+                )
 
         return commitfunc(ui, repo, message, matcher, opts)
 
+
 def samefile(f, ctx1, ctx2):
     if f in ctx1.manifest():
         a = ctx1.filectx(f)
         if f in ctx2.manifest():
             b = ctx2.filectx(f)
-            return (not a.cmp(b)
-                    and a.flags() == b.flags())
+            return not a.cmp(b) and a.flags() == b.flags()
         else:
             return False
     else:
         return f not in ctx2.manifest()
 
+
 def amend(ui, repo, old, extra, pats, opts):
     # avoid cycle context -> subrepo -> cmdutil
     from . import context
@@ -2469,7 +2790,7 @@
     # amend will reuse the existing user if not specified, but the obsolete
     # marker creation requires that the current user's name is specified.
     if obsolete.isenabled(repo, obsolete.createmarkersopt):
-        ui.username() # raise exception if username not set
+        ui.username()  # raise exception if username not set
 
     ui.note(_('amending changeset %s\n') % old)
     base = old.p1()
@@ -2514,17 +2835,20 @@
         matcher = scmutil.match(wctx, pats, opts)
         relative = scmutil.anypats(pats, opts)
         uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
-        if (opts.get('addremove')
-            and scmutil.addremove(repo, matcher, "", uipathfn, opts)):
+        if opts.get('addremove') and scmutil.addremove(
+            repo, matcher, "", uipathfn, opts
+        ):
             raise error.Abort(
-                _("failed to mark all new/missing files as added/removed"))
+                _("failed to mark all new/missing files as added/removed")
+            )
 
         # Check subrepos. This depends on in-place wctx._status update in
         # subrepo.precommit(). To minimize the risk of this hack, we do
         # nothing if .hgsub does not exist.
         if '.hgsub' in wctx or '.hgsub' in old:
             subs, commitsubs, newsubstate = subrepoutil.precommit(
-                ui, wctx, wctx._status, matcher)
+                ui, wctx, wctx._status, matcher
+            )
             # amend should abort if commitsubrepos is enabled
             assert not commitsubs
             if subs:
@@ -2535,7 +2859,7 @@
 
         filestoamend = set(f for f in wctx.files() if matcher(f))
 
-        changes = (len(filestoamend) > 0)
+        changes = len(filestoamend) > 0
         if changes:
             # Recompute copies (avoid recording a -> b -> a)
             copied = copies.pathcopies(base, wctx, matcher)
@@ -2549,8 +2873,11 @@
             # was removed, it's no longer relevant. If X is missing (aka
             # deleted), old X must be preserved.
             files.update(filestoamend)
-            files = [f for f in files if (f not in filestoamend
-                                          or not samefile(f, wctx, base))]
+            files = [
+                f
+                for f in files
+                if (f not in filestoamend or not samefile(f, wctx, base))
+            ]
 
             def filectxfn(repo, ctx_, path):
                 try:
@@ -2568,14 +2895,19 @@
 
                     fctx = wctx[path]
                     flags = fctx.flags()
-                    mctx = context.memfilectx(repo, ctx_,
-                                              fctx.path(), fctx.data(),
-                                              islink='l' in flags,
-                                              isexec='x' in flags,
-                                              copysource=copied.get(path))
+                    mctx = context.memfilectx(
+                        repo,
+                        ctx_,
+                        fctx.path(),
+                        fctx.data(),
+                        islink='l' in flags,
+                        isexec='x' in flags,
+                        copysource=copied.get(path),
+                    )
                     return mctx
                 except KeyError:
                     return None
+
         else:
             ui.note(_('copying changeset %s to %s\n') % (old, base))
 
@@ -2607,22 +2939,26 @@
         pureextra = extra.copy()
         extra['amend_source'] = old.hex()
 
-        new = context.memctx(repo,
-                             parents=[base.node(), old.p2().node()],
-                             text=message,
-                             files=files,
-                             filectxfn=filectxfn,
-                             user=user,
-                             date=date,
-                             extra=extra,
-                             editor=editor)
+        new = context.memctx(
+            repo,
+            parents=[base.node(), old.p2().node()],
+            text=message,
+            files=files,
+            filectxfn=filectxfn,
+            user=user,
+            date=date,
+            extra=extra,
+            editor=editor,
+        )
 
         newdesc = changelog.stripdesc(new.description())
-        if ((not changes)
+        if (
+            (not changes)
             and newdesc == old.description()
             and user == old.user()
             and (date == old.date() or datemaydiffer)
-            and pureextra == old.extra()):
+            and pureextra == old.extra()
+        ):
             # nothing changed. continuing here would create a new node
             # anyway because of the amend_source noise.
             #
@@ -2641,9 +2977,15 @@
         if opts.get('note'):
             obsmetadata = {'note': encoding.fromlocal(opts['note'])}
         backup = ui.configbool('rewrite', 'backup-bundle')
-        scmutil.cleanupnodes(repo, mapping, 'amend', metadata=obsmetadata,
-                             fixphase=True, targetphase=commitphase,
-                             backup=backup)
+        scmutil.cleanupnodes(
+            repo,
+            mapping,
+            'amend',
+            metadata=obsmetadata,
+            fixphase=True,
+            targetphase=commitphase,
+            backup=backup,
+        )
 
         # Fixing the dirstate because localrepo.commitctx does not update
         # it. This is rather convenient because we did not need to update
@@ -2666,14 +3008,24 @@
 
     return newid
 
+
 def commiteditor(repo, ctx, subs, editform=''):
     if ctx.description():
         return ctx.description()
-    return commitforceeditor(repo, ctx, subs, editform=editform,
-                             unchangedmessagedetection=True)
-
-def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None,
-                      editform='', unchangedmessagedetection=False):
+    return commitforceeditor(
+        repo, ctx, subs, editform=editform, unchangedmessagedetection=True
+    )
+
+
+def commitforceeditor(
+    repo,
+    ctx,
+    subs,
+    finishdesc=None,
+    extramsg=None,
+    editform='',
+    unchangedmessagedetection=False,
+):
     if not extramsg:
         extramsg = _("Leave message empty to abort commit.")
 
@@ -2684,7 +3036,8 @@
         ref = '.'.join(forms)
         if repo.ui.config('committemplate', ref):
             templatetext = committext = buildcommittemplate(
-                repo, ctx, subs, extramsg, ref)
+                repo, ctx, subs, extramsg, ref
+            )
             break
         forms.pop()
     else:
@@ -2699,16 +3052,22 @@
     repo.dirstate.write(tr)
     pending = tr and tr.writepending() and repo.root
 
-    editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(),
-                              editform=editform, pending=pending,
-                              repopath=repo.path, action='commit')
+    editortext = repo.ui.edit(
+        committext,
+        ctx.user(),
+        ctx.extra(),
+        editform=editform,
+        pending=pending,
+        repopath=repo.path,
+        action='commit',
+    )
     text = editortext
 
     # strip away anything below this special string (used for editors that want
     # to display the diff)
     stripbelow = re.search(_linebelow, text, flags=re.MULTILINE)
     if stripbelow:
-        text = text[:stripbelow.start()]
+        text = text[: stripbelow.start()]
 
     text = re.sub("(?m)^HG:.*(\n|$)", "", text)
     os.chdir(olddir)
@@ -2722,32 +3081,43 @@
 
     return text
 
+
 def buildcommittemplate(repo, ctx, subs, extramsg, ref):
     ui = repo.ui
     spec = formatter.templatespec(ref, None, None)
     t = logcmdutil.changesettemplater(ui, repo, spec)
-    t.t.cache.update((k, templater.unquotestring(v))
-                     for k, v in repo.ui.configitems('committemplate'))
+    t.t.cache.update(
+        (k, templater.unquotestring(v))
+        for k, v in repo.ui.configitems('committemplate')
+    )
 
     if not extramsg:
-        extramsg = '' # ensure that extramsg is string
+        extramsg = ''  # ensure that extramsg is string
 
     ui.pushbuffer()
     t.show(ctx, extramsg=extramsg)
     return ui.popbuffer()
 
+
 def hgprefix(msg):
     return "\n".join(["HG: %s" % a for a in msg.split("\n") if a])
 
+
 def buildcommittext(repo, ctx, subs, extramsg):
     edittext = []
     modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
     if ctx.description():
         edittext.append(ctx.description())
     edittext.append("")
-    edittext.append("") # Empty line between message and comments.
-    edittext.append(hgprefix(_("Enter commit message."
-                      "  Lines beginning with 'HG:' are removed.")))
+    edittext.append("")  # Empty line between message and comments.
+    edittext.append(
+        hgprefix(
+            _(
+                "Enter commit message."
+                "  Lines beginning with 'HG:' are removed."
+            )
+        )
+    )
     edittext.append(hgprefix(extramsg))
     edittext.append("HG: --")
     edittext.append(hgprefix(_("user: %s") % ctx.user()))
@@ -2767,14 +3137,21 @@
 
     return "\n".join(edittext)
 
+
 def commitstatus(repo, node, branch, bheads=None, opts=None):
     if opts is None:
         opts = {}
     ctx = repo[node]
     parents = ctx.parents()
 
-    if (not opts.get('amend') and bheads and node not in bheads and not
-        [x for x in parents if x.node() in bheads and x.branch() == branch]):
+    if (
+        not opts.get('amend')
+        and bheads
+        and node not in bheads
+        and not [
+            x for x in parents if x.node() in bheads and x.branch() == branch
+        ]
+    ):
         repo.ui.status(_('created new head\n'))
         # The message is not printed for initial roots. For the other
         # changesets, it is printed in the following situations:
@@ -2815,9 +3192,11 @@
     elif repo.ui.verbose:
         repo.ui.write(_('committed changeset %d:%s\n') % (ctx.rev(), ctx))
 
+
 def postcommitstatus(repo, pats, opts):
     return repo.status(match=scmutil.match(repo[None], pats, opts))
 
+
 def revert(ui, repo, ctx, parents, *pats, **opts):
     opts = pycompat.byteskwargs(opts)
     parent, p2 = parents
@@ -2875,8 +3254,9 @@
             # Find status of all file in `names`.
             m = scmutil.matchfiles(repo, names)
 
-            changes = repo.status(node1=node, match=m,
-                                  unknown=True, ignored=True, clean=True)
+            changes = repo.status(
+                node1=node, match=m, unknown=True, ignored=True, clean=True
+            )
         else:
             changes = repo.status(node1=node, match=m)
             for kind in changes:
@@ -2886,12 +3266,12 @@
             m = scmutil.matchfiles(repo, names)
 
         modified = set(changes.modified)
-        added    = set(changes.added)
-        removed  = set(changes.removed)
+        added = set(changes.added)
+        removed = set(changes.removed)
         _deleted = set(changes.deleted)
-        unknown  = set(changes.unknown)
+        unknown = set(changes.unknown)
         unknown.update(changes.ignored)
-        clean    = set(changes.clean)
+        clean = set(changes.clean)
         modadded = set()
 
         # We need to account for the state of the file in the dirstate,
@@ -2908,8 +3288,8 @@
         else:
             changes = repo.status(node1=parent, match=m)
             dsmodified = set(changes.modified)
-            dsadded    = set(changes.added)
-            dsremoved  = set(changes.removed)
+            dsadded = set(changes.added)
+            dsremoved = set(changes.removed)
             # store all local modifications, useful later for rename detection
             localchanges = dsmodified | dsadded
 
@@ -2924,7 +3304,7 @@
 
             # tell newly modified apart.
             dsmodified &= modified
-            dsmodified |= modified & dsadded # dirstate added may need backup
+            dsmodified |= modified & dsadded  # dirstate added may need backup
             modified -= dsmodified
 
             # We need to wait for some post-processing to update this set
@@ -2989,24 +3369,25 @@
 
         # action to be actually performed by revert
         # (<list of file>, message>) tuple
-        actions = {'revert': ([], _('reverting %s\n')),
-                   'add': ([], _('adding %s\n')),
-                   'remove': ([], _('removing %s\n')),
-                   'drop': ([], _('removing %s\n')),
-                   'forget': ([], _('forgetting %s\n')),
-                   'undelete': ([], _('undeleting %s\n')),
-                   'noop': (None, _('no changes needed to %s\n')),
-                   'unknown': (None, _('file not managed: %s\n')),
-                  }
+        actions = {
+            'revert': ([], _('reverting %s\n')),
+            'add': ([], _('adding %s\n')),
+            'remove': ([], _('removing %s\n')),
+            'drop': ([], _('removing %s\n')),
+            'forget': ([], _('forgetting %s\n')),
+            'undelete': ([], _('undeleting %s\n')),
+            'noop': (None, _('no changes needed to %s\n')),
+            'unknown': (None, _('file not managed: %s\n')),
+        }
 
         # "constant" that convey the backup strategy.
         # All set to `discard` if `no-backup` is set do avoid checking
         # no_backup lower in the code.
         # These values are ordered for comparison purposes
-        backupinteractive = 3 # do backup if interactively modified
+        backupinteractive = 3  # do backup if interactively modified
         backup = 2  # unconditionally do backup
-        check = 1   # check if the existing file differs from target
-        discard = 0 # never do backup
+        check = 1  # check if the existing file differs from target
+        discard = 0  # never do backup
         if opts.get('no_backup'):
             backupinteractive = backup = check = discard
         if interactive:
@@ -3024,36 +3405,35 @@
             #   file state
             #   action
             #   make backup
-
             ## Sets that results that will change file on disk
             # Modified compared to target, no local change
-            (modified,      actions['revert'],   discard),
+            (modified, actions['revert'], discard),
             # Modified compared to target, but local file is deleted
-            (deleted,       actions['revert'],   discard),
+            (deleted, actions['revert'], discard),
             # Modified compared to target, local change
-            (dsmodified,    actions['revert'],   dsmodifiedbackup),
+            (dsmodified, actions['revert'], dsmodifiedbackup),
             # Added since target
-            (added,         actions['remove'],   discard),
+            (added, actions['remove'], discard),
             # Added in working directory
-            (dsadded,       actions['forget'],   discard),
+            (dsadded, actions['forget'], discard),
             # Added since target, have local modification
-            (modadded,      backupanddel,        backup),
+            (modadded, backupanddel, backup),
             # Added since target but file is missing in working directory
-            (deladded,      actions['drop'],   discard),
+            (deladded, actions['drop'], discard),
             # Removed since  target, before working copy parent
-            (removed,       actions['add'],      discard),
+            (removed, actions['add'], discard),
             # Same as `removed` but an unknown file exists at the same path
-            (removunk,      actions['add'],      check),
+            (removunk, actions['add'], check),
             # Removed since targe, marked as such in working copy parent
-            (dsremoved,     actions['undelete'], discard),
+            (dsremoved, actions['undelete'], discard),
             # Same as `dsremoved` but an unknown file exists at the same path
-            (dsremovunk,    actions['undelete'], check),
+            (dsremovunk, actions['undelete'], check),
             ## the following sets does not result in any file changes
             # File with no modification
-            (clean,         actions['noop'],     discard),
+            (clean, actions['noop'], discard),
             # Existing file, not tracked anywhere
-            (unknown,       actions['unknown'],  discard),
-            )
+            (unknown, actions['unknown'], discard),
+        )
 
         for abs, exact in sorted(names.items()):
             # target file to be touch on disk (relative to cwd)
@@ -3071,12 +3451,15 @@
                         # .orig files (issue4793)
                         if dobackup == backupinteractive:
                             tobackup.add(abs)
-                        elif (backup <= dobackup or wctx[abs].cmp(ctx[abs])):
+                        elif backup <= dobackup or wctx[abs].cmp(ctx[abs]):
                             absbakname = scmutil.backuppath(ui, repo, abs)
-                            bakname = os.path.relpath(absbakname,
-                                                      start=repo.root)
-                            ui.note(_('saving current version of %s as %s\n') %
-                                    (uipathfn(abs), uipathfn(bakname)))
+                            bakname = os.path.relpath(
+                                absbakname, start=repo.root
+                            )
+                            ui.note(
+                                _('saving current version of %s as %s\n')
+                                % (uipathfn(abs), uipathfn(bakname))
+                            )
                             if not opts.get('dry_run'):
                                 if interactive:
                                     util.copyfile(target, absbakname)
@@ -3094,25 +3477,49 @@
             oplist = [actions[name][0] for name in needdata]
             prefetch = scmutil.prefetchfiles
             matchfiles = scmutil.matchfiles
-            prefetch(repo, [ctx.rev()],
-                     matchfiles(repo,
-                                [f for sublist in oplist for f in sublist]))
+            prefetch(
+                repo,
+                [ctx.rev()],
+                matchfiles(repo, [f for sublist in oplist for f in sublist]),
+            )
             match = scmutil.match(repo[None], pats)
-            _performrevert(repo, parents, ctx, names, uipathfn, actions,
-                           match, interactive, tobackup)
+            _performrevert(
+                repo,
+                parents,
+                ctx,
+                names,
+                uipathfn,
+                actions,
+                match,
+                interactive,
+                tobackup,
+            )
 
         if targetsubs:
             # Revert the subrepos on the revert list
             for sub in targetsubs:
                 try:
-                    wctx.sub(sub).revert(ctx.substate[sub], *pats,
-                                         **pycompat.strkwargs(opts))
+                    wctx.sub(sub).revert(
+                        ctx.substate[sub], *pats, **pycompat.strkwargs(opts)
+                    )
                 except KeyError:
-                    raise error.Abort("subrepository '%s' does not exist in %s!"
-                                      % (sub, short(ctx.node())))
-
-def _performrevert(repo, parents, ctx, names, uipathfn, actions,
-                   match, interactive=False, tobackup=None):
+                    raise error.Abort(
+                        "subrepository '%s' does not exist in %s!"
+                        % (sub, short(ctx.node()))
+                    )
+
+
+def _performrevert(
+    repo,
+    parents,
+    ctx,
+    names,
+    uipathfn,
+    actions,
+    match,
+    interactive=False,
+    tobackup=None,
+):
     """function that actually perform all the actions computed for revert
 
     This is an independent function to let extension to plug in and react to
@@ -3145,7 +3552,8 @@
     for f in actions['forget'][0]:
         if interactive:
             choice = repo.ui.promptchoice(
-                _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
+                _("forget added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
+            )
             if choice == 0:
                 prntstatusmsg('forget', f)
                 repo.dirstate.drop(f)
@@ -3158,7 +3566,8 @@
         audit_path(f)
         if interactive:
             choice = repo.ui.promptchoice(
-                _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f))
+                _("remove added file %s (Yn)?$$ &Yes $$ &No") % uipathfn(f)
+            )
             if choice == 0:
                 prntstatusmsg('remove', f)
                 doremove(f)
@@ -3187,15 +3596,19 @@
         # Prompt the user for changes to revert
         torevert = [f for f in actions['revert'][0] if f not in excluded_files]
         m = scmutil.matchfiles(repo, torevert)
-        diffopts = patch.difffeatureopts(repo.ui, whitespace=True,
-                                         section='commands',
-                                         configprefix='revert.interactive.')
+        diffopts = patch.difffeatureopts(
+            repo.ui,
+            whitespace=True,
+            section='commands',
+            configprefix='revert.interactive.',
+        )
         diffopts.nodates = True
         diffopts.git = True
         operation = 'apply'
         if node == parent:
-            if repo.ui.configbool('experimental',
-                                  'revert.interactive.select-to-keep'):
+            if repo.ui.configbool(
+                'experimental', 'revert.interactive.select-to-keep'
+            ):
                 operation = 'keep'
             else:
                 operation = 'discard'
@@ -3208,8 +3621,9 @@
 
         try:
 
-            chunks, opts = recordfilter(repo.ui, originalchunks, match,
-                                        operation=operation)
+            chunks, opts = recordfilter(
+                repo.ui, originalchunks, match, operation=operation
+            )
             if operation == 'discard':
                 chunks = patch.reversehunks(chunks)
 
@@ -3222,7 +3636,8 @@
         # alsorestore value. Ideally we'd be able to partially revert
         # copied/renamed files.
         newlyaddedandmodifiedfiles, unusedalsorestore = newandmodified(
-                chunks, originalchunks)
+            chunks, originalchunks
+        )
         if tobackup is None:
             tobackup = set()
         # Apply changes
@@ -3273,7 +3688,8 @@
     for f in actions['undelete'][0]:
         if interactive:
             choice = repo.ui.promptchoice(
-                _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f)
+                _("add back removed file %s (Yn)?$$ &Yes $$ &No") % f
+            )
             if choice == 0:
                 prntstatusmsg('undelete', f)
                 checkout(f)
@@ -3291,6 +3707,7 @@
         if f in copied:
             repo.dirstate.copy(copied[f], f)
 
+
 # a list of (ui, repo, otherpeer, opts, missing) functions called by
 # commands.outgoing.  "missing" is "missing" of the result of
 # "findcommonoutgoing()"
@@ -3318,19 +3735,27 @@
     # Check for non-clearable states first, so things like rebase will take
     # precedence over update.
     for state in statemod._unfinishedstates:
-        if (state._clearable or (commit and state._allowcommit) or
-            state._reportonly):
+        if (
+            state._clearable
+            or (commit and state._allowcommit)
+            or state._reportonly
+        ):
             continue
         if state.isunfinished(repo):
             raise error.Abort(state.msg(), hint=state.hint())
 
     for s in statemod._unfinishedstates:
-        if (not s._clearable or (commit and s._allowcommit) or
-            (s._opname == 'merge' and skipmerge) or s._reportonly):
+        if (
+            not s._clearable
+            or (commit and s._allowcommit)
+            or (s._opname == 'merge' and skipmerge)
+            or s._reportonly
+        ):
             continue
         if s.isunfinished(repo):
             raise error.Abort(s.msg(), hint=s.hint())
 
+
 def clearunfinished(repo):
     '''Check for unfinished operations (as above), and clear the ones
     that are clearable.
@@ -3347,6 +3772,7 @@
         if s._clearable and s.isunfinished(repo):
             util.unlink(repo.vfs.join(s._fname))
 
+
 def getunfinishedstate(repo):
     ''' Checks for unfinished operations and returns statecheck object
         for it'''
@@ -3355,6 +3781,7 @@
             return state
     return None
 
+
 def howtocontinue(repo):
     '''Check for an unfinished operation and return the command to finish
     it.
@@ -3376,6 +3803,7 @@
         return contmsg % _("hg commit"), False
     return None, None
 
+
 def checkafterresolved(repo):
     '''Inform the user about the next action after completing hg resolve
 
@@ -3391,6 +3819,7 @@
         else:
             repo.ui.note("%s\n" % msg)
 
+
 def wrongtooltocontinue(repo, task):
     '''Raise an abort suggesting how to properly continue if there is an
     active task.
@@ -3406,6 +3835,7 @@
         hint = after[0]
     raise error.Abort(_('no %s in progress') % task, hint=hint)
 
+
 def abortgraft(ui, repo, graftstate):
     """abort the interrupted graft and rollbacks to the state before interrupted
     graft"""
@@ -3426,30 +3856,38 @@
     # whether to strip or not
     cleanup = False
     from . import hg
+
     if newnodes:
         newnodes = [repo[r].rev() for r in newnodes]
         cleanup = True
         # checking that none of the newnodes turned public or is public
         immutable = [c for c in newnodes if not repo[c].mutable()]
         if immutable:
-            repo.ui.warn(_("cannot clean up public changesets %s\n")
-                         % ', '.join(bytes(repo[r]) for r in immutable),
-                         hint=_("see 'hg help phases' for details"))
+            repo.ui.warn(
+                _("cannot clean up public changesets %s\n")
+                % ', '.join(bytes(repo[r]) for r in immutable),
+                hint=_("see 'hg help phases' for details"),
+            )
             cleanup = False
 
         # checking that no new nodes are created on top of grafted revs
         desc = set(repo.changelog.descendants(newnodes))
         if desc - set(newnodes):
-            repo.ui.warn(_("new changesets detected on destination "
-                           "branch, can't strip\n"))
+            repo.ui.warn(
+                _(
+                    "new changesets detected on destination "
+                    "branch, can't strip\n"
+                )
+            )
             cleanup = False
 
         if cleanup:
             with repo.wlock(), repo.lock():
                 hg.updaterepo(repo, startctx.node(), overwrite=True)
                 # stripping the new nodes created
-                strippoints = [c.node() for c in repo.set("roots(%ld)",
-                                                          newnodes)]
+                strippoints = [
+                    c.node() for c in repo.set("roots(%ld)", newnodes)
+                ]
                 repair.strip(repo.ui, repo, strippoints, backup=False)
 
     if not cleanup:
@@ -3462,6 +3900,7 @@
     graftstate.delete()
     return 0
 
+
 def readgraftstate(repo, graftstate):
     """read the graft state file and return a dict of the data stored in it"""
     try:
@@ -3470,6 +3909,7 @@
         nodes = repo.vfs.read('graftstate').splitlines()
         return {'nodes': nodes}
 
+
 def hgabortgraft(ui, repo):
     """ abort logic for aborting graft using 'hg abort'"""
     with repo.wlock():
--- a/mercurial/color.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/color.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,12 +16,11 @@
     pycompat,
 )
 
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 try:
     import curses
+
     # Mapping from effect name to terminfo attribute name (or raw code) or
     # color number.  This will also force-load the curses module.
     _baseterminfoparams = {
@@ -72,7 +71,7 @@
     'purple_background': 45,
     'cyan_background': 46,
     'white_background': 47,
-    }
+}
 
 _defaultstyles = {
     'grep.match': 'red bold',
@@ -147,9 +146,11 @@
     'tags.local': 'black bold',
 }
 
+
 def loadcolortable(ui, extname, colortable):
     _defaultstyles.update(colortable)
 
+
 def _terminfosetup(ui, mode, formatted):
     '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
 
@@ -186,10 +187,15 @@
         # Only warn about missing terminfo entries if we explicitly asked for
         # terminfo mode and we're in a formatted terminal.
         if mode == "terminfo" and formatted:
-            ui.warn(_("no terminfo entry for setab/setaf: reverting to "
-              "ECMA-48 color\n"))
+            ui.warn(
+                _(
+                    "no terminfo entry for setab/setaf: reverting to "
+                    "ECMA-48 color\n"
+                )
+            )
         ui._terminfoparams.clear()
 
+
 def setup(ui):
     """configure color on a ui
 
@@ -200,6 +206,7 @@
     if mode and mode != 'debug':
         configstyles(ui)
 
+
 def _modesetup(ui):
     if ui.plain('color'):
         return None
@@ -207,7 +214,7 @@
     if config == 'debug':
         return 'debug'
 
-    auto = (config == 'auto')
+    auto = config == 'auto'
     always = False
     if not auto and stringutil.parsebool(config):
         # We want the config to behave like a boolean, "on" is actually auto,
@@ -220,8 +227,9 @@
     if not always and not auto:
         return None
 
-    formatted = (always or (encoding.environ.get('TERM') != 'dumb'
-                 and ui.formatted()))
+    formatted = always or (
+        encoding.environ.get('TERM') != 'dumb' and ui.formatted()
+    )
 
     mode = ui.config('color', 'mode')
 
@@ -285,6 +293,7 @@
         return realmode
     return None
 
+
 def configstyles(ui):
     ui._styles.update(_defaultstyles)
     for status, cfgeffects in ui.configitems('color'):
@@ -297,11 +306,16 @@
                 if valideffect(ui, e):
                     good.append(e)
                 else:
-                    ui.warn(_("ignoring unknown color/effect %s "
-                              "(configured in color.%s)\n")
-                            % (stringutil.pprint(e), status))
+                    ui.warn(
+                        _(
+                            "ignoring unknown color/effect %s "
+                            "(configured in color.%s)\n"
+                        )
+                        % (stringutil.pprint(e), status)
+                    )
             ui._styles[status] = ' '.join(good)
 
+
 def _activeeffects(ui):
     '''Return the effects map for the color mode set on the ui.'''
     if ui._colormode == 'win32':
@@ -310,11 +324,13 @@
         return _effects
     return {}
 
+
 def valideffect(ui, effect):
     'Determine if the effect is valid or not.'
-    return ((not ui._terminfoparams and effect in _activeeffects(ui))
-             or (effect in ui._terminfoparams
-                 or effect[:-11] in ui._terminfoparams))
+    return (not ui._terminfoparams and effect in _activeeffects(ui)) or (
+        effect in ui._terminfoparams or effect[:-11] in ui._terminfoparams
+    )
+
 
 def _effect_str(ui, effect):
     '''Helper function for render_effects().'''
@@ -337,6 +353,7 @@
     else:
         return curses.tparm(curses.tigetstr(r'setaf'), val)
 
+
 def _mergeeffects(text, start, stop):
     """Insert start sequence at every occurrence of stop sequence
 
@@ -354,28 +371,35 @@
         parts.extend([start, t, stop])
     return ''.join(parts)
 
+
 def _render_effects(ui, text, effects):
     'Wrap text in commands to turn on each effect.'
     if not text:
         return text
     if ui._terminfoparams:
-        start = ''.join(_effect_str(ui, effect)
-                        for effect in ['none'] + effects.split())
+        start = ''.join(
+            _effect_str(ui, effect) for effect in ['none'] + effects.split()
+        )
         stop = _effect_str(ui, 'none')
     else:
         activeeffects = _activeeffects(ui)
-        start = [pycompat.bytestr(activeeffects[e])
-                 for e in ['none'] + effects.split()]
+        start = [
+            pycompat.bytestr(activeeffects[e])
+            for e in ['none'] + effects.split()
+        ]
         start = '\033[' + ';'.join(start) + 'm'
         stop = '\033[' + pycompat.bytestr(activeeffects['none']) + 'm'
     return _mergeeffects(text, start, stop)
 
+
 _ansieffectre = re.compile(br'\x1b\[[0-9;]*m')
 
+
 def stripeffects(text):
     """Strip ANSI control codes which could be inserted by colorlabel()"""
     return _ansieffectre.sub('', text)
 
+
 def colorlabel(ui, msg, label):
     """add color control code according to the mode"""
     if ui._colormode == 'debug':
@@ -394,10 +418,12 @@
                 effects.append(l)
         effects = ' '.join(effects)
         if effects:
-            msg = '\n'.join([_render_effects(ui, line, effects)
-                             for line in msg.split('\n')])
+            msg = '\n'.join(
+                [_render_effects(ui, line, effects) for line in msg.split('\n')]
+            )
     return msg
 
+
 w32effects = None
 if pycompat.iswindows:
     import ctypes
@@ -409,24 +435,27 @@
     _INVALID_HANDLE_VALUE = -1
 
     class _COORD(ctypes.Structure):
-        _fields_ = [(r'X', ctypes.c_short),
-                    (r'Y', ctypes.c_short)]
+        _fields_ = [(r'X', ctypes.c_short), (r'Y', ctypes.c_short)]
 
     class _SMALL_RECT(ctypes.Structure):
-        _fields_ = [(r'Left', ctypes.c_short),
-                    (r'Top', ctypes.c_short),
-                    (r'Right', ctypes.c_short),
-                    (r'Bottom', ctypes.c_short)]
+        _fields_ = [
+            (r'Left', ctypes.c_short),
+            (r'Top', ctypes.c_short),
+            (r'Right', ctypes.c_short),
+            (r'Bottom', ctypes.c_short),
+        ]
 
     class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
-        _fields_ = [(r'dwSize', _COORD),
-                    (r'dwCursorPosition', _COORD),
-                    (r'wAttributes', _WORD),
-                    (r'srWindow', _SMALL_RECT),
-                    (r'dwMaximumWindowSize', _COORD)]
+        _fields_ = [
+            (r'dwSize', _COORD),
+            (r'dwCursorPosition', _COORD),
+            (r'wAttributes', _WORD),
+            (r'srWindow', _SMALL_RECT),
+            (r'dwMaximumWindowSize', _COORD),
+        ]
 
-    _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
-    _STD_ERROR_HANDLE = 0xfffffff4  # (DWORD)-12
+    _STD_OUTPUT_HANDLE = 0xFFFFFFF5  # (DWORD)-11
+    _STD_ERROR_HANDLE = 0xFFFFFFF4  # (DWORD)-12
 
     _FOREGROUND_BLUE = 0x0001
     _FOREGROUND_GREEN = 0x0002
@@ -453,40 +482,44 @@
         'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
         'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
         'bold': _FOREGROUND_INTENSITY,
-        'black_background': 0x100,                  # unused value > 0x0f
+        'black_background': 0x100,  # unused value > 0x0f
         'red_background': _BACKGROUND_RED,
         'green_background': _BACKGROUND_GREEN,
         'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
         'blue_background': _BACKGROUND_BLUE,
         'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
         'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
-        'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
-                             _BACKGROUND_BLUE),
+        'white_background': (
+            _BACKGROUND_RED | _BACKGROUND_GREEN | _BACKGROUND_BLUE
+        ),
         'bold_background': _BACKGROUND_INTENSITY,
         'underline': _COMMON_LVB_UNDERSCORE,  # double-byte charsets only
-        'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
+        'inverse': _COMMON_LVB_REVERSE_VIDEO,  # double-byte charsets only
     }
 
-    passthrough = {_FOREGROUND_INTENSITY,
-                   _BACKGROUND_INTENSITY,
-                   _COMMON_LVB_UNDERSCORE,
-                   _COMMON_LVB_REVERSE_VIDEO}
+    passthrough = {
+        _FOREGROUND_INTENSITY,
+        _BACKGROUND_INTENSITY,
+        _COMMON_LVB_UNDERSCORE,
+        _COMMON_LVB_REVERSE_VIDEO,
+    }
 
     stdout = _kernel32.GetStdHandle(
-                  _STD_OUTPUT_HANDLE)  # don't close the handle returned
+        _STD_OUTPUT_HANDLE
+    )  # don't close the handle returned
     if stdout is None or stdout == _INVALID_HANDLE_VALUE:
         w32effects = None
     else:
         csbi = _CONSOLE_SCREEN_BUFFER_INFO()
-        if not _kernel32.GetConsoleScreenBufferInfo(
-                    stdout, ctypes.byref(csbi)):
+        if not _kernel32.GetConsoleScreenBufferInfo(stdout, ctypes.byref(csbi)):
             # stdout may not support GetConsoleScreenBufferInfo()
             # when called from subprocess or redirected
             w32effects = None
         else:
             origattr = csbi.wAttributes
-            ansire = re.compile(br'\033\[([^m]*)m([^\033]*)(.*)',
-                                re.MULTILINE | re.DOTALL)
+            ansire = re.compile(
+                br'\033\[([^m]*)m([^\033]*)(.*)', re.MULTILINE | re.DOTALL
+            )
 
     def win32print(ui, writefunc, text, **opts):
         label = opts.get(r'label', '')
@@ -497,10 +530,10 @@
                 return origattr
             elif val in passthrough:
                 return attr | val
-            elif val > 0x0f:
-                return (val & 0x70) | (attr & 0x8f)
+            elif val > 0x0F:
+                return (val & 0x70) | (attr & 0x8F)
             else:
-                return (val & 0x07) | (attr & 0xf8)
+                return (val & 0x07) | (attr & 0xF8)
 
         # determine console attributes based on labels
         for l in label.split():
--- a/mercurial/commands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/commands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -80,37 +80,67 @@
 # common command options
 
 globalopts = [
-    ('R', 'repository', '',
-     _('repository root directory or name of overlay bundle file'),
-     _('REPO')),
-    ('', 'cwd', '',
-     _('change working directory'), _('DIR')),
-    ('y', 'noninteractive', None,
-     _('do not prompt, automatically pick the first choice for all prompts')),
+    (
+        'R',
+        'repository',
+        '',
+        _('repository root directory or name of overlay bundle file'),
+        _('REPO'),
+    ),
+    ('', 'cwd', '', _('change working directory'), _('DIR')),
+    (
+        'y',
+        'noninteractive',
+        None,
+        _('do not prompt, automatically pick the first choice for all prompts'),
+    ),
     ('q', 'quiet', None, _('suppress output')),
     ('v', 'verbose', None, _('enable additional output')),
-    ('', 'color', '',
-     # i18n: 'always', 'auto', 'never', and 'debug' are keywords
-     # and should not be translated
-     _("when to colorize (boolean, always, auto, never, or debug)"),
-     _('TYPE')),
-    ('', 'config', [],
-     _('set/override config option (use \'section.name=value\')'),
-     _('CONFIG')),
+    (
+        '',
+        'color',
+        '',
+        # i18n: 'always', 'auto', 'never', and 'debug' are keywords
+        # and should not be translated
+        _("when to colorize (boolean, always, auto, never, or debug)"),
+        _('TYPE'),
+    ),
+    (
+        '',
+        'config',
+        [],
+        _('set/override config option (use \'section.name=value\')'),
+        _('CONFIG'),
+    ),
     ('', 'debug', None, _('enable debugging output')),
     ('', 'debugger', None, _('start debugger')),
-    ('', 'encoding', encoding.encoding, _('set the charset encoding'),
-     _('ENCODE')),
-    ('', 'encodingmode', encoding.encodingmode,
-     _('set the charset encoding mode'), _('MODE')),
+    (
+        '',
+        'encoding',
+        encoding.encoding,
+        _('set the charset encoding'),
+        _('ENCODE'),
+    ),
+    (
+        '',
+        'encodingmode',
+        encoding.encodingmode,
+        _('set the charset encoding mode'),
+        _('MODE'),
+    ),
     ('', 'traceback', None, _('always print a traceback on exception')),
     ('', 'time', None, _('time how long the command takes')),
     ('', 'profile', None, _('print command execution profile')),
     ('', 'version', None, _('output version information and exit')),
     ('h', 'help', None, _('display help and exit')),
     ('', 'hidden', False, _('consider hidden changesets')),
-    ('', 'pager', 'auto',
-     _("when to paginate (boolean, always, auto, or never)"), _('TYPE')),
+    (
+        '',
+        'pager',
+        'auto',
+        _("when to paginate (boolean, always, auto, or never)"),
+        _('TYPE'),
+    ),
 ]
 
 dryrunopts = cmdutil.dryrunopts
@@ -132,9 +162,13 @@
 
 # Commands start here, listed alphabetically
 
-@command('abort',
-    dryrunopts, helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
-    helpbasic=True)
+
+@command(
+    'abort',
+    dryrunopts,
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+    helpbasic=True,
+)
 def abort(ui, repo, **opts):
     """abort an unfinished operation (EXPERIMENTAL)
 
@@ -148,18 +182,27 @@
     if not abortstate:
         raise error.Abort(_('no operation in progress'))
     if not abortstate.abortfunc:
-        raise error.Abort((_("%s in progress but does not support 'hg abort'") %
-                            (abortstate._opname)), hint=abortstate.hint())
+        raise error.Abort(
+            (
+                _("%s in progress but does not support 'hg abort'")
+                % (abortstate._opname)
+            ),
+            hint=abortstate.hint(),
+        )
     if dryrun:
         ui.status(_('%s in progress, will be aborted\n') % (abortstate._opname))
         return
     return abortstate.abortfunc(ui, repo)
 
-@command('add',
+
+@command(
+    'add',
     walkopts + subrepoopts + dryrunopts,
     _('[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    helpbasic=True, inferrepo=True)
+    helpbasic=True,
+    inferrepo=True,
+)
 def add(ui, repo, *pats, **opts):
     """add the specified files on the next commit
 
@@ -208,11 +251,14 @@
     rejected = cmdutil.add(ui, repo, m, "", uipathfn, False, **opts)
     return rejected and 1 or 0
 
-@command('addremove',
+
+@command(
+    'addremove',
     similarityopts + subrepoopts + walkopts + dryrunopts,
     _('[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    inferrepo=True)
+    inferrepo=True,
+)
 def addremove(ui, repo, *pats, **opts):
     """add all new files, delete all missing files
 
@@ -283,23 +329,40 @@
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=relative)
     return scmutil.addremove(repo, matcher, "", uipathfn, opts)
 
-@command('annotate|blame',
-    [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
-    ('', 'follow', None,
-     _('follow copies/renames and list the filename (DEPRECATED)')),
-    ('', 'no-follow', None, _("don't follow copies and renames")),
-    ('a', 'text', None, _('treat all files as text')),
-    ('u', 'user', None, _('list the author (long with -v)')),
-    ('f', 'file', None, _('list the filename')),
-    ('d', 'date', None, _('list the date (short with -q)')),
-    ('n', 'number', None, _('list the revision number (default)')),
-    ('c', 'changeset', None, _('list the changeset')),
-    ('l', 'line-number', None, _('show line number at the first appearance')),
-    ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
-    ] + diffwsopts + walkopts + formatteropts,
+
+@command(
+    'annotate|blame',
+    [
+        ('r', 'rev', '', _('annotate the specified revision'), _('REV')),
+        (
+            '',
+            'follow',
+            None,
+            _('follow copies/renames and list the filename (DEPRECATED)'),
+        ),
+        ('', 'no-follow', None, _("don't follow copies and renames")),
+        ('a', 'text', None, _('treat all files as text')),
+        ('u', 'user', None, _('list the author (long with -v)')),
+        ('f', 'file', None, _('list the filename')),
+        ('d', 'date', None, _('list the date (short with -q)')),
+        ('n', 'number', None, _('list the revision number (default)')),
+        ('c', 'changeset', None, _('list the changeset')),
+        (
+            'l',
+            'line-number',
+            None,
+            _('show line number at the first appearance'),
+        ),
+        ('', 'skip', [], _('revision to not display (EXPERIMENTAL)'), _('REV')),
+    ]
+    + diffwsopts
+    + walkopts
+    + formatteropts,
     _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
-    helpbasic=True, inferrepo=True)
+    helpbasic=True,
+    inferrepo=True,
+)
 def annotate(ui, repo, *pats, **opts):
     """show changeset information by line for each file
 
@@ -347,8 +410,12 @@
         # to mimic the behavior of Mercurial before version 1.5
         opts['file'] = True
 
-    if (not opts.get('user') and not opts.get('changeset')
-        and not opts.get('date') and not opts.get('file')):
+    if (
+        not opts.get('user')
+        and not opts.get('changeset')
+        and not opts.get('date')
+        and not opts.get('file')
+    ):
         opts['number'] = True
 
     linenumber = opts.get('line_number') is not None
@@ -365,8 +432,10 @@
     if ui.debugflag:
         shorthex = pycompat.identity
     else:
+
         def shorthex(h):
             return h[:12]
+
     if ui.quiet:
         datefunc = dateutil.shortdate
     else:
@@ -379,17 +448,21 @@
                     return '%d' % ctx.p1().rev()
                 else:
                     return '%d' % rev
+
         else:
+
             def formatrev(rev):
                 if rev == wdirrev:
                     return '%d+' % ctx.p1().rev()
                 else:
                     return '%d ' % rev
+
         def formathex(h):
             if h == wdirhex:
                 return '%s+' % shorthex(hex(ctx.p1().node()))
             else:
                 return '%s ' % shorthex(h)
+
     else:
         formatrev = b'%d'.__mod__
         formathex = shorthex
@@ -410,17 +483,27 @@
     }
 
     if rootfm.isplain():
+
         def makefunc(get, fmt):
             return lambda x: fmt(get(x))
+
     else:
+
         def makefunc(get, fmt):
             return get
+
     datahint = rootfm.datahint()
-    funcmap = [(makefunc(get, fmt), sep) for fn, sep, get, fmt in opmap
-               if opts.get(opnamemap.get(fn, fn)) or fn in datahint]
-    funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
-    fields = ' '.join(fn for fn, sep, get, fmt in opmap
-                      if opts.get(opnamemap.get(fn, fn)) or fn in datahint)
+    funcmap = [
+        (makefunc(get, fmt), sep)
+        for fn, sep, get, fmt in opmap
+        if opts.get(opnamemap.get(fn, fn)) or fn in datahint
+    ]
+    funcmap[0] = (funcmap[0][0], '')  # no separator in front of first column
+    fields = ' '.join(
+        fn
+        for fn, sep, get, fmt in opmap
+        if opts.get(opnamemap.get(fn, fn)) or fn in datahint
+    )
 
     def bad(x, y):
         raise error.Abort("%s: %s" % (x, y))
@@ -428,8 +511,9 @@
     m = scmutil.match(ctx, pats, opts, badfn=bad)
 
     follow = not opts.get('no_follow')
-    diffopts = patch.difffeatureopts(ui, opts, section='annotate',
-                                     whitespace=True)
+    diffopts = patch.difffeatureopts(
+        ui, opts, section='annotate', whitespace=True
+    )
     skiprevs = opts.get('skip')
     if skiprevs:
         skiprevs = scmutil.revrange(repo, skiprevs)
@@ -444,8 +528,9 @@
             continue
 
         fm = rootfm.nested('lines', tmpl='{rev}: {line}')
-        lines = fctx.annotate(follow=follow, skiprevs=skiprevs,
-                              diffopts=diffopts)
+        lines = fctx.annotate(
+            follow=follow, skiprevs=skiprevs, diffopts=diffopts
+        )
         if not lines:
             fm.end()
             continue
@@ -478,15 +563,26 @@
 
     rootfm.end()
 
-@command('archive',
-    [('', 'no-decode', None, _('do not pass files through decoders')),
-    ('p', 'prefix', '', _('directory prefix for files in archive'),
-     _('PREFIX')),
-    ('r', 'rev', '', _('revision to distribute'), _('REV')),
-    ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
-    ] + subrepoopts + walkopts,
+
+@command(
+    'archive',
+    [
+        ('', 'no-decode', None, _('do not pass files through decoders')),
+        (
+            'p',
+            'prefix',
+            '',
+            _('directory prefix for files in archive'),
+            _('PREFIX'),
+        ),
+        ('r', 'rev', '', _('revision to distribute'), _('REV')),
+        ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
+    ]
+    + subrepoopts
+    + walkopts,
     _('[OPTION]... DEST'),
-    helpcategory=command.CATEGORY_IMPORT_EXPORT)
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def archive(ui, repo, dest, **opts):
     '''create an unversioned archive of a repository revision
 
@@ -553,21 +649,46 @@
 
     prefix = cmdutil.makefilename(ctx, prefix)
     match = scmutil.match(ctx, [], opts)
-    archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
-                     match, prefix, subrepos=opts.get('subrepos'))
-
-@command('backout',
-    [('', 'merge', None, _('merge with old dirstate parent after backout')),
-    ('', 'commit', None,
-     _('commit if no conflicts were encountered (DEPRECATED)')),
-    ('', 'no-commit', None, _('do not commit')),
-    ('', 'parent', '',
-     _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
-    ('r', 'rev', '', _('revision to backout'), _('REV')),
-    ('e', 'edit', False, _('invoke editor on commit messages')),
-    ] + mergetoolopts + walkopts + commitopts + commitopts2,
+    archival.archive(
+        repo,
+        dest,
+        node,
+        kind,
+        not opts.get('no_decode'),
+        match,
+        prefix,
+        subrepos=opts.get('subrepos'),
+    )
+
+
+@command(
+    'backout',
+    [
+        ('', 'merge', None, _('merge with old dirstate parent after backout')),
+        (
+            '',
+            'commit',
+            None,
+            _('commit if no conflicts were encountered (DEPRECATED)'),
+        ),
+        ('', 'no-commit', None, _('do not commit')),
+        (
+            '',
+            'parent',
+            '',
+            _('parent to choose when backing out merge (DEPRECATED)'),
+            _('REV'),
+        ),
+        ('r', 'rev', '', _('revision to backout'), _('REV')),
+        ('e', 'edit', False, _('invoke editor on commit messages')),
+    ]
+    + mergetoolopts
+    + walkopts
+    + commitopts
+    + commitopts2,
     _('[OPTION]... [-r] REV'),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+)
 def backout(ui, repo, node=None, rev=None, **opts):
     '''reverse effect of earlier changeset
 
@@ -623,6 +744,7 @@
     with repo.wlock(), repo.lock():
         return _dobackout(ui, repo, node, rev, **opts)
 
+
 def _dobackout(ui, repo, node=None, rev=None, **opts):
     opts = pycompat.byteskwargs(opts)
     if opts.get('commit') and opts.get('no_commit'):
@@ -659,8 +781,9 @@
             raise error.Abort(_('cannot backout a merge changeset'))
         p = repo.lookup(opts['parent'])
         if p not in (p1, p2):
-            raise error.Abort(_('%s is not a parent of %s') %
-                             (short(p), short(node)))
+            raise error.Abort(
+                _('%s is not a parent of %s') % (short(p), short(node))
+            )
         parent = p
     else:
         if opts.get('parent'):
@@ -675,14 +798,20 @@
         with dirstateguard.dirstateguard(repo, 'backout'):
             overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
             with ui.configoverride(overrides, 'backout'):
-                stats = mergemod.update(repo, parent, branchmerge=True,
-                                        force=True, ancestor=node,
-                                        mergeancestor=False)
+                stats = mergemod.update(
+                    repo,
+                    parent,
+                    branchmerge=True,
+                    force=True,
+                    ancestor=node,
+                    mergeancestor=False,
+                )
             repo.setparents(op1, op2)
         hg._showstats(repo, stats)
         if stats.unresolvedcount:
-            repo.ui.status(_("use 'hg resolve' to retry unresolved "
-                             "file merges\n"))
+            repo.ui.status(
+                _("use 'hg resolve' to retry unresolved " "file merges\n")
+            )
             return 1
     else:
         hg.clean(repo, node, show_stats=False)
@@ -690,21 +819,23 @@
         cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
 
     if opts.get('no_commit'):
-        msg = _("changeset %s backed out, "
-                "don't forget to commit.\n")
+        msg = _("changeset %s backed out, " "don't forget to commit.\n")
         ui.status(msg % short(node))
         return 0
 
     def commitfunc(ui, repo, message, match, opts):
         editform = 'backout'
-        e = cmdutil.getcommiteditor(editform=editform,
-                                    **pycompat.strkwargs(opts))
+        e = cmdutil.getcommiteditor(
+            editform=editform, **pycompat.strkwargs(opts)
+        )
         if not message:
             # we don't translate commit messages
             message = "Backed out changeset %s" % short(node)
             e = cmdutil.getcommiteditor(edit=True, editform=editform)
-        return repo.commit(message, opts.get('user'), opts.get('date'),
-                           match, editor=e)
+        return repo.commit(
+            message, opts.get('user'), opts.get('date'), match, editor=e
+        )
+
     newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
     if not newnode:
         ui.status(_("nothing changed\n"))
@@ -713,30 +844,53 @@
 
     def nice(node):
         return '%d:%s' % (repo.changelog.rev(node), short(node))
-    ui.status(_('changeset %s backs out changeset %s\n') %
-              (nice(repo.changelog.tip()), nice(node)))
+
+    ui.status(
+        _('changeset %s backs out changeset %s\n')
+        % (nice(repo.changelog.tip()), nice(node))
+    )
     if opts.get('merge') and op1 != node:
         hg.clean(repo, op1, show_stats=False)
-        ui.status(_('merging with changeset %s\n')
-                  % nice(repo.changelog.tip()))
+        ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
         overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
         with ui.configoverride(overrides, 'backout'):
             return hg.merge(repo, hex(repo.changelog.tip()))
     return 0
 
-@command('bisect',
-    [('r', 'reset', False, _('reset bisect state')),
-    ('g', 'good', False, _('mark changeset good')),
-    ('b', 'bad', False, _('mark changeset bad')),
-    ('s', 'skip', False, _('skip testing changeset')),
-    ('e', 'extend', False, _('extend the bisect range')),
-    ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
-    ('U', 'noupdate', False, _('do not update to target'))],
+
+@command(
+    'bisect',
+    [
+        ('r', 'reset', False, _('reset bisect state')),
+        ('g', 'good', False, _('mark changeset good')),
+        ('b', 'bad', False, _('mark changeset bad')),
+        ('s', 'skip', False, _('skip testing changeset')),
+        ('e', 'extend', False, _('extend the bisect range')),
+        (
+            'c',
+            'command',
+            '',
+            _('use command to check changeset state'),
+            _('CMD'),
+        ),
+        ('U', 'noupdate', False, _('do not update to target')),
+    ],
     _("[-gbsr] [-U] [-c CMD] [REV]"),
-    helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
-def bisect(ui, repo, rev=None, extra=None, command=None,
-               reset=None, good=None, bad=None, skip=None, extend=None,
-               noupdate=None):
+    helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
+)
+def bisect(
+    ui,
+    repo,
+    rev=None,
+    extra=None,
+    command=None,
+    reset=None,
+    good=None,
+    bad=None,
+    skip=None,
+    extend=None,
+    noupdate=None,
+):
     """subdivision search of changesets
 
     This command helps to find changesets which introduce problems. To
@@ -843,8 +997,9 @@
     enabled = [x for x in incompatibles if incompatibles[x]]
 
     if len(enabled) > 1:
-        raise error.Abort(_('%s and %s are incompatible') %
-                          tuple(sorted(enabled)[0:2]))
+        raise error.Abort(
+            _('%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
+        )
 
     if reset:
         hbisect.resetstate(repo)
@@ -884,8 +1039,12 @@
             try:
                 node = state['current'][0]
             except LookupError:
-                raise error.Abort(_('current bisect revision is unknown - '
-                                   'start a new bisect to fix'))
+                raise error.Abort(
+                    _(
+                        'current bisect revision is unknown - '
+                        'start a new bisect to fix'
+                    )
+                )
         else:
             node, p2 = repo.dirstate.parents()
             if p2 != nullid:
@@ -897,8 +1056,11 @@
                 # update state
                 state['current'] = [node]
                 hbisect.save_state(repo, state)
-                status = ui.system(command, environ={'HG_NODE': hex(node)},
-                                   blockedtag='bisect_check')
+                status = ui.system(
+                    command,
+                    environ={'HG_NODE': hex(node)},
+                    blockedtag='bisect_check',
+                )
                 if status == 125:
                     transition = "skip"
                 elif status == 0:
@@ -912,8 +1074,9 @@
                     transition = "bad"
                 state[transition].append(node)
                 ctx = repo[node]
-                ui.status(_('changeset %d:%s: %s\n') % (ctx.rev(), ctx,
-                                                        transition))
+                ui.status(
+                    _('changeset %d:%s: %s\n') % (ctx.rev(), ctx, transition)
+                )
                 hbisect.checkstate(state)
                 # bisect
                 nodes, changesets, bgood = hbisect.bisect(repo, state)
@@ -934,8 +1097,10 @@
         if not changesets:
             extendnode = hbisect.extendrange(repo, state, nodes, good)
             if extendnode is not None:
-                ui.write(_("Extending search to changeset %d:%s\n")
-                         % (extendnode.rev(), extendnode))
+                ui.write(
+                    _("Extending search to changeset %d:%s\n")
+                    % (extendnode.rev(), extendnode)
+                )
                 state['current'] = [extendnode.node()]
                 hbisect.save_state(repo, state)
                 return mayupdate(repo, extendnode.node())
@@ -944,30 +1109,39 @@
     if changesets == 0:
         hbisect.printresult(ui, repo, state, displayer, nodes, good)
     else:
-        assert len(nodes) == 1 # only a single node can be tested next
+        assert len(nodes) == 1  # only a single node can be tested next
         node = nodes[0]
         # compute the approximate number of remaining tests
         tests, size = 0, 2
         while size <= changesets:
             tests, size = tests + 1, size * 2
         rev = repo.changelog.rev(node)
-        ui.write(_("Testing changeset %d:%s "
-                   "(%d changesets remaining, ~%d tests)\n")
-                 % (rev, short(node), changesets, tests))
+        ui.write(
+            _(
+                "Testing changeset %d:%s "
+                "(%d changesets remaining, ~%d tests)\n"
+            )
+            % (rev, short(node), changesets, tests)
+        )
         state['current'] = [node]
         hbisect.save_state(repo, state)
         return mayupdate(repo, node)
 
-@command('bookmarks|bookmark',
-    [('f', 'force', False, _('force')),
-    ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
-    ('d', 'delete', False, _('delete a given bookmark')),
-    ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
-    ('i', 'inactive', False, _('mark a bookmark inactive')),
-    ('l', 'list', False, _('list existing bookmarks')),
-    ] + formatteropts,
+
+@command(
+    'bookmarks|bookmark',
+    [
+        ('f', 'force', False, _('force')),
+        ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
+        ('d', 'delete', False, _('delete a given bookmark')),
+        ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
+        ('i', 'inactive', False, _('mark a bookmark inactive')),
+        ('l', 'list', False, _('list existing bookmarks')),
+    ]
+    + formatteropts,
     _('hg bookmarks [OPTIONS]... [NAME]...'),
-    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def bookmark(ui, repo, *names, **opts):
     '''create a new bookmark or list existing bookmarks
 
@@ -1035,8 +1209,9 @@
 
     selactions = [k for k in ['delete', 'rename', 'list'] if opts.get(k)]
     if len(selactions) > 1:
-        raise error.Abort(_('--%s and --%s are incompatible')
-                          % tuple(selactions[:2]))
+        raise error.Abort(
+            _('--%s and --%s are incompatible') % tuple(selactions[:2])
+        )
     if selactions:
         action = selactions[0]
     elif names or rev:
@@ -1081,14 +1256,22 @@
     else:
         raise error.ProgrammingError('invalid action: %s' % action)
 
-@command('branch',
-    [('f', 'force', None,
-     _('set branch name even if it shadows an existing branch')),
-     ('C', 'clean', None, _('reset branch name to parent branch name')),
-     ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
+
+@command(
+    'branch',
+    [
+        (
+            'f',
+            'force',
+            None,
+            _('set branch name even if it shadows an existing branch'),
+        ),
+        ('C', 'clean', None, _('reset branch name to parent branch name')),
+        ('r', 'rev', [], _('change branches of the given revs (EXPERIMENTAL)')),
     ],
     _('[-fC] [NAME]'),
-    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def branch(ui, repo, label=None, **opts):
     """set or show the current branch name
 
@@ -1142,10 +1325,11 @@
 
             if not opts.get('force') and label in repo.branchmap():
                 if label not in [p.branch() for p in repo[None].parents()]:
-                    raise error.Abort(_('a branch of the same name already'
-                                       ' exists'),
-                                     # i18n: "it" refers to an existing branch
-                                     hint=_("use 'hg update' to switch to it"))
+                    raise error.Abort(
+                        _('a branch of the same name already' ' exists'),
+                        # i18n: "it" refers to an existing branch
+                        hint=_("use 'hg update' to switch to it"),
+                    )
 
             repo.dirstate.setbranch(label)
             ui.status(_('marked working directory as branch %s\n') % label)
@@ -1154,18 +1338,31 @@
             for n, h, t, c in repo.branchmap().iterbranches():
                 if n != "default" and not c:
                     return 0
-            ui.status(_('(branches are permanent and global, '
-                        'did you want a bookmark?)\n'))
-
-@command('branches',
-    [('a', 'active', False,
-      _('show only branches that have unmerged heads (DEPRECATED)')),
-     ('c', 'closed', False, _('show normal and closed branches')),
-     ('r', 'rev', [], _('show branch name(s) of the given rev'))
-    ] + formatteropts,
+            ui.status(
+                _(
+                    '(branches are permanent and global, '
+                    'did you want a bookmark?)\n'
+                )
+            )
+
+
+@command(
+    'branches',
+    [
+        (
+            'a',
+            'active',
+            False,
+            _('show only branches that have unmerged heads (DEPRECATED)'),
+        ),
+        ('c', 'closed', False, _('show normal and closed branches')),
+        ('r', 'rev', [], _('show branch name(s) of the given rev')),
+    ]
+    + formatteropts,
     _('[-c]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def branches(ui, repo, active=False, closed=False, **opts):
     """list repository named branches
 
@@ -1212,8 +1409,7 @@
             openheads = set(repo.branchmap().iteropen(heads))
             isactive = bool(openheads & allheads)
         branches.append((tag, repo[tip], isactive, not isclosed))
-    branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
-                  reverse=True)
+    branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True)
 
     for tag, ctx, isactive, isopen in branches:
         if active and not isactive:
@@ -1229,7 +1425,7 @@
         else:
             label = 'branches.inactive'
             notice = _(' (inactive)')
-        current = (tag == repo.dirstate.branch())
+        current = tag == repo.dirstate.branch()
         if current:
             label = 'branches.current'
 
@@ -1238,8 +1434,14 @@
         rev = ctx.rev()
         padsize = max(31 - len("%d" % rev) - encoding.colwidth(tag), 0)
         fmt = ' ' * padsize + ' %d:%s'
-        fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
-                     label='log.changeset changeset.%s' % ctx.phasestr())
+        fm.condwrite(
+            not ui.quiet,
+            'rev node',
+            fmt,
+            rev,
+            hexfunc(ctx.node()),
+            label='log.changeset changeset.%s' % ctx.phasestr(),
+        )
         fm.context(ctx=ctx)
         fm.data(active=isactive, closed=not isopen, current=current)
         if not ui.quiet:
@@ -1247,20 +1449,39 @@
         fm.plain('\n')
     fm.end()
 
-@command('bundle',
-    [('f', 'force', None, _('run even when the destination is unrelated')),
-    ('r', 'rev', [], _('a changeset intended to be added to the destination'),
-     _('REV')),
-    ('b', 'branch', [], _('a specific branch you would like to bundle'),
-     _('BRANCH')),
-    ('', 'base', [],
-     _('a base changeset assumed to be available at the destination'),
-     _('REV')),
-    ('a', 'all', None, _('bundle all changesets in the repository')),
-    ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
-    ] + remoteopts,
+
+@command(
+    'bundle',
+    [
+        ('f', 'force', None, _('run even when the destination is unrelated')),
+        (
+            'r',
+            'rev',
+            [],
+            _('a changeset intended to be added to the destination'),
+            _('REV'),
+        ),
+        (
+            'b',
+            'branch',
+            [],
+            _('a specific branch you would like to bundle'),
+            _('BRANCH'),
+        ),
+        (
+            '',
+            'base',
+            [],
+            _('a base changeset assumed to be available at the destination'),
+            _('REV'),
+        ),
+        ('a', 'all', None, _('bundle all changesets in the repository')),
+        ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
+    ]
+    + remoteopts,
     _('[-f] [-t BUNDLESPEC] [-a] [-r REV]... [--base REV]... FILE [DEST]'),
-    helpcategory=command.CATEGORY_IMPORT_EXPORT)
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def bundle(ui, repo, fname, dest=None, **opts):
     """create a bundle file
 
@@ -1301,33 +1522,41 @@
     try:
         bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
     except error.UnsupportedBundleSpecification as e:
-        raise error.Abort(pycompat.bytestr(e),
-                          hint=_("see 'hg help bundlespec' for supported "
-                                 "values for --type"))
+        raise error.Abort(
+            pycompat.bytestr(e),
+            hint=_(
+                "see 'hg help bundlespec' for supported " "values for --type"
+            ),
+        )
     cgversion = bundlespec.contentopts["cg.version"]
 
     # Packed bundles are a pseudo bundle format for now.
     if cgversion == 's1':
-        raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
-                          hint=_("use 'hg debugcreatestreamclonebundle'"))
+        raise error.Abort(
+            _('packed bundles cannot be produced by "hg bundle"'),
+            hint=_("use 'hg debugcreatestreamclonebundle'"),
+        )
 
     if opts.get('all'):
         if dest:
-            raise error.Abort(_("--all is incompatible with specifying "
-                                "a destination"))
+            raise error.Abort(
+                _("--all is incompatible with specifying " "a destination")
+            )
         if opts.get('base'):
             ui.warn(_("ignoring --base because --all was specified\n"))
         base = [nullrev]
     else:
         base = scmutil.revrange(repo, opts.get('base'))
     if cgversion not in changegroup.supportedoutgoingversions(repo):
-        raise error.Abort(_("repository does not support bundle version %s") %
-                          cgversion)
+        raise error.Abort(
+            _("repository does not support bundle version %s") % cgversion
+        )
 
     if base:
         if dest:
-            raise error.Abort(_("--base is incompatible with specifying "
-                               "a destination"))
+            raise error.Abort(
+                _("--base is incompatible with specifying " "a destination")
+            )
         common = [repo[rev].node() for rev in base]
         heads = [repo[r].node() for r in revs] if revs else None
         outgoing = discovery.outgoing(repo, common, heads)
@@ -1338,16 +1567,15 @@
         revs = [repo[r].hex() for r in revs]
         revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
         heads = revs and pycompat.maplist(repo.lookup, revs) or revs
-        outgoing = discovery.findcommonoutgoing(repo, other,
-                                                onlyheads=heads,
-                                                force=opts.get('force'),
-                                                portable=True)
+        outgoing = discovery.findcommonoutgoing(
+            repo, other, onlyheads=heads, force=opts.get('force'), portable=True
+        )
 
     if not outgoing.missing:
         scmutil.nochangesfound(ui, repo, not base and outgoing.excluded)
         return 1
 
-    if cgversion == '01': #bundle1
+    if cgversion == '01':  # bundle1
         bversion = 'HG10' + bundlespec.wirecompression
         bcompression = None
     elif cgversion in ('02', '03'):
@@ -1355,15 +1583,17 @@
         bcompression = bundlespec.wirecompression
     else:
         raise error.ProgrammingError(
-            'bundle: unexpected changegroup version %s' % cgversion)
+            'bundle: unexpected changegroup version %s' % cgversion
+        )
 
     # TODO compression options should be derived from bundlespec parsing.
     # This is a temporary hack to allow adjusting bundle compression
     # level without a) formalizing the bundlespec changes to declare it
     # b) introducing a command flag.
     compopts = {}
-    complevel = ui.configint('experimental',
-                             'bundlecomplevel.' + bundlespec.compression)
+    complevel = ui.configint(
+        'experimental', 'bundlecomplevel.' + bundlespec.compression
+    )
     if complevel is None:
         complevel = ui.configint('experimental', 'bundlecomplevel')
     if complevel is not None:
@@ -1376,20 +1606,39 @@
     if repo.ui.configbool('experimental', 'bundle-phases'):
         bundlespec.contentopts['phases'] = True
 
-    bundle2.writenewbundle(ui, repo, 'bundle', fname, bversion, outgoing,
-                           bundlespec.contentopts, compression=bcompression,
-                           compopts=compopts)
-
-@command('cat',
-    [('o', 'output', '',
-     _('print output to file with formatted name'), _('FORMAT')),
-    ('r', 'rev', '', _('print the given revision'), _('REV')),
-    ('', 'decode', None, _('apply any matching decode filter')),
-    ] + walkopts + formatteropts,
+    bundle2.writenewbundle(
+        ui,
+        repo,
+        'bundle',
+        fname,
+        bversion,
+        outgoing,
+        bundlespec.contentopts,
+        compression=bcompression,
+        compopts=compopts,
+    )
+
+
+@command(
+    'cat',
+    [
+        (
+            'o',
+            'output',
+            '',
+            _('print output to file with formatted name'),
+            _('FORMAT'),
+        ),
+        ('r', 'rev', '', _('print the given revision'), _('REV')),
+        ('', 'decode', None, _('apply any matching decode filter')),
+    ]
+    + walkopts
+    + formatteropts,
     _('[OPTION]... FILE...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     inferrepo=True,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def cat(ui, repo, file1, *pats, **opts):
     """output the current or given revision of files
 
@@ -1440,27 +1689,60 @@
         ui.pager('cat')
         fm = ui.formatter('cat', opts)
     with fm:
-        return cmdutil.cat(ui, repo, ctx, m, fm, fntemplate, '',
-                           **pycompat.strkwargs(opts))
-
-@command('clone',
-    [('U', 'noupdate', None, _('the clone will include an empty working '
-                               'directory (only a repository)')),
-    ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
-        _('REV')),
-    ('r', 'rev', [], _('do not clone everything, but include this changeset'
-                       ' and its ancestors'), _('REV')),
-    ('b', 'branch', [], _('do not clone everything, but include this branch\'s'
-                          ' changesets and their ancestors'), _('BRANCH')),
-    ('', 'pull', None, _('use pull protocol to copy metadata')),
-    ('', 'uncompressed', None,
-       _('an alias to --stream (DEPRECATED)')),
-    ('', 'stream', None,
-       _('clone with minimal data processing')),
-    ] + remoteopts,
+        return cmdutil.cat(
+            ui, repo, ctx, m, fm, fntemplate, '', **pycompat.strkwargs(opts)
+        )
+
+
+@command(
+    'clone',
+    [
+        (
+            'U',
+            'noupdate',
+            None,
+            _(
+                'the clone will include an empty working '
+                'directory (only a repository)'
+            ),
+        ),
+        (
+            'u',
+            'updaterev',
+            '',
+            _('revision, tag, or branch to check out'),
+            _('REV'),
+        ),
+        (
+            'r',
+            'rev',
+            [],
+            _(
+                'do not clone everything, but include this changeset'
+                ' and its ancestors'
+            ),
+            _('REV'),
+        ),
+        (
+            'b',
+            'branch',
+            [],
+            _(
+                'do not clone everything, but include this branch\'s'
+                ' changesets and their ancestors'
+            ),
+            _('BRANCH'),
+        ),
+        ('', 'pull', None, _('use pull protocol to copy metadata')),
+        ('', 'uncompressed', None, _('an alias to --stream (DEPRECATED)')),
+        ('', 'stream', None, _('clone with minimal data processing')),
+    ]
+    + remoteopts,
     _('[OPTION]... SOURCE [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
-    helpbasic=True, norepo=True)
+    helpbasic=True,
+    norepo=True,
+)
 def clone(ui, source, dest=None, **opts):
     """make a copy of an existing repository
 
@@ -1587,34 +1869,55 @@
         if opts.get('exclude'):
             excludepats = narrowspec.parsepatterns(opts.get('exclude'))
 
-    r = hg.clone(ui, opts, source, dest,
-                 pull=opts.get('pull'),
-                 stream=opts.get('stream') or opts.get('uncompressed'),
-                 revs=opts.get('rev'),
-                 update=opts.get('updaterev') or not opts.get('noupdate'),
-                 branch=opts.get('branch'),
-                 shareopts=opts.get('shareopts'),
-                 storeincludepats=includepats,
-                 storeexcludepats=excludepats,
-                 depth=opts.get('depth') or None)
+    r = hg.clone(
+        ui,
+        opts,
+        source,
+        dest,
+        pull=opts.get('pull'),
+        stream=opts.get('stream') or opts.get('uncompressed'),
+        revs=opts.get('rev'),
+        update=opts.get('updaterev') or not opts.get('noupdate'),
+        branch=opts.get('branch'),
+        shareopts=opts.get('shareopts'),
+        storeincludepats=includepats,
+        storeexcludepats=excludepats,
+        depth=opts.get('depth') or None,
+    )
 
     return r is None
 
-@command('commit|ci',
-    [('A', 'addremove', None,
-     _('mark new/missing files as added/removed before committing')),
-    ('', 'close-branch', None,
-     _('mark a branch head as closed')),
-    ('', 'amend', None, _('amend the parent of the working directory')),
-    ('s', 'secret', None, _('use the secret phase for committing')),
-    ('e', 'edit', None, _('invoke editor on commit messages')),
-    ('', 'force-close-branch', None,
-     _('forcibly close branch from a non-head changeset (ADVANCED)')),
-    ('i', 'interactive', None, _('use interactive mode')),
-    ] + walkopts + commitopts + commitopts2 + subrepoopts,
+
+@command(
+    'commit|ci',
+    [
+        (
+            'A',
+            'addremove',
+            None,
+            _('mark new/missing files as added/removed before committing'),
+        ),
+        ('', 'close-branch', None, _('mark a branch head as closed')),
+        ('', 'amend', None, _('amend the parent of the working directory')),
+        ('s', 'secret', None, _('use the secret phase for committing')),
+        ('e', 'edit', None, _('invoke editor on commit messages')),
+        (
+            '',
+            'force-close-branch',
+            None,
+            _('forcibly close branch from a non-head changeset (ADVANCED)'),
+        ),
+        ('i', 'interactive', None, _('use interactive mode')),
+    ]
+    + walkopts
+    + commitopts
+    + commitopts2
+    + subrepoopts,
     _('[OPTION]... [FILE]...'),
-    helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
-    inferrepo=True)
+    helpcategory=command.CATEGORY_COMMITTING,
+    helpbasic=True,
+    inferrepo=True,
+)
 def commit(ui, repo, *pats, **opts):
     """commit the specified files or all outstanding changes
 
@@ -1674,12 +1977,13 @@
     with repo.wlock(), repo.lock():
         return _docommit(ui, repo, *pats, **opts)
 
+
 def _docommit(ui, repo, *pats, **opts):
     if opts.get(r'interactive'):
         opts.pop(r'interactive')
-        ret = cmdutil.dorecord(ui, repo, commit, None, False,
-                               cmdutil.recordfilter, *pats,
-                               **opts)
+        ret = cmdutil.dorecord(
+            ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts
+        )
         # ret can be 0 (no changes to record) or the value returned by
         # commit(), 1 if nothing changed or None on success.
         return 1 if ret == 0 else ret
@@ -1701,18 +2005,26 @@
         extra['close'] = '1'
 
         if repo['.'].closesbranch():
-            raise error.Abort(_('current revision is already a branch closing'
-                                ' head'))
+            raise error.Abort(
+                _('current revision is already a branch closing' ' head')
+            )
         elif not bheads:
             raise error.Abort(_('branch "%s" has no heads to close') % branch)
-        elif (branch == repo['.'].branch() and repo['.'].node() not in bheads
-              and not opts.get('force_close_branch')):
-            hint = _('use --force-close-branch to close branch from a non-head'
-                     ' changeset')
+        elif (
+            branch == repo['.'].branch()
+            and repo['.'].node() not in bheads
+            and not opts.get('force_close_branch')
+        ):
+            hint = _(
+                'use --force-close-branch to close branch from a non-head'
+                ' changeset'
+            )
             raise error.Abort(_('can only close branch heads'), hint=hint)
         elif opts.get('amend'):
-            if (repo['.'].p1().branch() != branch and
-                repo['.'].p2().branch() != branch):
+            if (
+                repo['.'].p1().branch() != branch
+                and repo['.'].p2().branch() != branch
+            ):
                 raise error.Abort(_('can only close branch heads'))
 
     if opts.get('amend'):
@@ -1736,6 +2048,7 @@
             ui.status(_("nothing changed\n"))
             return 1
     else:
+
         def commitfunc(ui, repo, message, match, opts):
             overrides = {}
             if opts.get('secret'):
@@ -1744,24 +2057,33 @@
             baseui = repo.baseui
             with baseui.configoverride(overrides, 'commit'):
                 with ui.configoverride(overrides, 'commit'):
-                    editform = cmdutil.mergeeditform(repo[None],
-                                                     'commit.normal')
+                    editform = cmdutil.mergeeditform(
+                        repo[None], 'commit.normal'
+                    )
                     editor = cmdutil.getcommiteditor(
-                        editform=editform, **pycompat.strkwargs(opts))
-                    return repo.commit(message,
-                                       opts.get('user'),
-                                       opts.get('date'),
-                                       match,
-                                       editor=editor,
-                                       extra=extra)
+                        editform=editform, **pycompat.strkwargs(opts)
+                    )
+                    return repo.commit(
+                        message,
+                        opts.get('user'),
+                        opts.get('date'),
+                        match,
+                        editor=editor,
+                        extra=extra,
+                    )
 
         node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
 
         if not node:
             stat = cmdutil.postcommitstatus(repo, pats, opts)
             if stat[3]:
-                ui.status(_("nothing changed (%d missing files, see "
-                            "'hg status')\n") % len(stat[3]))
+                ui.status(
+                    _(
+                        "nothing changed (%d missing files, see "
+                        "'hg status')\n"
+                    )
+                    % len(stat[3])
+                )
             else:
                 ui.status(_("nothing changed\n"))
             return 1
@@ -1769,18 +2091,32 @@
     cmdutil.commitstatus(repo, node, branch, bheads, opts)
 
     if not ui.quiet and ui.configbool('commands', 'commit.post-status'):
-        status(ui, repo, modified=True, added=True, removed=True, deleted=True,
-               unknown=True, subrepos=opts.get('subrepos'))
-
-@command('config|showconfig|debugconfig',
-    [('u', 'untrusted', None, _('show untrusted configuration options')),
-     ('e', 'edit', None, _('edit user config')),
-     ('l', 'local', None, _('edit repository config')),
-     ('g', 'global', None, _('edit global config'))] + formatteropts,
+        status(
+            ui,
+            repo,
+            modified=True,
+            added=True,
+            removed=True,
+            deleted=True,
+            unknown=True,
+            subrepos=opts.get('subrepos'),
+        )
+
+
+@command(
+    'config|showconfig|debugconfig',
+    [
+        ('u', 'untrusted', None, _('show untrusted configuration options')),
+        ('e', 'edit', None, _('edit user config')),
+        ('l', 'local', None, _('edit repository config')),
+        ('g', 'global', None, _('edit global config')),
+    ]
+    + formatteropts,
     _('[-u] [NAME]...'),
     helpcategory=command.CATEGORY_HELP,
     optionalrepo=True,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def config(ui, repo, *values, **opts):
     """show combined config settings from all hgrc files
 
@@ -1846,9 +2182,12 @@
             fp.close()
 
         editor = ui.geteditor()
-        ui.system("%s \"%s\"" % (editor, f),
-                  onerr=error.Abort, errprefix=_("edit failed"),
-                  blockedtag='config_edit')
+        ui.system(
+            "%s \"%s\"" % (editor, f),
+            onerr=error.Abort,
+            errprefix=_("edit failed"),
+            blockedtag='config_edit',
+        )
         return
     ui.pager('config')
     fm = ui.formatter('config', opts)
@@ -1866,7 +2205,7 @@
     if values:
         selsections = [v for v in values if '.' not in v]
         selentries = [v for v in values if '.' in v]
-    uniquesel = (len(selentries) == 1 and not selsections)
+    uniquesel = len(selentries) == 1 and not selsections
     selsections = set(selsections)
     selentries = set(selentries)
 
@@ -1895,9 +2234,13 @@
         return 0
     return 1
 
-@command('continue',
-    dryrunopts, helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
-    helpbasic=True)
+
+@command(
+    'continue',
+    dryrunopts,
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+    helpbasic=True,
+)
 def continuecmd(ui, repo, **opts):
     """resumes an interrupted operation (EXPERIMENTAL)
 
@@ -1911,20 +2254,30 @@
     if not contstate:
         raise error.Abort(_('no operation in progress'))
     if not contstate.continuefunc:
-        raise error.Abort((_("%s in progress but does not support "
-                             "'hg continue'") % (contstate._opname)),
-                             hint=contstate.continuemsg())
+        raise error.Abort(
+            (
+                _("%s in progress but does not support " "'hg continue'")
+                % (contstate._opname)
+            ),
+            hint=contstate.continuemsg(),
+        )
     if dryrun:
         ui.status(_('%s in progress, will be resumed\n') % (contstate._opname))
         return
     return contstate.continuefunc(ui, repo)
 
-@command('copy|cp',
-    [('A', 'after', None, _('record a copy that has already occurred')),
-    ('f', 'force', None, _('forcibly copy over an existing managed file')),
-    ] + walkopts + dryrunopts,
+
+@command(
+    'copy|cp',
+    [
+        ('A', 'after', None, _('record a copy that has already occurred')),
+        ('f', 'force', None, _('forcibly copy over an existing managed file')),
+    ]
+    + walkopts
+    + dryrunopts,
     _('[OPTION]... SOURCE... DEST'),
-    helpcategory=command.CATEGORY_FILE_CONTENTS)
+    helpcategory=command.CATEGORY_FILE_CONTENTS,
+)
 def copy(ui, repo, *pats, **opts):
     """mark files as copied for the next commit
 
@@ -1945,10 +2298,14 @@
     with repo.wlock(False):
         return cmdutil.copy(ui, repo, pats, opts)
 
+
 @command(
-    'debugcommands', [], _('[COMMAND]'),
+    'debugcommands',
+    [],
+    _('[COMMAND]'),
     helpcategory=command.CATEGORY_HELP,
-    norepo=True)
+    norepo=True,
+)
 def debugcommands(ui, cmd='', *args):
     """list all available commands and options"""
     for cmd, vals in sorted(table.iteritems()):
@@ -1956,11 +2313,14 @@
         opts = ', '.join([i[1] for i in vals[1]])
         ui.write('%s: %s\n' % (cmd, opts))
 
-@command('debugcomplete',
+
+@command(
+    'debugcomplete',
     [('o', 'options', None, _('show the command options'))],
     _('[-o] CMD'),
     helpcategory=command.CATEGORY_HELP,
-    norepo=True)
+    norepo=True,
+)
 def debugcomplete(ui, cmd='', **opts):
     """returns the completion list associated with the given command"""
 
@@ -1985,13 +2345,23 @@
         cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
     ui.write("%s\n" % "\n".join(sorted(cmdlist)))
 
-@command('diff',
-    [('r', 'rev', [], _('revision'), _('REV')),
-    ('c', 'change', '', _('change made by revision'), _('REV'))
-    ] + diffopts + diffopts2 + walkopts + subrepoopts,
+
+@command(
+    'diff',
+    [
+        ('r', 'rev', [], _('revision'), _('REV')),
+        ('c', 'change', '', _('change made by revision'), _('REV')),
+    ]
+    + diffopts
+    + diffopts2
+    + walkopts
+    + subrepoopts,
     _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
-    helpbasic=True, inferrepo=True, intents={INTENT_READONLY})
+    helpbasic=True,
+    inferrepo=True,
+    intents={INTENT_READONLY},
+)
 def diff(ui, repo, *pats, **opts):
     """diff repository (or selected files)
 
@@ -2075,21 +2445,46 @@
     m = scmutil.match(ctx2, pats, opts)
     m = repo.narrowmatch(m)
     ui.pager('diff')
-    logcmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
-                              listsubrepos=opts.get('subrepos'),
-                              root=opts.get('root'))
-
-@command('export',
-    [('B', 'bookmark', '',
-     _('export changes only reachable by given bookmark'), _('BOOKMARK')),
-    ('o', 'output', '',
-     _('print output to file with formatted name'), _('FORMAT')),
-    ('', 'switch-parent', None, _('diff against the second parent')),
-    ('r', 'rev', [], _('revisions to export'), _('REV')),
-    ] + diffopts + formatteropts,
+    logcmdutil.diffordiffstat(
+        ui,
+        repo,
+        diffopts,
+        node1,
+        node2,
+        m,
+        stat=stat,
+        listsubrepos=opts.get('subrepos'),
+        root=opts.get('root'),
+    )
+
+
+@command(
+    'export',
+    [
+        (
+            'B',
+            'bookmark',
+            '',
+            _('export changes only reachable by given bookmark'),
+            _('BOOKMARK'),
+        ),
+        (
+            'o',
+            'output',
+            '',
+            _('print output to file with formatted name'),
+            _('FORMAT'),
+        ),
+        ('', 'switch-parent', None, _('diff against the second parent')),
+        ('r', 'rev', [], _('revisions to export'), _('REV')),
+    ]
+    + diffopts
+    + formatteropts,
     _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
-    helpbasic=True, intents={INTENT_READONLY})
+    helpbasic=True,
+    intents={INTENT_READONLY},
+)
 def export(ui, repo, *changesets, **opts):
     """dump the header and diffs for one or more changesets
 
@@ -2200,17 +2595,29 @@
         ui.pager('export')
         fm = ui.formatter('export', opts)
     with fm:
-        cmdutil.export(repo, revs, fm, fntemplate=fntemplate,
-                       switch_parent=opts.get('switch_parent'),
-                       opts=patch.diffallopts(ui, opts))
-
-@command('files',
-    [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
-     ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
-    ] + walkopts + formatteropts + subrepoopts,
+        cmdutil.export(
+            repo,
+            revs,
+            fm,
+            fntemplate=fntemplate,
+            switch_parent=opts.get('switch_parent'),
+            opts=patch.diffallopts(ui, opts),
+        )
+
+
+@command(
+    'files',
+    [
+        ('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
+        ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
+    ]
+    + walkopts
+    + formatteropts
+    + subrepoopts,
     _('[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def files(ui, repo, *pats, **opts):
     """list tracked files
 
@@ -2280,16 +2687,21 @@
     ui.pager('files')
     uipathfn = scmutil.getuipathfn(ctx.repo(), legacyrelativevalue=True)
     with ui.formatter('files', opts) as fm:
-        return cmdutil.files(ui, ctx, m, uipathfn, fm, fmt,
-                             opts.get('subrepos'))
+        return cmdutil.files(
+            ui, ctx, m, uipathfn, fm, fmt, opts.get('subrepos')
+        )
+
 
 @command(
     'forget',
-    [('i', 'interactive', None, _('use interactive mode')),
-    ] + walkopts + dryrunopts,
+    [('i', 'interactive', None, _('use interactive mode')),]
+    + walkopts
+    + dryrunopts,
     _('[OPTION]... FILE...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    helpbasic=True, inferrepo=True)
+    helpbasic=True,
+    inferrepo=True,
+)
 def forget(ui, repo, *pats, **opts):
     """forget the specified files on the next commit
 
@@ -2326,31 +2738,56 @@
     m = scmutil.match(repo[None], pats, opts)
     dryrun, interactive = opts.get('dry_run'), opts.get('interactive')
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
-    rejected = cmdutil.forget(ui, repo, m, prefix="", uipathfn=uipathfn,
-                              explicitonly=False, dryrun=dryrun,
-                              interactive=interactive)[0]
+    rejected = cmdutil.forget(
+        ui,
+        repo,
+        m,
+        prefix="",
+        uipathfn=uipathfn,
+        explicitonly=False,
+        dryrun=dryrun,
+        interactive=interactive,
+    )[0]
     return rejected and 1 or 0
 
+
 @command(
     'graft',
-    [('r', 'rev', [], _('revisions to graft'), _('REV')),
-     ('', 'base', '',
-      _('base revision when doing the graft merge (ADVANCED)'), _('REV')),
-     ('c', 'continue', False, _('resume interrupted graft')),
-     ('', 'stop', False, _('stop interrupted graft')),
-     ('', 'abort', False, _('abort interrupted graft')),
-     ('e', 'edit', False, _('invoke editor on commit messages')),
-     ('', 'log', None, _('append graft info to log message')),
-     ('', 'no-commit', None,
-      _("don't commit, just apply the changes in working directory")),
-     ('f', 'force', False, _('force graft')),
-     ('D', 'currentdate', False,
-      _('record the current date as commit date')),
-     ('U', 'currentuser', False,
-      _('record the current user as committer'))]
-    + commitopts2 + mergetoolopts  + dryrunopts,
+    [
+        ('r', 'rev', [], _('revisions to graft'), _('REV')),
+        (
+            '',
+            'base',
+            '',
+            _('base revision when doing the graft merge (ADVANCED)'),
+            _('REV'),
+        ),
+        ('c', 'continue', False, _('resume interrupted graft')),
+        ('', 'stop', False, _('stop interrupted graft')),
+        ('', 'abort', False, _('abort interrupted graft')),
+        ('e', 'edit', False, _('invoke editor on commit messages')),
+        ('', 'log', None, _('append graft info to log message')),
+        (
+            '',
+            'no-commit',
+            None,
+            _("don't commit, just apply the changes in working directory"),
+        ),
+        ('f', 'force', False, _('force graft')),
+        (
+            'D',
+            'currentdate',
+            False,
+            _('record the current date as commit date'),
+        ),
+        ('U', 'currentuser', False, _('record the current user as committer')),
+    ]
+    + commitopts2
+    + mergetoolopts
+    + dryrunopts,
     _('[OPTION]... [-r REV]... REV...'),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+)
 def graft(ui, repo, *revs, **opts):
     '''copy changes from other branches onto the current branch
 
@@ -2449,11 +2886,16 @@
     with repo.wlock():
         return _dograft(ui, repo, *revs, **opts)
 
+
 def _dograft(ui, repo, *revs, **opts):
     opts = pycompat.byteskwargs(opts)
     if revs and opts.get('rev'):
-        ui.warn(_('warning: inconsistent use of --rev might give unexpected '
-                  'revision ordering!\n'))
+        ui.warn(
+            _(
+                'warning: inconsistent use of --rev might give unexpected '
+                'revision ordering!\n'
+            )
+        )
 
     revs = list(revs)
     revs.extend(opts.get('rev'))
@@ -2474,45 +2916,68 @@
     if not opts.get('date') and opts.get('currentdate'):
         opts['date'] = "%d %d" % dateutil.makedate()
 
-    editor = cmdutil.getcommiteditor(editform='graft',
-                                     **pycompat.strkwargs(opts))
+    editor = cmdutil.getcommiteditor(
+        editform='graft', **pycompat.strkwargs(opts)
+    )
 
     cont = False
     if opts.get('no_commit'):
         if opts.get('edit'):
-            raise error.Abort(_("cannot specify --no-commit and "
-                                "--edit together"))
+            raise error.Abort(
+                _("cannot specify --no-commit and " "--edit together")
+            )
         if opts.get('currentuser'):
-            raise error.Abort(_("cannot specify --no-commit and "
-                                "--currentuser together"))
+            raise error.Abort(
+                _("cannot specify --no-commit and " "--currentuser together")
+            )
         if opts.get('currentdate'):
-            raise error.Abort(_("cannot specify --no-commit and "
-                                "--currentdate together"))
+            raise error.Abort(
+                _("cannot specify --no-commit and " "--currentdate together")
+            )
         if opts.get('log'):
-            raise error.Abort(_("cannot specify --no-commit and "
-                                "--log together"))
+            raise error.Abort(
+                _("cannot specify --no-commit and " "--log together")
+            )
 
     graftstate = statemod.cmdstate(repo, 'graftstate')
 
     if opts.get('stop'):
         if opts.get('continue'):
-            raise error.Abort(_("cannot use '--continue' and "
-                                "'--stop' together"))
+            raise error.Abort(
+                _("cannot use '--continue' and " "'--stop' together")
+            )
         if opts.get('abort'):
             raise error.Abort(_("cannot use '--abort' and '--stop' together"))
 
-        if any((opts.get('edit'), opts.get('log'), opts.get('user'),
-                opts.get('date'), opts.get('currentdate'),
-                opts.get('currentuser'), opts.get('rev'))):
+        if any(
+            (
+                opts.get('edit'),
+                opts.get('log'),
+                opts.get('user'),
+                opts.get('date'),
+                opts.get('currentdate'),
+                opts.get('currentuser'),
+                opts.get('rev'),
+            )
+        ):
             raise error.Abort(_("cannot specify any other flag with '--stop'"))
         return _stopgraft(ui, repo, graftstate)
     elif opts.get('abort'):
         if opts.get('continue'):
-            raise error.Abort(_("cannot use '--continue' and "
-                                "'--abort' together"))
-        if any((opts.get('edit'), opts.get('log'), opts.get('user'),
-                opts.get('date'), opts.get('currentdate'),
-                opts.get('currentuser'), opts.get('rev'))):
+            raise error.Abort(
+                _("cannot use '--continue' and " "'--abort' together")
+            )
+        if any(
+            (
+                opts.get('edit'),
+                opts.get('log'),
+                opts.get('user'),
+                opts.get('date'),
+                opts.get('currentdate'),
+                opts.get('currentuser'),
+                opts.get('rev'),
+            )
+        ):
             raise error.Abort(_("cannot specify any other flag with '--abort'"))
 
         return cmdutil.abortgraft(ui, repo, graftstate)
@@ -2568,8 +3033,9 @@
         # don't mutate while iterating, create a copy
         for rev in list(revs):
             if rev in ancestors:
-                ui.warn(_('skipping ancestor revision %d:%s\n') %
-                        (rev, repo[rev]))
+                ui.warn(
+                    _('skipping ancestor revision %d:%s\n') % (rev, repo[rev])
+                )
                 # XXX remove on list is slow
                 revs.remove(rev)
         if not revs:
@@ -2597,26 +3063,42 @@
                 except error.RepoLookupError:
                     r = None
                 if r in revs:
-                    ui.warn(_('skipping revision %d:%s '
-                              '(already grafted to %d:%s)\n')
-                            % (r, repo[r], rev, ctx))
+                    ui.warn(
+                        _(
+                            'skipping revision %d:%s '
+                            '(already grafted to %d:%s)\n'
+                        )
+                        % (r, repo[r], rev, ctx)
+                    )
                     revs.remove(r)
                 elif ids[n] in revs:
                     if r is None:
-                        ui.warn(_('skipping already grafted revision %d:%s '
-                                  '(%d:%s also has unknown origin %s)\n')
-                                % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
+                        ui.warn(
+                            _(
+                                'skipping already grafted revision %d:%s '
+                                '(%d:%s also has unknown origin %s)\n'
+                            )
+                            % (ids[n], repo[ids[n]], rev, ctx, n[:12])
+                        )
                     else:
-                        ui.warn(_('skipping already grafted revision %d:%s '
-                                  '(%d:%s also has origin %d:%s)\n')
-                                % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
+                        ui.warn(
+                            _(
+                                'skipping already grafted revision %d:%s '
+                                '(%d:%s also has origin %d:%s)\n'
+                            )
+                            % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])
+                        )
                     revs.remove(ids[n])
             elif ctx.hex() in ids:
                 r = ids[ctx.hex()]
                 if r in revs:
-                    ui.warn(_('skipping already grafted revision %d:%s '
-                              '(was grafted from %d:%s)\n') %
-                            (r, repo[r], rev, ctx))
+                    ui.warn(
+                        _(
+                            'skipping already grafted revision %d:%s '
+                            '(was grafted from %d:%s)\n'
+                        )
+                        % (r, repo[r], rev, ctx)
+                    )
                     revs.remove(r)
         if not revs:
             return -1
@@ -2624,8 +3106,11 @@
     if opts.get('no_commit'):
         statedata['no_commit'] = True
     for pos, ctx in enumerate(repo.set("%ld", revs)):
-        desc = '%d:%s "%s"' % (ctx.rev(), ctx,
-                               ctx.description().split('\n', 1)[0])
+        desc = '%d:%s "%s"' % (
+            ctx.rev(),
+            ctx,
+            ctx.description().split('\n', 1)[0],
+        )
         names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
         if names:
             desc += ' (%s)' % ' '.join(names)
@@ -2669,19 +3154,21 @@
                 graftstate.save(stateversion, statedata)
                 hint = _("use 'hg resolve' and 'hg graft --continue'")
                 raise error.Abort(
-                    _("unresolved conflicts, can't continue"),
-                    hint=hint)
+                    _("unresolved conflicts, can't continue"), hint=hint
+                )
         else:
             cont = False
 
         # commit if --no-commit is false
         if not opts.get('no_commit'):
-            node = repo.commit(text=message, user=user, date=date, extra=extra,
-                               editor=editor)
+            node = repo.commit(
+                text=message, user=user, date=date, extra=extra, editor=editor
+            )
             if node is None:
                 ui.warn(
-                    _('note: graft of %d:%s created no changes to commit\n') %
-                    (ctx.rev(), ctx))
+                    _('note: graft of %d:%s created no changes to commit\n')
+                    % (ctx.rev(), ctx)
+                )
             # checking that newnodes exist because old state files won't have it
             elif statedata.get('newnodes') is not None:
                 statedata['newnodes'].append(node)
@@ -2692,6 +3179,7 @@
 
     return 0
 
+
 def _stopgraft(ui, repo, graftstate):
     """stop the interrupted graft"""
     if not graftstate.exists():
@@ -2703,36 +3191,72 @@
     ui.status(_("working directory is now at %s\n") % pctx.hex()[:12])
     return 0
 
+
 statemod.addunfinished(
-    'graft', fname='graftstate', clearable=True, stopflag=True,
-    continueflag=True, abortfunc=cmdutil.hgabortgraft,
-    cmdhint=_("use 'hg graft --continue' or 'hg graft --stop' to stop")
+    'graft',
+    fname='graftstate',
+    clearable=True,
+    stopflag=True,
+    continueflag=True,
+    abortfunc=cmdutil.hgabortgraft,
+    cmdhint=_("use 'hg graft --continue' or 'hg graft --stop' to stop"),
 )
 
-@command('grep',
-    [('0', 'print0', None, _('end fields with NUL')),
-    ('', 'all', None, _('print all revisions that match (DEPRECATED) ')),
-    ('', 'diff', None, _('print all revisions when the term was introduced '
-                         'or removed')),
-    ('a', 'text', None, _('treat all files as text')),
-    ('f', 'follow', None,
-     _('follow changeset history,'
-       ' or file history across copies and renames')),
-    ('i', 'ignore-case', None, _('ignore case when matching')),
-    ('l', 'files-with-matches', None,
-     _('print only filenames and revisions that match')),
-    ('n', 'line-number', None, _('print matching line numbers')),
-    ('r', 'rev', [],
-     _('only search files changed within revision range'), _('REV')),
-    ('', 'all-files', None,
-     _('include all files in the changeset while grepping (EXPERIMENTAL)')),
-    ('u', 'user', None, _('list the author (long with -v)')),
-    ('d', 'date', None, _('list the date (short with -q)')),
-    ] + formatteropts + walkopts,
+
+@command(
+    'grep',
+    [
+        ('0', 'print0', None, _('end fields with NUL')),
+        ('', 'all', None, _('print all revisions that match (DEPRECATED) ')),
+        (
+            '',
+            'diff',
+            None,
+            _('print all revisions when the term was introduced ' 'or removed'),
+        ),
+        ('a', 'text', None, _('treat all files as text')),
+        (
+            'f',
+            'follow',
+            None,
+            _(
+                'follow changeset history,'
+                ' or file history across copies and renames'
+            ),
+        ),
+        ('i', 'ignore-case', None, _('ignore case when matching')),
+        (
+            'l',
+            'files-with-matches',
+            None,
+            _('print only filenames and revisions that match'),
+        ),
+        ('n', 'line-number', None, _('print matching line numbers')),
+        (
+            'r',
+            'rev',
+            [],
+            _('only search files changed within revision range'),
+            _('REV'),
+        ),
+        (
+            '',
+            'all-files',
+            None,
+            _(
+                'include all files in the changeset while grepping (EXPERIMENTAL)'
+            ),
+        ),
+        ('u', 'user', None, _('list the author (long with -v)')),
+        ('d', 'date', None, _('list the date (short with -q)')),
+    ]
+    + formatteropts
+    + walkopts,
     _('[OPTION]... PATTERN [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     inferrepo=True,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def grep(ui, repo, pattern, *pats, **opts):
     """search revision history for a pattern in specified files
 
@@ -2841,6 +3365,7 @@
 
     matches = {}
     copies = {}
+
     def grepbody(fn, rev, body):
         matches[rev].setdefault(fn, [])
         m = matches[rev][fn]
@@ -2864,6 +3389,7 @@
                     yield ('+', b[i])
 
     uipathfn = scmutil.getuipathfn(repo)
+
     def display(fm, fn, ctx, pstates, states):
         rev = scmutil.intrev(ctx)
         if fm.isplain():
@@ -2875,6 +3401,7 @@
         else:
             datefmt = '%a %b %d %H:%M:%S %Y %1%2'
         found = False
+
         @util.cachefunc
         def binary():
             flog = getfile(fn)
@@ -2900,14 +3427,32 @@
             ]
             if diff:
                 cols.append(
-                    ('change', '%s', change, True,
-                     'grep.inserted ' if change == '+' else 'grep.deleted ')
+                    (
+                        'change',
+                        '%s',
+                        change,
+                        True,
+                        'grep.inserted ' if change == '+' else 'grep.deleted ',
+                    )
                 )
-            cols.extend([
-                ('user', '%s', formatuser(ctx.user()), opts.get('user'), ''),
-                ('date', '%s', fm.formatdate(ctx.date(), datefmt),
-                 opts.get('date'), ''),
-            ])
+            cols.extend(
+                [
+                    (
+                        'user',
+                        '%s',
+                        formatuser(ctx.user()),
+                        opts.get('user'),
+                        '',
+                    ),
+                    (
+                        'date',
+                        '%s',
+                        fm.formatdate(ctx.date(), datefmt),
+                        opts.get('date'),
+                        '',
+                    ),
+                ]
+            )
             for name, fmt, data, cond, extra_label in cols:
                 if cond:
                     fm.plain(sep, label='grep.sep')
@@ -2950,6 +3495,7 @@
     follow = opts.get('follow')
 
     getrenamed = scmutil.getrenamedfn(repo)
+
     def prep(ctx, fns):
         rev = ctx.rev()
         pctx = ctx.p1()
@@ -3019,16 +3565,26 @@
 
     return not found
 
-@command('heads',
-    [('r', 'rev', '',
-     _('show only heads which are descendants of STARTREV'), _('STARTREV')),
-    ('t', 'topo', False, _('show topological heads only')),
-    ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
-    ('c', 'closed', False, _('show normal and closed branch heads')),
-    ] + templateopts,
+
+@command(
+    'heads',
+    [
+        (
+            'r',
+            'rev',
+            '',
+            _('show only heads which are descendants of STARTREV'),
+            _('STARTREV'),
+        ),
+        ('t', 'topo', False, _('show topological heads only')),
+        ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
+        ('c', 'closed', False, _('show normal and closed branch heads')),
+    ]
+    + templateopts,
     _('[-ct] [-r STARTREV] [REV]...'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def heads(ui, repo, *branchrevs, **opts):
     """show branch heads
 
@@ -3070,8 +3626,9 @@
         heads = [repo[h] for h in heads]
 
     if branchrevs:
-        branches = set(repo[r].branch()
-                       for r in scmutil.revrange(repo, branchrevs))
+        branches = set(
+            repo[r].branch() for r in scmutil.revrange(repo, branchrevs)
+        )
         heads = [h for h in heads if h.branch() in branches]
 
     if opts.get('active') and branchrevs:
@@ -3091,23 +3648,32 @@
         return 1
 
     ui.pager('heads')
-    heads = sorted(heads, key=lambda x: -x.rev())
+    heads = sorted(heads, key=lambda x: -(x.rev()))
     displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
     for ctx in heads:
         displayer.show(ctx)
     displayer.close()
 
-@command('help',
-    [('e', 'extension', None, _('show only help for extensions')),
-     ('c', 'command', None, _('show only help for commands')),
-     ('k', 'keyword', None, _('show topics matching keyword')),
-     ('s', 'system', [],
-      _('show help for specific platform(s)'), _('PLATFORM')),
-     ],
+
+@command(
+    'help',
+    [
+        ('e', 'extension', None, _('show only help for extensions')),
+        ('c', 'command', None, _('show only help for commands')),
+        ('k', 'keyword', None, _('show topics matching keyword')),
+        (
+            's',
+            'system',
+            [],
+            _('show help for specific platform(s)'),
+            _('PLATFORM'),
+        ),
+    ],
     _('[-eck] [-s PLATFORM] [TOPIC]'),
     helpcategory=command.CATEGORY_HELP,
     norepo=True,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def help_(ui, name=None, **opts):
     """show help for a given topic or a help overview
 
@@ -3139,21 +3705,35 @@
     ui.write(formatted)
 
 
-@command('identify|id',
-    [('r', 'rev', '',
-     _('identify the specified revision'), _('REV')),
-    ('n', 'num', None, _('show local revision number')),
-    ('i', 'id', None, _('show global revision id')),
-    ('b', 'branch', None, _('show branch')),
-    ('t', 'tags', None, _('show tags')),
-    ('B', 'bookmarks', None, _('show bookmarks')),
-    ] + remoteopts + formatteropts,
+@command(
+    'identify|id',
+    [
+        ('r', 'rev', '', _('identify the specified revision'), _('REV')),
+        ('n', 'num', None, _('show local revision number')),
+        ('i', 'id', None, _('show global revision id')),
+        ('b', 'branch', None, _('show branch')),
+        ('t', 'tags', None, _('show tags')),
+        ('B', 'bookmarks', None, _('show bookmarks')),
+    ]
+    + remoteopts
+    + formatteropts,
     _('[-nibtB] [-r REV] [SOURCE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
     optionalrepo=True,
-    intents={INTENT_READONLY})
-def identify(ui, repo, source=None, rev=None,
-             num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
+    intents={INTENT_READONLY},
+)
+def identify(
+    ui,
+    repo,
+    source=None,
+    rev=None,
+    num=None,
+    id=None,
+    branch=None,
+    tags=None,
+    bookmarks=None,
+    **opts
+):
     """identify the working directory or specified revision
 
     Print a summary identifying the repository state at REV using one or
@@ -3202,8 +3782,9 @@
 
     opts = pycompat.byteskwargs(opts)
     if not repo and not source:
-        raise error.Abort(_("there is no Mercurial repository here "
-                           "(.hg not found)"))
+        raise error.Abort(
+            _("there is no Mercurial repository here " "(.hg not found)")
+        )
 
     default = not (num or id or branch or tags or bookmarks)
     output = []
@@ -3211,7 +3792,7 @@
 
     if source:
         source, branches = hg.parseurl(ui.expandpath(source))
-        peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
+        peer = hg.peer(repo or ui, opts, source)  # only pass ui when no repo
         repo = peer.local()
         revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
 
@@ -3221,7 +3802,8 @@
     if not repo:
         if num or branch or tags:
             raise error.Abort(
-                _("can't query remote revision number, branch, or tags"))
+                _("can't query remote revision number, branch, or tags")
+            )
         if not rev and revs:
             rev = revs[0]
         if not rev:
@@ -3239,8 +3821,11 @@
 
             if 'bookmarks' in peer.listkeys('namespaces'):
                 hexremoterev = hex(remoterev)
-                bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
-                       if bmr == hexremoterev]
+                bms = [
+                    bm
+                    for bm, bmr in peer.listkeys('bookmarks').iteritems()
+                    if bmr == hexremoterev
+                ]
 
             return sorted(bms)
 
@@ -3282,8 +3867,11 @@
                 numoutput = ["%d" % p.rev() for p in parents]
                 output.append("%s%s" % ('+'.join(numoutput), dirty))
 
-            fm.data(parents=fm.formatlist([fm.hexfunc(p.node())
-                                           for p in parents], name='node'))
+            fm.data(
+                parents=fm.formatlist(
+                    [fm.hexfunc(p.node()) for p in parents], name='node'
+                )
+            )
         else:
             hexoutput = fm.hexfunc(ctx.node())
             if default or id:
@@ -3327,29 +3915,56 @@
     fm.plain("%s\n" % ' '.join(output))
     fm.end()
 
-@command('import|patch',
-    [('p', 'strip', 1,
-     _('directory strip option for patch. This has the same '
-       'meaning as the corresponding patch option'), _('NUM')),
-    ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
-    ('e', 'edit', False, _('invoke editor on commit messages')),
-    ('f', 'force', None,
-     _('skip check for outstanding uncommitted changes (DEPRECATED)')),
-    ('', 'no-commit', None,
-     _("don't commit, just update the working directory")),
-    ('', 'bypass', None,
-     _("apply patch without touching the working directory")),
-    ('', 'partial', None,
-     _('commit even if some hunks fail')),
-    ('', 'exact', None,
-     _('abort if patch would apply lossily')),
-    ('', 'prefix', '',
-     _('apply patch to subdirectory'), _('DIR')),
-    ('', 'import-branch', None,
-     _('use any branch information in patch (implied by --exact)'))] +
-    commitopts + commitopts2 + similarityopts,
+
+@command(
+    'import|patch',
+    [
+        (
+            'p',
+            'strip',
+            1,
+            _(
+                'directory strip option for patch. This has the same '
+                'meaning as the corresponding patch option'
+            ),
+            _('NUM'),
+        ),
+        ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
+        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (
+            'f',
+            'force',
+            None,
+            _('skip check for outstanding uncommitted changes (DEPRECATED)'),
+        ),
+        (
+            '',
+            'no-commit',
+            None,
+            _("don't commit, just update the working directory"),
+        ),
+        (
+            '',
+            'bypass',
+            None,
+            _("apply patch without touching the working directory"),
+        ),
+        ('', 'partial', None, _('commit even if some hunks fail')),
+        ('', 'exact', None, _('abort if patch would apply lossily')),
+        ('', 'prefix', '', _('apply patch to subdirectory'), _('DIR')),
+        (
+            '',
+            'import-branch',
+            None,
+            _('use any branch information in patch (implied by --exact)'),
+        ),
+    ]
+    + commitopts
+    + commitopts2
+    + similarityopts,
     _('[OPTION]... PATCH...'),
-    helpcategory=command.CATEGORY_IMPORT_EXPORT)
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def import_(ui, repo, patch1=None, *patches, **opts):
     """import an ordered set of patches
 
@@ -3484,7 +4099,7 @@
     with repo.wlock():
         if update:
             cmdutil.checkunfinished(repo)
-            if (exact or not opts.get('force')):
+            if exact or not opts.get('force'):
                 cmdutil.bailifchanged(repo)
 
         if not opts.get('no_commit'):
@@ -3501,7 +4116,7 @@
                 if patchurl == '-':
                     ui.status(_('applying patch from stdin\n'))
                     patchfile = ui.fin
-                    patchurl = 'stdin'      # for error message
+                    patchurl = 'stdin'  # for error message
                 else:
                     patchurl = os.path.join(base, patchurl)
                     ui.status(_('applying %s\n') % patchurl)
@@ -3510,10 +4125,9 @@
                 haspatch = False
                 for hunk in patch.split(patchfile):
                     with patch.extract(ui, hunk) as patchdata:
-                        msg, node, rej = cmdutil.tryimportone(ui, repo,
-                                                              patchdata,
-                                                              parents, opts,
-                                                              msgs, hg.clean)
+                        msg, node, rej = cmdutil.tryimportone(
+                            ui, repo, patchdata, parents, opts, msgs, hg.clean
+                        )
                     if msg:
                         haspatch = True
                         ui.note(msg + '\n')
@@ -3523,8 +4137,12 @@
                         parents = [repo[node]]
                     if rej:
                         ui.write_err(_("patch applied partially\n"))
-                        ui.write_err(_("(fix the .rej files and run "
-                                       "`hg commit --amend`)\n"))
+                        ui.write_err(
+                            _(
+                                "(fix the .rej files and run "
+                                "`hg commit --amend`)\n"
+                            )
+                        )
                         ret = 1
                         break
 
@@ -3535,19 +4153,35 @@
                 repo.savecommitmessage('\n* * *\n'.join(msgs))
         return ret
 
-@command('incoming|in',
-    [('f', 'force', None,
-     _('run even if remote repository is unrelated')),
-    ('n', 'newest-first', None, _('show newest record first')),
-    ('', 'bundle', '',
-     _('file to store the bundles into'), _('FILE')),
-    ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
-    ('B', 'bookmarks', False, _("compare bookmarks")),
-    ('b', 'branch', [],
-     _('a specific branch you would like to pull'), _('BRANCH')),
-    ] + logopts + remoteopts + subrepoopts,
+
+@command(
+    'incoming|in',
+    [
+        ('f', 'force', None, _('run even if remote repository is unrelated')),
+        ('n', 'newest-first', None, _('show newest record first')),
+        ('', 'bundle', '', _('file to store the bundles into'), _('FILE')),
+        (
+            'r',
+            'rev',
+            [],
+            _('a remote changeset intended to be added'),
+            _('REV'),
+        ),
+        ('B', 'bookmarks', False, _("compare bookmarks")),
+        (
+            'b',
+            'branch',
+            [],
+            _('a specific branch you would like to pull'),
+            _('BRANCH'),
+        ),
+    ]
+    + logopts
+    + remoteopts
+    + subrepoopts,
     _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'),
-    helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT)
+    helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
+)
 def incoming(ui, repo, source="default", **opts):
     """show new changesets found in source
 
@@ -3605,10 +4239,12 @@
     opts = pycompat.byteskwargs(opts)
     if opts.get('graph'):
         logcmdutil.checkunsupportedgraphflags([], opts)
+
         def display(other, chlist, displayer):
             revdag = logcmdutil.graphrevs(other, chlist, opts)
-            logcmdutil.displaygraph(ui, repo, revdag, displayer,
-                                    graphmod.asciiedges)
+            logcmdutil.displaygraph(
+                ui, repo, revdag, displayer, graphmod.asciiedges
+            )
 
         hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
         return 0
@@ -3617,8 +4253,9 @@
         raise error.Abort(_('cannot combine --bundle and --subrepos'))
 
     if opts.get('bookmarks'):
-        source, branches = hg.parseurl(ui.expandpath(source),
-                                       opts.get('branch'))
+        source, branches = hg.parseurl(
+            ui.expandpath(source), opts.get('branch')
+        )
         other = hg.peer(repo, opts, source)
         if 'bookmarks' not in other.listkeys('namespaces'):
             ui.warn(_("remote doesn't support bookmarks\n"))
@@ -3634,9 +4271,14 @@
         del repo._subtoppath
 
 
-@command('init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
-         helpcategory=command.CATEGORY_REPO_CREATION,
-         helpbasic=True, norepo=True)
+@command(
+    'init',
+    remoteopts,
+    _('[-e CMD] [--remotecmd CMD] [DEST]'),
+    helpcategory=command.CATEGORY_REPO_CREATION,
+    helpbasic=True,
+    norepo=True,
+)
 def init(ui, dest=".", **opts):
     """create a new repository in the given directory
 
@@ -3653,13 +4295,23 @@
     opts = pycompat.byteskwargs(opts)
     hg.peer(ui, opts, ui.expandpath(dest), create=True)
 
-@command('locate',
-    [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
-    ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
-    ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
-    ] + walkopts,
+
+@command(
+    'locate',
+    [
+        ('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
+        ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
+        (
+            'f',
+            'fullpath',
+            None,
+            _('print complete paths from the filesystem root'),
+        ),
+    ]
+    + walkopts,
     _('[OPTION]... [PATTERN]...'),
-    helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+    helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
 def locate(ui, repo, *pats, **opts):
     """locate files matching specific patterns (DEPRECATED)
 
@@ -3690,8 +4342,9 @@
     ctx = scmutil.revsingle(repo, opts.get('rev'), None)
 
     ret = 1
-    m = scmutil.match(ctx, pats, opts, default='relglob',
-                      badfn=lambda x, y: False)
+    m = scmutil.match(
+        ctx, pats, opts, default='relglob', badfn=lambda x, y: False
+    )
 
     ui.pager('locate')
     if ctx.rev() is None:
@@ -3710,35 +4363,81 @@
 
     return ret
 
-@command('log|history',
-    [('f', 'follow', None,
-     _('follow changeset history, or file history across copies and renames')),
-    ('', 'follow-first', None,
-     _('only follow the first parent of merge changesets (DEPRECATED)')),
-    ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
-    ('C', 'copies', None, _('show copied files')),
-    ('k', 'keyword', [],
-     _('do case-insensitive search for a given text'), _('TEXT')),
-    ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
-    ('L', 'line-range', [],
-     _('follow line range of specified file (EXPERIMENTAL)'),
-     _('FILE,RANGE')),
-    ('', 'removed', None, _('include revisions where files were removed')),
-    ('m', 'only-merges', None,
-     _('show only merges (DEPRECATED) (use -r "merge()" instead)')),
-    ('u', 'user', [], _('revisions committed by user'), _('USER')),
-    ('', 'only-branch', [],
-     _('show only changesets within the given named branch (DEPRECATED)'),
-     _('BRANCH')),
-    ('b', 'branch', [],
-     _('show changesets within the given named branch'), _('BRANCH')),
-    ('P', 'prune', [],
-     _('do not display revision or any of its ancestors'), _('REV')),
-    ] + logopts + walkopts,
+
+@command(
+    'log|history',
+    [
+        (
+            'f',
+            'follow',
+            None,
+            _(
+                'follow changeset history, or file history across copies and renames'
+            ),
+        ),
+        (
+            '',
+            'follow-first',
+            None,
+            _('only follow the first parent of merge changesets (DEPRECATED)'),
+        ),
+        ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
+        ('C', 'copies', None, _('show copied files')),
+        (
+            'k',
+            'keyword',
+            [],
+            _('do case-insensitive search for a given text'),
+            _('TEXT'),
+        ),
+        ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
+        (
+            'L',
+            'line-range',
+            [],
+            _('follow line range of specified file (EXPERIMENTAL)'),
+            _('FILE,RANGE'),
+        ),
+        ('', 'removed', None, _('include revisions where files were removed')),
+        (
+            'm',
+            'only-merges',
+            None,
+            _('show only merges (DEPRECATED) (use -r "merge()" instead)'),
+        ),
+        ('u', 'user', [], _('revisions committed by user'), _('USER')),
+        (
+            '',
+            'only-branch',
+            [],
+            _(
+                'show only changesets within the given named branch (DEPRECATED)'
+            ),
+            _('BRANCH'),
+        ),
+        (
+            'b',
+            'branch',
+            [],
+            _('show changesets within the given named branch'),
+            _('BRANCH'),
+        ),
+        (
+            'P',
+            'prune',
+            [],
+            _('do not display revision or any of its ancestors'),
+            _('REV'),
+        ),
+    ]
+    + logopts
+    + walkopts,
     _('[OPTION]... [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
-    helpbasic=True, inferrepo=True,
-    intents={INTENT_READONLY})
+    helpbasic=True,
+    inferrepo=True,
+    intents={INTENT_READONLY},
+)
 def log(ui, repo, *pats, **opts):
     """show revision history of entire repository or files
 
@@ -3894,21 +4593,27 @@
         getcopies = scmutil.getcopiesfn(repo, endrev=endrev)
 
     ui.pager('log')
-    displayer = logcmdutil.changesetdisplayer(ui, repo, opts, differ,
-                                              buffered=True)
+    displayer = logcmdutil.changesetdisplayer(
+        ui, repo, opts, differ, buffered=True
+    )
     if opts.get('graph'):
         displayfn = logcmdutil.displaygraphrevs
     else:
         displayfn = logcmdutil.displayrevs
     displayfn(ui, repo, revs, displayer, getcopies)
 
-@command('manifest',
-    [('r', 'rev', '', _('revision to display'), _('REV')),
-     ('', 'all', False, _("list files from all revisions"))]
-         + formatteropts,
+
+@command(
+    'manifest',
+    [
+        ('r', 'rev', '', _('revision to display'), _('REV')),
+        ('', 'all', False, _("list files from all revisions")),
+    ]
+    + formatteropts,
     _('[-r REV]'),
     helpcategory=command.CATEGORY_MAINTENANCE,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def manifest(ui, repo, node=None, rev=None, **opts):
     """output the current or given revision of the project manifest
 
@@ -3965,16 +4670,30 @@
         fm.write('path', '%s\n', f)
     fm.end()
 
-@command('merge',
-    [('f', 'force', None,
-      _('force a merge including outstanding changes (DEPRECATED)')),
-    ('r', 'rev', '', _('revision to merge'), _('REV')),
-    ('P', 'preview', None,
-     _('review revisions to merge (no merge is performed)')),
-    ('', 'abort', None, _('abort the ongoing merge')),
-     ] + mergetoolopts,
+
+@command(
+    'merge',
+    [
+        (
+            'f',
+            'force',
+            None,
+            _('force a merge including outstanding changes (DEPRECATED)'),
+        ),
+        ('r', 'rev', '', _('revision to merge'), _('REV')),
+        (
+            'P',
+            'preview',
+            None,
+            _('review revisions to merge (no merge is performed)'),
+        ),
+        ('', 'abort', None, _('abort the ongoing merge')),
+    ]
+    + mergetoolopts,
     _('[-P] [[-r] REV]'),
-    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT, helpbasic=True)
+    helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
+    helpbasic=True,
+)
 def merge(ui, repo, node=None, **opts):
     """merge another revision into working directory
 
@@ -4011,8 +4730,10 @@
     if abort:
         state = cmdutil.getunfinishedstate(repo)
         if state and state._opname != 'merge':
-            raise error.Abort(_('cannot abort merge with %s in progress') %
-                                (state._opname), hint=state.hint())
+            raise error.Abort(
+                _('cannot abort merge with %s in progress') % (state._opname),
+                hint=state.hint(),
+            )
         if node:
             raise error.Abort(_("cannot specify a node with --abort"))
         if opts.get('rev'):
@@ -4047,28 +4768,57 @@
     with ui.configoverride(overrides, 'merge'):
         force = opts.get('force')
         labels = ['working copy', 'merge rev']
-        return hg.merge(repo, node, force=force, mergeforce=force,
-                        labels=labels, abort=abort)
+        return hg.merge(
+            repo,
+            node,
+            force=force,
+            mergeforce=force,
+            labels=labels,
+            abort=abort,
+        )
+
 
 statemod.addunfinished(
-    'merge', fname=None, clearable=True, allowcommit=True,
-    cmdmsg=_('outstanding uncommitted merge'), abortfunc=hg.abortmerge,
-    statushint=_('To continue:    hg commit\n'
-                 'To abort:       hg merge --abort'),
-    cmdhint=_("use 'hg commit' or 'hg merge --abort'")
+    'merge',
+    fname=None,
+    clearable=True,
+    allowcommit=True,
+    cmdmsg=_('outstanding uncommitted merge'),
+    abortfunc=hg.abortmerge,
+    statushint=_(
+        'To continue:    hg commit\n' 'To abort:       hg merge --abort'
+    ),
+    cmdhint=_("use 'hg commit' or 'hg merge --abort'"),
 )
 
-@command('outgoing|out',
-    [('f', 'force', None, _('run even when the destination is unrelated')),
-    ('r', 'rev', [],
-     _('a changeset intended to be included in the destination'), _('REV')),
-    ('n', 'newest-first', None, _('show newest record first')),
-    ('B', 'bookmarks', False, _('compare bookmarks')),
-    ('b', 'branch', [], _('a specific branch you would like to push'),
-     _('BRANCH')),
-    ] + logopts + remoteopts + subrepoopts,
+
+@command(
+    'outgoing|out',
+    [
+        ('f', 'force', None, _('run even when the destination is unrelated')),
+        (
+            'r',
+            'rev',
+            [],
+            _('a changeset intended to be included in the destination'),
+            _('REV'),
+        ),
+        ('n', 'newest-first', None, _('show newest record first')),
+        ('B', 'bookmarks', False, _('compare bookmarks')),
+        (
+            'b',
+            'branch',
+            [],
+            _('a specific branch you would like to push'),
+            _('BRANCH'),
+        ),
+    ]
+    + logopts
+    + remoteopts
+    + subrepoopts,
     _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'),
-    helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT)
+    helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
+)
 def outgoing(ui, repo, dest=None, **opts):
     """show changesets not found in the destination
 
@@ -4109,8 +4859,10 @@
     # style URLs, so don't overwrite dest.
     path = ui.paths.getpath(dest, default=('default-push', 'default'))
     if not path:
-        raise error.Abort(_('default repository not configured!'),
-                          hint=_("see 'hg help config.paths'"))
+        raise error.Abort(
+            _('default repository not configured!'),
+            hint=_("see 'hg help config.paths'"),
+        )
 
     opts = pycompat.byteskwargs(opts)
     if opts.get('graph'):
@@ -4123,8 +4875,9 @@
         revdag = logcmdutil.graphrevs(repo, o, opts)
         ui.pager('outgoing')
         displayer = logcmdutil.changesetdisplayer(ui, repo, opts, buffered=True)
-        logcmdutil.displaygraph(ui, repo, revdag, displayer,
-                                graphmod.asciiedges)
+        logcmdutil.displaygraph(
+            ui, repo, revdag, displayer, graphmod.asciiedges
+        )
         cmdutil.outgoinghooks(ui, repo, other, opts, o)
         return 0
 
@@ -4144,12 +4897,15 @@
     finally:
         del repo._subtoppath
 
-@command('parents',
-    [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
-    ] + templateopts,
+
+@command(
+    'parents',
+    [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),]
+    + templateopts,
     _('[-r REV] [FILE]'),
     helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
-    inferrepo=True)
+    inferrepo=True,
+)
 def parents(ui, repo, file_=None, **opts):
     """show the parents of the working directory or revision (DEPRECATED)
 
@@ -4205,9 +4961,15 @@
             displayer.show(repo[n])
     displayer.close()
 
-@command('paths', formatteropts, _('[NAME]'),
+
+@command(
+    'paths',
+    formatteropts,
+    _('[NAME]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
-    optionalrepo=True, intents={INTENT_READONLY})
+    optionalrepo=True,
+    intents={INTENT_READONLY},
+)
 def paths(ui, repo, search=None, **opts):
     """show aliases for remote repositories
 
@@ -4253,8 +5015,11 @@
     opts = pycompat.byteskwargs(opts)
     ui.pager('paths')
     if search:
-        pathitems = [(name, path) for name, path in ui.paths.iteritems()
-                     if name == search]
+        pathitems = [
+            (name, path)
+            for name, path in ui.paths.iteritems()
+            if name == search
+        ]
     else:
         pathitems = sorted(ui.paths.iteritems())
 
@@ -4288,15 +5053,19 @@
     else:
         return 0
 
-@command('phase',
-    [('p', 'public', False, _('set changeset phase to public')),
-     ('d', 'draft', False, _('set changeset phase to draft')),
-     ('s', 'secret', False, _('set changeset phase to secret')),
-     ('f', 'force', False, _('allow to move boundary backward')),
-     ('r', 'rev', [], _('target revision'), _('REV')),
+
+@command(
+    'phase',
+    [
+        ('p', 'public', False, _('set changeset phase to public')),
+        ('d', 'draft', False, _('set changeset phase to draft')),
+        ('s', 'secret', False, _('set changeset phase to secret')),
+        ('f', 'force', False, _('allow to move boundary backward')),
+        ('r', 'rev', [], _('target revision'), _('REV')),
     ],
     _('[-p|-d|-s] [-f] [-r] [REV...]'),
-    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def phase(ui, repo, *revs, **opts):
     """set or show the current phase name
 
@@ -4357,11 +5126,15 @@
         newdata = [getphase(unfi, r) for r in unfi]
         changes = sum(newdata[r] != olddata[r] for r in unfi)
         cl = unfi.changelog
-        rejected = [n for n in nodes
-                    if newdata[cl.rev(n)] < targetphase]
+        rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase]
         if rejected:
-            ui.warn(_('cannot move %i changesets to a higher '
-                      'phase, use --force\n') % len(rejected))
+            ui.warn(
+                _(
+                    'cannot move %i changesets to a higher '
+                    'phase, use --force\n'
+                )
+                % len(rejected)
+            )
             ret = 1
         if changes:
             msg = _('phase changed for %i changesets\n') % changes
@@ -4373,6 +5146,7 @@
             ui.warn(_('no phases changed\n'))
     return ret
 
+
 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
     """Run after a changegroup has been added via pull/unbundle
 
@@ -4397,25 +5171,46 @@
         if currentbranchheads == modheads:
             ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
         elif currentbranchheads > 1:
-            ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
-                        "merge)\n"))
+            ui.status(
+                _("(run 'hg heads .' to see heads, 'hg merge' to " "merge)\n")
+            )
         else:
             ui.status(_("(run 'hg heads' to see heads)\n"))
     elif not ui.configbool('commands', 'update.requiredest'):
         ui.status(_("(run 'hg update' to get a working copy)\n"))
 
-@command('pull',
-    [('u', 'update', None,
-     _('update to new branch head if new descendants were pulled')),
-    ('f', 'force', None, _('run even when remote repository is unrelated')),
-    ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
-    ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
-    ('b', 'branch', [], _('a specific branch you would like to pull'),
-     _('BRANCH')),
-    ] + remoteopts,
+
+@command(
+    'pull',
+    [
+        (
+            'u',
+            'update',
+            None,
+            _('update to new branch head if new descendants were pulled'),
+        ),
+        ('f', 'force', None, _('run even when remote repository is unrelated')),
+        (
+            'r',
+            'rev',
+            [],
+            _('a remote changeset intended to be added'),
+            _('REV'),
+        ),
+        ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
+        (
+            'b',
+            'branch',
+            [],
+            _('a specific branch you would like to pull'),
+            _('BRANCH'),
+        ),
+    ]
+    + remoteopts,
     _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
-    helpbasic=True)
+    helpbasic=True,
+)
 def pull(ui, repo, source="default", **opts):
     """pull changes from the specified source
 
@@ -4456,8 +5251,9 @@
     ui.status(_('pulling from %s\n') % util.hidepassword(source))
     other = hg.peer(repo, opts, source)
     try:
-        revs, checkout = hg.addbranchrevs(repo, other, branches,
-                                          opts.get('rev'))
+        revs, checkout = hg.addbranchrevs(
+            repo, other, branches, opts.get('rev')
+        )
 
         pullopargs = {}
 
@@ -4471,13 +5267,15 @@
             fnodes = []
             revs = revs or []
             if revs and not other.capable('lookup'):
-                err = _("other repository doesn't support revision lookup, "
-                        "so a rev cannot be specified.")
+                err = _(
+                    "other repository doesn't support revision lookup, "
+                    "so a rev cannot be specified."
+                )
                 raise error.Abort(err)
             with other.commandexecutor() as e:
-                fremotebookmarks = e.callcommand('listkeys', {
-                    'namespace': 'bookmarks'
-                })
+                fremotebookmarks = e.callcommand(
+                    'listkeys', {'namespace': 'bookmarks'}
+                )
                 for r in revs:
                     fnodes.append(e.callcommand('lookup', {'key': r}))
             remotebookmarks = fremotebookmarks.result()
@@ -4499,10 +5297,14 @@
             wlock = repo.wlock()
         with wlock:
             pullopargs.update(opts.get('opargs', {}))
-            modheads = exchange.pull(repo, other, heads=nodes,
-                                     force=opts.get('force'),
-                                     bookmarks=opts.get('bookmark', ()),
-                                     opargs=pullopargs).cgresult
+            modheads = exchange.pull(
+                repo,
+                other,
+                heads=nodes,
+                force=opts.get('force'),
+                bookmarks=opts.get('bookmark', ()),
+                opargs=pullopargs,
+            ).cgresult
 
             # brev is a name, which might be a bookmark to be activated at
             # the end of the update. In other words, it is an explicit
@@ -4523,8 +5325,9 @@
                     brev = branches[0]
             repo._subtoppath = source
             try:
-                ret = postincoming(ui, repo, modheads, opts.get('update'),
-                                   checkout, brev)
+                ret = postincoming(
+                    ui, repo, modheads, opts.get('update'), checkout, brev
+                )
             except error.FilteredRepoLookupError as exc:
                 msg = _('cannot update to target: %s') % exc.args[0]
                 exc.args = (msg,) + exc.args[1:]
@@ -4536,21 +5339,45 @@
         other.close()
     return ret
 
-@command('push',
-    [('f', 'force', None, _('force push')),
-    ('r', 'rev', [],
-     _('a changeset intended to be included in the destination'),
-     _('REV')),
-    ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
-    ('b', 'branch', [],
-     _('a specific branch you would like to push'), _('BRANCH')),
-    ('', 'new-branch', False, _('allow pushing a new branch')),
-    ('', 'pushvars', [], _('variables that can be sent to server (ADVANCED)')),
-    ('', 'publish', False, _('push the changeset as public (EXPERIMENTAL)')),
-    ] + remoteopts,
+
+@command(
+    'push',
+    [
+        ('f', 'force', None, _('force push')),
+        (
+            'r',
+            'rev',
+            [],
+            _('a changeset intended to be included in the destination'),
+            _('REV'),
+        ),
+        ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
+        (
+            'b',
+            'branch',
+            [],
+            _('a specific branch you would like to push'),
+            _('BRANCH'),
+        ),
+        ('', 'new-branch', False, _('allow pushing a new branch')),
+        (
+            '',
+            'pushvars',
+            [],
+            _('variables that can be sent to server (ADVANCED)'),
+        ),
+        (
+            '',
+            'publish',
+            False,
+            _('push the changeset as public (EXPERIMENTAL)'),
+        ),
+    ]
+    + remoteopts,
     _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
-    helpbasic=True)
+    helpbasic=True,
+)
 def push(ui, repo, dest=None, **opts):
     """push changes to the specified destination
 
@@ -4623,8 +5450,10 @@
 
     path = ui.paths.getpath(dest, default=('default-push', 'default'))
     if not path:
-        raise error.Abort(_('default repository not configured!'),
-                         hint=_("see 'hg help config.paths'"))
+        raise error.Abort(
+            _('default repository not configured!'),
+            hint=_("see 'hg help config.paths'"),
+        )
     dest = path.pushloc or path.loc
     branches = (path.branch, opts.get('branch') or [])
     ui.status(_('pushing to %s\n') % util.hidepassword(dest))
@@ -4634,8 +5463,10 @@
     if revs:
         revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
         if not revs:
-            raise error.Abort(_("specified revisions evaluate to an empty set"),
-                             hint=_("use different revision arguments"))
+            raise error.Abort(
+                _("specified revisions evaluate to an empty set"),
+                hint=_("use different revision arguments"),
+            )
     elif path.pushrev:
         # It doesn't make any sense to specify ancestor revisions. So limit
         # to DAG heads to make discovery simpler.
@@ -4643,14 +5474,15 @@
         revs = scmutil.revrange(repo, [expr])
         revs = [repo[rev].node() for rev in revs]
         if not revs:
-            raise error.Abort(_('default push revset for path evaluates to an '
-                                'empty set'))
+            raise error.Abort(
+                _('default push revset for path evaluates to an ' 'empty set')
+            )
 
     repo._subtoppath = dest
     try:
         # push subrepos depth-first for coherent ordering
         c = repo['.']
-        subs = c.substate # only repos that are committed
+        subs = c.substate  # only repos that are committed
         for s in sorted(subs):
             result = c.sub(s).push(opts)
             if result == 0:
@@ -4658,14 +5490,19 @@
     finally:
         del repo._subtoppath
 
-    opargs = dict(opts.get('opargs', {})) # copy opargs since we may mutate it
+    opargs = dict(opts.get('opargs', {}))  # copy opargs since we may mutate it
     opargs.setdefault('pushvars', []).extend(opts.get('pushvars', []))
 
-    pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
-                           newbranch=opts.get('new_branch'),
-                           bookmarks=opts.get('bookmark', ()),
-                           publish=opts.get('publish'),
-                           opargs=opargs)
+    pushop = exchange.push(
+        repo,
+        other,
+        opts.get('force'),
+        revs=revs,
+        newbranch=opts.get('new_branch'),
+        bookmarks=opts.get('bookmark', ()),
+        publish=opts.get('publish'),
+        opargs=opargs,
+    )
 
     result = not pushop.cgresult
 
@@ -4677,10 +5514,12 @@
 
     return result
 
-@command('recover',
-    [('','verify', True, "run `hg verify` after succesful recover"),
-    ],
-    helpcategory=command.CATEGORY_MAINTENANCE)
+
+@command(
+    'recover',
+    [('', 'verify', True, "run `hg verify` after succesful recover"),],
+    helpcategory=command.CATEGORY_MAINTENANCE,
+)
 def recover(ui, repo, **opts):
     """roll back an interrupted transaction
 
@@ -4697,20 +5536,29 @@
         if opts[r'verify']:
             return hg.verify(repo)
         else:
-            msg = _("(verify step skipped, run  `hg verify` to check your "
-                    "repository content)\n")
+            msg = _(
+                "(verify step skipped, run  `hg verify` to check your "
+                "repository content)\n"
+            )
             ui.warn(msg)
             return 0
     return 1
 
-@command('remove|rm',
-    [('A', 'after', None, _('record delete for missing files')),
-    ('f', 'force', None,
-     _('forget added files, delete modified files')),
-    ] + subrepoopts + walkopts + dryrunopts,
+
+@command(
+    'remove|rm',
+    [
+        ('A', 'after', None, _('record delete for missing files')),
+        ('f', 'force', None, _('forget added files, delete modified files')),
+    ]
+    + subrepoopts
+    + walkopts
+    + dryrunopts,
     _('[OPTION]... FILE...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    helpbasic=True, inferrepo=True)
+    helpbasic=True,
+    inferrepo=True,
+)
 def remove(ui, repo, *pats, **opts):
     """remove the specified files on the next commit
 
@@ -4759,15 +5607,22 @@
     m = scmutil.match(repo[None], pats, opts)
     subrepos = opts.get('subrepos')
     uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
-    return cmdutil.remove(ui, repo, m, "", uipathfn, after, force, subrepos,
-                          dryrun=dryrun)
-
-@command('rename|move|mv',
-    [('A', 'after', None, _('record a rename that has already occurred')),
-    ('f', 'force', None, _('forcibly move over an existing managed file')),
-    ] + walkopts + dryrunopts,
+    return cmdutil.remove(
+        ui, repo, m, "", uipathfn, after, force, subrepos, dryrun=dryrun
+    )
+
+
+@command(
+    'rename|move|mv',
+    [
+        ('A', 'after', None, _('record a rename that has already occurred')),
+        ('f', 'force', None, _('forcibly move over an existing managed file')),
+    ]
+    + walkopts
+    + dryrunopts,
     _('[OPTION]... SOURCE... DEST'),
-    helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+    helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
 def rename(ui, repo, *pats, **opts):
     """rename files; equivalent of copy + remove
 
@@ -4788,17 +5643,24 @@
     with repo.wlock(False):
         return cmdutil.copy(ui, repo, pats, opts, rename=True)
 
-@command('resolve',
-    [('a', 'all', None, _('select all unresolved files')),
-    ('l', 'list', None, _('list state of files needing merge')),
-    ('m', 'mark', None, _('mark files as resolved')),
-    ('u', 'unmark', None, _('mark files as unresolved')),
-    ('n', 'no-status', None, _('hide status prefix')),
-    ('', 're-merge', None, _('re-merge files'))]
-    + mergetoolopts + walkopts + formatteropts,
+
+@command(
+    'resolve',
+    [
+        ('a', 'all', None, _('select all unresolved files')),
+        ('l', 'list', None, _('list state of files needing merge')),
+        ('m', 'mark', None, _('mark files as resolved')),
+        ('u', 'unmark', None, _('mark files as unresolved')),
+        ('n', 'no-status', None, _('hide status prefix')),
+        ('', 're-merge', None, _('re-merge files')),
+    ]
+    + mergetoolopts
+    + walkopts
+    + formatteropts,
     _('[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    inferrepo=True)
+    inferrepo=True,
+)
 def resolve(ui, repo, *pats, **opts):
     """redo merges or set/view the merge status of files
 
@@ -4854,34 +5716,45 @@
     opts = pycompat.byteskwargs(opts)
     confirm = ui.configbool('commands', 'resolve.confirm')
     flaglist = 'all mark unmark list no_status re_merge'.split()
-    all, mark, unmark, show, nostatus, remerge = [
-        opts.get(o) for o in flaglist]
+    all, mark, unmark, show, nostatus, remerge = [opts.get(o) for o in flaglist]
 
     actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
     if actioncount > 1:
         raise error.Abort(_("too many actions specified"))
-    elif (actioncount == 0
-          and ui.configbool('commands', 'resolve.explicit-re-merge')):
+    elif actioncount == 0 and ui.configbool(
+        'commands', 'resolve.explicit-re-merge'
+    ):
         hint = _('use --mark, --unmark, --list or --re-merge')
         raise error.Abort(_('no action specified'), hint=hint)
     if pats and all:
         raise error.Abort(_("can't specify --all and patterns"))
     if not (all or pats or show or mark or unmark):
-        raise error.Abort(_('no files or directories specified'),
-                         hint=('use --all to re-merge all unresolved files'))
+        raise error.Abort(
+            _('no files or directories specified'),
+            hint='use --all to re-merge all unresolved files',
+        )
 
     if confirm:
         if all:
-            if ui.promptchoice(_(b're-merge all unresolved files (yn)?'
-                                 b'$$ &Yes $$ &No')):
+            if ui.promptchoice(
+                _(b're-merge all unresolved files (yn)?' b'$$ &Yes $$ &No')
+            ):
                 raise error.Abort(_('user quit'))
         if mark and not pats:
-            if ui.promptchoice(_(b'mark all unresolved files as resolved (yn)?'
-                                 b'$$ &Yes $$ &No')):
+            if ui.promptchoice(
+                _(
+                    b'mark all unresolved files as resolved (yn)?'
+                    b'$$ &Yes $$ &No'
+                )
+            ):
                 raise error.Abort(_('user quit'))
         if unmark and not pats:
-            if ui.promptchoice(_(b'mark all resolved files as unresolved (yn)?'
-                                 b'$$ &Yes $$ &No')):
+            if ui.promptchoice(
+                _(
+                    b'mark all resolved files as unresolved (yn)?'
+                    b'$$ &Yes $$ &No'
+                )
+            ):
                 raise error.Abort(_('user quit'))
 
     uipathfn = scmutil.getuipathfn(repo)
@@ -4901,8 +5774,10 @@
             mergemod.MERGE_RECORD_RESOLVED: ('resolve.resolved', 'R'),
             mergemod.MERGE_RECORD_UNRESOLVED_PATH: ('resolve.unresolved', 'P'),
             mergemod.MERGE_RECORD_RESOLVED_PATH: ('resolve.resolved', 'R'),
-            mergemod.MERGE_RECORD_DRIVER_RESOLVED: ('resolve.driverresolved',
-                                                    'D'),
+            mergemod.MERGE_RECORD_DRIVER_RESOLVED: (
+                'resolve.driverresolved',
+                'D',
+            ),
         }
 
         for f in ms:
@@ -4923,12 +5798,15 @@
 
         if not (ms.active() or repo.dirstate.p2() != nullid):
             raise error.Abort(
-                _('resolve command not applicable when not merging'))
+                _('resolve command not applicable when not merging')
+            )
 
         wctx = repo[None]
 
-        if (ms.mergedriver
-            and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED):
+        if (
+            ms.mergedriver
+            and ms.mdstate() == mergemod.MERGE_DRIVER_STATE_UNMARKED
+        ):
             proceed = mergemod.driverpreprocess(repo, ms, wctx)
             ms.commit()
             # allow mark and unmark to go through
@@ -4959,33 +5837,43 @@
                 exact = m.exact(f)
                 if mark:
                     if exact:
-                        ui.warn(_('not marking %s as it is driver-resolved\n')
-                                % uipathfn(f))
+                        ui.warn(
+                            _('not marking %s as it is driver-resolved\n')
+                            % uipathfn(f)
+                        )
                 elif unmark:
                     if exact:
-                        ui.warn(_('not unmarking %s as it is driver-resolved\n')
-                                % uipathfn(f))
+                        ui.warn(
+                            _('not unmarking %s as it is driver-resolved\n')
+                            % uipathfn(f)
+                        )
                 else:
                     runconclude = True
                 continue
 
             # path conflicts must be resolved manually
-            if ms[f] in (mergemod.MERGE_RECORD_UNRESOLVED_PATH,
-                         mergemod.MERGE_RECORD_RESOLVED_PATH):
+            if ms[f] in (
+                mergemod.MERGE_RECORD_UNRESOLVED_PATH,
+                mergemod.MERGE_RECORD_RESOLVED_PATH,
+            ):
                 if mark:
                     ms.mark(f, mergemod.MERGE_RECORD_RESOLVED_PATH)
                 elif unmark:
                     ms.mark(f, mergemod.MERGE_RECORD_UNRESOLVED_PATH)
                 elif ms[f] == mergemod.MERGE_RECORD_UNRESOLVED_PATH:
-                    ui.warn(_('%s: path conflict must be resolved manually\n')
-                            % uipathfn(f))
+                    ui.warn(
+                        _('%s: path conflict must be resolved manually\n')
+                        % uipathfn(f)
+                    )
                 continue
 
             if mark:
                 if markcheck:
                     fdata = repo.wvfs.tryread(f)
-                    if (filemerge.hasconflictmarkers(fdata) and
-                        ms[f] != mergemod.MERGE_RECORD_RESOLVED):
+                    if (
+                        filemerge.hasconflictmarkers(fdata)
+                        and ms[f] != mergemod.MERGE_RECORD_RESOLVED
+                    ):
                         hasconflictmarkers.append(f)
                 ms.mark(f, mergemod.MERGE_RECORD_RESOLVED)
             elif unmark:
@@ -5015,19 +5903,26 @@
                 # for merges that are complete
                 if complete:
                     try:
-                        util.rename(a + ".resolve",
-                                    scmutil.backuppath(ui, repo, f))
+                        util.rename(
+                            a + ".resolve", scmutil.backuppath(ui, repo, f)
+                        )
                     except OSError as inst:
                         if inst.errno != errno.ENOENT:
                             raise
 
         if hasconflictmarkers:
-            ui.warn(_('warning: the following files still have conflict '
-                      'markers:\n') + ''.join('  ' + uipathfn(f) + '\n'
-                                              for f in hasconflictmarkers))
+            ui.warn(
+                _(
+                    'warning: the following files still have conflict '
+                    'markers:\n'
+                )
+                + ''.join('  ' + uipathfn(f) + '\n' for f in hasconflictmarkers)
+            )
             if markcheck == 'abort' and not all and not pats:
-                raise error.Abort(_('conflict markers detected'),
-                                  hint=_('use --all to mark anyway'))
+                raise error.Abort(
+                    _('conflict markers detected'),
+                    hint=_('use --all to mark anyway'),
+                )
 
         for f in tocomplete:
             try:
@@ -5059,14 +5954,17 @@
                 for f in ms:
                     if not m(f):
                         continue
+
                     def flag(o):
                         if o == 're_merge':
                             return '--re-merge '
                         return '-%s ' % o[0:1]
+
                     flags = ''.join([flag(o) for o in flaglist if opts.get(o)])
                     hint = _("(try: hg resolve %s%s)\n") % (
-                             flags,
-                             ' '.join(pats))
+                        flags,
+                        ' '.join(pats),
+                    )
                     break
             ui.warn(_("arguments do not match paths that need resolving\n"))
             if hint:
@@ -5076,8 +5974,9 @@
             # or there are no driver-resolved files
             # we can't use 'ret' to determine whether any files are unresolved
             # because we might not have tried to resolve some
-            if ((runconclude or not list(ms.driverresolved()))
-                and not list(ms.unresolved())):
+            if (runconclude or not list(ms.driverresolved())) and not list(
+                ms.unresolved()
+            ):
                 proceed = mergemod.driverconclude(repo, ms, wctx)
                 ms.commit()
                 if not proceed:
@@ -5090,20 +5989,30 @@
         ui.status(_('(no more unresolved files)\n'))
         cmdutil.checkafterresolved(repo)
     elif not unresolvedf:
-        ui.status(_('(no more unresolved files -- '
-                    'run "hg resolve --all" to conclude)\n'))
+        ui.status(
+            _(
+                '(no more unresolved files -- '
+                'run "hg resolve --all" to conclude)\n'
+            )
+        )
 
     return ret
 
-@command('revert',
-    [('a', 'all', None, _('revert all changes when no arguments given')),
-    ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
-    ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
-    ('C', 'no-backup', None, _('do not save backup copies of files')),
-    ('i', 'interactive', None, _('interactively select the changes')),
-    ] + walkopts + dryrunopts,
+
+@command(
+    'revert',
+    [
+        ('a', 'all', None, _('revert all changes when no arguments given')),
+        ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
+        ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
+        ('C', 'no-backup', None, _('do not save backup copies of files')),
+        ('i', 'interactive', None, _('interactively select the changes')),
+    ]
+    + walkopts
+    + dryrunopts,
     _('[OPTION]... [-r REV] [NAME]...'),
-    helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+    helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
 def revert(ui, repo, *pats, **opts):
     """restore files to their checkout state
 
@@ -5150,43 +6059,65 @@
     parent, p2 = repo.dirstate.parents()
     if not opts.get('rev') and p2 != nullid:
         # revert after merge is a trap for new users (issue2915)
-        raise error.Abort(_('uncommitted merge with no revision specified'),
-                         hint=_("use 'hg update' or see 'hg help revert'"))
+        raise error.Abort(
+            _('uncommitted merge with no revision specified'),
+            hint=_("use 'hg update' or see 'hg help revert'"),
+        )
 
     rev = opts.get('rev')
     if rev:
         repo = scmutil.unhidehashlikerevs(repo, [rev], 'nowarn')
     ctx = scmutil.revsingle(repo, rev)
 
-    if (not (pats or opts.get('include') or opts.get('exclude') or
-             opts.get('all') or opts.get('interactive'))):
+    if not (
+        pats
+        or opts.get('include')
+        or opts.get('exclude')
+        or opts.get('all')
+        or opts.get('interactive')
+    ):
         msg = _("no files or directories specified")
         if p2 != nullid:
-            hint = _("uncommitted merge, use --all to discard all changes,"
-                     " or 'hg update -C .' to abort the merge")
+            hint = _(
+                "uncommitted merge, use --all to discard all changes,"
+                " or 'hg update -C .' to abort the merge"
+            )
             raise error.Abort(msg, hint=hint)
         dirty = any(repo.status())
         node = ctx.node()
         if node != parent:
             if dirty:
-                hint = _("uncommitted changes, use --all to discard all"
-                         " changes, or 'hg update %d' to update") % ctx.rev()
+                hint = (
+                    _(
+                        "uncommitted changes, use --all to discard all"
+                        " changes, or 'hg update %d' to update"
+                    )
+                    % ctx.rev()
+                )
             else:
-                hint = _("use --all to revert all files,"
-                         " or 'hg update %d' to update") % ctx.rev()
+                hint = (
+                    _(
+                        "use --all to revert all files,"
+                        " or 'hg update %d' to update"
+                    )
+                    % ctx.rev()
+                )
         elif dirty:
             hint = _("uncommitted changes, use --all to discard all changes")
         else:
             hint = _("use --all to revert all files")
         raise error.Abort(msg, hint=hint)
 
-    return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats,
-                          **pycompat.strkwargs(opts))
+    return cmdutil.revert(
+        ui, repo, ctx, (parent, p2), *pats, **pycompat.strkwargs(opts)
+    )
+
 
 @command(
     'rollback',
     dryrunopts + [('f', 'force', False, _('ignore safety measures'))],
-    helpcategory=command.CATEGORY_MAINTENANCE)
+    helpcategory=command.CATEGORY_MAINTENANCE,
+)
 def rollback(ui, repo, **opts):
     """roll back the last transaction (DANGEROUS) (DEPRECATED)
 
@@ -5233,14 +6164,19 @@
     Returns 0 on success, 1 if no rollback data is available.
     """
     if not ui.configbool('ui', 'rollback'):
-        raise error.Abort(_('rollback is disabled because it is unsafe'),
-                          hint=('see `hg help -v rollback` for information'))
-    return repo.rollback(dryrun=opts.get(r'dry_run'),
-                         force=opts.get(r'force'))
+        raise error.Abort(
+            _('rollback is disabled because it is unsafe'),
+            hint='see `hg help -v rollback` for information',
+        )
+    return repo.rollback(dryrun=opts.get(r'dry_run'), force=opts.get(r'force'))
+
 
 @command(
-    'root', [] + formatteropts, intents={INTENT_READONLY},
-    helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+    'root',
+    [] + formatteropts,
+    intents={INTENT_READONLY},
+    helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
 def root(ui, repo, **opts):
     """print the root (top) of the current working directory
 
@@ -5264,36 +6200,84 @@
         fm.write('reporoot', '%s\n', repo.root)
         fm.data(hgpath=repo.path, storepath=repo.spath)
 
-@command('serve',
-    [('A', 'accesslog', '', _('name of access log file to write to'),
-     _('FILE')),
-    ('d', 'daemon', None, _('run server in background')),
-    ('', 'daemon-postexec', [], _('used internally by daemon mode')),
-    ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
-    # use string type, then we can check if something was passed
-    ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
-    ('a', 'address', '', _('address to listen on (default: all interfaces)'),
-     _('ADDR')),
-    ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
-     _('PREFIX')),
-    ('n', 'name', '',
-     _('name to show in web pages (default: working directory)'), _('NAME')),
-    ('', 'web-conf', '',
-     _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
-    ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
-     _('FILE')),
-    ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
-    ('', 'stdio', None, _('for remote clients (ADVANCED)')),
-    ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
-    ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
-    ('', 'style', '', _('template style to use'), _('STYLE')),
-    ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
-    ('', 'certificate', '', _('SSL certificate file'), _('FILE')),
-    ('', 'print-url', None, _('start and print only the URL'))]
-     + subrepoopts,
+
+@command(
+    'serve',
+    [
+        (
+            'A',
+            'accesslog',
+            '',
+            _('name of access log file to write to'),
+            _('FILE'),
+        ),
+        ('d', 'daemon', None, _('run server in background')),
+        ('', 'daemon-postexec', [], _('used internally by daemon mode')),
+        (
+            'E',
+            'errorlog',
+            '',
+            _('name of error log file to write to'),
+            _('FILE'),
+        ),
+        # use string type, then we can check if something was passed
+        ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
+        (
+            'a',
+            'address',
+            '',
+            _('address to listen on (default: all interfaces)'),
+            _('ADDR'),
+        ),
+        (
+            '',
+            'prefix',
+            '',
+            _('prefix path to serve from (default: server root)'),
+            _('PREFIX'),
+        ),
+        (
+            'n',
+            'name',
+            '',
+            _('name to show in web pages (default: working directory)'),
+            _('NAME'),
+        ),
+        (
+            '',
+            'web-conf',
+            '',
+            _("name of the hgweb config file (see 'hg help hgweb')"),
+            _('FILE'),
+        ),
+        (
+            '',
+            'webdir-conf',
+            '',
+            _('name of the hgweb config file (DEPRECATED)'),
+            _('FILE'),
+        ),
+        (
+            '',
+            'pid-file',
+            '',
+            _('name of file to write process ID to'),
+            _('FILE'),
+        ),
+        ('', 'stdio', None, _('for remote clients (ADVANCED)')),
+        ('', 'cmdserver', '', _('for remote clients (ADVANCED)'), _('MODE')),
+        ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
+        ('', 'style', '', _('template style to use'), _('STYLE')),
+        ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
+        ('', 'certificate', '', _('SSL certificate file'), _('FILE')),
+        ('', 'print-url', None, _('start and print only the URL')),
+    ]
+    + subrepoopts,
     _('[OPTION]...'),
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
-    helpbasic=True, optionalrepo=True)
+    helpbasic=True,
+    optionalrepo=True,
+)
 def serve(ui, repo, **opts):
     """start stand-alone webserver
 
@@ -5327,46 +6311,69 @@
 
     if opts["stdio"]:
         if repo is None:
-            raise error.RepoError(_("there is no Mercurial repository here"
-                                    " (.hg not found)"))
+            raise error.RepoError(
+                _("there is no Mercurial repository here" " (.hg not found)")
+            )
         s = wireprotoserver.sshserver(ui, repo)
         s.serve_forever()
 
     service = server.createservice(ui, repo, opts)
     return server.runservice(opts, initfn=service.init, runfn=service.run)
 
-@command('shelve',
-         [('A', 'addremove', None,
-           _('mark new/missing files as added/removed before shelving')),
-          ('u', 'unknown', None,
-           _('store unknown files in the shelve')),
-          ('', 'cleanup', None,
-           _('delete all shelved changes')),
-          ('', 'date', '',
-           _('shelve with the specified commit date'), _('DATE')),
-          ('d', 'delete', None,
-           _('delete the named shelved change(s)')),
-          ('e', 'edit', False,
-           _('invoke editor on commit messages')),
-          ('k', 'keep', False,
-           _('shelve, but keep changes in the working directory')),
-          ('l', 'list', None,
-           _('list current shelves')),
-          ('m', 'message', '',
-           _('use text as shelve message'), _('TEXT')),
-          ('n', 'name', '',
-           _('use the given name for the shelved commit'), _('NAME')),
-          ('p', 'patch', None,
-           _('output patches for changes (provide the names of the shelved '
-             'changes as positional arguments)')),
-          ('i', 'interactive', None,
-           _('interactive mode')),
-          ('', 'stat', None,
-           _('output diffstat-style summary of changes (provide the names of '
-             'the shelved changes as positional arguments)')
-           )] + cmdutil.walkopts,
-         _('hg shelve [OPTION]... [FILE]...'),
-         helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+
+@command(
+    'shelve',
+    [
+        (
+            'A',
+            'addremove',
+            None,
+            _('mark new/missing files as added/removed before shelving'),
+        ),
+        ('u', 'unknown', None, _('store unknown files in the shelve')),
+        ('', 'cleanup', None, _('delete all shelved changes')),
+        ('', 'date', '', _('shelve with the specified commit date'), _('DATE')),
+        ('d', 'delete', None, _('delete the named shelved change(s)')),
+        ('e', 'edit', False, _('invoke editor on commit messages')),
+        (
+            'k',
+            'keep',
+            False,
+            _('shelve, but keep changes in the working directory'),
+        ),
+        ('l', 'list', None, _('list current shelves')),
+        ('m', 'message', '', _('use text as shelve message'), _('TEXT')),
+        (
+            'n',
+            'name',
+            '',
+            _('use the given name for the shelved commit'),
+            _('NAME'),
+        ),
+        (
+            'p',
+            'patch',
+            None,
+            _(
+                'output patches for changes (provide the names of the shelved '
+                'changes as positional arguments)'
+            ),
+        ),
+        ('i', 'interactive', None, _('interactive mode')),
+        (
+            '',
+            'stat',
+            None,
+            _(
+                'output diffstat-style summary of changes (provide the names of '
+                'the shelved changes as positional arguments)'
+            ),
+        ),
+    ]
+    + cmdutil.walkopts,
+    _('hg shelve [OPTION]... [FILE]...'),
+    helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
 def shelve(ui, repo, *pats, **opts):
     '''save and set aside changes from the working directory
 
@@ -5402,10 +6409,10 @@
     '''
     opts = pycompat.byteskwargs(opts)
     allowables = [
-        ('addremove', {'create'}), # 'create' is pseudo action
+        ('addremove', {'create'}),  # 'create' is pseudo action
         ('unknown', {'create'}),
         ('cleanup', {'cleanup'}),
-#       ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
+        #       ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
         ('delete', {'delete'}),
         ('edit', {'create'}),
         ('keep', {'create'}),
@@ -5415,13 +6422,20 @@
         ('patch', {'patch', 'list'}),
         ('stat', {'stat', 'list'}),
     ]
+
     def checkopt(opt):
         if opts.get(opt):
             for i, allowable in allowables:
                 if opts[i] and opt not in allowable:
-                    raise error.Abort(_("options '--%s' and '--%s' may not be "
-                                       "used together") % (opt, i))
+                    raise error.Abort(
+                        _(
+                            "options '--%s' and '--%s' may not be "
+                            "used together"
+                        )
+                        % (opt, i)
+                    )
             return True
+
     if checkopt('cleanup'):
         if pats:
             raise error.Abort(_("cannot specify names when using '--cleanup'"))
@@ -5435,28 +6449,37 @@
     else:
         return shelvemod.createcmd(ui, repo, pats, opts)
 
+
 _NOTTERSE = 'nothing'
 
-@command('status|st',
-    [('A', 'all', None, _('show status of all files')),
-    ('m', 'modified', None, _('show only modified files')),
-    ('a', 'added', None, _('show only added files')),
-    ('r', 'removed', None, _('show only removed files')),
-    ('d', 'deleted', None, _('show only deleted (but tracked) files')),
-    ('c', 'clean', None, _('show only files without changes')),
-    ('u', 'unknown', None, _('show only unknown (not tracked) files')),
-    ('i', 'ignored', None, _('show only ignored files')),
-    ('n', 'no-status', None, _('hide status prefix')),
-    ('t', 'terse', _NOTTERSE, _('show the terse output (EXPERIMENTAL)')),
-    ('C', 'copies', None, _('show source of copied files')),
-    ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
-    ('', 'rev', [], _('show difference from revision'), _('REV')),
-    ('', 'change', '', _('list the changed files of a revision'), _('REV')),
-    ] + walkopts + subrepoopts + formatteropts,
+
+@command(
+    'status|st',
+    [
+        ('A', 'all', None, _('show status of all files')),
+        ('m', 'modified', None, _('show only modified files')),
+        ('a', 'added', None, _('show only added files')),
+        ('r', 'removed', None, _('show only removed files')),
+        ('d', 'deleted', None, _('show only deleted (but tracked) files')),
+        ('c', 'clean', None, _('show only files without changes')),
+        ('u', 'unknown', None, _('show only unknown (not tracked) files')),
+        ('i', 'ignored', None, _('show only ignored files')),
+        ('n', 'no-status', None, _('hide status prefix')),
+        ('t', 'terse', _NOTTERSE, _('show the terse output (EXPERIMENTAL)')),
+        ('C', 'copies', None, _('show source of copied files')),
+        ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
+        ('', 'rev', [], _('show difference from revision'), _('REV')),
+        ('', 'change', '', _('list the changed files of a revision'), _('REV')),
+    ]
+    + walkopts
+    + subrepoopts
+    + formatteropts,
     _('[OPTION]... [FILE]...'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    helpbasic=True, inferrepo=True,
-    intents={INTENT_READONLY})
+    helpbasic=True,
+    inferrepo=True,
+    intents={INTENT_READONLY},
+)
 def status(ui, repo, *pats, **opts):
     """show changed files in the working directory
 
@@ -5579,8 +6602,11 @@
     forcerelativevalue = None
     if ui.hasconfig('commands', 'status.relative'):
         forcerelativevalue = ui.configbool('commands', 'status.relative')
-    uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=bool(pats),
-                                   forcerelativevalue=forcerelativevalue)
+    uipathfn = scmutil.getuipathfn(
+        repo,
+        legacyrelativevalue=bool(pats),
+        forcerelativevalue=forcerelativevalue,
+    )
 
     if opts.get('print0'):
         end = '\0'
@@ -5601,21 +6627,35 @@
     m = scmutil.match(ctx2, pats, opts)
     if terse:
         # we need to compute clean and unknown to terse
-        stat = repo.status(ctx1.node(), ctx2.node(), m,
-                           'ignored' in show or 'i' in terse,
-                            clean=True, unknown=True,
-                            listsubrepos=opts.get('subrepos'))
+        stat = repo.status(
+            ctx1.node(),
+            ctx2.node(),
+            m,
+            'ignored' in show or 'i' in terse,
+            clean=True,
+            unknown=True,
+            listsubrepos=opts.get('subrepos'),
+        )
 
         stat = cmdutil.tersedir(stat, terse)
     else:
-        stat = repo.status(ctx1.node(), ctx2.node(), m,
-                           'ignored' in show, 'clean' in show,
-                           'unknown' in show, opts.get('subrepos'))
+        stat = repo.status(
+            ctx1.node(),
+            ctx2.node(),
+            m,
+            'ignored' in show,
+            'clean' in show,
+            'unknown' in show,
+            opts.get('subrepos'),
+        )
 
     changestates = zip(states, pycompat.iterbytestr('MAR!?IC'), stat)
 
-    if (opts.get('all') or opts.get('copies')
-        or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
+    if (
+        opts.get('all')
+        or opts.get('copies')
+        or ui.configbool('ui', 'statuscopies')
+    ) and not opts.get('no_status'):
         copy = copies.pathcopies(ctx1, ctx2, m)
 
     ui.pager('status')
@@ -5634,20 +6674,26 @@
                 fm.plain(fmt % uipathfn(f), label=label)
                 if f in copy:
                     fm.data(source=copy[f])
-                    fm.plain(('  %s' + end) % uipathfn(copy[f]),
-                             label='status.copied')
-
-    if ((ui.verbose or ui.configbool('commands', 'status.verbose'))
-        and not ui.plain()):
+                    fm.plain(
+                        ('  %s' + end) % uipathfn(copy[f]),
+                        label='status.copied',
+                    )
+
+    if (
+        ui.verbose or ui.configbool('commands', 'status.verbose')
+    ) and not ui.plain():
         cmdutil.morestatus(repo, fm)
     fm.end()
 
-@command('summary|sum',
+
+@command(
+    'summary|sum',
     [('', 'remote', None, _('check for push and pull'))],
     '[--remote]',
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
     helpbasic=True,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def summary(ui, repo, **opts):
     """summarize working directory state
 
@@ -5672,7 +6718,8 @@
     except error.UnsupportedMergeRecords as e:
         s = ' '.join(e.recordtypes)
         ui.warn(
-            _('warning: merge state has unsupported record types: %s\n') % s)
+            _('warning: merge state has unsupported record types: %s\n') % s
+        )
         unresolved = []
     else:
         unresolved = list(ms.unresolved())
@@ -5681,8 +6728,10 @@
         # label with log.changeset (instead of log.parent) since this
         # shows a working directory parent *changeset*:
         # i18n: column positioning for "hg summary"
-        ui.write(_('parent: %d:%s ') % (p.rev(), p),
-                 label=logcmdutil.changesetlabels(p))
+        ui.write(
+            _('parent: %d:%s ') % (p.rev(), p),
+            label=logcmdutil.changesetlabels(p),
+        )
         ui.write(' '.join(p.tags()), label='log.tag')
         if p.bookmarks():
             marks.extend(p.bookmarks())
@@ -5694,15 +6743,17 @@
         if p.obsolete():
             ui.write(_(' (obsolete)'))
         if p.isunstable():
-            instabilities = (ui.label(instability, 'trouble.%s' % instability)
-                             for instability in p.instabilities())
-            ui.write(' ('
-                     + ', '.join(instabilities)
-                     + ')')
+            instabilities = (
+                ui.label(instability, 'trouble.%s' % instability)
+                for instability in p.instabilities()
+            )
+            ui.write(' (' + ', '.join(instabilities) + ')')
         ui.write('\n')
         if p.description():
-            ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
-                      label='log.summary')
+            ui.status(
+                ' ' + p.description().splitlines()[0].strip() + '\n',
+                label='log.summary',
+            )
 
     branch = ctx.branch()
     bheads = repo.branchheads(branch)
@@ -5742,15 +6793,17 @@
 
     subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
 
-    labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
-              (ui.label(_('%d added'), 'status.added'), status.added),
-              (ui.label(_('%d removed'), 'status.removed'), status.removed),
-              (ui.label(_('%d renamed'), 'status.copied'), renamed),
-              (ui.label(_('%d copied'), 'status.copied'), copied),
-              (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
-              (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
-              (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
-              (ui.label(_('%d subrepos'), 'status.modified'), subs)]
+    labels = [
+        (ui.label(_('%d modified'), 'status.modified'), status.modified),
+        (ui.label(_('%d added'), 'status.added'), status.added),
+        (ui.label(_('%d removed'), 'status.removed'), status.removed),
+        (ui.label(_('%d renamed'), 'status.copied'), renamed),
+        (ui.label(_('%d copied'), 'status.copied'), copied),
+        (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
+        (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
+        (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
+        (ui.label(_('%d subrepos'), 'status.modified'), subs),
+    ]
     t = []
     for l, s in labels:
         if s:
@@ -5767,11 +6820,18 @@
         t += _(' (merge)')
     elif branch != parents[0].branch():
         t += _(' (new branch)')
-    elif (parents[0].closesbranch() and
-          pnode in repo.branchheads(branch, closed=True)):
+    elif parents[0].closesbranch() and pnode in repo.branchheads(
+        branch, closed=True
+    ):
         t += _(' (head closed)')
-    elif not (status.modified or status.added or status.removed or renamed or
-              copied or subs):
+    elif not (
+        status.modified
+        or status.added
+        or status.removed
+        or renamed
+        or copied
+        or subs
+    ):
         t += _(' (clean)')
         cleanworkdir = True
     elif pnode not in bheads:
@@ -5793,8 +6853,9 @@
         ui.write(_('commit: %s\n') % t.strip())
 
     # all ancestors of branch heads - all ancestors of parent = new csets
-    new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
-                                         bheads))
+    new = len(
+        repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)
+    )
 
     if new == 0:
         # i18n: column positioning for "hg summary"
@@ -5804,8 +6865,10 @@
         ui.write(_('update: %d new changesets (update)\n') % new)
     else:
         # i18n: column positioning for "hg summary"
-        ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
-                 (new, len(bheads)))
+        ui.write(
+            _('update: %d new changesets, %d branch heads (merge)\n')
+            % (new, len(bheads))
+        )
 
     t = []
     draft = len(repo.revs('draft()'))
@@ -5823,9 +6886,9 @@
             numtrouble = len(repo.revs(trouble + "()"))
             # We write all the possibilities to ease translation
             troublemsg = {
-               "orphan": _("orphan: %d changesets"),
-               "contentdivergent": _("content-divergent: %d changesets"),
-               "phasedivergent": _("phase-divergent: %d changesets"),
+                "orphan": _("orphan: %d changesets"),
+                "contentdivergent": _("content-divergent: %d changesets"),
+                "phasedivergent": _("phase-divergent: %d changesets"),
             }
             if numtrouble > 0:
                 ui.status(troublemsg[trouble] % numtrouble + "\n")
@@ -5884,15 +6947,16 @@
             return dest, dbranch, None, None
         else:
             dother = sother
-        if (source != dest or (sbranch is not None and sbranch != dbranch)):
+        if source != dest or (sbranch is not None and sbranch != dbranch):
             common = None
         else:
             common = commoninc
         if revs:
             revs = [repo.lookup(rev) for rev in revs]
         repo.ui.pushbuffer()
-        outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
-                                                commoninc=common)
+        outgoing = discovery.findcommonoutgoing(
+            repo, dother, onlyheads=revs, commoninc=common
+        )
         repo.ui.popbuffer()
         return dest, dbranch, dother, outgoing
 
@@ -5923,21 +6987,32 @@
             # i18n: column positioning for "hg summary"
             ui.status(_('remote: (synced)\n'))
 
-    cmdutil.summaryremotehooks(ui, repo, opts,
-                               ((source, sbranch, sother, commoninc),
-                                (dest, dbranch, dother, outgoing)))
-
-@command('tag',
-    [('f', 'force', None, _('force tag')),
-    ('l', 'local', None, _('make the tag local')),
-    ('r', 'rev', '', _('revision to tag'), _('REV')),
-    ('', 'remove', None, _('remove a tag')),
-    # -l/--local is already there, commitopts cannot be used
-    ('e', 'edit', None, _('invoke editor on commit messages')),
-    ('m', 'message', '', _('use text as commit message'), _('TEXT')),
-    ] + commitopts2,
+    cmdutil.summaryremotehooks(
+        ui,
+        repo,
+        opts,
+        (
+            (source, sbranch, sother, commoninc),
+            (dest, dbranch, dother, outgoing),
+        ),
+    )
+
+
+@command(
+    'tag',
+    [
+        ('f', 'force', None, _('force tag')),
+        ('l', 'local', None, _('make the tag local')),
+        ('r', 'rev', '', _('revision to tag'), _('REV')),
+        ('', 'remove', None, _('remove a tag')),
+        # -l/--local is already there, commitopts cannot be used
+        ('e', 'edit', None, _('invoke editor on commit messages')),
+        ('m', 'message', '', _('use text as commit message'), _('TEXT')),
+    ]
+    + commitopts2,
     _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'),
-    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
+    helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
+)
 def tag(ui, repo, name1, *names, **opts):
     """add one or more tags for the current or given revision
 
@@ -5979,8 +7054,9 @@
         for n in names:
             scmutil.checknewlabel(repo, n, 'tag')
             if not n:
-                raise error.Abort(_('tag names cannot consist entirely of '
-                                   'whitespace'))
+                raise error.Abort(
+                    _('tag names cannot consist entirely of ' 'whitespace')
+                )
         if opts.get('rev') and opts.get('remove'):
             raise error.Abort(_("--rev and --remove are incompatible"))
         if opts.get('rev'):
@@ -6011,22 +7087,29 @@
         elif not opts.get('force'):
             for n in names:
                 if n in repo.tags():
-                    raise error.Abort(_("tag '%s' already exists "
-                                       "(use -f to force)") % n)
+                    raise error.Abort(
+                        _("tag '%s' already exists " "(use -f to force)") % n
+                    )
         if not opts.get('local'):
             p1, p2 = repo.dirstate.parents()
             if p2 != nullid:
                 raise error.Abort(_('uncommitted merge'))
             bheads = repo.branchheads()
             if not opts.get('force') and bheads and p1 not in bheads:
-                raise error.Abort(_('working directory is not at a branch head '
-                                    '(use -f to force)'))
+                raise error.Abort(
+                    _(
+                        'working directory is not at a branch head '
+                        '(use -f to force)'
+                    )
+                )
         node = scmutil.revsingle(repo, rev_).node()
 
         if not message:
             # we don't translate commit messages
-            message = ('Added tag %s for changeset %s' %
-                       (', '.join(names), short(node)))
+            message = 'Added tag %s for changeset %s' % (
+                ', '.join(names),
+                short(node),
+            )
 
         date = opts.get('date')
         if date:
@@ -6036,21 +7119,36 @@
             editform = 'tag.remove'
         else:
             editform = 'tag.add'
-        editor = cmdutil.getcommiteditor(editform=editform,
-                                         **pycompat.strkwargs(opts))
+        editor = cmdutil.getcommiteditor(
+            editform=editform, **pycompat.strkwargs(opts)
+        )
 
         # don't allow tagging the null rev
-        if (not opts.get('remove') and
-            scmutil.revsingle(repo, rev_).rev() == nullrev):
+        if (
+            not opts.get('remove')
+            and scmutil.revsingle(repo, rev_).rev() == nullrev
+        ):
             raise error.Abort(_("cannot tag null revision"))
 
-        tagsmod.tag(repo, names, node, message, opts.get('local'),
-                    opts.get('user'), date, editor=editor)
+        tagsmod.tag(
+            repo,
+            names,
+            node,
+            message,
+            opts.get('local'),
+            opts.get('user'),
+            date,
+            editor=editor,
+        )
+
 
 @command(
-    'tags', formatteropts, '',
+    'tags',
+    formatteropts,
+    '',
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
-    intents={INTENT_READONLY})
+    intents={INTENT_READONLY},
+)
 def tags(ui, repo, **opts):
     """list repository tags
 
@@ -6088,19 +7186,31 @@
         fm.context(repo=repo)
         fm.write('tag', '%s', t, label=label)
         fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
-        fm.condwrite(not ui.quiet, 'rev node', fmt,
-                     repo.changelog.rev(n), hn, label=label)
-        fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
-                     tagtype, label=label)
+        fm.condwrite(
+            not ui.quiet,
+            'rev node',
+            fmt,
+            repo.changelog.rev(n),
+            hn,
+            label=label,
+        )
+        fm.condwrite(
+            ui.verbose and tagtype, 'type', ' %s', tagtype, label=label
+        )
         fm.plain('\n')
     fm.end()
 
-@command('tip',
-    [('p', 'patch', None, _('show patch')),
-    ('g', 'git', None, _('use git extended diff format')),
-    ] + templateopts,
+
+@command(
+    'tip',
+    [
+        ('p', 'patch', None, _('show patch')),
+        ('g', 'git', None, _('use git extended diff format')),
+    ]
+    + templateopts,
     _('[-p] [-g]'),
-    helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
+    helpcategory=command.CATEGORY_CHANGE_NAVIGATION,
+)
 def tip(ui, repo, **opts):
     """show the tip revision (DEPRECATED)
 
@@ -6122,11 +7232,20 @@
     displayer.show(repo['tip'])
     displayer.close()
 
-@command('unbundle',
-    [('u', 'update', None,
-     _('update to new branch head if changesets were unbundled'))],
+
+@command(
+    'unbundle',
+    [
+        (
+            'u',
+            'update',
+            None,
+            _('update to new branch head if changesets were unbundled'),
+        )
+    ],
     _('[-u] FILE...'),
-    helpcategory=command.CATEGORY_IMPORT_EXPORT)
+    helpcategory=command.CATEGORY_IMPORT_EXPORT,
+)
 def unbundle(ui, repo, fname1, *fnames, **opts):
     """apply one or more bundle files
 
@@ -6142,43 +7261,58 @@
             gen = exchange.readbundle(ui, f, fname)
             if isinstance(gen, streamclone.streamcloneapplier):
                 raise error.Abort(
-                        _('packed bundles cannot be applied with '
-                          '"hg unbundle"'),
-                        hint=_('use "hg debugapplystreamclonebundle"'))
+                    _('packed bundles cannot be applied with ' '"hg unbundle"'),
+                    hint=_('use "hg debugapplystreamclonebundle"'),
+                )
             url = 'bundle:' + fname
             try:
                 txnname = 'unbundle'
                 if not isinstance(gen, bundle2.unbundle20):
                     txnname = 'unbundle\n%s' % util.hidepassword(url)
                 with repo.transaction(txnname) as tr:
-                    op = bundle2.applybundle(repo, gen, tr, source='unbundle',
-                                             url=url)
+                    op = bundle2.applybundle(
+                        repo, gen, tr, source='unbundle', url=url
+                    )
             except error.BundleUnknownFeatureError as exc:
                 raise error.Abort(
                     _('%s: unknown bundle feature, %s') % (fname, exc),
-                    hint=_("see https://mercurial-scm.org/"
-                           "wiki/BundleFeature for more "
-                           "information"))
+                    hint=_(
+                        "see https://mercurial-scm.org/"
+                        "wiki/BundleFeature for more "
+                        "information"
+                    ),
+                )
             modheads = bundle2.combinechangegroupresults(op)
 
     return postincoming(ui, repo, modheads, opts.get(r'update'), None, None)
 
-@command('unshelve',
-         [('a', 'abort', None,
-           _('abort an incomplete unshelve operation')),
-          ('c', 'continue', None,
-           _('continue an incomplete unshelve operation')),
-          ('i', 'interactive', None,
-           _('use interactive mode (EXPERIMENTAL)')),
-          ('k', 'keep', None,
-           _('keep shelve after unshelving')),
-          ('n', 'name', '',
-           _('restore shelved change with given name'), _('NAME')),
-          ('t', 'tool', '', _('specify merge tool')),
-          ('', 'date', '',
-           _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
-         _('hg unshelve [OPTION]... [FILE]... [-n SHELVED]'),
-         helpcategory=command.CATEGORY_WORKING_DIRECTORY)
+
+@command(
+    'unshelve',
+    [
+        ('a', 'abort', None, _('abort an incomplete unshelve operation')),
+        ('c', 'continue', None, _('continue an incomplete unshelve operation')),
+        ('i', 'interactive', None, _('use interactive mode (EXPERIMENTAL)')),
+        ('k', 'keep', None, _('keep shelve after unshelving')),
+        (
+            'n',
+            'name',
+            '',
+            _('restore shelved change with given name'),
+            _('NAME'),
+        ),
+        ('t', 'tool', '', _('specify merge tool')),
+        (
+            '',
+            'date',
+            '',
+            _('set date for temporary commits (DEPRECATED)'),
+            _('DATE'),
+        ),
+    ],
+    _('hg unshelve [OPTION]... [FILE]... [-n SHELVED]'),
+    helpcategory=command.CATEGORY_WORKING_DIRECTORY,
+)
 def unshelve(ui, repo, *shelved, **opts):
     """restore a shelved change to the working directory
 
@@ -6226,23 +7360,31 @@
     with repo.wlock():
         return shelvemod.dounshelve(ui, repo, *shelved, **opts)
 
+
 statemod.addunfinished(
-    'unshelve', fname='shelvedstate', continueflag=True,
+    'unshelve',
+    fname='shelvedstate',
+    continueflag=True,
     abortfunc=shelvemod.hgabortunshelve,
     continuefunc=shelvemod.hgcontinueunshelve,
     cmdmsg=_('unshelve already in progress'),
 )
 
-@command('update|up|checkout|co',
-    [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
-    ('c', 'check', None, _('require clean working directory')),
-    ('m', 'merge', None, _('merge uncommitted changes')),
-    ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
-    ('r', 'rev', '', _('revision'), _('REV'))
-     ] + mergetoolopts,
+
+@command(
+    'update|up|checkout|co',
+    [
+        ('C', 'clean', None, _('discard uncommitted changes (no backup)')),
+        ('c', 'check', None, _('require clean working directory')),
+        ('m', 'merge', None, _('merge uncommitted changes')),
+        ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
+        ('r', 'rev', '', _('revision'), _('REV')),
+    ]
+    + mergetoolopts,
     _('[-C|-c|-m] [-d DATE] [[-r] REV]'),
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
-    helpbasic=True)
+    helpbasic=True,
+)
 def update(ui, repo, node=None, **opts):
     """update working directory (or switch revisions)
 
@@ -6308,8 +7450,10 @@
 
     if ui.configbool('commands', 'update.requiredest'):
         if not node and not rev and not date:
-            raise error.Abort(_('you must specify a destination'),
-                              hint=_('for example: hg update ".::"'))
+            raise error.Abort(
+                _('you must specify a destination'),
+                hint=_('for example: hg update ".::"'),
+            )
 
     if rev is None or rev == '':
         rev = node
@@ -6318,8 +7462,12 @@
         raise error.Abort(_("you can't specify a revision and a date"))
 
     if len([x for x in (clean, check, merge) if x]) > 1:
-        raise error.Abort(_("can only specify one of -C/--clean, -c/--check, "
-                            "or -m/--merge"))
+        raise error.Abort(
+            _(
+                "can only specify one of -C/--clean, -c/--check, "
+                "or -m/--merge"
+            )
+        )
 
     updatecheck = None
     if check:
@@ -6341,8 +7489,9 @@
         hidden = ctx.hidden()
         overrides = {('ui', 'forcemerge'): opts.get(r'tool', '')}
         with ui.configoverride(overrides, 'update'):
-            ret = hg.updatetotally(ui, repo, rev, brev, clean=clean,
-                                   updatecheck=updatecheck)
+            ret = hg.updatetotally(
+                ui, repo, rev, brev, clean=clean, updatecheck=updatecheck
+            )
         if hidden:
             ctxstr = ctx.hex()[:12]
             ui.warn(_("updated to hidden changeset %s\n") % ctxstr)
@@ -6352,9 +7501,12 @@
                 ui.warn("(%s)\n" % obsfatemsg)
         return ret
 
-@command('verify',
-         [('', 'full', False, 'perform more checks (EXPERIMENTAL)')],
-         helpcategory=command.CATEGORY_MAINTENANCE)
+
+@command(
+    'verify',
+    [('', 'full', False, 'perform more checks (EXPERIMENTAL)')],
+    helpcategory=command.CATEGORY_MAINTENANCE,
+)
 def verify(ui, repo, **opts):
     """verify the integrity of the repository
 
@@ -6378,9 +7530,14 @@
         level = verifymod.VERIFY_FULL
     return hg.verify(repo, level)
 
+
 @command(
-    'version', [] + formatteropts, helpcategory=command.CATEGORY_HELP,
-    norepo=True, intents={INTENT_READONLY})
+    'version',
+    [] + formatteropts,
+    helpcategory=command.CATEGORY_HELP,
+    norepo=True,
+    intents={INTENT_READONLY},
+)
 def version_(ui, **opts):
     """output version and copyright information
 
@@ -6404,8 +7561,9 @@
         ui.pager('version')
     fm = ui.formatter("version", opts)
     fm.startitem()
-    fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
-             util.version())
+    fm.write(
+        "ver", _("Mercurial Distributed SCM (version %s)\n"), util.version()
+    )
     license = _(
         "(see https://mercurial-scm.org for more information)\n"
         "\nCopyright (C) 2005-2019 Matt Mackall and others\n"
@@ -6442,11 +7600,14 @@
     fn.end()
     fm.end()
 
+
 def loadcmdtable(ui, name, cmdtable):
     """Load command functions from specified cmdtable
     """
     overrides = [cmd for cmd in cmdtable if cmd in table]
     if overrides:
-        ui.warn(_("extension '%s' overrides commands: %s\n")
-                % (name, " ".join(overrides)))
+        ui.warn(
+            _("extension '%s' overrides commands: %s\n")
+            % (name, " ".join(overrides))
+        )
     table.update(cmdtable)
--- a/mercurial/commandserver.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/commandserver.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
 
 try:
     import selectors
+
     selectors.BaseSelector
 except ImportError:
     from .thirdparty import selectors2 as selectors
@@ -37,6 +38,7 @@
     procutil,
 )
 
+
 class channeledoutput(object):
     """
     Write data to out in the following format:
@@ -44,6 +46,7 @@
     data length (unsigned int),
     data
     """
+
     def __init__(self, out, channel):
         self.out = out
         self.channel = channel
@@ -64,6 +67,7 @@
             raise AttributeError(attr)
         return getattr(self.out, attr)
 
+
 class channeledmessage(object):
     """
     Write encoded message and metadata to out in the following format:
@@ -92,6 +96,7 @@
     def __getattr__(self, attr):
         return getattr(self._cout, attr)
 
+
 class channeledinput(object):
     """
     Read data from in_.
@@ -178,10 +183,12 @@
             raise AttributeError(attr)
         return getattr(self.in_, attr)
 
+
 _messageencoders = {
     b'cbor': lambda v: b''.join(cborutil.streamencode(v)),
 }
 
+
 def _selectmessageencoder(ui):
     # experimental config: cmdserver.message-encodings
     encnames = ui.configlist(b'cmdserver', b'message-encodings')
@@ -189,14 +196,17 @@
         f = _messageencoders.get(n)
         if f:
             return n, f
-    raise error.Abort(b'no supported message encodings: %s'
-                      % b' '.join(encnames))
+    raise error.Abort(
+        b'no supported message encodings: %s' % b' '.join(encnames)
+    )
+
 
 class server(object):
     """
     Listens for commands on fin, runs them and writes the output on a channel
     based stream to fout.
     """
+
     def __init__(self, ui, repo, fin, fout, prereposetups=None):
         self.cwd = encoding.getcwd()
 
@@ -282,7 +292,7 @@
             self.repo.baseui = copiedui
             # clone ui without using ui.copy because this is protected
             repoui = self.repoui.__class__(self.repoui)
-            repoui.copy = copiedui.copy # redo copy protection
+            repoui.copy = copiedui.copy  # redo copy protection
             uis.append(repoui)
             self.repo.ui = self.repo.dirstate._ui = repoui
             self.repo.invalidateall()
@@ -295,9 +305,16 @@
             if not util.safehasattr(self.cin, 'fileno'):
                 ui.setconfig('ui', 'nontty', 'true', 'commandserver')
 
-        req = dispatch.request(args[:], copiedui, self.repo, self.cin,
-                               self.cout, self.cerr, self.cmsg,
-                               prereposetups=self._prereposetups)
+        req = dispatch.request(
+            args[:],
+            copiedui,
+            self.repo,
+            self.cin,
+            self.cout,
+            self.cerr,
+            self.cmsg,
+            prereposetups=self._prereposetups,
+        )
 
         try:
             ret = dispatch.dispatch(req) & 255
@@ -324,8 +341,7 @@
 
         return cmd != ''
 
-    capabilities = {'runcommand': runcommand,
-                    'getencoding': getencoding}
+    capabilities = {'runcommand': runcommand, 'getencoding': getencoding}
 
     def serve(self):
         hellomsg = 'capabilities: ' + ' '.join(sorted(self.capabilities))
@@ -352,6 +368,7 @@
 
         return 0
 
+
 def setuplogging(ui, repo=None, fp=None):
     """Set up server logging facility
 
@@ -377,8 +394,13 @@
         # developer config: cmdserver.max-log-size
         maxsize = ui.configbytes(b'cmdserver', b'max-log-size')
         vfs = vfsmod.vfs(os.path.dirname(logpath))
-        logger = loggingutil.filelogger(vfs, os.path.basename(logpath), tracked,
-                                        maxfiles=maxfiles, maxsize=maxsize)
+        logger = loggingutil.filelogger(
+            vfs,
+            os.path.basename(logpath),
+            tracked,
+            maxfiles=maxfiles,
+            maxsize=maxsize,
+        )
 
     targetuis = {ui}
     if repo:
@@ -387,6 +409,7 @@
     for u in targetuis:
         u.setlogger(b'cmdserver', logger)
 
+
 class pipeservice(object):
     def __init__(self, ui, repo, opts):
         self.ui = ui
@@ -406,6 +429,7 @@
             finally:
                 sv.cleanup()
 
+
 def _initworkerprocess():
     # use a different process group from the master process, in order to:
     # 1. make the current process group no longer "orphaned" (because the
@@ -423,6 +447,7 @@
     # same state inherited from parent.
     random.seed()
 
+
 def _serverequest(ui, repo, conn, createcmdserver, prereposetups):
     fin = conn.makefile(r'rb')
     fout = conn.makefile(r'wb')
@@ -442,7 +467,7 @@
             pass
         finally:
             sv.cleanup()
-    except: # re-raises
+    except:  # re-raises
         # also write traceback to error channel. otherwise client cannot
         # see it because it is written to server's stderr by default.
         if sv:
@@ -459,6 +484,7 @@
             if inst.errno != errno.EPIPE:
                 raise
 
+
 class unixservicehandler(object):
     """Set of pluggable operations for unix-mode services
 
@@ -492,6 +518,7 @@
         serves for the current connection"""
         return server(self.ui, repo, fin, fout, prereposetups)
 
+
 class unixforkingservice(object):
     """
     Listens on unix domain socket and forks server per connection
@@ -558,10 +585,12 @@
         exiting = False
         h = self._servicehandler
         selector = selectors.DefaultSelector()
-        selector.register(self._sock, selectors.EVENT_READ,
-                          self._acceptnewconnection)
-        selector.register(self._mainipc, selectors.EVENT_READ,
-                          self._handlemainipc)
+        selector.register(
+            self._sock, selectors.EVENT_READ, self._acceptnewconnection
+        )
+        selector.register(
+            self._mainipc, selectors.EVENT_READ, self._handlemainipc
+        )
         while True:
             if not exiting and h.shouldexit():
                 # clients can no longer connect() to the domain socket, so
@@ -605,8 +634,9 @@
         pid = os.fork()
         if pid:
             try:
-                self.ui.log(b'cmdserver', b'forked worker process (pid=%d)\n',
-                            pid)
+                self.ui.log(
+                    b'cmdserver', b'forked worker process (pid=%d)\n', pid
+                )
                 self._workerpids.add(pid)
                 h.newconnection()
             finally:
@@ -662,8 +692,13 @@
         _initworkerprocess()
         h = self._servicehandler
         try:
-            _serverequest(self.ui, self.repo, conn, h.createcmdserver,
-                          prereposetups=[self._reposetup])
+            _serverequest(
+                self.ui,
+                self.repo,
+                conn,
+                h.createcmdserver,
+                prereposetups=[self._reposetup],
+            )
         finally:
             gc.collect()  # trigger __del__ since worker process uses os._exit
 
@@ -677,8 +712,9 @@
                 try:
                     self._cmdserveripc.send(self.root)
                 except socket.error:
-                    self.ui.log(b'cmdserver',
-                                b'failed to send repo root to master\n')
+                    self.ui.log(
+                        b'cmdserver', b'failed to send repo root to master\n'
+                    )
 
         repo.__class__ = unixcmdserverrepo
         repo._cmdserveripc = self._workeripc
--- a/mercurial/config.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/config.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,6 +17,7 @@
     util,
 )
 
+
 class config(object):
     def __init__(self, data=None, includepaths=None):
         self._data = {}
@@ -28,17 +29,23 @@
             self._source = data._source.copy()
         else:
             self._source = util.cowdict()
+
     def copy(self):
         return config(self)
+
     def __contains__(self, section):
         return section in self._data
+
     def hasitem(self, section, item):
         return item in self._data.get(section, {})
+
     def __getitem__(self, section):
         return self._data.get(section, {})
+
     def __iter__(self):
         for d in self.sections():
             yield d
+
     def update(self, src):
         self._source = self._source.preparewrite()
         for s, n in src._unset:
@@ -55,6 +62,7 @@
                 self._data[s] = util.cowsortdict()
             self._data[s].update(src._data[s])
         self._source.update(src._source)
+
     def get(self, section, item, default=None):
         return self._data.get(section, {}).get(item, default)
 
@@ -72,18 +80,24 @@
 
     def source(self, section, item):
         return self._source.get((section, item), "")
+
     def sections(self):
         return sorted(self._data.keys())
+
     def items(self, section):
         return list(self._data.get(section, {}).iteritems())
+
     def set(self, section, item, value, source=""):
         if pycompat.ispy3:
-            assert not isinstance(section, str), (
-                'config section may not be unicode strings on Python 3')
-            assert not isinstance(item, str), (
-                'config item may not be unicode strings on Python 3')
-            assert not isinstance(value, str), (
-                'config values may not be unicode strings on Python 3')
+            assert not isinstance(
+                section, str
+            ), 'config section may not be unicode strings on Python 3'
+            assert not isinstance(
+                item, str
+            ), 'config item may not be unicode strings on Python 3'
+            assert not isinstance(
+                value, str
+            ), 'config values may not be unicode strings on Python 3'
         if section not in self:
             self._data[section] = util.cowsortdict()
         else:
@@ -156,9 +170,11 @@
                         break
                     except IOError as inst:
                         if inst.errno != errno.ENOENT:
-                            raise error.ParseError(_("cannot include %s (%s)")
-                                                   % (inc, inst.strerror),
-                                                   "%s:%d" % (src, line))
+                            raise error.ParseError(
+                                _("cannot include %s (%s)")
+                                % (inc, inst.strerror),
+                                "%s:%d" % (src, line),
+                            )
                 continue
             if emptyre.match(l):
                 continue
@@ -194,11 +210,16 @@
     def read(self, path, fp=None, sections=None, remap=None):
         if not fp:
             fp = util.posixfile(path, 'rb')
-        assert getattr(fp, 'mode', r'rb') == r'rb', (
-            'config files must be opened in binary mode, got fp=%r mode=%r' % (
-                fp, fp.mode))
-        self.parse(path, fp.read(),
-                   sections=sections, remap=remap, include=self.read)
+        assert (
+            getattr(fp, 'mode', r'rb') == r'rb'
+        ), 'config files must be opened in binary mode, got fp=%r mode=%r' % (
+            fp,
+            fp.mode,
+        )
+        self.parse(
+            path, fp.read(), sections=sections, remap=remap, include=self.read
+        )
+
 
 def parselist(value):
     """parse a configuration value as a list of comma/space separated strings
@@ -209,38 +230,44 @@
 
     def _parse_plain(parts, s, offset):
         whitespace = False
-        while offset < len(s) and (s[offset:offset + 1].isspace()
-                                   or s[offset:offset + 1] == ','):
+        while offset < len(s) and (
+            s[offset : offset + 1].isspace() or s[offset : offset + 1] == ','
+        ):
             whitespace = True
             offset += 1
         if offset >= len(s):
             return None, parts, offset
         if whitespace:
             parts.append('')
-        if s[offset:offset + 1] == '"' and not parts[-1]:
+        if s[offset : offset + 1] == '"' and not parts[-1]:
             return _parse_quote, parts, offset + 1
-        elif s[offset:offset + 1] == '"' and parts[-1][-1:] == '\\':
-            parts[-1] = parts[-1][:-1] + s[offset:offset + 1]
+        elif s[offset : offset + 1] == '"' and parts[-1][-1:] == '\\':
+            parts[-1] = parts[-1][:-1] + s[offset : offset + 1]
             return _parse_plain, parts, offset + 1
-        parts[-1] += s[offset:offset + 1]
+        parts[-1] += s[offset : offset + 1]
         return _parse_plain, parts, offset + 1
 
     def _parse_quote(parts, s, offset):
-        if offset < len(s) and s[offset:offset + 1] == '"': # ""
+        if offset < len(s) and s[offset : offset + 1] == '"':  # ""
             parts.append('')
             offset += 1
-            while offset < len(s) and (s[offset:offset + 1].isspace() or
-                    s[offset:offset + 1] == ','):
+            while offset < len(s) and (
+                s[offset : offset + 1].isspace()
+                or s[offset : offset + 1] == ','
+            ):
                 offset += 1
             return _parse_plain, parts, offset
 
-        while offset < len(s) and s[offset:offset + 1] != '"':
-            if (s[offset:offset + 1] == '\\' and offset + 1 < len(s)
-                    and s[offset + 1:offset + 2] == '"'):
+        while offset < len(s) and s[offset : offset + 1] != '"':
+            if (
+                s[offset : offset + 1] == '\\'
+                and offset + 1 < len(s)
+                and s[offset + 1 : offset + 2] == '"'
+            ):
                 offset += 1
                 parts[-1] += '"'
             else:
-                parts[-1] += s[offset:offset + 1]
+                parts[-1] += s[offset : offset + 1]
             offset += 1
 
         if offset >= len(s):
@@ -254,11 +281,11 @@
             return None, parts, offset
 
         offset += 1
-        while offset < len(s) and s[offset:offset + 1] in [' ', ',']:
+        while offset < len(s) and s[offset : offset + 1] in [' ', ',']:
             offset += 1
 
         if offset < len(s):
-            if offset + 1 == len(s) and s[offset:offset + 1] == '"':
+            if offset + 1 == len(s) and s[offset : offset + 1] == '"':
                 parts[-1] += '"'
                 offset += 1
             else:
--- a/mercurial/configitems.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/configitems.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,6 +15,7 @@
     error,
 )
 
+
 def loadconfigtable(ui, extname, configtable):
     """update config item known to the ui with the extension ones"""
     for section, items in sorted(configtable.items()):
@@ -28,6 +29,7 @@
 
         knownitems.update(items)
 
+
 class configitem(object):
     """represent a known config item
 
@@ -38,8 +40,16 @@
     :generic: this is a generic definition, match name using regular expression.
     """
 
-    def __init__(self, section, name, default=None, alias=(),
-                 generic=False, priority=0, experimental=False):
+    def __init__(
+        self,
+        section,
+        name,
+        default=None,
+        alias=(),
+        generic=False,
+        priority=0,
+        experimental=False,
+    ):
         self.section = section
         self.name = name
         self.default = default
@@ -51,6 +61,7 @@
         if generic:
             self._re = re.compile(self.name)
 
+
 class itemregister(dict):
     """A specialized dictionary that can handle wild-card selection"""
 
@@ -91,8 +102,10 @@
 
         return None
 
+
 coreitems = {}
 
+
 def _register(configtable, *args, **kwargs):
     item = configitem(*args, **kwargs)
     section = configtable.setdefault(item.section, itemregister())
@@ -101,1428 +114,1418 @@
         raise error.ProgrammingError(msg % (item.section, item.name))
     section[item.name] = item
 
+
 # special value for case where the default is derived from other values
 dynamicdefault = object()
 
 # Registering actual config items
 
+
 def getitemregister(configtable):
     f = functools.partial(_register, configtable)
     # export pseudo enum as configitem.*
     f.dynamicdefault = dynamicdefault
     return f
 
+
 coreconfigitem = getitemregister(coreitems)
 
+
 def _registerdiffopts(section, configprefix=''):
-    coreconfigitem(section, configprefix + 'nodates',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'nodates', default=False,
     )
-    coreconfigitem(section, configprefix + 'showfunc',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'showfunc', default=False,
     )
-    coreconfigitem(section, configprefix + 'unified',
-        default=None,
+    coreconfigitem(
+        section, configprefix + 'unified', default=None,
     )
-    coreconfigitem(section, configprefix + 'git',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'git', default=False,
     )
-    coreconfigitem(section, configprefix + 'ignorews',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'ignorews', default=False,
     )
-    coreconfigitem(section, configprefix + 'ignorewsamount',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'ignorewsamount', default=False,
     )
-    coreconfigitem(section, configprefix + 'ignoreblanklines',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'ignoreblanklines', default=False,
     )
-    coreconfigitem(section, configprefix + 'ignorewseol',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'ignorewseol', default=False,
     )
-    coreconfigitem(section, configprefix + 'nobinary',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'nobinary', default=False,
     )
-    coreconfigitem(section, configprefix + 'noprefix',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'noprefix', default=False,
     )
-    coreconfigitem(section, configprefix + 'word-diff',
-        default=False,
+    coreconfigitem(
+        section, configprefix + 'word-diff', default=False,
     )
 
-coreconfigitem('alias', '.*',
-    default=dynamicdefault,
-    generic=True,
+
+coreconfigitem(
+    'alias', '.*', default=dynamicdefault, generic=True,
 )
-coreconfigitem('auth', 'cookiefile',
-    default=None,
+coreconfigitem(
+    'auth', 'cookiefile', default=None,
 )
 _registerdiffopts(section='annotate')
 # bookmarks.pushing: internal hack for discovery
-coreconfigitem('bookmarks', 'pushing',
-    default=list,
+coreconfigitem(
+    'bookmarks', 'pushing', default=list,
 )
 # bundle.mainreporoot: internal hack for bundlerepo
-coreconfigitem('bundle', 'mainreporoot',
-    default='',
+coreconfigitem(
+    'bundle', 'mainreporoot', default='',
 )
-coreconfigitem('censor', 'policy',
-    default='abort',
-    experimental=True,
+coreconfigitem(
+    'censor', 'policy', default='abort', experimental=True,
 )
-coreconfigitem('chgserver', 'idletimeout',
-    default=3600,
+coreconfigitem(
+    'chgserver', 'idletimeout', default=3600,
 )
-coreconfigitem('chgserver', 'skiphash',
-    default=False,
+coreconfigitem(
+    'chgserver', 'skiphash', default=False,
 )
-coreconfigitem('cmdserver', 'log',
-    default=None,
+coreconfigitem(
+    'cmdserver', 'log', default=None,
 )
-coreconfigitem('cmdserver', 'max-log-files',
-    default=7,
+coreconfigitem(
+    'cmdserver', 'max-log-files', default=7,
 )
-coreconfigitem('cmdserver', 'max-log-size',
-    default='1 MB',
+coreconfigitem(
+    'cmdserver', 'max-log-size', default='1 MB',
 )
-coreconfigitem('cmdserver', 'max-repo-cache',
-    default=0,
-    experimental=True,
+coreconfigitem(
+    'cmdserver', 'max-repo-cache', default=0, experimental=True,
 )
-coreconfigitem('cmdserver', 'message-encodings',
-    default=list,
-    experimental=True,
+coreconfigitem(
+    'cmdserver', 'message-encodings', default=list, experimental=True,
 )
-coreconfigitem('cmdserver', 'track-log',
+coreconfigitem(
+    'cmdserver',
+    'track-log',
     default=lambda: ['chgserver', 'cmdserver', 'repocache'],
 )
-coreconfigitem('color', '.*',
-    default=None,
-    generic=True,
+coreconfigitem(
+    'color', '.*', default=None, generic=True,
 )
-coreconfigitem('color', 'mode',
-    default='auto',
+coreconfigitem(
+    'color', 'mode', default='auto',
 )
-coreconfigitem('color', 'pagermode',
-    default=dynamicdefault,
+coreconfigitem(
+    'color', 'pagermode', default=dynamicdefault,
 )
 _registerdiffopts(section='commands', configprefix='commit.interactive.')
-coreconfigitem('commands', 'commit.post-status',
-    default=False,
+coreconfigitem(
+    'commands', 'commit.post-status', default=False,
 )
-coreconfigitem('commands', 'grep.all-files',
-    default=False,
-    experimental=True,
+coreconfigitem(
+    'commands', 'grep.all-files', default=False, experimental=True,
 )
-coreconfigitem('commands', 'resolve.confirm',
-    default=False,
+coreconfigitem(
+    'commands', 'resolve.confirm', default=False,
 )
-coreconfigitem('commands', 'resolve.explicit-re-merge',
-    default=False,
+coreconfigitem(
+    'commands', 'resolve.explicit-re-merge', default=False,
 )
-coreconfigitem('commands', 'resolve.mark-check',
-    default='none',
+coreconfigitem(
+    'commands', 'resolve.mark-check', default='none',
 )
 _registerdiffopts(section='commands', configprefix='revert.interactive.')
-coreconfigitem('commands', 'show.aliasprefix',
-    default=list,
+coreconfigitem(
+    'commands', 'show.aliasprefix', default=list,
+)
+coreconfigitem(
+    'commands', 'status.relative', default=False,
 )
-coreconfigitem('commands', 'status.relative',
-    default=False,
+coreconfigitem(
+    'commands', 'status.skipstates', default=[], experimental=True,
 )
-coreconfigitem('commands', 'status.skipstates',
-    default=[],
-    experimental=True,
+coreconfigitem(
+    'commands', 'status.terse', default='',
 )
-coreconfigitem('commands', 'status.terse',
-    default='',
+coreconfigitem(
+    'commands', 'status.verbose', default=False,
 )
-coreconfigitem('commands', 'status.verbose',
-    default=False,
+coreconfigitem(
+    'commands', 'update.check', default=None,
 )
-coreconfigitem('commands', 'update.check',
-    default=None,
+coreconfigitem(
+    'commands', 'update.requiredest', default=False,
 )
-coreconfigitem('commands', 'update.requiredest',
-    default=False,
+coreconfigitem(
+    'committemplate', '.*', default=None, generic=True,
 )
-coreconfigitem('committemplate', '.*',
-    default=None,
-    generic=True,
+coreconfigitem(
+    'convert', 'bzr.saverev', default=True,
+)
+coreconfigitem(
+    'convert', 'cvsps.cache', default=True,
 )
-coreconfigitem('convert', 'bzr.saverev',
-    default=True,
+coreconfigitem(
+    'convert', 'cvsps.fuzz', default=60,
 )
-coreconfigitem('convert', 'cvsps.cache',
-    default=True,
+coreconfigitem(
+    'convert', 'cvsps.logencoding', default=None,
 )
-coreconfigitem('convert', 'cvsps.fuzz',
-    default=60,
+coreconfigitem(
+    'convert', 'cvsps.mergefrom', default=None,
 )
-coreconfigitem('convert', 'cvsps.logencoding',
-    default=None,
+coreconfigitem(
+    'convert', 'cvsps.mergeto', default=None,
 )
-coreconfigitem('convert', 'cvsps.mergefrom',
-    default=None,
+coreconfigitem(
+    'convert', 'git.committeractions', default=lambda: ['messagedifferent'],
 )
-coreconfigitem('convert', 'cvsps.mergeto',
-    default=None,
-)
-coreconfigitem('convert', 'git.committeractions',
-    default=lambda: ['messagedifferent'],
+coreconfigitem(
+    'convert', 'git.extrakeys', default=list,
 )
-coreconfigitem('convert', 'git.extrakeys',
-    default=list,
+coreconfigitem(
+    'convert', 'git.findcopiesharder', default=False,
 )
-coreconfigitem('convert', 'git.findcopiesharder',
-    default=False,
+coreconfigitem(
+    'convert', 'git.remoteprefix', default='remote',
 )
-coreconfigitem('convert', 'git.remoteprefix',
-    default='remote',
+coreconfigitem(
+    'convert', 'git.renamelimit', default=400,
 )
-coreconfigitem('convert', 'git.renamelimit',
-    default=400,
+coreconfigitem(
+    'convert', 'git.saverev', default=True,
 )
-coreconfigitem('convert', 'git.saverev',
-    default=True,
+coreconfigitem(
+    'convert', 'git.similarity', default=50,
 )
-coreconfigitem('convert', 'git.similarity',
-    default=50,
+coreconfigitem(
+    'convert', 'git.skipsubmodules', default=False,
 )
-coreconfigitem('convert', 'git.skipsubmodules',
-    default=False,
+coreconfigitem(
+    'convert', 'hg.clonebranches', default=False,
 )
-coreconfigitem('convert', 'hg.clonebranches',
-    default=False,
+coreconfigitem(
+    'convert', 'hg.ignoreerrors', default=False,
 )
-coreconfigitem('convert', 'hg.ignoreerrors',
-    default=False,
+coreconfigitem(
+    'convert', 'hg.preserve-hash', default=False,
 )
-coreconfigitem('convert', 'hg.preserve-hash',
-    default=False,
+coreconfigitem(
+    'convert', 'hg.revs', default=None,
 )
-coreconfigitem('convert', 'hg.revs',
-    default=None,
+coreconfigitem(
+    'convert', 'hg.saverev', default=False,
 )
-coreconfigitem('convert', 'hg.saverev',
-    default=False,
+coreconfigitem(
+    'convert', 'hg.sourcename', default=None,
 )
-coreconfigitem('convert', 'hg.sourcename',
-    default=None,
+coreconfigitem(
+    'convert', 'hg.startrev', default=None,
 )
-coreconfigitem('convert', 'hg.startrev',
-    default=None,
+coreconfigitem(
+    'convert', 'hg.tagsbranch', default='default',
 )
-coreconfigitem('convert', 'hg.tagsbranch',
-    default='default',
+coreconfigitem(
+    'convert', 'hg.usebranchnames', default=True,
 )
-coreconfigitem('convert', 'hg.usebranchnames',
-    default=True,
+coreconfigitem(
+    'convert', 'ignoreancestorcheck', default=False, experimental=True,
 )
-coreconfigitem('convert', 'ignoreancestorcheck',
-    default=False,
-    experimental=True,
+coreconfigitem(
+    'convert', 'localtimezone', default=False,
 )
-coreconfigitem('convert', 'localtimezone',
-    default=False,
+coreconfigitem(
+    'convert', 'p4.encoding', default=dynamicdefault,
 )
-coreconfigitem('convert', 'p4.encoding',
-    default=dynamicdefault,
+coreconfigitem(
+    'convert', 'p4.startrev', default=0,
 )
-coreconfigitem('convert', 'p4.startrev',
-    default=0,
+coreconfigitem(
+    'convert', 'skiptags', default=False,
 )
-coreconfigitem('convert', 'skiptags',
-    default=False,
+coreconfigitem(
+    'convert', 'svn.debugsvnlog', default=True,
 )
-coreconfigitem('convert', 'svn.debugsvnlog',
-    default=True,
+coreconfigitem(
+    'convert', 'svn.trunk', default=None,
 )
-coreconfigitem('convert', 'svn.trunk',
-    default=None,
+coreconfigitem(
+    'convert', 'svn.tags', default=None,
 )
-coreconfigitem('convert', 'svn.tags',
-    default=None,
+coreconfigitem(
+    'convert', 'svn.branches', default=None,
 )
-coreconfigitem('convert', 'svn.branches',
-    default=None,
+coreconfigitem(
+    'convert', 'svn.startrev', default=0,
 )
-coreconfigitem('convert', 'svn.startrev',
-    default=0,
+coreconfigitem(
+    'debug', 'dirstate.delaywrite', default=0,
+)
+coreconfigitem(
+    'defaults', '.*', default=None, generic=True,
 )
-coreconfigitem('debug', 'dirstate.delaywrite',
-    default=0,
+coreconfigitem(
+    'devel', 'all-warnings', default=False,
 )
-coreconfigitem('defaults', '.*',
-    default=None,
-    generic=True,
+coreconfigitem(
+    'devel', 'bundle2.debug', default=False,
 )
-coreconfigitem('devel', 'all-warnings',
-    default=False,
+coreconfigitem(
+    'devel', 'bundle.delta', default='',
 )
-coreconfigitem('devel', 'bundle2.debug',
-    default=False,
+coreconfigitem(
+    'devel', 'cache-vfs', default=None,
 )
-coreconfigitem('devel', 'bundle.delta',
-    default='',
+coreconfigitem(
+    'devel', 'check-locks', default=False,
 )
-coreconfigitem('devel', 'cache-vfs',
-    default=None,
-)
-coreconfigitem('devel', 'check-locks',
-    default=False,
+coreconfigitem(
+    'devel', 'check-relroot', default=False,
 )
-coreconfigitem('devel', 'check-relroot',
-    default=False,
+coreconfigitem(
+    'devel', 'default-date', default=None,
 )
-coreconfigitem('devel', 'default-date',
-    default=None,
+coreconfigitem(
+    'devel', 'deprec-warn', default=False,
 )
-coreconfigitem('devel', 'deprec-warn',
-    default=False,
+coreconfigitem(
+    'devel', 'disableloaddefaultcerts', default=False,
 )
-coreconfigitem('devel', 'disableloaddefaultcerts',
-    default=False,
+coreconfigitem(
+    'devel', 'warn-empty-changegroup', default=False,
 )
-coreconfigitem('devel', 'warn-empty-changegroup',
-    default=False,
+coreconfigitem(
+    'devel', 'legacy.exchange', default=list,
 )
-coreconfigitem('devel', 'legacy.exchange',
-    default=list,
+coreconfigitem(
+    'devel', 'servercafile', default='',
 )
-coreconfigitem('devel', 'servercafile',
-    default='',
+coreconfigitem(
+    'devel', 'serverexactprotocol', default='',
 )
-coreconfigitem('devel', 'serverexactprotocol',
-    default='',
-)
-coreconfigitem('devel', 'serverrequirecert',
-    default=False,
+coreconfigitem(
+    'devel', 'serverrequirecert', default=False,
 )
-coreconfigitem('devel', 'strip-obsmarkers',
-    default=True,
+coreconfigitem(
+    'devel', 'strip-obsmarkers', default=True,
 )
-coreconfigitem('devel', 'warn-config',
-    default=None,
+coreconfigitem(
+    'devel', 'warn-config', default=None,
 )
-coreconfigitem('devel', 'warn-config-default',
-    default=None,
+coreconfigitem(
+    'devel', 'warn-config-default', default=None,
 )
-coreconfigitem('devel', 'user.obsmarker',
-    default=None,
+coreconfigitem(
+    'devel', 'user.obsmarker', default=None,
 )
-coreconfigitem('devel', 'warn-config-unknown',
-    default=None,
+coreconfigitem(
+    'devel', 'warn-config-unknown', default=None,
 )
-coreconfigitem('devel', 'debug.copies',
-    default=False,
+coreconfigitem(
+    'devel', 'debug.copies', default=False,
 )
-coreconfigitem('devel', 'debug.extensions',
-    default=False,
+coreconfigitem(
+    'devel', 'debug.extensions', default=False,
 )
-coreconfigitem('devel', 'debug.peer-request',
-    default=False,
+coreconfigitem(
+    'devel', 'debug.peer-request', default=False,
 )
-coreconfigitem('devel', 'discovery.randomize',
-    default=True,
+coreconfigitem(
+    'devel', 'discovery.randomize', default=True,
 )
 _registerdiffopts(section='diff')
-coreconfigitem('email', 'bcc',
-    default=None,
+coreconfigitem(
+    'email', 'bcc', default=None,
+)
+coreconfigitem(
+    'email', 'cc', default=None,
 )
-coreconfigitem('email', 'cc',
-    default=None,
+coreconfigitem(
+    'email', 'charsets', default=list,
+)
+coreconfigitem(
+    'email', 'from', default=None,
 )
-coreconfigitem('email', 'charsets',
-    default=list,
+coreconfigitem(
+    'email', 'method', default='smtp',
+)
+coreconfigitem(
+    'email', 'reply-to', default=None,
 )
-coreconfigitem('email', 'from',
-    default=None,
+coreconfigitem(
+    'email', 'to', default=None,
 )
-coreconfigitem('email', 'method',
-    default='smtp',
+coreconfigitem(
+    'experimental', 'archivemetatemplate', default=dynamicdefault,
 )
-coreconfigitem('email', 'reply-to',
-    default=None,
+coreconfigitem(
+    'experimental', 'auto-publish', default='publish',
+)
+coreconfigitem(
+    'experimental', 'bundle-phases', default=False,
 )
-coreconfigitem('email', 'to',
-    default=None,
+coreconfigitem(
+    'experimental', 'bundle2-advertise', default=True,
+)
+coreconfigitem(
+    'experimental', 'bundle2-output-capture', default=False,
 )
-coreconfigitem('experimental', 'archivemetatemplate',
-    default=dynamicdefault,
+coreconfigitem(
+    'experimental', 'bundle2.pushback', default=False,
+)
+coreconfigitem(
+    'experimental', 'bundle2lazylocking', default=False,
 )
-coreconfigitem('experimental', 'auto-publish',
-    default='publish',
+coreconfigitem(
+    'experimental', 'bundlecomplevel', default=None,
 )
-coreconfigitem('experimental', 'bundle-phases',
-    default=False,
+coreconfigitem(
+    'experimental', 'bundlecomplevel.bzip2', default=None,
 )
-coreconfigitem('experimental', 'bundle2-advertise',
-    default=True,
+coreconfigitem(
+    'experimental', 'bundlecomplevel.gzip', default=None,
+)
+coreconfigitem(
+    'experimental', 'bundlecomplevel.none', default=None,
 )
-coreconfigitem('experimental', 'bundle2-output-capture',
-    default=False,
+coreconfigitem(
+    'experimental', 'bundlecomplevel.zstd', default=None,
+)
+coreconfigitem(
+    'experimental', 'changegroup3', default=False,
 )
-coreconfigitem('experimental', 'bundle2.pushback',
-    default=False,
+coreconfigitem(
+    'experimental', 'cleanup-as-archived', default=False,
+)
+coreconfigitem(
+    'experimental', 'clientcompressionengines', default=list,
 )
-coreconfigitem('experimental', 'bundle2lazylocking',
-    default=False,
+coreconfigitem(
+    'experimental', 'copytrace', default='on',
 )
-coreconfigitem('experimental', 'bundlecomplevel',
-    default=None,
+coreconfigitem(
+    'experimental', 'copytrace.movecandidateslimit', default=100,
 )
-coreconfigitem('experimental', 'bundlecomplevel.bzip2',
-    default=None,
+coreconfigitem(
+    'experimental', 'copytrace.sourcecommitlimit', default=100,
+)
+coreconfigitem(
+    'experimental', 'copies.read-from', default="filelog-only",
 )
-coreconfigitem('experimental', 'bundlecomplevel.gzip',
-    default=None,
+coreconfigitem(
+    'experimental', 'copies.write-to', default='filelog-only',
+)
+coreconfigitem(
+    'experimental', 'crecordtest', default=None,
 )
-coreconfigitem('experimental', 'bundlecomplevel.none',
-    default=None,
+coreconfigitem(
+    'experimental', 'directaccess', default=False,
+)
+coreconfigitem(
+    'experimental', 'directaccess.revnums', default=False,
 )
-coreconfigitem('experimental', 'bundlecomplevel.zstd',
-    default=None,
+coreconfigitem(
+    'experimental', 'editortmpinhg', default=False,
 )
-coreconfigitem('experimental', 'changegroup3',
-    default=False,
+coreconfigitem(
+    'experimental', 'evolution', default=list,
 )
-coreconfigitem('experimental', 'cleanup-as-archived',
+coreconfigitem(
+    'experimental',
+    'evolution.allowdivergence',
     default=False,
-)
-coreconfigitem('experimental', 'clientcompressionengines',
-    default=list,
+    alias=[('experimental', 'allowdivergence')],
 )
-coreconfigitem('experimental', 'copytrace',
-    default='on',
-)
-coreconfigitem('experimental', 'copytrace.movecandidateslimit',
-    default=100,
-)
-coreconfigitem('experimental', 'copytrace.sourcecommitlimit',
-    default=100,
+coreconfigitem(
+    'experimental', 'evolution.allowunstable', default=None,
 )
-coreconfigitem('experimental', 'copies.read-from',
-    default="filelog-only",
-)
-coreconfigitem('experimental', 'copies.write-to',
-    default='filelog-only',
-)
-coreconfigitem('experimental', 'crecordtest',
-    default=None,
+coreconfigitem(
+    'experimental', 'evolution.createmarkers', default=None,
 )
-coreconfigitem('experimental', 'directaccess',
-    default=False,
-)
-coreconfigitem('experimental', 'directaccess.revnums',
-    default=False,
-)
-coreconfigitem('experimental', 'editortmpinhg',
-    default=False,
+coreconfigitem(
+    'experimental',
+    'evolution.effect-flags',
+    default=True,
+    alias=[('experimental', 'effect-flags')],
 )
-coreconfigitem('experimental', 'evolution',
-    default=list,
+coreconfigitem(
+    'experimental', 'evolution.exchange', default=None,
 )
-coreconfigitem('experimental', 'evolution.allowdivergence',
-    default=False,
-    alias=[('experimental', 'allowdivergence')]
-)
-coreconfigitem('experimental', 'evolution.allowunstable',
-    default=None,
-)
-coreconfigitem('experimental', 'evolution.createmarkers',
-    default=None,
+coreconfigitem(
+    'experimental', 'evolution.bundle-obsmarker', default=False,
 )
-coreconfigitem('experimental', 'evolution.effect-flags',
-    default=True,
-    alias=[('experimental', 'effect-flags')]
-)
-coreconfigitem('experimental', 'evolution.exchange',
-    default=None,
+coreconfigitem(
+    'experimental', 'log.topo', default=False,
 )
-coreconfigitem('experimental', 'evolution.bundle-obsmarker',
-    default=False,
-)
-coreconfigitem('experimental', 'log.topo',
-    default=False,
+coreconfigitem(
+    'experimental', 'evolution.report-instabilities', default=True,
 )
-coreconfigitem('experimental', 'evolution.report-instabilities',
-    default=True,
-)
-coreconfigitem('experimental', 'evolution.track-operation',
-    default=True,
+coreconfigitem(
+    'experimental', 'evolution.track-operation', default=True,
 )
 # repo-level config to exclude a revset visibility
 #
 # The target use case is to use `share` to expose different subset of the same
 # repository, especially server side. See also `server.view`.
-coreconfigitem('experimental', 'extra-filter-revs',
-    default=None,
+coreconfigitem(
+    'experimental', 'extra-filter-revs', default=None,
 )
-coreconfigitem('experimental', 'maxdeltachainspan',
-    default=-1,
+coreconfigitem(
+    'experimental', 'maxdeltachainspan', default=-1,
 )
-coreconfigitem('experimental', 'mergetempdirprefix',
-    default=None,
+coreconfigitem(
+    'experimental', 'mergetempdirprefix', default=None,
 )
-coreconfigitem('experimental', 'mmapindexthreshold',
-    default=None,
+coreconfigitem(
+    'experimental', 'mmapindexthreshold', default=None,
 )
-coreconfigitem('experimental', 'narrow',
-    default=False,
+coreconfigitem(
+    'experimental', 'narrow', default=False,
 )
-coreconfigitem('experimental', 'nonnormalparanoidcheck',
-    default=False,
+coreconfigitem(
+    'experimental', 'nonnormalparanoidcheck', default=False,
 )
-coreconfigitem('experimental', 'exportableenviron',
-    default=list,
+coreconfigitem(
+    'experimental', 'exportableenviron', default=list,
 )
-coreconfigitem('experimental', 'extendedheader.index',
-    default=None,
+coreconfigitem(
+    'experimental', 'extendedheader.index', default=None,
 )
-coreconfigitem('experimental', 'extendedheader.similarity',
-    default=False,
+coreconfigitem(
+    'experimental', 'extendedheader.similarity', default=False,
 )
-coreconfigitem('experimental', 'graphshorten',
-    default=False,
+coreconfigitem(
+    'experimental', 'graphshorten', default=False,
 )
-coreconfigitem('experimental', 'graphstyle.parent',
-    default=dynamicdefault,
+coreconfigitem(
+    'experimental', 'graphstyle.parent', default=dynamicdefault,
 )
-coreconfigitem('experimental', 'graphstyle.missing',
-    default=dynamicdefault,
+coreconfigitem(
+    'experimental', 'graphstyle.missing', default=dynamicdefault,
 )
-coreconfigitem('experimental', 'graphstyle.grandparent',
-    default=dynamicdefault,
+coreconfigitem(
+    'experimental', 'graphstyle.grandparent', default=dynamicdefault,
 )
-coreconfigitem('experimental', 'hook-track-tags',
-    default=False,
+coreconfigitem(
+    'experimental', 'hook-track-tags', default=False,
 )
-coreconfigitem('experimental', 'httppeer.advertise-v2',
-    default=False,
+coreconfigitem(
+    'experimental', 'httppeer.advertise-v2', default=False,
 )
-coreconfigitem('experimental', 'httppeer.v2-encoder-order',
-    default=None,
+coreconfigitem(
+    'experimental', 'httppeer.v2-encoder-order', default=None,
 )
-coreconfigitem('experimental', 'httppostargs',
-    default=False,
+coreconfigitem(
+    'experimental', 'httppostargs', default=False,
 )
-coreconfigitem('experimental', 'mergedriver',
-    default=None,
+coreconfigitem(
+    'experimental', 'mergedriver', default=None,
 )
 coreconfigitem('experimental', 'nointerrupt', default=False)
 coreconfigitem('experimental', 'nointerrupt-interactiveonly', default=True)
 
-coreconfigitem('experimental', 'obsmarkers-exchange-debug',
-    default=False,
+coreconfigitem(
+    'experimental', 'obsmarkers-exchange-debug', default=False,
 )
-coreconfigitem('experimental', 'remotenames',
-    default=False,
+coreconfigitem(
+    'experimental', 'remotenames', default=False,
 )
-coreconfigitem('experimental', 'removeemptydirs',
-    default=True,
+coreconfigitem(
+    'experimental', 'removeemptydirs', default=True,
 )
-coreconfigitem('experimental', 'revert.interactive.select-to-keep',
-    default=False,
+coreconfigitem(
+    'experimental', 'revert.interactive.select-to-keep', default=False,
 )
-coreconfigitem('experimental', 'revisions.prefixhexnode',
-    default=False,
+coreconfigitem(
+    'experimental', 'revisions.prefixhexnode', default=False,
 )
-coreconfigitem('experimental', 'revlogv2',
-    default=None,
+coreconfigitem(
+    'experimental', 'revlogv2', default=None,
 )
-coreconfigitem('experimental', 'revisions.disambiguatewithin',
-    default=None,
+coreconfigitem(
+    'experimental', 'revisions.disambiguatewithin', default=None,
 )
-coreconfigitem('experimental', 'server.filesdata.recommended-batch-size',
-    default=50000,
+coreconfigitem(
+    'experimental', 'server.filesdata.recommended-batch-size', default=50000,
 )
-coreconfigitem('experimental', 'server.manifestdata.recommended-batch-size',
+coreconfigitem(
+    'experimental',
+    'server.manifestdata.recommended-batch-size',
     default=100000,
 )
-coreconfigitem('experimental', 'server.stream-narrow-clones',
-    default=False,
-)
-coreconfigitem('experimental', 'single-head-per-branch',
-    default=False,
-)
-coreconfigitem('experimental', 'single-head-per-branch:account-closed-heads',
-    default=False,
-)
-coreconfigitem('experimental', 'sshserver.support-v2',
-    default=False,
-)
-coreconfigitem('experimental', 'sparse-read',
-    default=False,
-)
-coreconfigitem('experimental', 'sparse-read.density-threshold',
-    default=0.50,
-)
-coreconfigitem('experimental', 'sparse-read.min-gap-size',
-    default='65K',
-)
-coreconfigitem('experimental', 'treemanifest',
-    default=False,
-)
-coreconfigitem('experimental', 'update.atomic-file',
-    default=False,
+coreconfigitem(
+    'experimental', 'server.stream-narrow-clones', default=False,
 )
-coreconfigitem('experimental', 'sshpeer.advertise-v2',
-    default=False,
-)
-coreconfigitem('experimental', 'web.apiserver',
-    default=False,
-)
-coreconfigitem('experimental', 'web.api.http-v2',
-    default=False,
-)
-coreconfigitem('experimental', 'web.api.debugreflect',
-    default=False,
+coreconfigitem(
+    'experimental', 'single-head-per-branch', default=False,
 )
-coreconfigitem('experimental', 'worker.wdir-get-thread-safe',
-    default=False,
-)
-coreconfigitem('experimental', 'xdiff',
-    default=False,
-)
-coreconfigitem('extensions', '.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem('extdata', '.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem('format', 'bookmarks-in-store',
+coreconfigitem(
+    'experimental',
+    'single-head-per-branch:account-closed-heads',
     default=False,
 )
-coreconfigitem('format', 'chunkcachesize',
-    default=None,
-    experimental=True,
+coreconfigitem(
+    'experimental', 'sshserver.support-v2', default=False,
+)
+coreconfigitem(
+    'experimental', 'sparse-read', default=False,
 )
-coreconfigitem('format', 'dotencode',
-    default=True,
+coreconfigitem(
+    'experimental', 'sparse-read.density-threshold', default=0.50,
 )
-coreconfigitem('format', 'generaldelta',
-    default=False,
-    experimental=True,
+coreconfigitem(
+    'experimental', 'sparse-read.min-gap-size', default='65K',
+)
+coreconfigitem(
+    'experimental', 'treemanifest', default=False,
 )
-coreconfigitem('format', 'manifestcachesize',
-    default=None,
-    experimental=True,
+coreconfigitem(
+    'experimental', 'update.atomic-file', default=False,
+)
+coreconfigitem(
+    'experimental', 'sshpeer.advertise-v2', default=False,
 )
-coreconfigitem('format', 'maxchainlen',
-    default=dynamicdefault,
-    experimental=True,
+coreconfigitem(
+    'experimental', 'web.apiserver', default=False,
 )
-coreconfigitem('format', 'obsstore-version',
-    default=None,
+coreconfigitem(
+    'experimental', 'web.api.http-v2', default=False,
+)
+coreconfigitem(
+    'experimental', 'web.api.debugreflect', default=False,
 )
-coreconfigitem('format', 'sparse-revlog',
-    default=True,
+coreconfigitem(
+    'experimental', 'worker.wdir-get-thread-safe', default=False,
+)
+coreconfigitem(
+    'experimental', 'xdiff', default=False,
 )
-coreconfigitem('format', 'revlog-compression',
-    default='zlib',
-    alias=[('experimental', 'format.compression')]
+coreconfigitem(
+    'extensions', '.*', default=None, generic=True,
 )
-coreconfigitem('format', 'usefncache',
-    default=True,
+coreconfigitem(
+    'extdata', '.*', default=None, generic=True,
+)
+coreconfigitem(
+    'format', 'bookmarks-in-store', default=False,
 )
-coreconfigitem('format', 'usegeneraldelta',
-    default=True,
+coreconfigitem(
+    'format', 'chunkcachesize', default=None, experimental=True,
 )
-coreconfigitem('format', 'usestore',
-    default=True,
+coreconfigitem(
+    'format', 'dotencode', default=True,
+)
+coreconfigitem(
+    'format', 'generaldelta', default=False, experimental=True,
 )
-coreconfigitem('format', 'use-side-data',
-    default=False,
-    experimental=True,
+coreconfigitem(
+    'format', 'manifestcachesize', default=None, experimental=True,
 )
-coreconfigitem('format', 'internal-phase',
-    default=False,
-    experimental=True,
+coreconfigitem(
+    'format', 'maxchainlen', default=dynamicdefault, experimental=True,
+)
+coreconfigitem(
+    'format', 'obsstore-version', default=None,
 )
-coreconfigitem('fsmonitor', 'warn_when_unused',
-    default=True,
+coreconfigitem(
+    'format', 'sparse-revlog', default=True,
 )
-coreconfigitem('fsmonitor', 'warn_update_file_count',
-    default=50000,
+coreconfigitem(
+    'format',
+    'revlog-compression',
+    default='zlib',
+    alias=[('experimental', 'format.compression')],
 )
-coreconfigitem('help', br'hidden-command\..*',
-    default=False,
-    generic=True,
+coreconfigitem(
+    'format', 'usefncache', default=True,
+)
+coreconfigitem(
+    'format', 'usegeneraldelta', default=True,
 )
-coreconfigitem('help', br'hidden-topic\..*',
-    default=False,
-    generic=True,
+coreconfigitem(
+    'format', 'usestore', default=True,
+)
+coreconfigitem(
+    'format', 'use-side-data', default=False, experimental=True,
+)
+coreconfigitem(
+    'format', 'internal-phase', default=False, experimental=True,
 )
-coreconfigitem('hooks', '.*',
-    default=dynamicdefault,
-    generic=True,
+coreconfigitem(
+    'fsmonitor', 'warn_when_unused', default=True,
 )
-coreconfigitem('hgweb-paths', '.*',
-    default=list,
-    generic=True,
+coreconfigitem(
+    'fsmonitor', 'warn_update_file_count', default=50000,
+)
+coreconfigitem(
+    'help', br'hidden-command\..*', default=False, generic=True,
 )
-coreconfigitem('hostfingerprints', '.*',
-    default=list,
-    generic=True,
+coreconfigitem(
+    'help', br'hidden-topic\..*', default=False, generic=True,
+)
+coreconfigitem(
+    'hooks', '.*', default=dynamicdefault, generic=True,
 )
-coreconfigitem('hostsecurity', 'ciphers',
-    default=None,
+coreconfigitem(
+    'hgweb-paths', '.*', default=list, generic=True,
 )
-coreconfigitem('hostsecurity', 'disabletls10warning',
-    default=False,
+coreconfigitem(
+    'hostfingerprints', '.*', default=list, generic=True,
 )
-coreconfigitem('hostsecurity', 'minimumprotocol',
-    default=dynamicdefault,
+coreconfigitem(
+    'hostsecurity', 'ciphers', default=None,
 )
-coreconfigitem('hostsecurity', '.*:minimumprotocol$',
-    default=dynamicdefault,
-    generic=True,
+coreconfigitem(
+    'hostsecurity', 'disabletls10warning', default=False,
 )
-coreconfigitem('hostsecurity', '.*:ciphers$',
-    default=dynamicdefault,
-    generic=True,
+coreconfigitem(
+    'hostsecurity', 'minimumprotocol', default=dynamicdefault,
+)
+coreconfigitem(
+    'hostsecurity', '.*:minimumprotocol$', default=dynamicdefault, generic=True,
 )
-coreconfigitem('hostsecurity', '.*:fingerprints$',
-    default=list,
-    generic=True,
+coreconfigitem(
+    'hostsecurity', '.*:ciphers$', default=dynamicdefault, generic=True,
 )
-coreconfigitem('hostsecurity', '.*:verifycertsfile$',
-    default=None,
-    generic=True,
+coreconfigitem(
+    'hostsecurity', '.*:fingerprints$', default=list, generic=True,
+)
+coreconfigitem(
+    'hostsecurity', '.*:verifycertsfile$', default=None, generic=True,
 )
 
-coreconfigitem('http_proxy', 'always',
-    default=False,
+coreconfigitem(
+    'http_proxy', 'always', default=False,
 )
-coreconfigitem('http_proxy', 'host',
-    default=None,
+coreconfigitem(
+    'http_proxy', 'host', default=None,
 )
-coreconfigitem('http_proxy', 'no',
-    default=list,
+coreconfigitem(
+    'http_proxy', 'no', default=list,
 )
-coreconfigitem('http_proxy', 'passwd',
-    default=None,
+coreconfigitem(
+    'http_proxy', 'passwd', default=None,
 )
-coreconfigitem('http_proxy', 'user',
-    default=None,
+coreconfigitem(
+    'http_proxy', 'user', default=None,
 )
 
-coreconfigitem('http', 'timeout',
-    default=None,
+coreconfigitem(
+    'http', 'timeout', default=None,
 )
 
-coreconfigitem('logtoprocess', 'commandexception',
-    default=None,
+coreconfigitem(
+    'logtoprocess', 'commandexception', default=None,
 )
-coreconfigitem('logtoprocess', 'commandfinish',
-    default=None,
+coreconfigitem(
+    'logtoprocess', 'commandfinish', default=None,
 )
-coreconfigitem('logtoprocess', 'command',
-    default=None,
+coreconfigitem(
+    'logtoprocess', 'command', default=None,
 )
-coreconfigitem('logtoprocess', 'develwarn',
-    default=None,
+coreconfigitem(
+    'logtoprocess', 'develwarn', default=None,
 )
-coreconfigitem('logtoprocess', 'uiblocked',
-    default=None,
+coreconfigitem(
+    'logtoprocess', 'uiblocked', default=None,
 )
-coreconfigitem('merge', 'checkunknown',
-    default='abort',
+coreconfigitem(
+    'merge', 'checkunknown', default='abort',
 )
-coreconfigitem('merge', 'checkignored',
-    default='abort',
+coreconfigitem(
+    'merge', 'checkignored', default='abort',
 )
-coreconfigitem('experimental', 'merge.checkpathconflicts',
-    default=False,
+coreconfigitem(
+    'experimental', 'merge.checkpathconflicts', default=False,
 )
-coreconfigitem('merge', 'followcopies',
-    default=True,
+coreconfigitem(
+    'merge', 'followcopies', default=True,
 )
-coreconfigitem('merge', 'on-failure',
-    default='continue',
+coreconfigitem(
+    'merge', 'on-failure', default='continue',
 )
-coreconfigitem('merge', 'preferancestor',
-        default=lambda: ['*'],
-        experimental=True,
+coreconfigitem(
+    'merge', 'preferancestor', default=lambda: ['*'], experimental=True,
+)
+coreconfigitem(
+    'merge', 'strict-capability-check', default=False,
 )
-coreconfigitem('merge', 'strict-capability-check',
-    default=False,
+coreconfigitem(
+    'merge-tools', '.*', default=None, generic=True,
 )
-coreconfigitem('merge-tools', '.*',
-    default=None,
-    generic=True,
-)
-coreconfigitem('merge-tools', br'.*\.args$',
+coreconfigitem(
+    'merge-tools',
+    br'.*\.args$',
     default="$local $base $other",
     generic=True,
     priority=-1,
 )
-coreconfigitem('merge-tools', br'.*\.binary$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.check$',
-    default=list,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.checkchanged$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.executable$',
-    default=dynamicdefault,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.fixeol$',
-    default=False,
-    generic=True,
-    priority=-1,
+coreconfigitem(
+    'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
 )
-coreconfigitem('merge-tools', br'.*\.gui$',
-    default=False,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.mergemarkers$',
-    default='basic',
-    generic=True,
-    priority=-1,
+coreconfigitem(
+    'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
 )
-coreconfigitem('merge-tools', br'.*\.mergemarkertemplate$',
-    default=dynamicdefault,  # take from ui.mergemarkertemplate
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.priority$',
-    default=0,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.premerge$',
-    default=dynamicdefault,
-    generic=True,
-    priority=-1,
-)
-coreconfigitem('merge-tools', br'.*\.symlink$',
+coreconfigitem(
+    'merge-tools',
+    br'.*\.checkchanged$',
     default=False,
     generic=True,
     priority=-1,
 )
-coreconfigitem('pager', 'attend-.*',
+coreconfigitem(
+    'merge-tools',
+    br'.*\.executable$',
+    default=dynamicdefault,
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
+)
+coreconfigitem(
+    'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
+)
+coreconfigitem(
+    'merge-tools',
+    br'.*\.mergemarkers$',
+    default='basic',
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    'merge-tools',
+    br'.*\.mergemarkertemplate$',
+    default=dynamicdefault,  # take from ui.mergemarkertemplate
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
+)
+coreconfigitem(
+    'merge-tools',
+    br'.*\.premerge$',
     default=dynamicdefault,
     generic=True,
+    priority=-1,
 )
-coreconfigitem('pager', 'ignore',
-    default=list,
+coreconfigitem(
+    'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
 )
-coreconfigitem('pager', 'pager',
-    default=dynamicdefault,
+coreconfigitem(
+    'pager', 'attend-.*', default=dynamicdefault, generic=True,
 )
-coreconfigitem('patch', 'eol',
-    default='strict',
+coreconfigitem(
+    'pager', 'ignore', default=list,
 )
-coreconfigitem('patch', 'fuzz',
-    default=2,
+coreconfigitem(
+    'pager', 'pager', default=dynamicdefault,
 )
-coreconfigitem('paths', 'default',
-    default=None,
+coreconfigitem(
+    'patch', 'eol', default='strict',
 )
-coreconfigitem('paths', 'default-push',
-    default=None,
+coreconfigitem(
+    'patch', 'fuzz', default=2,
 )
-coreconfigitem('paths', '.*',
-    default=None,
-    generic=True,
+coreconfigitem(
+    'paths', 'default', default=None,
 )
-coreconfigitem('phases', 'checksubrepos',
-    default='follow',
+coreconfigitem(
+    'paths', 'default-push', default=None,
 )
-coreconfigitem('phases', 'new-commit',
-    default='draft',
+coreconfigitem(
+    'paths', '.*', default=None, generic=True,
 )
-coreconfigitem('phases', 'publish',
-    default=True,
+coreconfigitem(
+    'phases', 'checksubrepos', default='follow',
 )
-coreconfigitem('profiling', 'enabled',
-    default=False,
+coreconfigitem(
+    'phases', 'new-commit', default='draft',
 )
-coreconfigitem('profiling', 'format',
-    default='text',
+coreconfigitem(
+    'phases', 'publish', default=True,
 )
-coreconfigitem('profiling', 'freq',
-    default=1000,
+coreconfigitem(
+    'profiling', 'enabled', default=False,
 )
-coreconfigitem('profiling', 'limit',
-    default=30,
+coreconfigitem(
+    'profiling', 'format', default='text',
 )
-coreconfigitem('profiling', 'nested',
-    default=0,
+coreconfigitem(
+    'profiling', 'freq', default=1000,
+)
+coreconfigitem(
+    'profiling', 'limit', default=30,
 )
-coreconfigitem('profiling', 'output',
-    default=None,
+coreconfigitem(
+    'profiling', 'nested', default=0,
+)
+coreconfigitem(
+    'profiling', 'output', default=None,
 )
-coreconfigitem('profiling', 'showmax',
-    default=0.999,
+coreconfigitem(
+    'profiling', 'showmax', default=0.999,
 )
-coreconfigitem('profiling', 'showmin',
-    default=dynamicdefault,
+coreconfigitem(
+    'profiling', 'showmin', default=dynamicdefault,
 )
-coreconfigitem('profiling', 'showtime',
-    default=True,
+coreconfigitem(
+    'profiling', 'showtime', default=True,
 )
-coreconfigitem('profiling', 'sort',
-    default='inlinetime',
+coreconfigitem(
+    'profiling', 'sort', default='inlinetime',
 )
-coreconfigitem('profiling', 'statformat',
-    default='hotpath',
+coreconfigitem(
+    'profiling', 'statformat', default='hotpath',
 )
-coreconfigitem('profiling', 'time-track',
-    default=dynamicdefault,
+coreconfigitem(
+    'profiling', 'time-track', default=dynamicdefault,
 )
-coreconfigitem('profiling', 'type',
-    default='stat',
+coreconfigitem(
+    'profiling', 'type', default='stat',
 )
-coreconfigitem('progress', 'assume-tty',
-    default=False,
+coreconfigitem(
+    'progress', 'assume-tty', default=False,
 )
-coreconfigitem('progress', 'changedelay',
-    default=1,
+coreconfigitem(
+    'progress', 'changedelay', default=1,
 )
-coreconfigitem('progress', 'clear-complete',
-    default=True,
+coreconfigitem(
+    'progress', 'clear-complete', default=True,
 )
-coreconfigitem('progress', 'debug',
-    default=False,
+coreconfigitem(
+    'progress', 'debug', default=False,
 )
-coreconfigitem('progress', 'delay',
-    default=3,
+coreconfigitem(
+    'progress', 'delay', default=3,
 )
-coreconfigitem('progress', 'disable',
-    default=False,
+coreconfigitem(
+    'progress', 'disable', default=False,
 )
-coreconfigitem('progress', 'estimateinterval',
-    default=60.0,
+coreconfigitem(
+    'progress', 'estimateinterval', default=60.0,
 )
-coreconfigitem('progress', 'format',
+coreconfigitem(
+    'progress',
+    'format',
     default=lambda: ['topic', 'bar', 'number', 'estimate'],
 )
-coreconfigitem('progress', 'refresh',
-    default=0.1,
+coreconfigitem(
+    'progress', 'refresh', default=0.1,
+)
+coreconfigitem(
+    'progress', 'width', default=dynamicdefault,
 )
-coreconfigitem('progress', 'width',
-    default=dynamicdefault,
+coreconfigitem(
+    'push', 'pushvars.server', default=False,
 )
-coreconfigitem('push', 'pushvars.server',
-    default=False,
-)
-coreconfigitem('rewrite', 'backup-bundle',
+coreconfigitem(
+    'rewrite',
+    'backup-bundle',
     default=True,
     alias=[('ui', 'history-editing-backup')],
 )
-coreconfigitem('rewrite', 'update-timestamp',
-    default=False,
+coreconfigitem(
+    'rewrite', 'update-timestamp', default=False,
 )
-coreconfigitem('storage', 'new-repo-backend',
-    default='revlogv1',
-    experimental=True,
+coreconfigitem(
+    'storage', 'new-repo-backend', default='revlogv1', experimental=True,
 )
-coreconfigitem('storage', 'revlog.optimize-delta-parent-choice',
+coreconfigitem(
+    'storage',
+    'revlog.optimize-delta-parent-choice',
     default=True,
     alias=[('format', 'aggressivemergedeltas')],
 )
-coreconfigitem('storage', 'revlog.reuse-external-delta',
-    default=True,
+coreconfigitem(
+    'storage', 'revlog.reuse-external-delta', default=True,
 )
-coreconfigitem('storage', 'revlog.reuse-external-delta-parent',
-    default=None,
+coreconfigitem(
+    'storage', 'revlog.reuse-external-delta-parent', default=None,
 )
-coreconfigitem('storage', 'revlog.zlib.level',
-    default=None,
+coreconfigitem(
+    'storage', 'revlog.zlib.level', default=None,
 )
-coreconfigitem('storage', 'revlog.zstd.level',
-    default=None,
+coreconfigitem(
+    'storage', 'revlog.zstd.level', default=None,
 )
-coreconfigitem('server', 'bookmarks-pushkey-compat',
-    default=True,
+coreconfigitem(
+    'server', 'bookmarks-pushkey-compat', default=True,
 )
-coreconfigitem('server', 'bundle1',
-    default=True,
+coreconfigitem(
+    'server', 'bundle1', default=True,
 )
-coreconfigitem('server', 'bundle1gd',
-    default=None,
+coreconfigitem(
+    'server', 'bundle1gd', default=None,
 )
-coreconfigitem('server', 'bundle1.pull',
-    default=None,
+coreconfigitem(
+    'server', 'bundle1.pull', default=None,
+)
+coreconfigitem(
+    'server', 'bundle1gd.pull', default=None,
 )
-coreconfigitem('server', 'bundle1gd.pull',
-    default=None,
+coreconfigitem(
+    'server', 'bundle1.push', default=None,
 )
-coreconfigitem('server', 'bundle1.push',
-    default=None,
+coreconfigitem(
+    'server', 'bundle1gd.push', default=None,
 )
-coreconfigitem('server', 'bundle1gd.push',
-    default=None,
-)
-coreconfigitem('server', 'bundle2.stream',
+coreconfigitem(
+    'server',
+    'bundle2.stream',
     default=True,
-    alias=[('experimental', 'bundle2.stream')]
+    alias=[('experimental', 'bundle2.stream')],
 )
-coreconfigitem('server', 'compressionengines',
-    default=list,
+coreconfigitem(
+    'server', 'compressionengines', default=list,
 )
-coreconfigitem('server', 'concurrent-push-mode',
-    default='strict',
+coreconfigitem(
+    'server', 'concurrent-push-mode', default='strict',
 )
-coreconfigitem('server', 'disablefullbundle',
-    default=False,
+coreconfigitem(
+    'server', 'disablefullbundle', default=False,
 )
-coreconfigitem('server', 'maxhttpheaderlen',
-    default=1024,
+coreconfigitem(
+    'server', 'maxhttpheaderlen', default=1024,
 )
-coreconfigitem('server', 'pullbundle',
-    default=False,
+coreconfigitem(
+    'server', 'pullbundle', default=False,
 )
-coreconfigitem('server', 'preferuncompressed',
-    default=False,
+coreconfigitem(
+    'server', 'preferuncompressed', default=False,
 )
-coreconfigitem('server', 'streamunbundle',
-    default=False,
+coreconfigitem(
+    'server', 'streamunbundle', default=False,
 )
-coreconfigitem('server', 'uncompressed',
-    default=True,
+coreconfigitem(
+    'server', 'uncompressed', default=True,
 )
-coreconfigitem('server', 'uncompressedallowsecret',
-    default=False,
+coreconfigitem(
+    'server', 'uncompressedallowsecret', default=False,
 )
-coreconfigitem('server', 'view',
-    default='served',
+coreconfigitem(
+    'server', 'view', default='served',
 )
-coreconfigitem('server', 'validate',
-    default=False,
+coreconfigitem(
+    'server', 'validate', default=False,
 )
-coreconfigitem('server', 'zliblevel',
-    default=-1,
+coreconfigitem(
+    'server', 'zliblevel', default=-1,
 )
-coreconfigitem('server', 'zstdlevel',
-    default=3,
+coreconfigitem(
+    'server', 'zstdlevel', default=3,
 )
-coreconfigitem('share', 'pool',
-    default=None,
+coreconfigitem(
+    'share', 'pool', default=None,
 )
-coreconfigitem('share', 'poolnaming',
-    default='identity',
+coreconfigitem(
+    'share', 'poolnaming', default='identity',
 )
-coreconfigitem('shelve','maxbackups',
-    default=10,
+coreconfigitem(
+    'shelve', 'maxbackups', default=10,
 )
-coreconfigitem('smtp', 'host',
-    default=None,
+coreconfigitem(
+    'smtp', 'host', default=None,
 )
-coreconfigitem('smtp', 'local_hostname',
-    default=None,
+coreconfigitem(
+    'smtp', 'local_hostname', default=None,
 )
-coreconfigitem('smtp', 'password',
-    default=None,
+coreconfigitem(
+    'smtp', 'password', default=None,
 )
-coreconfigitem('smtp', 'port',
-    default=dynamicdefault,
+coreconfigitem(
+    'smtp', 'port', default=dynamicdefault,
 )
-coreconfigitem('smtp', 'tls',
-    default='none',
+coreconfigitem(
+    'smtp', 'tls', default='none',
 )
-coreconfigitem('smtp', 'username',
-    default=None,
+coreconfigitem(
+    'smtp', 'username', default=None,
 )
-coreconfigitem('sparse', 'missingwarning',
-    default=True,
-    experimental=True,
+coreconfigitem(
+    'sparse', 'missingwarning', default=True, experimental=True,
 )
-coreconfigitem('subrepos', 'allowed',
+coreconfigitem(
+    'subrepos',
+    'allowed',
     default=dynamicdefault,  # to make backporting simpler
 )
-coreconfigitem('subrepos', 'hg:allowed',
-    default=dynamicdefault,
+coreconfigitem(
+    'subrepos', 'hg:allowed', default=dynamicdefault,
 )
-coreconfigitem('subrepos', 'git:allowed',
-    default=dynamicdefault,
+coreconfigitem(
+    'subrepos', 'git:allowed', default=dynamicdefault,
 )
-coreconfigitem('subrepos', 'svn:allowed',
-    default=dynamicdefault,
+coreconfigitem(
+    'subrepos', 'svn:allowed', default=dynamicdefault,
 )
-coreconfigitem('templates', '.*',
-    default=None,
-    generic=True,
+coreconfigitem(
+    'templates', '.*', default=None, generic=True,
+)
+coreconfigitem(
+    'templateconfig', '.*', default=dynamicdefault, generic=True,
 )
-coreconfigitem('templateconfig', '.*',
-    default=dynamicdefault,
-    generic=True,
+coreconfigitem(
+    'trusted', 'groups', default=list,
 )
-coreconfigitem('trusted', 'groups',
-    default=list,
+coreconfigitem(
+    'trusted', 'users', default=list,
 )
-coreconfigitem('trusted', 'users',
-    default=list,
+coreconfigitem(
+    'ui', '_usedassubrepo', default=False,
 )
-coreconfigitem('ui', '_usedassubrepo',
-    default=False,
+coreconfigitem(
+    'ui', 'allowemptycommit', default=False,
 )
-coreconfigitem('ui', 'allowemptycommit',
-    default=False,
+coreconfigitem(
+    'ui', 'archivemeta', default=True,
 )
-coreconfigitem('ui', 'archivemeta',
-    default=True,
+coreconfigitem(
+    'ui', 'askusername', default=False,
 )
-coreconfigitem('ui', 'askusername',
-    default=False,
+coreconfigitem(
+    'ui', 'clonebundlefallback', default=False,
 )
-coreconfigitem('ui', 'clonebundlefallback',
-    default=False,
+coreconfigitem(
+    'ui', 'clonebundleprefers', default=list,
 )
-coreconfigitem('ui', 'clonebundleprefers',
-    default=list,
+coreconfigitem(
+    'ui', 'clonebundles', default=True,
 )
-coreconfigitem('ui', 'clonebundles',
-    default=True,
+coreconfigitem(
+    'ui', 'color', default='auto',
 )
-coreconfigitem('ui', 'color',
-    default='auto',
+coreconfigitem(
+    'ui', 'commitsubrepos', default=False,
 )
-coreconfigitem('ui', 'commitsubrepos',
-    default=False,
+coreconfigitem(
+    'ui', 'debug', default=False,
 )
-coreconfigitem('ui', 'debug',
-    default=False,
+coreconfigitem(
+    'ui', 'debugger', default=None,
 )
-coreconfigitem('ui', 'debugger',
-    default=None,
+coreconfigitem(
+    'ui', 'editor', default=dynamicdefault,
 )
-coreconfigitem('ui', 'editor',
-    default=dynamicdefault,
+coreconfigitem(
+    'ui', 'fallbackencoding', default=None,
 )
-coreconfigitem('ui', 'fallbackencoding',
-    default=None,
+coreconfigitem(
+    'ui', 'forcecwd', default=None,
 )
-coreconfigitem('ui', 'forcecwd',
-    default=None,
-)
-coreconfigitem('ui', 'forcemerge',
-    default=None,
+coreconfigitem(
+    'ui', 'forcemerge', default=None,
 )
-coreconfigitem('ui', 'formatdebug',
-    default=False,
+coreconfigitem(
+    'ui', 'formatdebug', default=False,
 )
-coreconfigitem('ui', 'formatjson',
-    default=False,
+coreconfigitem(
+    'ui', 'formatjson', default=False,
 )
-coreconfigitem('ui', 'formatted',
-    default=None,
+coreconfigitem(
+    'ui', 'formatted', default=None,
 )
-coreconfigitem('ui', 'graphnodetemplate',
-    default=None,
+coreconfigitem(
+    'ui', 'graphnodetemplate', default=None,
 )
-coreconfigitem('ui', 'interactive',
-    default=None,
+coreconfigitem(
+    'ui', 'interactive', default=None,
 )
-coreconfigitem('ui', 'interface',
-    default=None,
+coreconfigitem(
+    'ui', 'interface', default=None,
 )
-coreconfigitem('ui', 'interface.chunkselector',
-    default=None,
+coreconfigitem(
+    'ui', 'interface.chunkselector', default=None,
 )
-coreconfigitem('ui', 'large-file-limit',
-    default=10000000,
+coreconfigitem(
+    'ui', 'large-file-limit', default=10000000,
 )
-coreconfigitem('ui', 'logblockedtimes',
-    default=False,
+coreconfigitem(
+    'ui', 'logblockedtimes', default=False,
 )
-coreconfigitem('ui', 'logtemplate',
-    default=None,
+coreconfigitem(
+    'ui', 'logtemplate', default=None,
 )
-coreconfigitem('ui', 'merge',
-    default=None,
+coreconfigitem(
+    'ui', 'merge', default=None,
 )
-coreconfigitem('ui', 'mergemarkers',
-    default='basic',
+coreconfigitem(
+    'ui', 'mergemarkers', default='basic',
 )
-coreconfigitem('ui', 'mergemarkertemplate',
-    default=('{node|short} '
-            '{ifeq(tags, "tip", "", '
-            'ifeq(tags, "", "", "{tags} "))}'
-            '{if(bookmarks, "{bookmarks} ")}'
-            '{ifeq(branch, "default", "", "{branch} ")}'
-            '- {author|user}: {desc|firstline}')
-)
-coreconfigitem('ui', 'message-output',
-    default='stdio',
+coreconfigitem(
+    'ui',
+    'mergemarkertemplate',
+    default=(
+        '{node|short} '
+        '{ifeq(tags, "tip", "", '
+        'ifeq(tags, "", "", "{tags} "))}'
+        '{if(bookmarks, "{bookmarks} ")}'
+        '{ifeq(branch, "default", "", "{branch} ")}'
+        '- {author|user}: {desc|firstline}'
+    ),
 )
-coreconfigitem('ui', 'nontty',
-    default=False,
+coreconfigitem(
+    'ui', 'message-output', default='stdio',
 )
-coreconfigitem('ui', 'origbackuppath',
-    default=None,
+coreconfigitem(
+    'ui', 'nontty', default=False,
 )
-coreconfigitem('ui', 'paginate',
-    default=True,
+coreconfigitem(
+    'ui', 'origbackuppath', default=None,
 )
-coreconfigitem('ui', 'patch',
-    default=None,
+coreconfigitem(
+    'ui', 'paginate', default=True,
 )
-coreconfigitem('ui', 'pre-merge-tool-output-template',
-    default=None,
+coreconfigitem(
+    'ui', 'patch', default=None,
 )
-coreconfigitem('ui', 'portablefilenames',
-    default='warn',
+coreconfigitem(
+    'ui', 'pre-merge-tool-output-template', default=None,
 )
-coreconfigitem('ui', 'promptecho',
-    default=False,
+coreconfigitem(
+    'ui', 'portablefilenames', default='warn',
 )
-coreconfigitem('ui', 'quiet',
-    default=False,
+coreconfigitem(
+    'ui', 'promptecho', default=False,
 )
-coreconfigitem('ui', 'quietbookmarkmove',
-    default=False,
+coreconfigitem(
+    'ui', 'quiet', default=False,
 )
-coreconfigitem('ui', 'relative-paths',
-    default='legacy',
+coreconfigitem(
+    'ui', 'quietbookmarkmove', default=False,
 )
-coreconfigitem('ui', 'remotecmd',
-    default='hg',
+coreconfigitem(
+    'ui', 'relative-paths', default='legacy',
 )
-coreconfigitem('ui', 'report_untrusted',
-    default=True,
+coreconfigitem(
+    'ui', 'remotecmd', default='hg',
 )
-coreconfigitem('ui', 'rollback',
-    default=True,
+coreconfigitem(
+    'ui', 'report_untrusted', default=True,
 )
-coreconfigitem('ui', 'signal-safe-lock',
-    default=True,
+coreconfigitem(
+    'ui', 'rollback', default=True,
 )
-coreconfigitem('ui', 'slash',
-    default=False,
+coreconfigitem(
+    'ui', 'signal-safe-lock', default=True,
 )
-coreconfigitem('ui', 'ssh',
-    default='ssh',
+coreconfigitem(
+    'ui', 'slash', default=False,
 )
-coreconfigitem('ui', 'ssherrorhint',
-    default=None,
+coreconfigitem(
+    'ui', 'ssh', default='ssh',
 )
-coreconfigitem('ui', 'statuscopies',
-    default=False,
+coreconfigitem(
+    'ui', 'ssherrorhint', default=None,
 )
-coreconfigitem('ui', 'strict',
-    default=False,
+coreconfigitem(
+    'ui', 'statuscopies', default=False,
 )
-coreconfigitem('ui', 'style',
-    default='',
+coreconfigitem(
+    'ui', 'strict', default=False,
 )
-coreconfigitem('ui', 'supportcontact',
-    default=None,
+coreconfigitem(
+    'ui', 'style', default='',
 )
-coreconfigitem('ui', 'textwidth',
-    default=78,
+coreconfigitem(
+    'ui', 'supportcontact', default=None,
 )
-coreconfigitem('ui', 'timeout',
-    default='600',
+coreconfigitem(
+    'ui', 'textwidth', default=78,
 )
-coreconfigitem('ui', 'timeout.warn',
-    default=0,
+coreconfigitem(
+    'ui', 'timeout', default='600',
 )
-coreconfigitem('ui', 'traceback',
-    default=False,
+coreconfigitem(
+    'ui', 'timeout.warn', default=0,
 )
-coreconfigitem('ui', 'tweakdefaults',
-    default=False,
+coreconfigitem(
+    'ui', 'traceback', default=False,
 )
-coreconfigitem('ui', 'username',
-    alias=[('ui', 'user')]
+coreconfigitem(
+    'ui', 'tweakdefaults', default=False,
 )
-coreconfigitem('ui', 'verbose',
-    default=False,
+coreconfigitem('ui', 'username', alias=[('ui', 'user')])
+coreconfigitem(
+    'ui', 'verbose', default=False,
 )
-coreconfigitem('verify', 'skipflags',
-    default=None,
+coreconfigitem(
+    'verify', 'skipflags', default=None,
 )
-coreconfigitem('web', 'allowbz2',
-    default=False,
+coreconfigitem(
+    'web', 'allowbz2', default=False,
 )
-coreconfigitem('web', 'allowgz',
-    default=False,
+coreconfigitem(
+    'web', 'allowgz', default=False,
 )
-coreconfigitem('web', 'allow-pull',
-    alias=[('web', 'allowpull')],
-    default=True,
+coreconfigitem(
+    'web', 'allow-pull', alias=[('web', 'allowpull')], default=True,
 )
-coreconfigitem('web', 'allow-push',
-    alias=[('web', 'allow_push')],
-    default=list,
+coreconfigitem(
+    'web', 'allow-push', alias=[('web', 'allow_push')], default=list,
 )
-coreconfigitem('web', 'allowzip',
-    default=False,
+coreconfigitem(
+    'web', 'allowzip', default=False,
 )
-coreconfigitem('web', 'archivesubrepos',
-    default=False,
+coreconfigitem(
+    'web', 'archivesubrepos', default=False,
 )
-coreconfigitem('web', 'cache',
-    default=True,
+coreconfigitem(
+    'web', 'cache', default=True,
 )
-coreconfigitem('web', 'comparisoncontext',
-    default=5,
+coreconfigitem(
+    'web', 'comparisoncontext', default=5,
 )
-coreconfigitem('web', 'contact',
-    default=None,
+coreconfigitem(
+    'web', 'contact', default=None,
 )
-coreconfigitem('web', 'deny_push',
-    default=list,
+coreconfigitem(
+    'web', 'deny_push', default=list,
 )
-coreconfigitem('web', 'guessmime',
-    default=False,
+coreconfigitem(
+    'web', 'guessmime', default=False,
 )
-coreconfigitem('web', 'hidden',
-    default=False,
+coreconfigitem(
+    'web', 'hidden', default=False,
 )
-coreconfigitem('web', 'labels',
-    default=list,
+coreconfigitem(
+    'web', 'labels', default=list,
 )
-coreconfigitem('web', 'logoimg',
-    default='hglogo.png',
+coreconfigitem(
+    'web', 'logoimg', default='hglogo.png',
 )
-coreconfigitem('web', 'logourl',
-    default='https://mercurial-scm.org/',
+coreconfigitem(
+    'web', 'logourl', default='https://mercurial-scm.org/',
 )
-coreconfigitem('web', 'accesslog',
-    default='-',
+coreconfigitem(
+    'web', 'accesslog', default='-',
 )
-coreconfigitem('web', 'address',
-    default='',
+coreconfigitem(
+    'web', 'address', default='',
 )
-coreconfigitem('web', 'allow-archive',
-    alias=[('web', 'allow_archive')],
-    default=list,
+coreconfigitem(
+    'web', 'allow-archive', alias=[('web', 'allow_archive')], default=list,
 )
-coreconfigitem('web', 'allow_read',
-    default=list,
+coreconfigitem(
+    'web', 'allow_read', default=list,
 )
-coreconfigitem('web', 'baseurl',
-    default=None,
+coreconfigitem(
+    'web', 'baseurl', default=None,
 )
-coreconfigitem('web', 'cacerts',
-    default=None,
+coreconfigitem(
+    'web', 'cacerts', default=None,
 )
-coreconfigitem('web', 'certificate',
-    default=None,
+coreconfigitem(
+    'web', 'certificate', default=None,
 )
-coreconfigitem('web', 'collapse',
-    default=False,
+coreconfigitem(
+    'web', 'collapse', default=False,
 )
-coreconfigitem('web', 'csp',
-    default=None,
+coreconfigitem(
+    'web', 'csp', default=None,
 )
-coreconfigitem('web', 'deny_read',
-    default=list,
+coreconfigitem(
+    'web', 'deny_read', default=list,
 )
-coreconfigitem('web', 'descend',
-    default=True,
+coreconfigitem(
+    'web', 'descend', default=True,
 )
-coreconfigitem('web', 'description',
-    default="",
+coreconfigitem(
+    'web', 'description', default="",
 )
-coreconfigitem('web', 'encoding',
-    default=lambda: encoding.encoding,
+coreconfigitem(
+    'web', 'encoding', default=lambda: encoding.encoding,
 )
-coreconfigitem('web', 'errorlog',
-    default='-',
+coreconfigitem(
+    'web', 'errorlog', default='-',
 )
-coreconfigitem('web', 'ipv6',
-    default=False,
+coreconfigitem(
+    'web', 'ipv6', default=False,
 )
-coreconfigitem('web', 'maxchanges',
-    default=10,
+coreconfigitem(
+    'web', 'maxchanges', default=10,
 )
-coreconfigitem('web', 'maxfiles',
-    default=10,
+coreconfigitem(
+    'web', 'maxfiles', default=10,
 )
-coreconfigitem('web', 'maxshortchanges',
-    default=60,
+coreconfigitem(
+    'web', 'maxshortchanges', default=60,
 )
-coreconfigitem('web', 'motd',
-    default='',
+coreconfigitem(
+    'web', 'motd', default='',
 )
-coreconfigitem('web', 'name',
-    default=dynamicdefault,
+coreconfigitem(
+    'web', 'name', default=dynamicdefault,
 )
-coreconfigitem('web', 'port',
-    default=8000,
+coreconfigitem(
+    'web', 'port', default=8000,
 )
-coreconfigitem('web', 'prefix',
-    default='',
+coreconfigitem(
+    'web', 'prefix', default='',
 )
-coreconfigitem('web', 'push_ssl',
-    default=True,
+coreconfigitem(
+    'web', 'push_ssl', default=True,
 )
-coreconfigitem('web', 'refreshinterval',
-    default=20,
+coreconfigitem(
+    'web', 'refreshinterval', default=20,
 )
-coreconfigitem('web', 'server-header',
-    default=None,
+coreconfigitem(
+    'web', 'server-header', default=None,
 )
-coreconfigitem('web', 'static',
-    default=None,
+coreconfigitem(
+    'web', 'static', default=None,
 )
-coreconfigitem('web', 'staticurl',
-    default=None,
+coreconfigitem(
+    'web', 'staticurl', default=None,
 )
-coreconfigitem('web', 'stripes',
-    default=1,
+coreconfigitem(
+    'web', 'stripes', default=1,
 )
-coreconfigitem('web', 'style',
-    default='paper',
+coreconfigitem(
+    'web', 'style', default='paper',
 )
-coreconfigitem('web', 'templates',
-    default=None,
+coreconfigitem(
+    'web', 'templates', default=None,
 )
-coreconfigitem('web', 'view',
-    default='served',
-    experimental=True,
+coreconfigitem(
+    'web', 'view', default='served', experimental=True,
 )
-coreconfigitem('worker', 'backgroundclose',
-    default=dynamicdefault,
+coreconfigitem(
+    'worker', 'backgroundclose', default=dynamicdefault,
 )
 # Windows defaults to a limit of 512 open files. A buffer of 128
 # should give us enough headway.
-coreconfigitem('worker', 'backgroundclosemaxqueue',
-    default=384,
+coreconfigitem(
+    'worker', 'backgroundclosemaxqueue', default=384,
 )
-coreconfigitem('worker', 'backgroundcloseminfilecount',
-    default=2048,
+coreconfigitem(
+    'worker', 'backgroundcloseminfilecount', default=2048,
 )
-coreconfigitem('worker', 'backgroundclosethreadcount',
-    default=4,
+coreconfigitem(
+    'worker', 'backgroundclosethreadcount', default=4,
 )
-coreconfigitem('worker', 'enabled',
-    default=True,
+coreconfigitem(
+    'worker', 'enabled', default=True,
 )
-coreconfigitem('worker', 'numcpus',
-    default=None,
+coreconfigitem(
+    'worker', 'numcpus', default=None,
 )
 
 # Rebase related configuration moved to core because other extension are doing
 # strange things. For example, shelve import the extensions to reuse some bit
 # without formally loading it.
-coreconfigitem('commands', 'rebase.requiredest',
-            default=False,
+coreconfigitem(
+    'commands', 'rebase.requiredest', default=False,
 )
-coreconfigitem('experimental', 'rebaseskipobsolete',
-    default=True,
+coreconfigitem(
+    'experimental', 'rebaseskipobsolete', default=True,
 )
-coreconfigitem('rebase', 'singletransaction',
-    default=False,
+coreconfigitem(
+    'rebase', 'singletransaction', default=False,
 )
-coreconfigitem('rebase', 'experimental.inmemory',
-    default=False,
+coreconfigitem(
+    'rebase', 'experimental.inmemory', default=False,
 )
--- a/mercurial/context.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/context.py	Sun Oct 06 09:45:02 2019 -0400
@@ -49,6 +49,7 @@
 
 propertycache = util.propertycache
 
+
 class basectx(object):
     """A basectx object represents the common logic for its children:
     changectx: read-only context that is already present in the repo,
@@ -98,8 +99,9 @@
         """
         return match
 
-    def _buildstatus(self, other, s, match, listignored, listclean,
-                     listunknown):
+    def _buildstatus(
+        self, other, s, match, listignored, listclean, listunknown
+    ):
         """build a status with respect to another context"""
         # Load earliest manifest first for caching reasons. More specifically,
         # if you have revisions 1000 and 1001, 1001 is probably stored as a
@@ -146,15 +148,22 @@
 
         if removed:
             # need to filter files if they are already reported as removed
-            unknown = [fn for fn in unknown if fn not in mf1 and
-                                               (not match or match(fn))]
-            ignored = [fn for fn in ignored if fn not in mf1 and
-                                               (not match or match(fn))]
+            unknown = [
+                fn
+                for fn in unknown
+                if fn not in mf1 and (not match or match(fn))
+            ]
+            ignored = [
+                fn
+                for fn in ignored
+                if fn not in mf1 and (not match or match(fn))
+            ]
             # if they're deleted, don't report them as removed
             removed = [fn for fn in removed if fn not in deletedset]
 
-        return scmutil.status(modified, added, removed, deleted, unknown,
-                              ignored, clean)
+        return scmutil.status(
+            modified, added, removed, deleted, unknown, ignored, clean
+        )
 
     @propertycache
     def substate(self):
@@ -165,18 +174,25 @@
 
     def rev(self):
         return self._rev
+
     def node(self):
         return self._node
+
     def hex(self):
         return hex(self.node())
+
     def manifest(self):
         return self._manifest
+
     def manifestctx(self):
         return self._manifestctx
+
     def repo(self):
         return self._repo
+
     def phasestr(self):
         return phases.phasenames[self.phase()]
+
     def mutable(self):
         return self.phase() > phases.public
 
@@ -249,18 +265,22 @@
             try:
                 return self._manifest[path], self._manifest.flags(path)
             except KeyError:
-                raise error.ManifestLookupError(self._node, path,
-                                                _('not found in manifest'))
+                raise error.ManifestLookupError(
+                    self._node, path, _('not found in manifest')
+                )
         if r'_manifestdelta' in self.__dict__ or path in self.files():
             if path in self._manifestdelta:
-                return (self._manifestdelta[path],
-                        self._manifestdelta.flags(path))
+                return (
+                    self._manifestdelta[path],
+                    self._manifestdelta.flags(path),
+                )
         mfl = self._repo.manifestlog
         try:
             node, flag = mfl[self._changeset.manifest].find(path)
         except KeyError:
-            raise error.ManifestLookupError(self._node, path,
-                                            _('not found in manifest'))
+            raise error.ManifestLookupError(
+                self._node, path, _('not found in manifest')
+            )
 
         return node, flag
 
@@ -276,8 +296,10 @@
     @propertycache
     def _copies(self):
         return copies.computechangesetcopies(self)
+
     def p1copies(self):
         return self._copies[0]
+
     def p2copies(self):
         return self._copies[1]
 
@@ -294,26 +316,59 @@
         '''
         return subrepo.subrepo(self, path, allowwdir=True)
 
-    def match(self, pats=None, include=None, exclude=None, default='glob',
-              listsubrepos=False, badfn=None):
+    def match(
+        self,
+        pats=None,
+        include=None,
+        exclude=None,
+        default='glob',
+        listsubrepos=False,
+        badfn=None,
+    ):
         r = self._repo
-        return matchmod.match(r.root, r.getcwd(), pats,
-                              include, exclude, default,
-                              auditor=r.nofsauditor, ctx=self,
-                              listsubrepos=listsubrepos, badfn=badfn)
-
-    def diff(self, ctx2=None, match=None, changes=None, opts=None,
-             losedatafn=None, pathfn=None, copy=None,
-             copysourcematch=None, hunksfilterfn=None):
+        return matchmod.match(
+            r.root,
+            r.getcwd(),
+            pats,
+            include,
+            exclude,
+            default,
+            auditor=r.nofsauditor,
+            ctx=self,
+            listsubrepos=listsubrepos,
+            badfn=badfn,
+        )
+
+    def diff(
+        self,
+        ctx2=None,
+        match=None,
+        changes=None,
+        opts=None,
+        losedatafn=None,
+        pathfn=None,
+        copy=None,
+        copysourcematch=None,
+        hunksfilterfn=None,
+    ):
         """Returns a diff generator for the given contexts and matcher"""
         if ctx2 is None:
             ctx2 = self.p1()
         if ctx2 is not None:
             ctx2 = self._repo[ctx2]
-        return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
-                          opts=opts, losedatafn=losedatafn, pathfn=pathfn,
-                          copy=copy, copysourcematch=copysourcematch,
-                          hunksfilterfn=hunksfilterfn)
+        return patch.diff(
+            self._repo,
+            ctx2,
+            self,
+            match=match,
+            changes=changes,
+            opts=opts,
+            losedatafn=losedatafn,
+            pathfn=pathfn,
+            copy=copy,
+            copysourcematch=copysourcematch,
+            hunksfilterfn=hunksfilterfn,
+        )
 
     def dirs(self):
         return self._manifest.dirs()
@@ -321,8 +376,15 @@
     def hasdir(self, dir):
         return self._manifest.hasdir(dir)
 
-    def status(self, other=None, match=None, listignored=False,
-               listclean=False, listunknown=False, listsubrepos=False):
+    def status(
+        self,
+        other=None,
+        match=None,
+        listignored=False,
+        listclean=False,
+        listunknown=False,
+        listsubrepos=False,
+    ):
         """return status of files between two nodes or node and working
         directory.
 
@@ -347,22 +409,23 @@
         # then we'd be done. But the special case of the above call means we
         # just copy the manifest of the parent.
         reversed = False
-        if (not isinstance(ctx1, changectx)
-            and isinstance(ctx2, changectx)):
+        if not isinstance(ctx1, changectx) and isinstance(ctx2, changectx):
             reversed = True
             ctx1, ctx2 = ctx2, ctx1
 
         match = self._repo.narrowmatch(match)
         match = ctx2._matchstatus(ctx1, match)
         r = scmutil.status([], [], [], [], [], [], [])
-        r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
-                              listunknown)
+        r = ctx2._buildstatus(
+            ctx1, r, match, listignored, listclean, listunknown
+        )
 
         if reversed:
             # Reverse added and removed. Clear deleted, unknown and ignored as
             # these make no sense to reverse.
-            r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
-                               r.clean)
+            r = scmutil.status(
+                r.modified, r.removed, r.added, [], [], [], r.clean
+            )
 
         if listsubrepos:
             for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
@@ -374,9 +437,14 @@
                     # won't contain that subpath. The best we can do ignore it.
                     rev2 = None
                 submatch = matchmod.subdirmatcher(subpath, match)
-                s = sub.status(rev2, match=submatch, ignored=listignored,
-                               clean=listclean, unknown=listunknown,
-                               listsubrepos=True)
+                s = sub.status(
+                    rev2,
+                    match=submatch,
+                    ignored=listignored,
+                    clean=listclean,
+                    unknown=listunknown,
+                    listsubrepos=True,
+                )
                 for rfiles, sfiles in zip(r, s):
                     rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
 
@@ -385,10 +453,12 @@
 
         return r
 
+
 class changectx(basectx):
     """A changecontext object makes access to data related to a particular
     changeset convenient. It represents a read-only context already present in
     the repo."""
+
     def __init__(self, repo, rev, node):
         super(changectx, self).__init__(repo)
         self._rev = rev
@@ -439,15 +509,19 @@
             c.description,
             c.extra,
         )
+
     def manifestnode(self):
         return self._changeset.manifest
 
     def user(self):
         return self._changeset.user
+
     def date(self):
         return self._changeset.date
+
     def files(self):
         return self._changeset.files
+
     def filesmodified(self):
         modified = set(self.files())
         modified.difference_update(self.filesadded())
@@ -508,21 +582,28 @@
 
     def description(self):
         return self._changeset.description
+
     def branch(self):
         return encoding.tolocal(self._changeset.extra.get("branch"))
+
     def closesbranch(self):
         return 'close' in self._changeset.extra
+
     def extra(self):
         """Return a dict of extra information."""
         return self._changeset.extra
+
     def tags(self):
         """Return a list of byte tag names"""
         return self._repo.nodetags(self._node)
+
     def bookmarks(self):
         """Return a list of byte bookmark names."""
         return self._repo.nodebookmarks(self._node)
+
     def phase(self):
         return self._repo._phasecache.phase(self._repo, self._rev)
+
     def hidden(self):
         return self._rev in repoview.filterrevs(self._repo, 'visible')
 
@@ -554,8 +635,9 @@
         """get a file context from this changeset"""
         if fileid is None:
             fileid = self.filenode(path)
-        return filectx(self._repo, path, fileid=fileid,
-                       changectx=self, filelog=filelog)
+        return filectx(
+            self._repo, path, fileid=fileid, changectx=self, filelog=filelog
+        )
 
     def ancestor(self, c2, warn=False):
         """return the "best" ancestor context of self and c2
@@ -586,11 +668,20 @@
                 anc = self._repo.changelog.ancestor(self._node, n2)
             if warn:
                 self._repo.ui.status(
-                    (_("note: using %s as ancestor of %s and %s\n") %
-                     (short(anc), short(self._node), short(n2))) +
-                    ''.join(_("      alternatively, use --config "
-                              "merge.preferancestor=%s\n") %
-                            short(n) for n in sorted(cahs) if n != anc))
+                    (
+                        _("note: using %s as ancestor of %s and %s\n")
+                        % (short(anc), short(self._node), short(n2))
+                    )
+                    + ''.join(
+                        _(
+                            "      alternatively, use --config "
+                            "merge.preferancestor=%s\n"
+                        )
+                        % short(n)
+                        for n in sorted(cahs)
+                        if n != anc
+                    )
+                )
         return self._repo[anc]
 
     def isancestorof(self, other):
@@ -604,8 +695,7 @@
         def bad(fn, msg):
             # The manifest doesn't know about subrepos, so don't complain about
             # paths into valid subrepos.
-            if any(fn == s or fn.startswith(s + '/')
-                   for s in self.substate):
+            if any(fn == s or fn.startswith(s + '/') for s in self.substate):
                 return
             match.bad(fn, _('no such file in rev %s') % self)
 
@@ -615,6 +705,7 @@
     def matches(self, match):
         return self.walk(match)
 
+
 class basefilectx(object):
     """A filecontext object represents the common logic for its children:
     filectx: read-only access to a filerevision that is already present
@@ -623,6 +714,7 @@
                     directory,
     memfilectx: a filecontext that represents files in-memory,
     """
+
     @propertycache
     def _filelog(self):
         return self._repo.file(self._path)
@@ -682,8 +774,11 @@
 
     def __eq__(self, other):
         try:
-            return (type(self) == type(other) and self._path == other._path
-                    and self._filenode == other._filenode)
+            return (
+                type(self) == type(other)
+                and self._path == other._path
+                and self._filenode == other._filenode
+            )
         except AttributeError:
             return False
 
@@ -692,53 +787,77 @@
 
     def filerev(self):
         return self._filerev
+
     def filenode(self):
         return self._filenode
+
     @propertycache
     def _flags(self):
         return self._changectx.flags(self._path)
+
     def flags(self):
         return self._flags
+
     def filelog(self):
         return self._filelog
+
     def rev(self):
         return self._changeid
+
     def linkrev(self):
         return self._filelog.linkrev(self._filerev)
+
     def node(self):
         return self._changectx.node()
+
     def hex(self):
         return self._changectx.hex()
+
     def user(self):
         return self._changectx.user()
+
     def date(self):
         return self._changectx.date()
+
     def files(self):
         return self._changectx.files()
+
     def description(self):
         return self._changectx.description()
+
     def branch(self):
         return self._changectx.branch()
+
     def extra(self):
         return self._changectx.extra()
+
     def phase(self):
         return self._changectx.phase()
+
     def phasestr(self):
         return self._changectx.phasestr()
+
     def obsolete(self):
         return self._changectx.obsolete()
+
     def instabilities(self):
         return self._changectx.instabilities()
+
     def manifest(self):
         return self._changectx.manifest()
+
     def changectx(self):
         return self._changectx
+
     def renamed(self):
         return self._copied
+
     def copysource(self):
         return self._copied and self._copied[0]
+
     def repo(self):
         return self._repo
+
     def size(self):
         return len(self.data())
 
@@ -750,8 +869,10 @@
             return stringutil.binary(self.data())
         except IOError:
             return False
+
     def isexec(self):
         return 'x' in self.flags()
+
     def islink(self):
         return 'l' in self.flags()
 
@@ -763,6 +884,7 @@
         return False
 
     _customcmp = False
+
     def cmp(self, fctx):
         """compare with other file context
 
@@ -773,7 +895,8 @@
 
         if self._filenode is None:
             raise error.ProgrammingError(
-                'filectx.cmp() must be reimplemented if not backed by revlog')
+                'filectx.cmp() must be reimplemented if not backed by revlog'
+            )
 
         if fctx._filenode is None:
             if self._repo._encodefilterpats:
@@ -818,12 +941,11 @@
         if srcrev is None:
             # wctx case, used by workingfilectx during mergecopy
             revs = [p.rev() for p in self._repo[None].parents()]
-            inclusive = True # we skipped the real (revless) source
+            inclusive = True  # we skipped the real (revless) source
         else:
             revs = [srcrev]
         if memberanc is None:
-            memberanc = iteranc = cl.ancestors(revs, lkr,
-                                               inclusive=inclusive)
+            memberanc = iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
         # check if this linkrev is an ancestor of srcrev
         if lkr not in memberanc:
             if iteranc is None:
@@ -833,8 +955,8 @@
             for a in iteranc:
                 if stoprev is not None and a < stoprev:
                     return None
-                ac = cl.read(a) # get changeset data (we avoid object creation)
-                if path in ac[3]: # checking the 'files' field.
+                ac = cl.read(a)  # get changeset data (we avoid object creation)
+                if path in ac[3]:  # checking the 'files' field.
                     # The file has been touched, check if the content is
                     # similar to the one we search for.
                     if fnode == mfl[ac[0]].readfast().get(path):
@@ -985,14 +1107,16 @@
             if base.rev() is None:
                 # wctx is not inclusive, but works because _ancestrycontext
                 # is used to test filelog revisions
-                ac = cl.ancestors([p.rev() for p in base.parents()],
-                                  inclusive=True)
+                ac = cl.ancestors(
+                    [p.rev() for p in base.parents()], inclusive=True
+                )
             else:
                 ac = cl.ancestors([base.rev()], inclusive=True)
             base._ancestrycontext = ac
 
-        return dagop.annotate(base, parents, skiprevs=skiprevs,
-                              diffopts=diffopts)
+        return dagop.annotate(
+            base, parents, skiprevs=skiprevs, diffopts=diffopts
+        )
 
     def ancestors(self, followfirst=False):
         visit = {}
@@ -1017,21 +1141,32 @@
         """
         return self._repo.wwritedata(self.path(), self.data())
 
+
 class filectx(basefilectx):
     """A filecontext object makes access to data related to a particular
        filerevision convenient."""
-    def __init__(self, repo, path, changeid=None, fileid=None,
-                 filelog=None, changectx=None):
+
+    def __init__(
+        self,
+        repo,
+        path,
+        changeid=None,
+        fileid=None,
+        filelog=None,
+        changectx=None,
+    ):
         """changeid must be a revision number, if specified.
            fileid can be a file revision or node."""
         self._repo = repo
         self._path = path
 
-        assert (changeid is not None
-                or fileid is not None
-                or changectx is not None), (
-                    "bad args: changeid=%r, fileid=%r, changectx=%r"
-                    % (changeid, fileid, changectx))
+        assert (
+            changeid is not None or fileid is not None or changectx is not None
+        ), "bad args: changeid=%r, fileid=%r, changectx=%r" % (
+            changeid,
+            fileid,
+            changectx,
+        )
 
         if filelog is not None:
             self._filelog = filelog
@@ -1069,8 +1204,13 @@
     def filectx(self, fileid, changeid=None):
         '''opens an arbitrary revision of the file without
         opening a new filelog'''
-        return filectx(self._repo, self._path, fileid=fileid,
-                       filelog=self._filelog, changeid=changeid)
+        return filectx(
+            self._repo,
+            self._path,
+            fileid=fileid,
+            filelog=self._filelog,
+            changeid=changeid,
+        )
 
     def rawdata(self):
         return self._filelog.rawdata(self._filenode)
@@ -1085,8 +1225,10 @@
         except error.CensoredNodeError:
             if self._repo.ui.config("censor", "policy") == "ignore":
                 return ""
-            raise error.Abort(_("censored node: %s") % short(self._filenode),
-                             hint=_("set censor.policy to ignore errors"))
+            raise error.Abort(
+                _("censored node: %s") % short(self._filenode),
+                hint=_("set censor.policy to ignore errors"),
+            )
 
     def size(self):
         return self._filelog.size(self._filerev)
@@ -1120,14 +1262,26 @@
     def children(self):
         # hard for renames
         c = self._filelog.children(self._filenode)
-        return [filectx(self._repo, self._path, fileid=x,
-                        filelog=self._filelog) for x in c]
+        return [
+            filectx(self._repo, self._path, fileid=x, filelog=self._filelog)
+            for x in c
+        ]
+
 
 class committablectx(basectx):
     """A committablectx object provides common functionality for a context that
     wants the ability to commit, e.g. workingctx or memctx."""
-    def __init__(self, repo, text="", user=None, date=None, extra=None,
-                 changes=None, branch=None):
+
+    def __init__(
+        self,
+        repo,
+        text="",
+        user=None,
+        date=None,
+        extra=None,
+        changes=None,
+        branch=None,
+    ):
         super(committablectx, self).__init__(repo)
         self._rev = None
         self._node = None
@@ -1178,31 +1332,43 @@
 
     def manifestnode(self):
         return None
+
     def user(self):
         return self._user or self._repo.ui.username()
+
     def date(self):
         return self._date
+
     def description(self):
         return self._text
+
     def files(self):
-        return sorted(self._status.modified + self._status.added +
-                      self._status.removed)
+        return sorted(
+            self._status.modified + self._status.added + self._status.removed
+        )
+
     def modified(self):
         return self._status.modified
+
     def added(self):
         return self._status.added
+
     def removed(self):
         return self._status.removed
+
     def deleted(self):
         return self._status.deleted
+
     filesmodified = modified
     filesadded = added
     filesremoved = removed
 
     def branch(self):
         return encoding.tolocal(self._extra['branch'])
+
     def closesbranch(self):
         return 'close' in self._extra
+
     def extra(self):
         return self._extra
 
@@ -1219,7 +1385,7 @@
         return b
 
     def phase(self):
-        phase = phases.draft # default phase to draft
+        phase = phases.draft  # default phase to draft
         for p in self.parents():
             phase = max(phase, p.phase())
         return phase
@@ -1232,13 +1398,14 @@
 
     def ancestor(self, c2):
         """return the "best" ancestor context of self and c2"""
-        return self._parents[0].ancestor(c2) # punt on two parents for now
+        return self._parents[0].ancestor(c2)  # punt on two parents for now
 
     def ancestors(self):
         for p in self._parents:
             yield p
         for a in self._repo.changelog.ancestors(
-            [p.rev() for p in self._parents]):
+            [p.rev() for p in self._parents]
+        ):
             yield self._repo[a]
 
     def markcommitted(self, node):
@@ -1254,6 +1421,7 @@
     def dirty(self, missing=False, merge=True, branch=True):
         return False
 
+
 class workingctx(committablectx):
     """A workingctx object makes access to data related to
     the current working directory convenient.
@@ -1263,16 +1431,19 @@
     changes - a list of file lists as returned by localrepo.status()
                or None to use the repository status.
     """
-    def __init__(self, repo, text="", user=None, date=None, extra=None,
-                 changes=None):
+
+    def __init__(
+        self, repo, text="", user=None, date=None, extra=None, changes=None
+    ):
         branch = None
         if not extra or 'branch' not in extra:
             try:
                 branch = repo.dirstate.branch()
             except UnicodeDecodeError:
                 raise error.Abort(_('branch name not in UTF-8!'))
-        super(workingctx, self).__init__(repo, text, user, date, extra, changes,
-                                         branch=branch)
+        super(workingctx, self).__init__(
+            repo, text, user, date, extra, changes, branch=branch
+        )
 
     def __iter__(self):
         d = self._repo.dirstate
@@ -1309,9 +1480,11 @@
         if len(parents) < 2:
             # when we have one parent, it's easy: copy from parent
             man = parents[0].manifest()
+
             def func(f):
                 f = copiesget(f, f)
                 return man.flags(f)
+
         else:
             # merges are tricky: we try to reconstruct the unstored
             # result from the merge (issue1802)
@@ -1320,7 +1493,7 @@
             m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
 
             def func(f):
-                f = copiesget(f, f) # may be wrong for merges with copies
+                f = copiesget(f, f)  # may be wrong for merges with copies
                 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
                 if fl1 == fl2:
                     return fl1
@@ -1328,7 +1501,7 @@
                     return fl2
                 if fl2 == fla:
                     return fl1
-                return '' # punt for conflicts
+                return ''  # punt for conflicts
 
         return func
 
@@ -1350,8 +1523,9 @@
 
     def filectx(self, path, filelog=None):
         """get a file context from the working directory"""
-        return workingfilectx(self._repo, path, workingctx=self,
-                              filelog=filelog)
+        return workingfilectx(
+            self._repo, path, workingctx=self, filelog=filelog
+        )
 
     def dirty(self, missing=False, merge=True, branch=True):
         "check whether a working directory is modified"
@@ -1360,10 +1534,14 @@
             if self.sub(s).dirty(missing=missing):
                 return True
         # check current working dir
-        return ((merge and self.p2()) or
-                (branch and self.branch() != self.p1().branch()) or
-                self.modified() or self.added() or self.removed() or
-                (missing and self.deleted()))
+        return (
+            (merge and self.p2())
+            or (branch and self.branch() != self.p1().branch())
+            or self.modified()
+            or self.added()
+            or self.removed()
+            or (missing and self.deleted())
+        )
 
     def add(self, list, prefix=""):
         with self._repo.wlock():
@@ -1384,14 +1562,23 @@
                     continue
                 limit = ui.configbytes('ui', 'large-file-limit')
                 if limit != 0 and st.st_size > limit:
-                    ui.warn(_("%s: up to %d MB of RAM may be required "
-                              "to manage this file\n"
-                              "(use 'hg revert %s' to cancel the "
-                              "pending addition)\n")
-                            % (f, 3 * st.st_size // 1000000, uipath(f)))
+                    ui.warn(
+                        _(
+                            "%s: up to %d MB of RAM may be required "
+                            "to manage this file\n"
+                            "(use 'hg revert %s' to cancel the "
+                            "pending addition)\n"
+                        )
+                        % (f, 3 * st.st_size // 1000000, uipath(f))
+                    )
                 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
-                    ui.warn(_("%s not added: only files and symlinks "
-                              "supported currently\n") % uipath(f))
+                    ui.warn(
+                        _(
+                            "%s not added: only files and symlinks "
+                            "supported currently\n"
+                        )
+                        % uipath(f)
+                    )
                     rejected.append(f)
                 elif ds[f] in 'amn':
                     ui.warn(_("%s already tracked!\n") % uipath(f))
@@ -1422,13 +1609,15 @@
         except OSError as err:
             if err.errno != errno.ENOENT:
                 raise
-            self._repo.ui.warn(_("%s does not exist!\n")
-                               % self._repo.dirstate.pathto(dest))
+            self._repo.ui.warn(
+                _("%s does not exist!\n") % self._repo.dirstate.pathto(dest)
+            )
             return
         if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
-            self._repo.ui.warn(_("copy failed: %s is not a file or a "
-                                 "symbolic link\n")
-                               % self._repo.dirstate.pathto(dest))
+            self._repo.ui.warn(
+                _("copy failed: %s is not a file or a " "symbolic link\n")
+                % self._repo.dirstate.pathto(dest)
+            )
         else:
             with self._repo.wlock():
                 ds = self._repo.dirstate
@@ -1438,17 +1627,33 @@
                     ds.normallookup(dest)
                 ds.copy(source, dest)
 
-    def match(self, pats=None, include=None, exclude=None, default='glob',
-              listsubrepos=False, badfn=None):
+    def match(
+        self,
+        pats=None,
+        include=None,
+        exclude=None,
+        default='glob',
+        listsubrepos=False,
+        badfn=None,
+    ):
         r = self._repo
 
         # Only a case insensitive filesystem needs magic to translate user input
         # to actual case in the filesystem.
         icasefs = not util.fscasesensitive(r.root)
-        return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
-                              default, auditor=r.auditor, ctx=self,
-                              listsubrepos=listsubrepos, badfn=badfn,
-                              icasefs=icasefs)
+        return matchmod.match(
+            r.root,
+            r.getcwd(),
+            pats,
+            include,
+            exclude,
+            default,
+            auditor=r.auditor,
+            ctx=self,
+            listsubrepos=listsubrepos,
+            badfn=badfn,
+            icasefs=icasefs,
+        )
 
     def _filtersuspectsymlink(self, files):
         if not files or self._repo.dirstate._checklink:
@@ -1462,10 +1667,15 @@
         for f in files:
             if self.flags(f) == 'l':
                 d = self[f].data()
-                if (d == '' or len(d) >= 1024 or '\n' in d
-                    or stringutil.binary(d)):
-                    self._repo.ui.debug('ignoring suspect symlink placeholder'
-                                        ' "%s"\n' % f)
+                if (
+                    d == ''
+                    or len(d) >= 1024
+                    or '\n' in d
+                    or stringutil.binary(d)
+                ):
+                    self._repo.ui.debug(
+                        'ignoring suspect symlink placeholder' ' "%s"\n' % f
+                    )
                     continue
             sane.append(f)
         return sane
@@ -1484,8 +1694,11 @@
             try:
                 # This will return True for a file that got replaced by a
                 # directory in the interim, but fixing that is pretty hard.
-                if (f not in pctx or self.flags(f) != pctx.flags(f)
-                    or pctx[f].cmp(self[f])):
+                if (
+                    f not in pctx
+                    or self.flags(f) != pctx.flags(f)
+                    or pctx[f].cmp(self[f])
+                ):
                     modified.append(f)
                 else:
                     fixup.append(f)
@@ -1532,8 +1745,9 @@
                         # consistency, because .hg/dirstate was
                         # already changed simultaneously after last
                         # caching (see also issue5584 for detail)
-                        self._repo.ui.debug('skip updating dirstate: '
-                                            'identity mismatch\n')
+                        self._repo.ui.debug(
+                            'skip updating dirstate: ' 'identity mismatch\n'
+                        )
             except error.LockError:
                 pass
             finally:
@@ -1545,8 +1759,9 @@
         subrepos = []
         if '.hgsub' in self:
             subrepos = sorted(self.substate)
-        cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
-                                            clean=clean, unknown=unknown)
+        cmp, s = self._repo.dirstate.status(
+            match, subrepos, ignored=ignored, clean=clean, unknown=unknown
+        )
 
         # check for any possibly clean files
         fixup = []
@@ -1564,8 +1779,9 @@
             # cache for performance
             if s.unknown or s.ignored or s.clean:
                 # "_status" is cached with list*=False in the normal route
-                self._status = scmutil.status(s.modified, s.added, s.removed,
-                                              s.deleted, [], [], [])
+                self._status = scmutil.status(
+                    s.modified, s.added, s.removed, s.deleted, [], [], []
+                )
             else:
                 self._status = s
 
@@ -1607,8 +1823,10 @@
         man = parents[0].manifest().copy()
 
         ff = self._flagfunc
-        for i, l in ((addednodeid, status.added),
-                     (modifiednodeid, status.modified)):
+        for i, l in (
+            (addednodeid, status.added),
+            (modifiednodeid, status.modified),
+        ):
             for f in l:
                 man[f] = i
                 try:
@@ -1622,8 +1840,9 @@
 
         return man
 
-    def _buildstatus(self, other, s, match, listignored, listclean,
-                     listunknown):
+    def _buildstatus(
+        self, other, s, match, listignored, listclean, listunknown
+    ):
         """build a status with respect to another context
 
         This includes logic for maintaining the fast path of status when
@@ -1637,9 +1856,9 @@
         # they are supposed to be linking to.
         s.modified[:] = self._filtersuspectsymlink(s.modified)
         if other != self._repo['.']:
-            s = super(workingctx, self)._buildstatus(other, s, match,
-                                                     listignored, listclean,
-                                                     listunknown)
+            s = super(workingctx, self)._buildstatus(
+                other, s, match, listignored, listclean, listunknown
+            )
         return s
 
     def _matchstatus(self, other, match):
@@ -1653,20 +1872,28 @@
         just use the default match object sent to us.
         """
         if other != self._repo['.']:
+
             def bad(f, msg):
                 # 'f' may be a directory pattern from 'match.files()',
                 # so 'f not in ctx1' is not enough
                 if f not in other and not other.hasdir(f):
-                    self._repo.ui.warn('%s: %s\n' %
-                                       (self._repo.dirstate.pathto(f), msg))
+                    self._repo.ui.warn(
+                        '%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
+                    )
+
             match.bad = bad
         return match
 
     def walk(self, match):
         '''Generates matching file names.'''
-        return sorted(self._repo.dirstate.walk(self._repo.narrowmatch(match),
-                                               subrepos=sorted(self.substate),
-                                               unknown=True, ignored=False))
+        return sorted(
+            self._repo.dirstate.walk(
+                self._repo.narrowmatch(match),
+                subrepos=sorted(self.substate),
+                unknown=True,
+                ignored=False,
+            )
+        )
 
     def matches(self, match):
         match = self._repo.narrowmatch(match)
@@ -1688,9 +1915,11 @@
 
         sparse.aftercommit(self._repo, node)
 
+
 class committablefilectx(basefilectx):
     """A committablefilectx provides common functionality for a file context
     that wants the ability to commit, e.g. workingfilectx or memfilectx."""
+
     def __init__(self, repo, path, filelog=None, ctx=None):
         self._repo = repo
         self._path = path
@@ -1719,6 +1948,7 @@
 
     def parents(self):
         '''return parent filectxs, following copies if necessary'''
+
         def filenode(ctx, path):
             return ctx._manifest.get(path, nullid)
 
@@ -1735,15 +1965,20 @@
         for pc in pcl[1:]:
             pl.append((path, filenode(pc, path), fl))
 
-        return [self._parentfilectx(p, fileid=n, filelog=l)
-                for p, n, l in pl if n != nullid]
+        return [
+            self._parentfilectx(p, fileid=n, filelog=l)
+            for p, n, l in pl
+            if n != nullid
+        ]
 
     def children(self):
         return []
 
+
 class workingfilectx(committablefilectx):
     """A workingfilectx object makes access to data related to a particular
        file in the working directory convenient."""
+
     def __init__(self, repo, path, filelog=None, workingctx=None):
         super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
 
@@ -1753,13 +1988,16 @@
 
     def data(self):
         return self._repo.wread(self._path)
+
     def copysource(self):
         return self._repo.dirstate.copied(self._path)
 
     def size(self):
         return self._repo.wvfs.lstat(self._path).st_size
+
     def lstat(self):
         return self._repo.wvfs.lstat(self._path)
+
     def date(self):
         t, tz = self._changectx.date()
         try:
@@ -1790,14 +2028,15 @@
     def remove(self, ignoremissing=False):
         """wraps unlink for a repo's working directory"""
         rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
-        self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
-                                   rmdir=rmdir)
+        self._repo.wvfs.unlinkpath(
+            self._path, ignoremissing=ignoremissing, rmdir=rmdir
+        )
 
     def write(self, data, flags, backgroundclose=False, **kwargs):
         """wraps repo.wwrite"""
-        return self._repo.wwrite(self._path, data, flags,
-                                 backgroundclose=backgroundclose,
-                                 **kwargs)
+        return self._repo.wwrite(
+            self._path, data, flags, backgroundclose=backgroundclose, **kwargs
+        )
 
     def markcopied(self, src):
         """marks this file a copy of `src`"""
@@ -1827,6 +2066,7 @@
     def setflags(self, l, x):
         self._repo.wvfs.setflags(self._path, l, x)
 
+
 class overlayworkingctx(committablectx):
     """Wraps another mutable context with a write-back cache that can be
     converted into a commit context.
@@ -1863,8 +2103,9 @@
                     # Must fallback here, too, because we only set flags.
                     return self._wrappedctx[path].data()
             else:
-                raise error.ProgrammingError("No such file or directory: %s" %
-                                             path)
+                raise error.ProgrammingError(
+                    "No such file or directory: %s" % path
+                )
         else:
             return self._wrappedctx[path].data()
 
@@ -1888,22 +2129,32 @@
     def _flagfunc(self):
         def f(path):
             return self._cache[path]['flags']
+
         return f
 
     def files(self):
         return sorted(self.added() + self.modified() + self.removed())
 
     def modified(self):
-        return [f for f in self._cache.keys() if self._cache[f]['exists'] and
-                self._existsinparent(f)]
+        return [
+            f
+            for f in self._cache.keys()
+            if self._cache[f]['exists'] and self._existsinparent(f)
+        ]
 
     def added(self):
-        return [f for f in self._cache.keys() if self._cache[f]['exists'] and
-                not self._existsinparent(f)]
+        return [
+            f
+            for f in self._cache.keys()
+            if self._cache[f]['exists'] and not self._existsinparent(f)
+        ]
 
     def removed(self):
-        return [f for f in self._cache.keys() if
-                not self._cache[f]['exists'] and self._existsinparent(f)]
+        return [
+            f
+            for f in self._cache.keys()
+            if not self._cache[f]['exists'] and self._existsinparent(f)
+        ]
 
     def p1copies(self):
         copies = self._repo._wrappedctx.p1copies().copy()
@@ -1911,7 +2162,7 @@
         for f in self._cache.keys():
             if not narrowmatch(f):
                 continue
-            copies.pop(f, None) # delete if it exists
+            copies.pop(f, None)  # delete if it exists
             source = self._cache[f]['copied']
             if source:
                 copies[f] = source
@@ -1923,7 +2174,7 @@
         for f in self._cache.keys():
             if not narrowmatch(f):
                 continue
-            copies.pop(f, None) # delete if it exists
+            copies.pop(f, None)  # delete if it exists
             source = self._cache[f]['copied']
             if source:
                 copies[f] = source
@@ -1939,8 +2190,13 @@
             return self._wrappedctx[path].date()
 
     def markcopied(self, path, origin):
-        self._markdirty(path, exists=True, date=self.filedate(path),
-                        flags=self.flags(path), copied=origin)
+        self._markdirty(
+            path,
+            exists=True,
+            date=self.filedate(path),
+            flags=self.flags(path),
+            copied=origin,
+        )
 
     def copydata(self, path):
         if self.isdirty(path):
@@ -1953,8 +2209,9 @@
             if self._cache[path]['exists']:
                 return self._cache[path]['flags']
             else:
-                raise error.ProgrammingError("No such file or directory: %s" %
-                                             self._path)
+                raise error.ProgrammingError(
+                    "No such file or directory: %s" % self._path
+                )
         else:
             return self._wrappedctx[path].flags()
 
@@ -1980,17 +2237,20 @@
         IMM, we'll never check that a path is actually writable -- e.g., because
         it adds `a/foo`, but `a` is actually a file in the other commit.
         """
+
         def fail(path, component):
             # p1() is the base and we're receiving "writes" for p2()'s
             # files.
             if 'l' in self.p1()[component].flags():
-                raise error.Abort("error: %s conflicts with symlink %s "
-                                  "in %d." % (path, component,
-                                              self.p1().rev()))
+                raise error.Abort(
+                    "error: %s conflicts with symlink %s "
+                    "in %d." % (path, component, self.p1().rev())
+                )
             else:
-                raise error.Abort("error: '%s' conflicts with file '%s' in "
-                                  "%d." % (path, component,
-                                           self.p1().rev()))
+                raise error.Abort(
+                    "error: '%s' conflicts with file '%s' in "
+                    "%d." % (path, component, self.p1().rev())
+                )
 
         # Test that each new directory to be created to write this path from p2
         # is not a file in p1.
@@ -2012,18 +2272,20 @@
             mfiles = [m for m in mfiles if m in self]
             if not mfiles:
                 return
-            raise error.Abort("error: file '%s' cannot be written because "
-                              " '%s/' is a directory in %s (containing %d "
-                              "entries: %s)"
-                              % (path, path, self.p1(), len(mfiles),
-                                 ', '.join(mfiles)))
+            raise error.Abort(
+                "error: file '%s' cannot be written because "
+                " '%s/' is a directory in %s (containing %d "
+                "entries: %s)"
+                % (path, path, self.p1(), len(mfiles), ', '.join(mfiles))
+            )
 
     def write(self, path, data, flags='', **kwargs):
         if data is None:
             raise error.ProgrammingError("data must be non-None")
         self._auditconflicts(path)
-        self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
-                        flags=flags)
+        self._markdirty(
+            path, exists=True, data=data, date=dateutil.makedate(), flags=flags
+        )
 
     def setflags(self, path, l, x):
         flag = ''
@@ -2031,8 +2293,7 @@
             flag = 'l'
         elif x:
             flag = 'x'
-        self._markdirty(path, exists=True, date=dateutil.makedate(),
-                        flags=flag)
+        self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag)
 
     def remove(self, path):
         self._markdirty(path, exists=False)
@@ -2044,8 +2305,10 @@
         if self.isdirty(path):
             # If this path exists and is a symlink, "follow" it by calling
             # exists on the destination path.
-            if (self._cache[path]['exists'] and
-                        'l' in self._cache[path]['flags']):
+            if (
+                self._cache[path]['exists']
+                and 'l' in self._cache[path]['flags']
+            ):
                 return self.exists(self._cache[path]['data'].strip())
             else:
                 return self._cache[path]['exists']
@@ -2064,12 +2327,21 @@
             if self._cache[path]['exists']:
                 return len(self._cache[path]['data'])
             else:
-                raise error.ProgrammingError("No such file or directory: %s" %
-                                             self._path)
+                raise error.ProgrammingError(
+                    "No such file or directory: %s" % self._path
+                )
         return self._wrappedctx[path].size()
 
-    def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
-                 user=None, editor=None):
+    def tomemctx(
+        self,
+        text,
+        branch=None,
+        extra=None,
+        date=None,
+        parents=None,
+        user=None,
+        editor=None,
+    ):
         """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
         committed.
 
@@ -2089,19 +2361,35 @@
             parents = (self._repo[parents[0]], self._repo[parents[1]])
 
         files = self.files()
+
         def getfile(repo, memctx, path):
             if self._cache[path]['exists']:
-                return memfilectx(repo, memctx, path,
-                                  self._cache[path]['data'],
-                                  'l' in self._cache[path]['flags'],
-                                  'x' in self._cache[path]['flags'],
-                                  self._cache[path]['copied'])
+                return memfilectx(
+                    repo,
+                    memctx,
+                    path,
+                    self._cache[path]['data'],
+                    'l' in self._cache[path]['flags'],
+                    'x' in self._cache[path]['flags'],
+                    self._cache[path]['copied'],
+                )
             else:
                 # Returning None, but including the path in `files`, is
                 # necessary for memctx to register a deletion.
                 return None
-        return memctx(self._repo, parents, text, files, getfile, date=date,
-                      extra=extra, user=user, branch=branch, editor=editor)
+
+        return memctx(
+            self._repo,
+            parents,
+            text,
+            files,
+            getfile,
+            date=date,
+            extra=extra,
+            user=user,
+            branch=branch,
+            editor=editor,
+        )
 
     def isdirty(self, path):
         return path in self._cache
@@ -2126,15 +2414,19 @@
         # This won't be perfect, but can help performance significantly when
         # using things like remotefilelog.
         scmutil.prefetchfiles(
-            self.repo(), [self.p1().rev()],
-            scmutil.matchfiles(self.repo(), self._cache.keys()))
+            self.repo(),
+            [self.p1().rev()],
+            scmutil.matchfiles(self.repo(), self._cache.keys()),
+        )
 
         for path in self._cache.keys():
             cache = self._cache[path]
             try:
                 underlying = self._wrappedctx[path]
-                if (underlying.data() == cache['data'] and
-                            underlying.flags() == cache['flags']):
+                if (
+                    underlying.data() == cache['data']
+                    and underlying.flags() == cache['flags']
+                ):
                     keys.append(path)
             except error.ManifestLookupError:
                 # Path not in the underlying manifest (created).
@@ -2144,8 +2436,9 @@
             del self._cache[path]
         return keys
 
-    def _markdirty(self, path, exists, data=None, date=None, flags='',
-        copied=None):
+    def _markdirty(
+        self, path, exists, data=None, date=None, flags='', copied=None
+    ):
         # data not provided, let's see if we already have some; if not, let's
         # grab it from our underlying context, so that we always have data if
         # the file is marked as existing.
@@ -2164,16 +2457,17 @@
         }
 
     def filectx(self, path, filelog=None):
-        return overlayworkingfilectx(self._repo, path, parent=self,
-                                     filelog=filelog)
+        return overlayworkingfilectx(
+            self._repo, path, parent=self, filelog=filelog
+        )
+
 
 class overlayworkingfilectx(committablefilectx):
     """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
     cache, which can be flushed through later by calling ``flush()``."""
 
     def __init__(self, repo, path, filelog=None, parent=None):
-        super(overlayworkingfilectx, self).__init__(repo, path, filelog,
-                                                    parent)
+        super(overlayworkingfilectx, self).__init__(repo, path, filelog, parent)
         self._repo = repo
         self._parent = parent
         self._path = path
@@ -2223,6 +2517,7 @@
     def clearunknown(self):
         pass
 
+
 class workingcommitctx(workingctx):
     """A workingcommitctx object makes access to data related to
     the revision being committed convenient.
@@ -2230,10 +2525,13 @@
     This hides changes in the working directory, if they aren't
     committed in this context.
     """
-    def __init__(self, repo, changes,
-                 text="", user=None, date=None, extra=None):
-        super(workingcommitctx, self).__init__(repo, text, user, date, extra,
-                                               changes)
+
+    def __init__(
+        self, repo, changes, text="", user=None, date=None, extra=None
+    ):
+        super(workingcommitctx, self).__init__(
+            repo, text, user, date, extra, changes
+        )
 
     def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
         """Return matched files only in ``self._status``
@@ -2245,10 +2543,15 @@
             clean = [f for f in self._manifest if f not in self._changedset]
         else:
             clean = []
-        return scmutil.status([f for f in self._status.modified if match(f)],
-                              [f for f in self._status.added if match(f)],
-                              [f for f in self._status.removed if match(f)],
-                              [], [], [], clean)
+        return scmutil.status(
+            [f for f in self._status.modified if match(f)],
+            [f for f in self._status.added if match(f)],
+            [f for f in self._status.removed if match(f)],
+            [],
+            [],
+            [],
+            clean,
+        )
 
     @propertycache
     def _changedset(self):
@@ -2259,6 +2562,7 @@
         changed.update(self._status.removed)
         return changed
 
+
 def makecachingfilectxfn(func):
     """Create a filectxfn that caches based on the path.
 
@@ -2275,36 +2579,54 @@
 
     return getfilectx
 
+
 def memfilefromctx(ctx):
     """Given a context return a memfilectx for ctx[path]
 
     This is a convenience method for building a memctx based on another
     context.
     """
+
     def getfilectx(repo, memctx, path):
         fctx = ctx[path]
         copysource = fctx.copysource()
-        return memfilectx(repo, memctx, path, fctx.data(),
-                          islink=fctx.islink(), isexec=fctx.isexec(),
-                          copysource=copysource)
+        return memfilectx(
+            repo,
+            memctx,
+            path,
+            fctx.data(),
+            islink=fctx.islink(),
+            isexec=fctx.isexec(),
+            copysource=copysource,
+        )
 
     return getfilectx
 
+
 def memfilefrompatch(patchstore):
     """Given a patch (e.g. patchstore object) return a memfilectx
 
     This is a convenience method for building a memctx based on a patchstore.
     """
+
     def getfilectx(repo, memctx, path):
         data, mode, copysource = patchstore.getfile(path)
         if data is None:
             return None
         islink, isexec = mode
-        return memfilectx(repo, memctx, path, data, islink=islink,
-                          isexec=isexec, copysource=copysource)
+        return memfilectx(
+            repo,
+            memctx,
+            path,
+            data,
+            islink=islink,
+            isexec=isexec,
+            copysource=copysource,
+        )
 
     return getfilectx
 
+
 class memctx(committablectx):
     """Use memctx to perform in-memory commits via localrepo.commitctx().
 
@@ -2338,10 +2660,22 @@
     # this field to determine what to do in filectxfn.
     _returnnoneformissingfiles = True
 
-    def __init__(self, repo, parents, text, files, filectxfn, user=None,
-                 date=None, extra=None, branch=None, editor=False):
-        super(memctx, self).__init__(repo, text, user, date, extra,
-                                     branch=branch)
+    def __init__(
+        self,
+        repo,
+        parents,
+        text,
+        files,
+        filectxfn,
+        user=None,
+        date=None,
+        extra=None,
+        branch=None,
+        editor=False,
+    ):
+        super(memctx, self).__init__(
+            repo, text, user, date, extra, branch=branch
+        )
         self._rev = None
         self._node = None
         parents = [(p or nullid) for p in parents]
@@ -2420,13 +2754,23 @@
 
         return scmutil.status(modified, added, removed, [], [], [], [])
 
+
 class memfilectx(committablefilectx):
     """memfilectx represents an in-memory file to commit.
 
     See memctx and committablefilectx for more details.
     """
-    def __init__(self, repo, changectx, path, data, islink=False,
-                 isexec=False, copysource=None):
+
+    def __init__(
+        self,
+        repo,
+        changectx,
+        path,
+        data,
+        islink=False,
+        isexec=False,
+        copysource=None,
+    ):
         """
         path is the normalized file path relative to repository root.
         data is the file content as a string.
@@ -2478,8 +2822,18 @@
     dateutil.parsedate() and defaults to current date, extra is a dictionary of
     metadata or is left empty.
     """
-    def __init__(self, repo, originalctx, parents=None, text=None, user=None,
-                 date=None, extra=None, editor=False):
+
+    def __init__(
+        self,
+        repo,
+        originalctx,
+        parents=None,
+        text=None,
+        user=None,
+        date=None,
+        extra=None,
+        editor=False,
+    ):
         if text is None:
             text = originalctx.description()
         super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
@@ -2500,11 +2854,15 @@
         # manifests of our commit parents
         mp1, mp2 = self.manifestctx().parents
         if p1 != nullid and p1.manifestnode() != mp1:
-            raise RuntimeError(r"can't reuse the manifest: its p1 "
-                               r"doesn't match the new ctx p1")
+            raise RuntimeError(
+                r"can't reuse the manifest: its p1 "
+                r"doesn't match the new ctx p1"
+            )
         if p2 != nullid and p2.manifestnode() != mp2:
-            raise RuntimeError(r"can't reuse the manifest: "
-                               r"its p2 doesn't match the new ctx p2")
+            raise RuntimeError(
+                r"can't reuse the manifest: "
+                r"its p2 doesn't match the new ctx p2"
+            )
 
         self._files = originalctx.files()
         self.substate = {}
@@ -2558,10 +2916,12 @@
 
         return scmutil.status(modified, added, removed, [], [], [], [])
 
+
 class arbitraryfilectx(object):
     """Allows you to use filectx-like functions on a file in an arbitrary
     location on disk, possibly not in the working directory.
     """
+
     def __init__(self, path, repo=None):
         # Repo is optional because contrib/simplemerge uses this class.
         self._repo = repo
@@ -2570,7 +2930,7 @@
     def cmp(self, fctx):
         # filecmp follows symlinks whereas `cmp` should not, so skip the fast
         # path if either side is a symlink.
-        symlinks = ('l' in self.flags() or 'l' in fctx.flags())
+        symlinks = 'l' in self.flags() or 'l' in fctx.flags()
         if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
             # Add a fast-path for merge if both sides are disk-backed.
             # Note that filecmp uses the opposite return values (True if same)
--- a/mercurial/copies.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/copies.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,9 +19,8 @@
     pathutil,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 
 def _findlimit(repo, ctxa, ctxb):
     """
@@ -62,7 +61,7 @@
     limit = node.wdirrev
 
     while interesting:
-        r = -heapq.heappop(visit)
+        r = -(heapq.heappop(visit))
         if r == node.wdirrev:
             parents = [pctx.rev() for pctx in wdirparents]
         else:
@@ -81,7 +80,7 @@
                 side[p] = 0
                 interesting -= 1
         if side[r]:
-            limit = r # lowest rev visited
+            limit = r  # lowest rev visited
             interesting -= 1
 
     # Consider the following flow (see test-commit-amend.t under issue4405):
@@ -107,6 +106,7 @@
     # This only occurs when a is a descendent of b or visa-versa.
     return min(limit, a, b)
 
+
 def _filter(src, dst, t):
     """filters out invalid copies after chaining"""
 
@@ -140,6 +140,7 @@
         elif k not in dst:
             del t[k]
 
+
 def _chain(a, b):
     """chain two sets of copies 'a' and 'b'"""
     t = a.copy()
@@ -150,6 +151,7 @@
             t[k] = v
     return t
 
+
 def _tracefile(fctx, am, basemf, limit):
     """return file context that is the ancestor of fctx present in ancestor
     manifest am, stopping after the first ancestor lower than limit"""
@@ -163,6 +165,7 @@
         if not f.isintroducedafter(limit):
             return None
 
+
 def _dirstatecopies(repo, match=None):
     ds = repo.dirstate
     c = ds.copies().copy()
@@ -171,6 +174,7 @@
             del c[k]
     return c
 
+
 def _computeforwardmissing(a, b, match=None):
     """Computes which files are in b but not a.
     This is its own function so extensions can easily wrap this call to see what
@@ -180,12 +184,14 @@
     mb = b.manifest()
     return mb.filesnotin(ma, match=match)
 
+
 def usechangesetcentricalgo(repo):
     """Checks if we should use changeset-centric copy algorithms"""
     readfrom = repo.ui.config('experimental', 'copies.read-from')
     changesetsource = ('changeset-only', 'compatibility')
     return readfrom in changesetsource
 
+
 def _committedforwardcopies(a, b, base, match):
     """Like _forwardcopies(), but b.rev() cannot be None (working copy)"""
     # files might have to be traced back to the fctx parent of the last
@@ -198,8 +204,7 @@
     debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
     dbg = repo.ui.debug
     if debug:
-        dbg('debug.copies:    looking into rename from %s to %s\n'
-            % (a, b))
+        dbg('debug.copies:    looking into rename from %s to %s\n' % (a, b))
     limit = _findlimit(repo, a, b)
     if debug:
         dbg('debug.copies:      search limit: %d\n' % limit)
@@ -242,10 +247,13 @@
                 dbg('debug.copies:          rename of: %s\n' % opath)
             cm[f] = opath
         if debug:
-            dbg('debug.copies:          time: %f seconds\n'
-                % (util.timer() - start))
+            dbg(
+                'debug.copies:          time: %f seconds\n'
+                % (util.timer() - start)
+            )
     return cm
 
+
 def _changesetforwardcopies(a, b, match):
     if a.rev() in (node.nullrev, b.rev()):
         return {}
@@ -300,8 +308,9 @@
                 parent = 2
                 childcopies = childctx.p2copies()
             if not alwaysmatch:
-                childcopies = {dst: src for dst, src in childcopies.items()
-                               if match(dst)}
+                childcopies = {
+                    dst: src for dst, src in childcopies.items() if match(dst)
+                }
             # Copy the dict only if later iterations will also need it
             if i != len(children[r]) - 1:
                 newcopies = copies.copy()
@@ -315,6 +324,7 @@
             heapq.heappush(work, (c, parent, newcopies))
     assert False
 
+
 def _forwardcopies(a, b, base=None, match=None):
     """find {dst@b: src@a} copy mapping where a is an ancestor of b"""
 
@@ -327,9 +337,10 @@
         # combine copies from dirstate if necessary
         copies = _chain(cm, _dirstatecopies(b._repo, match))
     else:
-        copies  = _committedforwardcopies(a, b, base, match)
+        copies = _committedforwardcopies(a, b, base, match)
     return copies
 
+
 def _backwardrenames(a, b, match):
     if a._repo.ui.config('experimental', 'copytrace') == 'off':
         return {}
@@ -351,13 +362,13 @@
         r[v] = k
     return r
 
+
 def pathcopies(x, y, match=None):
     """find {dst@y: src@x} copy mapping for directed compare"""
     repo = x._repo
     debug = repo.ui.debugflag and repo.ui.configbool('devel', 'debug.copies')
     if debug:
-        repo.ui.debug('debug.copies: searching copies from %s to %s\n'
-                      % (x, y))
+        repo.ui.debug('debug.copies: searching copies from %s to %s\n' % (x, y))
     if x == y or not x or not y:
         return {}
     a = y.ancestor(x)
@@ -378,11 +389,14 @@
         base = None
         if a.rev() != node.nullrev:
             base = x
-        copies = _chain(_backwardrenames(x, a, match=match),
-                        _forwardcopies(a, y, base, match=match))
+        copies = _chain(
+            _backwardrenames(x, a, match=match),
+            _forwardcopies(a, y, base, match=match),
+        )
     _filter(x, y, copies)
     return copies
 
+
 def mergecopies(repo, c1, c2, base):
     """
     Finds moves and copies between context c1 and c2 that are relevant for
@@ -462,6 +476,7 @@
     else:
         return _fullcopytracing(repo, c1, c2, base)
 
+
 def _isfullcopytraceable(repo, c1, base):
     """ Checks that if base, source and destination are all no-public branches,
     if yes let's use the full copytrace algorithm for increased capabilities
@@ -474,14 +489,17 @@
     if c1.rev() is None:
         c1 = c1.p1()
     if c1.mutable() and base.mutable():
-        sourcecommitlimit = repo.ui.configint('experimental',
-                                              'copytrace.sourcecommitlimit')
+        sourcecommitlimit = repo.ui.configint(
+            'experimental', 'copytrace.sourcecommitlimit'
+        )
         commits = len(repo.revs('%d::%d', base.rev(), c1.rev()))
         return commits < sourcecommitlimit
     return False
 
-def _checksinglesidecopies(src, dsts1, m1, m2, mb, c2, base,
-                           copy, renamedelete):
+
+def _checksinglesidecopies(
+    src, dsts1, m1, m2, mb, c2, base, copy, renamedelete
+):
     if src not in m2:
         # deleted on side 2
         if src not in m1:
@@ -497,6 +515,7 @@
                 # "both created" case in manifestmerge otherwise)
                 copy[dst] = src
 
+
 def _fullcopytracing(repo, c1, c2, base):
     """ The full copytracing algorithm which finds all the new files that were
     added from merge base up to the top commit and for each file it checks if
@@ -537,7 +556,7 @@
                 # to 'b' and 'c' and deletes 'a', and side 2 copies 'a' to 'c'
                 # and 'd' and deletes 'a'.
                 if dsts1 & dsts2:
-                    for dst in (dsts1 & dsts2):
+                    for dst in dsts1 & dsts2:
                         copy[dst] = src
                 else:
                     diverge[src] = sorted(dsts1 | dsts2)
@@ -545,18 +564,20 @@
                 # copied on both sides
                 dsts1 = set(dsts1)
                 dsts2 = set(dsts2)
-                for dst in (dsts1 & dsts2):
+                for dst in dsts1 & dsts2:
                     copy[dst] = src
             # TODO: Handle cases where it was renamed on one side and copied
             # on the other side
         elif dsts1:
             # copied/renamed only on side 1
-            _checksinglesidecopies(src, dsts1, m1, m2, mb, c2, base,
-                                   copy, renamedelete)
+            _checksinglesidecopies(
+                src, dsts1, m1, m2, mb, c2, base, copy, renamedelete
+            )
         elif dsts2:
             # copied/renamed only on side 2
-            _checksinglesidecopies(src, dsts2, m2, m1, mb, c1, base,
-                                   copy, renamedelete)
+            _checksinglesidecopies(
+                src, dsts2, m2, m1, mb, c1, base, copy, renamedelete
+            )
 
     renamedeleteset = set()
     divergeset = set()
@@ -583,8 +604,10 @@
         return copy, {}, diverge, renamedelete, {}
 
     if repo.ui.debugflag:
-        repo.ui.debug("  all copies found (* = to merge, ! = divergent, "
-                      "% = renamed and deleted):\n")
+        repo.ui.debug(
+            "  all copies found (* = to merge, ! = divergent, "
+            "% = renamed and deleted):\n"
+        )
         for f in sorted(fullcopy):
             note = ""
             if f in copy:
@@ -593,8 +616,9 @@
                 note += "!"
             if f in renamedeleteset:
                 note += "%"
-            repo.ui.debug("   src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f,
-                                                              note))
+            repo.ui.debug(
+                "   src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f, note)
+            )
     del divergeset
 
     repo.ui.debug("  checking for directory renames\n")
@@ -635,8 +659,9 @@
     dirmove = {k + "/": v + "/" for k, v in dirmove.iteritems()}
 
     for d in dirmove:
-        repo.ui.debug("   discovered dir src: '%s' -> dst: '%s'\n" %
-                      (d, dirmove[d]))
+        repo.ui.debug(
+            "   discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d])
+        )
 
     movewithdir = {}
     # check unaccounted nonoverlapping files against directory moves
@@ -645,15 +670,18 @@
             for d in dirmove:
                 if f.startswith(d):
                     # new file added in a directory that was moved, move it
-                    df = dirmove[d] + f[len(d):]
+                    df = dirmove[d] + f[len(d) :]
                     if df not in copy:
                         movewithdir[f] = df
-                        repo.ui.debug(("   pending file src: '%s' -> "
-                                       "dst: '%s'\n") % (f, df))
+                        repo.ui.debug(
+                            ("   pending file src: '%s' -> " "dst: '%s'\n")
+                            % (f, df)
+                        )
                     break
 
     return copy, movewithdir, diverge, renamedelete, dirmove
 
+
 def _heuristicscopytracing(repo, c1, c2, base):
     """ Fast copytracing using filename heuristics
 
@@ -690,8 +718,10 @@
     m1 = c1.manifest()
     if not repo.revs('%d::%d', base.rev(), c2.rev()):
         # If base is not in c2 branch, we switch to fullcopytracing
-        repo.ui.debug("switching to full copytracing as base is not "
-                      "an ancestor of c2\n")
+        repo.ui.debug(
+            "switching to full copytracing as base is not "
+            "an ancestor of c2\n"
+        )
         return _fullcopytracing(repo, c1, c2, base)
 
     ctx = c2
@@ -736,13 +766,18 @@
             f2 = c2.filectx(f)
             # we can have a lot of candidates which can slow down the heuristics
             # config value to limit the number of candidates moves to check
-            maxcandidates = repo.ui.configint('experimental',
-                                              'copytrace.movecandidateslimit')
+            maxcandidates = repo.ui.configint(
+                'experimental', 'copytrace.movecandidateslimit'
+            )
 
             if len(movecandidates) > maxcandidates:
-                repo.ui.status(_("skipping copytracing for '%s', more "
-                                 "candidates than the limit: %d\n")
-                               % (f, len(movecandidates)))
+                repo.ui.status(
+                    _(
+                        "skipping copytracing for '%s', more "
+                        "candidates than the limit: %d\n"
+                    )
+                    % (f, len(movecandidates))
+                )
                 continue
 
             for candidate in movecandidates:
@@ -755,6 +790,7 @@
 
     return copies, {}, {}, {}, {}
 
+
 def _related(f1, f2):
     """return True if f1 and f2 filectx have a common ancestor
 
@@ -765,7 +801,7 @@
     """
 
     if f1 == f2:
-        return True # a match
+        return True  # a match
 
     g1, g2 = f1.ancestors(), f2.ancestors()
     try:
@@ -782,11 +818,12 @@
                 f1 = next(g1)
             elif f2r > f1r:
                 f2 = next(g2)
-            else: # f1 and f2 point to files in the same linkrev
-                return f1 == f2 # true if they point to the same file
+            else:  # f1 and f2 point to files in the same linkrev
+                return f1 == f2  # true if they point to the same file
     except StopIteration:
         return False
 
+
 def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
     """reproduce copies from fromrev to rev in the dirstate
 
@@ -798,8 +835,9 @@
     exclude = {}
     ctraceconfig = repo.ui.config('experimental', 'copytrace')
     bctrace = stringutil.parsebool(ctraceconfig)
-    if (skiprev is not None and
-        (ctraceconfig == 'heuristics' or bctrace or bctrace is None)):
+    if skiprev is not None and (
+        ctraceconfig == 'heuristics' or bctrace or bctrace is None
+    ):
         # copytrace='off' skips this line, but not the entire function because
         # the line below is O(size of the repo) during a rebase, while the rest
         # of the function is much faster (and is required for carrying copy
@@ -811,6 +849,7 @@
         if dst in wctx:
             wctx[dst].markcopied(src)
 
+
 def computechangesetcopies(ctx):
     """return the copies data for a changeset
 
--- a/mercurial/crecord.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/crecord.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,49 +24,59 @@
     scmutil,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 stringio = util.stringio
 
 # patch comments based on the git one
-diffhelptext = _("""# To remove '-' lines, make them ' ' lines (context).
+diffhelptext = _(
+    """# To remove '-' lines, make them ' ' lines (context).
 # To remove '+' lines, delete them.
 # Lines starting with # will be removed from the patch.
-""")
+"""
+)
 
-hunkhelptext = _("""#
+hunkhelptext = _(
+    """#
 # If the patch applies cleanly, the edited hunk will immediately be
 # added to the record list. If it does not apply cleanly, a rejects file
 # will be generated. You can use that when you try again. If all lines
 # of the hunk are removed, then the edit is aborted and the hunk is left
 # unchanged.
-""")
+"""
+)
 
-patchhelptext = _("""#
+patchhelptext = _(
+    """#
 # If the patch applies cleanly, the edited patch will immediately
 # be finalised. If it does not apply cleanly, rejects files will be
 # generated. You can use those when you try again.
-""")
+"""
+)
 
 try:
     import curses
+
     curses.error
 except ImportError:
     # I have no idea if wcurses works with crecord...
     try:
         import wcurses as curses
+
         curses.error
     except ImportError:
         # wcurses is not shipped on Windows by default, or python is not
         # compiled with curses
         curses = False
 
+
 class fallbackerror(error.Abort):
     """Error that indicates the client should try to fallback to text mode."""
+
     # Inherits from error.Abort so that existing behavior is preserved if the
     # calling code does not know how to fallback.
 
+
 def checkcurses(ui):
     """Return True if the user wants to use curses
 
@@ -75,6 +85,7 @@
     """
     return curses and ui.interface("chunkselector") == "curses"
 
+
 class patchnode(object):
     """abstract class for patch graph nodes
     (i.e. patchroot, header, hunk, hunkline)
@@ -152,7 +163,7 @@
                 # else return grandparent's next sibling (or None)
                 return self.parentitem().parentitem().nextsibling()
 
-            except AttributeError: # parent and/or grandparent was None
+            except AttributeError:  # parent and/or grandparent was None
                 return None
 
     def previtem(self):
@@ -167,11 +178,11 @@
         prevsibling = self.prevsibling()
         if prevsibling is not None:
             prevsiblinglastchild = prevsibling.lastchild()
-            if ((prevsiblinglastchild is not None) and
-                not prevsibling.folded):
+            if (prevsiblinglastchild is not None) and not prevsibling.folded:
                 prevsiblinglclc = prevsiblinglastchild.lastchild()
-                if ((prevsiblinglclc is not None) and
-                    not prevsiblinglastchild.folded):
+                if (
+                    prevsiblinglclc is not None
+                ) and not prevsiblinglastchild.folded:
                     return prevsiblinglclc
                 else:
                     return prevsiblinglastchild
@@ -181,16 +192,19 @@
         # try parent (or None)
         return self.parentitem()
 
-class patch(patchnode, list): # todo: rename patchroot
+
+class patch(patchnode, list):  # todo: rename patchroot
     """
     list of header objects representing the patch.
     """
+
     def __init__(self, headerlist):
         self.extend(headerlist)
         # add parent patch object reference to each header
         for header in self:
             header.patch = self
 
+
 class uiheader(patchnode):
     """patch header
 
@@ -266,8 +280,10 @@
     def __getattr__(self, name):
         return getattr(self.nonuiheader, name)
 
+
 class uihunkline(patchnode):
     "represents a changed line in a hunk"
+
     def __init__(self, linetext, hunk):
         self.linetext = linetext
         self.applied = True
@@ -284,7 +300,7 @@
         numlinesinhunk = len(self.hunk.changedlines)
         indexofthisline = self.hunk.changedlines.index(self)
 
-        if (indexofthisline < numlinesinhunk - 1):
+        if indexofthisline < numlinesinhunk - 1:
             nextline = self.hunk.changedlines[indexofthisline + 1]
             return nextline
         else:
@@ -312,8 +328,10 @@
         # hunk-lines don't have children
         return None
 
+
 class uihunk(patchnode):
     """ui patch hunk, wraps a hunk and keep track of ui behavior """
+
     maxcontext = 3
 
     def __init__(self, hunk, header):
@@ -335,7 +353,7 @@
         numhunksinheader = len(self.header.hunks)
         indexofthishunk = self.header.hunks.index(self)
 
-        if (indexofthishunk < numhunksinheader - 1):
+        if indexofthishunk < numhunksinheader - 1:
             nexthunk = self.header.hunks[indexofthishunk + 1]
             return nexthunk
         else:
@@ -373,18 +391,29 @@
 
     def countchanges(self):
         """changedlines -> (n+,n-)"""
-        add = len([l for l in self.changedlines if l.applied
-                    and l.prettystr().startswith('+')])
-        rem = len([l for l in self.changedlines if l.applied
-                    and l.prettystr().startswith('-')])
+        add = len(
+            [
+                l
+                for l in self.changedlines
+                if l.applied and l.prettystr().startswith('+')
+            ]
+        )
+        rem = len(
+            [
+                l
+                for l in self.changedlines
+                if l.applied and l.prettystr().startswith('-')
+            ]
+        )
         return add, rem
 
     def getfromtoline(self):
         # calculate the number of removed lines converted to context lines
         removedconvertedtocontext = self.originalremoved - self.removed
 
-        contextlen = (len(self.before) + len(self.after) +
-                      removedconvertedtocontext)
+        contextlen = (
+            len(self.before) + len(self.after) + removedconvertedtocontext
+        )
         if self.after and self.after[-1] == '\\ No newline at end of file\n':
             contextlen -= 1
         fromlen = contextlen + self.removed
@@ -404,8 +433,12 @@
                 toline -= 1
 
         fromtoline = '@@ -%d,%d +%d,%d @@%s\n' % (
-            fromline, fromlen, toline, tolen,
-            self.proc and (' ' + self.proc))
+            fromline,
+            fromlen,
+            toline,
+            tolen,
+            self.proc and (' ' + self.proc),
+        )
         return fromtoline
 
     def write(self, fp):
@@ -477,8 +510,9 @@
                 adds.append(text[1:])
         hunk = ['-%s' % l for l in dels] + ['+%s' % l for l in adds]
         h = self._hunk
-        return patchmod.recordhunk(h.header, h.toline, h.fromline, h.proc,
-                                   h.before, hunk, h.after)
+        return patchmod.recordhunk(
+            h.header, h.toline, h.fromline, h.proc, h.before, hunk, h.after
+        )
 
     def __getattr__(self, name):
         return getattr(self._hunk, name)
@@ -486,6 +520,7 @@
     def __repr__(self):
         return r'<hunk %r@%d>' % (self.filename(), self.fromline)
 
+
 def filterpatch(ui, chunks, chunkselector, operation=None):
     """interactively filter patch chunks into applied-only chunks"""
     chunks = list(chunks)
@@ -502,8 +537,9 @@
     ret = chunkselector(ui, uiheaders, operation=operation)
     appliedhunklist = []
     for hdr in uiheaders:
-        if (hdr.applied and
-            (hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0)):
+        if hdr.applied and (
+            hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0
+        ):
             appliedhunklist.append(hdr)
             fixoffset = 0
             for hnk in hdr.hunks:
@@ -512,13 +548,14 @@
                     # adjust the 'to'-line offset of the hunk to be correct
                     # after de-activating some of the other hunks for this file
                     if fixoffset:
-                        #hnk = copy.copy(hnk) # necessary??
+                        # hnk = copy.copy(hnk) # necessary??
                         hnk.toline += fixoffset
                 else:
                     fixoffset += hnk.removed - hnk.added
 
     return (appliedhunklist, ret)
 
+
 def chunkselector(ui, headerlist, operation=None):
     """
     curses interface to get selection of chunks, and mark the applied flags
@@ -542,11 +579,14 @@
             signal.signal(signal.SIGTSTP, origsigtstp)
     return chunkselector.opts
 
+
 def testdecorator(testfn, f):
     def u(*args, **kwargs):
         return f(testfn, *args, **kwargs)
+
     return u
 
+
 def testchunkselector(testfn, ui, headerlist, operation=None):
     """
     test interface to get selection of chunks, and mark the applied flags
@@ -557,6 +597,7 @@
     class dummystdscr(object):
         def clear(self):
             pass
+
         def refresh(self):
             pass
 
@@ -570,13 +611,15 @@
                 break
     return chunkselector.opts
 
-_headermessages = { # {operation: text}
+
+_headermessages = {  # {operation: text}
     'apply': _('Select hunks to apply'),
     'discard': _('Select hunks to discard'),
     'keep': _('Select hunks to keep'),
     None: _('Select hunks to record'),
 }
 
+
 class curseschunkselector(object):
     def __init__(self, headerlist, ui, operation=None):
         # put the headers into a patch object
@@ -691,7 +734,7 @@
         a hunk is currently selected, then select the next hunk, if one exists,
         or if not, the next header if one exists.
         """
-        #self.startprintline += 1 #debug
+        # self.startprintline += 1 #debug
         currentitem = self.currentselecteditem
 
         nextitem = currentitem.nextitem()
@@ -872,19 +915,20 @@
             nosiblingsapplied = not (True in siblingappliedstatus)
 
             siblingspartialstatus = [hnk.partial for hnk in item.header.hunks]
-            somesiblingspartial = (True in siblingspartialstatus)
+            somesiblingspartial = True in siblingspartialstatus
 
-            #cases where applied or partial should be removed from header
+            # cases where applied or partial should be removed from header
 
             # if no 'sibling' hunks are applied (including this hunk)
             if nosiblingsapplied:
                 if not item.header.special():
                     item.header.applied = False
                     item.header.partial = False
-            else: # some/all parent siblings are applied
+            else:  # some/all parent siblings are applied
                 item.header.applied = True
-                item.header.partial = (somesiblingspartial or
-                                        not allsiblingsapplied)
+                item.header.partial = (
+                    somesiblingspartial or not allsiblingsapplied
+                )
 
         elif isinstance(item, uihunkline):
             siblingappliedstatus = [ln.applied for ln in item.hunk.changedlines]
@@ -898,18 +942,20 @@
             elif allsiblingsapplied:
                 item.hunk.applied = True
                 item.hunk.partial = False
-            else: # some siblings applied
+            else:  # some siblings applied
                 item.hunk.applied = True
                 item.hunk.partial = True
 
-            parentsiblingsapplied = [hnk.applied for hnk
-                                     in item.hunk.header.hunks]
+            parentsiblingsapplied = [
+                hnk.applied for hnk in item.hunk.header.hunks
+            ]
             noparentsiblingsapplied = not (True in parentsiblingsapplied)
             allparentsiblingsapplied = not (False in parentsiblingsapplied)
 
-            parentsiblingspartial = [hnk.partial for hnk
-                                     in item.hunk.header.hunks]
-            someparentsiblingspartial = (True in parentsiblingspartial)
+            parentsiblingspartial = [
+                hnk.partial for hnk in item.hunk.header.hunks
+            ]
+            someparentsiblingspartial = True in parentsiblingspartial
 
             # if all parent hunks are not applied, un-apply header
             if noparentsiblingsapplied:
@@ -917,14 +963,15 @@
                     item.hunk.header.applied = False
                     item.hunk.header.partial = False
             # set the applied and partial status of the header if needed
-            else: # some/all parent siblings are applied
+            else:  # some/all parent siblings are applied
                 item.hunk.header.applied = True
-                item.hunk.header.partial = (someparentsiblingspartial or
-                                            not allparentsiblingsapplied)
+                item.hunk.header.partial = (
+                    someparentsiblingspartial or not allparentsiblingsapplied
+                )
 
     def toggleall(self):
         "toggle the applied flag of all items."
-        if self.waslasttoggleallapplied: # then unapply them this time
+        if self.waslasttoggleallapplied:  # then unapply them this time
             for item in self.headerlist:
                 if item.applied:
                     self.toggleapply(item)
@@ -936,8 +983,10 @@
 
     def toggleallbetween(self):
         "toggle applied on or off for all items in range [lastapplied,current]."
-        if (not self.lastapplieditem or
-            self.currentselecteditem == self.lastapplieditem):
+        if (
+            not self.lastapplieditem
+            or self.currentselecteditem == self.lastapplieditem
+        ):
             # Treat this like a normal 'x'/' '
             self.toggleapply()
             return
@@ -985,7 +1034,7 @@
                 item.neverunfolded = False
 
             # also fold any foldable children of the parent/current item
-            if isinstance(item, uiheader): # the original or 'new' item
+            if isinstance(item, uiheader):  # the original or 'new' item
                 for child in item.allchildren():
                     child.folded = not item.folded
 
@@ -1004,11 +1053,22 @@
         # turn tabs into spaces
         instr = instr.expandtabs(4)
         strwidth = encoding.colwidth(instr)
-        numspaces = (width - ((strwidth + xstart) % width))
+        numspaces = width - ((strwidth + xstart) % width)
         return instr + " " * numspaces
 
-    def printstring(self, window, text, fgcolor=None, bgcolor=None, pair=None,
-        pairname=None, attrlist=None, towin=True, align=True, showwhtspc=False):
+    def printstring(
+        self,
+        window,
+        text,
+        fgcolor=None,
+        bgcolor=None,
+        pair=None,
+        pairname=None,
+        attrlist=None,
+        towin=True,
+        align=True,
+        showwhtspc=False,
+    ):
         """
         print the string, text, with the specified colors and attributes, to
         the specified curses window object.
@@ -1030,8 +1090,11 @@
         # preprocess the text, converting tabs to spaces
         text = text.expandtabs(4)
         # strip \n, and convert control characters to ^[char] representation
-        text = re.sub(br'[\x00-\x08\x0a-\x1f]',
-                lambda m:'^' + chr(ord(m.group()) + 64), text.strip('\n'))
+        text = re.sub(
+            br'[\x00-\x08\x0a-\x1f]',
+            lambda m: '^' + chr(ord(m.group()) + 64),
+            text.strip('\n'),
+        )
 
         if pair is not None:
             colorpair = pair
@@ -1060,11 +1123,11 @@
                     colorpair |= textattr
 
         y, xstart = self.chunkpad.getyx()
-        t = "" # variable for counting lines printed
+        t = ""  # variable for counting lines printed
         # if requested, show trailing whitespace
         if showwhtspc:
             origlen = len(text)
-            text = text.rstrip(' \n') # tabs have already been expanded
+            text = text.rstrip(' \n')  # tabs have already been expanded
             strippedlen = len(text)
             numtrailingspaces = origlen - strippedlen
 
@@ -1073,11 +1136,11 @@
         t += text
 
         if showwhtspc:
-                wscolorpair = colorpair | curses.A_REVERSE
-                if towin:
-                    for i in range(numtrailingspaces):
-                        window.addch(curses.ACS_CKBOARD, wscolorpair)
-                t += " " * numtrailingspaces
+            wscolorpair = colorpair | curses.A_REVERSE
+            if towin:
+                for i in range(numtrailingspaces):
+                    window.addch(curses.ACS_CKBOARD, wscolorpair)
+            t += " " * numtrailingspaces
 
         if align:
             if towin:
@@ -1102,8 +1165,10 @@
         # Format the selected label into a place as long as the longer of the
         # two possible labels.  This may vary by language.
         spacelen = max(len(spaceselect), len(spacedeselect))
-        selectedlabel = '%-*s' % (spacelen,
-                                  spacedeselect if selected else spaceselect)
+        selectedlabel = '%-*s' % (
+            spacelen,
+            spacedeselect if selected else spaceselect,
+        )
         segments = [
             _headermessages[self.operation],
             '-',
@@ -1160,10 +1225,14 @@
         try:
             self.printitem()
             self.updatescroll()
-            self.chunkpad.refresh(self.firstlineofpadtoprint, 0,
-                                  self.numstatuslines, 0,
-                                  self.yscreensize - self.numstatuslines,
-                                  self.xscreensize)
+            self.chunkpad.refresh(
+                self.firstlineofpadtoprint,
+                0,
+                self.numstatuslines,
+                0,
+                self.yscreensize - self.numstatuslines,
+                self.xscreensize,
+            )
         except curses.error:
             pass
 
@@ -1195,13 +1264,14 @@
                 if isinstance(item, uiheader):
                     # add two more spaces for headers
                     checkbox += "  "
-        except AttributeError: # not foldable
+        except AttributeError:  # not foldable
             checkbox += "  "
 
         return checkbox
 
-    def printheader(self, header, selected=False, towin=True,
-                    ignorefolding=False):
+    def printheader(
+        self, header, selected=False, towin=True, ignorefolding=False
+    ):
         """
         print the header to the pad.  if countlines is True, don't print
         anything, but just count the number of lines which would be printed.
@@ -1213,11 +1283,13 @@
 
         if chunkindex != 0 and not header.folded:
             # add separating line before headers
-            outstr += self.printstring(self.chunkpad, '_' * self.xscreensize,
-                                       towin=towin, align=False)
+            outstr += self.printstring(
+                self.chunkpad, '_' * self.xscreensize, towin=towin, align=False
+            )
         # select color-pair based on if the header is selected
-        colorpair = self.getcolorpair(name=selected and "selected" or "normal",
-                                      attrlist=[curses.A_BOLD])
+        colorpair = self.getcolorpair(
+            name=selected and "selected" or "normal", attrlist=[curses.A_BOLD]
+        )
 
         # print out each line of the chunk, expanding it to screen width
 
@@ -1229,19 +1301,22 @@
             linestr = checkbox + textlist[0]
         else:
             linestr = checkbox + header.filename()
-        outstr += self.printstring(self.chunkpad, linestr, pair=colorpair,
-                                   towin=towin)
+        outstr += self.printstring(
+            self.chunkpad, linestr, pair=colorpair, towin=towin
+        )
         if not header.folded or ignorefolding:
             if len(textlist) > 1:
                 for line in textlist[1:]:
-                    linestr = " "*(indentnumchars + len(checkbox)) + line
-                    outstr += self.printstring(self.chunkpad, linestr,
-                                               pair=colorpair, towin=towin)
+                    linestr = " " * (indentnumchars + len(checkbox)) + line
+                    outstr += self.printstring(
+                        self.chunkpad, linestr, pair=colorpair, towin=towin
+                    )
 
         return outstr
 
-    def printhunklinesbefore(self, hunk, selected=False, towin=True,
-                             ignorefolding=False):
+    def printhunklinesbefore(
+        self, hunk, selected=False, towin=True, ignorefolding=False
+    ):
         "includes start/end line indicator"
         outstr = ""
         # where hunk is in list of siblings
@@ -1249,22 +1324,26 @@
 
         if hunkindex != 0:
             # add separating line before headers
-            outstr += self.printstring(self.chunkpad, ' '*self.xscreensize,
-                                       towin=towin, align=False)
+            outstr += self.printstring(
+                self.chunkpad, ' ' * self.xscreensize, towin=towin, align=False
+            )
 
-        colorpair = self.getcolorpair(name=selected and "selected" or "normal",
-                                      attrlist=[curses.A_BOLD])
+        colorpair = self.getcolorpair(
+            name=selected and "selected" or "normal", attrlist=[curses.A_BOLD]
+        )
 
         # print out from-to line with checkbox
         checkbox = self.getstatusprefixstring(hunk)
 
-        lineprefix = " "*self.hunkindentnumchars + checkbox
+        lineprefix = " " * self.hunkindentnumchars + checkbox
         frtoline = "   " + hunk.getfromtoline().strip("\n")
 
-        outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
-                                   align=False) # add uncolored checkbox/indent
-        outstr += self.printstring(self.chunkpad, frtoline, pair=colorpair,
-                                   towin=towin)
+        outstr += self.printstring(
+            self.chunkpad, lineprefix, towin=towin, align=False
+        )  # add uncolored checkbox/indent
+        outstr += self.printstring(
+            self.chunkpad, frtoline, pair=colorpair, towin=towin
+        )
 
         if hunk.folded and not ignorefolding:
             # skip remainder of output
@@ -1272,7 +1351,7 @@
 
         # print out lines of the chunk preceeding changed-lines
         for line in hunk.before:
-            linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line
+            linestr = " " * (self.hunklineindentnumchars + len(checkbox)) + line
             outstr += self.printstring(self.chunkpad, linestr, towin=towin)
 
         return outstr
@@ -1285,7 +1364,7 @@
         # a bit superfluous, but to avoid hard-coding indent amount
         checkbox = self.getstatusprefixstring(hunk)
         for line in hunk.after:
-            linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line
+            linestr = " " * (self.hunklineindentnumchars + len(checkbox)) + line
             outstr += self.printstring(self.chunkpad, linestr, towin=towin)
 
         return outstr
@@ -1306,15 +1385,18 @@
         elif linestr.startswith("\\"):
             colorpair = self.getcolorpair(name="normal")
 
-        lineprefix = " "*self.hunklineindentnumchars + checkbox
-        outstr += self.printstring(self.chunkpad, lineprefix, towin=towin,
-                                   align=False) # add uncolored checkbox/indent
-        outstr += self.printstring(self.chunkpad, linestr, pair=colorpair,
-                                   towin=towin, showwhtspc=True)
+        lineprefix = " " * self.hunklineindentnumchars + checkbox
+        outstr += self.printstring(
+            self.chunkpad, lineprefix, towin=towin, align=False
+        )  # add uncolored checkbox/indent
+        outstr += self.printstring(
+            self.chunkpad, linestr, pair=colorpair, towin=towin, showwhtspc=True
+        )
         return outstr
 
-    def printitem(self, item=None, ignorefolding=False, recursechildren=True,
-                  towin=True):
+    def printitem(
+        self, item=None, ignorefolding=False, recursechildren=True, towin=True
+    ):
         """
         use __printitem() to print the the specified item.applied.
         if item is not specified, then print the entire patch.
@@ -1327,12 +1409,13 @@
             self.linesprintedtopadsofar = 0
 
         outstr = []
-        self.__printitem(item, ignorefolding, recursechildren, outstr,
-                                  towin=towin)
+        self.__printitem(
+            item, ignorefolding, recursechildren, outstr, towin=towin
+        )
         return ''.join(outstr)
 
     def outofdisplayedarea(self):
-        y, _ = self.chunkpad.getyx() # cursor location
+        y, _ = self.chunkpad.getyx()  # cursor location
         # * 2 here works but an optimization would be the max number of
         # consecutive non selectable lines
         # i.e the max number of context line for any hunk in the patch
@@ -1341,18 +1424,21 @@
         return y < miny or y > maxy
 
     def handleselection(self, item, recursechildren):
-        selected = (item is self.currentselecteditem)
+        selected = item is self.currentselecteditem
         if selected and recursechildren:
             # assumes line numbering starting from line 0
             self.selecteditemstartline = self.linesprintedtopadsofar
-            selecteditemlines = self.getnumlinesdisplayed(item,
-                                                          recursechildren=False)
-            self.selecteditemendline = (self.selecteditemstartline +
-                                        selecteditemlines - 1)
+            selecteditemlines = self.getnumlinesdisplayed(
+                item, recursechildren=False
+            )
+            self.selecteditemendline = (
+                self.selecteditemstartline + selecteditemlines - 1
+            )
         return selected
 
-    def __printitem(self, item, ignorefolding, recursechildren, outstr,
-                    towin=True):
+    def __printitem(
+        self, item, ignorefolding, recursechildren, outstr, towin=True
+    ):
         """
         recursive method for printing out patch/header/hunk/hunk-line data to
         screen.  also returns a string with all of the content of the displayed
@@ -1373,36 +1459,52 @@
         if isinstance(item, patch):
             if recursechildren:
                 for hdr in item:
-                    self.__printitem(hdr, ignorefolding,
-                            recursechildren, outstr, towin)
+                    self.__printitem(
+                        hdr, ignorefolding, recursechildren, outstr, towin
+                    )
         # todo: eliminate all isinstance() calls
         if isinstance(item, uiheader):
-            outstr.append(self.printheader(item, selected, towin=towin,
-                                       ignorefolding=ignorefolding))
+            outstr.append(
+                self.printheader(
+                    item, selected, towin=towin, ignorefolding=ignorefolding
+                )
+            )
             if recursechildren:
                 for hnk in item.hunks:
-                    self.__printitem(hnk, ignorefolding,
-                            recursechildren, outstr, towin)
-        elif (isinstance(item, uihunk) and
-              ((not item.header.folded) or ignorefolding)):
+                    self.__printitem(
+                        hnk, ignorefolding, recursechildren, outstr, towin
+                    )
+        elif isinstance(item, uihunk) and (
+            (not item.header.folded) or ignorefolding
+        ):
             # print the hunk data which comes before the changed-lines
-            outstr.append(self.printhunklinesbefore(item, selected, towin=towin,
-                                                ignorefolding=ignorefolding))
+            outstr.append(
+                self.printhunklinesbefore(
+                    item, selected, towin=towin, ignorefolding=ignorefolding
+                )
+            )
             if recursechildren:
                 for l in item.changedlines:
-                    self.__printitem(l, ignorefolding,
-                            recursechildren, outstr, towin)
-                outstr.append(self.printhunklinesafter(item, towin=towin,
-                                                ignorefolding=ignorefolding))
-        elif (isinstance(item, uihunkline) and
-              ((not item.hunk.folded) or ignorefolding)):
-            outstr.append(self.printhunkchangedline(item, selected,
-                towin=towin))
+                    self.__printitem(
+                        l, ignorefolding, recursechildren, outstr, towin
+                    )
+                outstr.append(
+                    self.printhunklinesafter(
+                        item, towin=towin, ignorefolding=ignorefolding
+                    )
+                )
+        elif isinstance(item, uihunkline) and (
+            (not item.hunk.folded) or ignorefolding
+        ):
+            outstr.append(
+                self.printhunkchangedline(item, selected, towin=towin)
+            )
 
         return outstr
 
-    def getnumlinesdisplayed(self, item=None, ignorefolding=False,
-                             recursechildren=True):
+    def getnumlinesdisplayed(
+        self, item=None, ignorefolding=False, recursechildren=True
+    ):
         """
         return the number of lines which would be displayed if the item were
         to be printed to the display.  the item will not be printed to the
@@ -1413,8 +1515,9 @@
         """
 
         # temporarily disable printing to windows by printstring
-        patchdisplaystring = self.printitem(item, ignorefolding,
-                                            recursechildren, towin=False)
+        patchdisplaystring = self.printitem(
+            item, ignorefolding, recursechildren, towin=False
+        )
         numlines = len(patchdisplaystring) // self.xscreensize
         return numlines
 
@@ -1429,8 +1532,9 @@
         except curses.error:
             pass
 
-    def getcolorpair(self, fgcolor=None, bgcolor=None, name=None,
-                     attrlist=None):
+    def getcolorpair(
+        self, fgcolor=None, bgcolor=None, name=None, attrlist=None
+    ):
         """
         get a curses color pair, adding it to self.colorpairs if it is not
         already defined.  an optional string, name, can be passed as a shortcut
@@ -1460,8 +1564,9 @@
                 pairindex = len(self.colorpairs) + 1
                 if self.usecolor:
                     curses.init_pair(pairindex, fgcolor, bgcolor)
-                    colorpair = self.colorpairs[(fgcolor, bgcolor)] = (
-                        curses.color_pair(pairindex))
+                    colorpair = self.colorpairs[
+                        (fgcolor, bgcolor)
+                    ] = curses.color_pair(pairindex)
                     if name is not None:
                         self.colorpairnames[name] = curses.color_pair(pairindex)
                 else:
@@ -1521,12 +1626,14 @@
                       c : confirm selected changes
                       r : review/edit and confirm selected changes
                       q : quit without confirming (no changes will be made)
-                      ? : help (what you're currently reading)""")
+                      ? : help (what you're currently reading)"""
+        )
 
         helpwin = curses.newwin(self.yscreensize, 0, 0, 0)
         helplines = helptext.split("\n")
-        helplines = helplines + [" "]*(
-            self.yscreensize - self.numstatuslines - len(helplines) - 1)
+        helplines = helplines + [" "] * (
+            self.yscreensize - self.numstatuslines - len(helplines) - 1
+        )
         try:
             for line in helplines:
                 self.printstring(helpwin, line, pairname="legend")
@@ -1548,7 +1655,7 @@
         self.commenttext = self.ui.edit(self.commenttext, self.ui.username())
         curses.cbreak()
         self.stdscr.refresh()
-        self.stdscr.keypad(1) # allow arrow-keys to continue to function
+        self.stdscr.keypad(1)  # allow arrow-keys to continue to function
 
     def handlefirstlineevent(self):
         """
@@ -1613,7 +1720,7 @@
         """ask for 'y' to be pressed to confirm selected. return True if
         confirmed."""
         confirmtext = _(
-"""If you answer yes to the following, your currently chosen patch chunks
+            """If you answer yes to the following, your currently chosen patch chunks
 will be loaded into an editor. To modify the patch, make the changes in your
 editor and save. To accept the current patch as-is, close the editor without
 saving.
@@ -1622,7 +1729,8 @@
       failing to follow this rule will result in the commit aborting.
 
 are you sure you want to review/edit and confirm the selected changes [yn]?
-""")
+"""
+        )
         with self.ui.timeblockedsection('crecord'):
             response = self.confirmationwindow(confirmtext)
         if response is None:
@@ -1642,15 +1750,19 @@
 
         if opts.get('amend') is None:
             opts['amend'] = True
-            msg = _("Amend option is turned on -- committing the currently "
-                    "selected changes will not create a new changeset, but "
-                    "instead update the most recently committed changeset.\n\n"
-                    "Press any key to continue.")
+            msg = _(
+                "Amend option is turned on -- committing the currently "
+                "selected changes will not create a new changeset, but "
+                "instead update the most recently committed changeset.\n\n"
+                "Press any key to continue."
+            )
         elif opts.get('amend') is True:
             opts['amend'] = None
-            msg = _("Amend option is turned off -- committing the currently "
-                    "selected changes will create a new changeset.\n\n"
-                    "Press any key to continue.")
+            msg = _(
+                "Amend option is turned off -- committing the currently "
+                "selected changes will create a new changeset.\n\n"
+                "Press any key to continue."
+            )
         if not test:
             self.confirmationwindow(msg)
 
@@ -1668,6 +1780,7 @@
         """
         edit the currently selected chunk
         """
+
         def updateui(self):
             self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
             self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
@@ -1703,8 +1816,11 @@
                 self.stdscr.refresh()
 
             # remove comment lines
-            patch = [line + '\n' for line in patch.splitlines()
-                     if not line.startswith('#')]
+            patch = [
+                line + '\n'
+                for line in patch.splitlines()
+                if not line.startswith('#')
+            ]
             return patchmod.parsepatch(patch)
 
         if item is None:
@@ -1728,7 +1844,7 @@
         header = item.header
         editedhunkindex = header.hunks.index(item)
         hunksbefore = header.hunks[:editedhunkindex]
-        hunksafter = header.hunks[editedhunkindex + 1:]
+        hunksafter = header.hunks[editedhunkindex + 1 :]
         newpatchheader = newpatches[0]
         newhunks = [uihunk(h, header) for h in newpatchheader.hunks]
         newadded = sum([h.added for h in newhunks])
@@ -1831,8 +1947,7 @@
 
         origsigwinch = sentinel = object()
         if util.safehasattr(signal, 'SIGWINCH'):
-            origsigwinch = signal.signal(signal.SIGWINCH,
-                                         self.sigwinchhandler)
+            origsigwinch = signal.signal(signal.SIGWINCH, self.sigwinchhandler)
         try:
             return self._main(stdscr)
         finally:
@@ -1867,14 +1982,15 @@
         # available colors: black, blue, cyan, green, magenta, white, yellow
         # init_pair(color_id, foreground_color, background_color)
         self.initcolorpair(None, None, name="normal")
-        self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_MAGENTA,
-                           name="selected")
+        self.initcolorpair(
+            curses.COLOR_WHITE, curses.COLOR_MAGENTA, name="selected"
+        )
         self.initcolorpair(curses.COLOR_RED, None, name="deletion")
         self.initcolorpair(curses.COLOR_GREEN, None, name="addition")
         self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_BLUE, name="legend")
         # newwin([height, width,] begin_y, begin_x)
         self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
-        self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences
+        self.statuswin.keypad(1)  # interpret arrow-key, etc. esc sequences
 
         # figure out how much space to allocate for the chunk-pad which is
         # used for displaying the patch
@@ -1889,11 +2005,13 @@
             self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
         except curses.error:
             self.initexc = fallbackerror(
-                _('this diff is too large to be displayed'))
+                _('this diff is too large to be displayed')
+            )
             return
         # initialize selecteditemendline (initial start-line is 0)
         self.selecteditemendline = self.getnumlinesdisplayed(
-            self.currentselecteditem, recursechildren=False)
+            self.currentselecteditem, recursechildren=False
+        )
 
         while True:
             self.updatescreen()
@@ -1909,7 +2027,8 @@
                 break
 
         if self.commenttext != "":
-            whitespaceremoved = re.sub(br"(?m)^\s.*(\n|$)", b"",
-                                       self.commenttext)
+            whitespaceremoved = re.sub(
+                br"(?m)^\s.*(\n|$)", b"", self.commenttext
+            )
             if whitespaceremoved != "":
                 self.opts['message'] = self.commenttext
--- a/mercurial/dagop.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/dagop.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,12 +9,8 @@
 
 import heapq
 
-from .node import (
-    nullrev,
-)
-from .thirdparty import (
-    attr,
-)
+from .node import nullrev
+from .thirdparty import attr
 from . import (
     error,
     mdiff,
@@ -30,6 +26,7 @@
 # possible maximum depth between null and wdir()
 maxlogdepth = 0x80000000
 
+
 def _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse):
     """Walk DAG using 'pfunc' from the given 'revs' nodes
 
@@ -72,7 +69,7 @@
                 heapq.heappush(pendingheap, (heapsign * inputrev, 0))
         # rescan parents until curdepth >= startdepth because queued entries
         # of the same revision are iterated from the lowest depth
-        foundnew = (currev != lastrev)
+        foundnew = currev != lastrev
         if foundnew and curdepth >= startdepth:
             lastrev = currev
             yield currev
@@ -82,6 +79,7 @@
                 if prev != node.nullrev:
                     heapq.heappush(pendingheap, (heapsign * prev, pdepth))
 
+
 def filectxancestors(fctxs, followfirst=False):
     """Like filectx.ancestors(), but can walk from multiple files/revisions,
     and includes the given fctxs themselves
@@ -90,6 +88,7 @@
     """
     visit = {}
     visitheap = []
+
     def addvisit(fctx):
         rev = fctx.rev()
         if rev not in visit:
@@ -105,7 +104,7 @@
     for c in fctxs:
         addvisit(c)
     while visit:
-        currev = -heapq.heappop(visitheap)
+        currev = -(heapq.heappop(visitheap))
         curfctxs = visit.pop(currev)
         yield currev, curfctxs
         for c in curfctxs:
@@ -113,6 +112,7 @@
                 addvisit(parent)
     assert not visitheap
 
+
 def filerevancestors(fctxs, followfirst=False):
     """Like filectx.ancestors(), but can walk from multiple files/revisions,
     and includes the given fctxs themselves
@@ -122,17 +122,20 @@
     gen = (rev for rev, _cs in filectxancestors(fctxs, followfirst))
     return generatorset(gen, iterasc=False)
 
+
 def _genrevancestors(repo, revs, followfirst, startdepth, stopdepth, cutfunc):
     if followfirst:
         cut = 1
     else:
         cut = None
     cl = repo.changelog
+
     def plainpfunc(rev):
         try:
             return cl.parentrevs(rev)[:cut]
         except error.WdirUnsupported:
             return (pctx.rev() for pctx in repo[rev].parents()[:cut])
+
     if cutfunc is None:
         pfunc = plainpfunc
     else:
@@ -140,8 +143,10 @@
         revs = revs.filter(lambda rev: not cutfunc(rev))
     return _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse=True)
 
-def revancestors(repo, revs, followfirst=False, startdepth=None,
-                 stopdepth=None, cutfunc=None):
+
+def revancestors(
+    repo, revs, followfirst=False, startdepth=None, stopdepth=None, cutfunc=None
+):
     r"""Like revlog.ancestors(), but supports additional options, includes
     the given revs themselves, and returns a smartset
 
@@ -163,10 +168,12 @@
         |/
         A
     """
-    gen = _genrevancestors(repo, revs, followfirst, startdepth, stopdepth,
-                           cutfunc)
+    gen = _genrevancestors(
+        repo, revs, followfirst, startdepth, stopdepth, cutfunc
+    )
     return generatorset(gen, iterasc=False)
 
+
 def _genrevdescendants(repo, revs, followfirst):
     if followfirst:
         cut = 1
@@ -194,6 +201,7 @@
                     yield i
                     break
 
+
 def _builddescendantsmap(repo, startrev, followfirst):
     """Build map of 'rev -> child revs', offset from startrev"""
     cl = repo.changelog
@@ -207,13 +215,17 @@
             descmap[p2rev - startrev].append(currev)
     return descmap
 
+
 def _genrevdescendantsofdepth(repo, revs, followfirst, startdepth, stopdepth):
     startrev = revs.min()
     descmap = _builddescendantsmap(repo, startrev, followfirst)
+
     def pfunc(rev):
         return descmap[rev - startrev]
+
     return _walkrevtree(pfunc, revs, startdepth, stopdepth, reverse=False)
 
+
 def revdescendants(repo, revs, followfirst, startdepth=None, stopdepth=None):
     """Like revlog.descendants() but supports additional options, includes
     the given revs themselves, and returns a smartset
@@ -224,10 +236,12 @@
     if startdepth is None and (stopdepth is None or stopdepth >= maxlogdepth):
         gen = _genrevdescendants(repo, revs, followfirst)
     else:
-        gen = _genrevdescendantsofdepth(repo, revs, followfirst,
-                                        startdepth, stopdepth)
+        gen = _genrevdescendantsofdepth(
+            repo, revs, followfirst, startdepth, stopdepth
+        )
     return generatorset(gen, iterasc=True)
 
+
 def descendantrevs(revs, revsfn, parentrevsfn):
     """Generate revision number descendants in revision order.
 
@@ -259,6 +273,7 @@
                 yield rev
                 break
 
+
 def _reachablerootspure(pfunc, minroot, roots, heads, includepath):
     """See revlog.reachableroots"""
     if not roots:
@@ -294,6 +309,7 @@
                 reached(rev)
     return reachable
 
+
 def reachableroots(repo, roots, heads, includepath=False):
     """See revlog.reachableroots"""
     if not roots:
@@ -306,6 +322,7 @@
     revs.sort()
     return revs
 
+
 def _changesrange(fctx1, fctx2, linerange2, diffopts):
     """Return `(diffinrange, linerange1)` where `diffinrange` is True
     if diff from fctx2 to fctx1 has changes in linerange2 and
@@ -316,6 +333,7 @@
     diffinrange = any(stype == '!' for _, stype in filteredblocks)
     return diffinrange, linerange1
 
+
 def blockancestors(fctx, fromline, toline, followfirst=False):
     """Yield ancestors of `fctx` with respect to the block of lines within
     `fromline`-`toline` range.
@@ -349,6 +367,7 @@
         if inrange:
             yield c, linerange2
 
+
 def blockdescendants(fctx, fromline, toline):
     """Yield descendants of `fctx` with respect to the block of lines within
     `fromline`-`toline` range.
@@ -388,6 +407,7 @@
         if inrange:
             yield c, linerange1
 
+
 @attr.s(slots=True, frozen=True)
 class annotateline(object):
     fctx = attr.ib()
@@ -396,6 +416,7 @@
     skip = attr.ib(default=False)
     text = attr.ib(default=None)
 
+
 @attr.s(slots=True, frozen=True)
 class _annotatedfile(object):
     # list indexed by lineno - 1
@@ -405,16 +426,19 @@
     # full file content
     text = attr.ib()
 
+
 def _countlines(text):
     if text.endswith("\n"):
         return text.count("\n")
     return text.count("\n") + int(bool(text))
 
+
 def _decoratelines(text, fctx):
     n = _countlines(text)
     linenos = pycompat.rangelist(1, n + 1)
     return _annotatedfile([fctx] * n, linenos, [False] * n, text)
 
+
 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
     r'''
     Given parent and child fctxes and annotate data for parents, for all lines
@@ -426,8 +450,10 @@
 
     See test-annotate.py for unit tests.
     '''
-    pblocks = [(parent, mdiff.allblocks(parent.text, child.text, opts=diffopts))
-               for parent in parents]
+    pblocks = [
+        (parent, mdiff.allblocks(parent.text, child.text, opts=diffopts))
+        for parent in parents
+    ]
 
     if skipchild:
         # Need to iterate over the blocks twice -- make it a list
@@ -482,6 +508,7 @@
                         child.skips[bk] = True
     return child
 
+
 def annotate(base, parents, skiprevs=None, diffopts=None):
     """Core algorithm for filectx.annotate()
 
@@ -528,8 +555,9 @@
             skipchild = False
             if skiprevs is not None:
                 skipchild = f._changeid in skiprevs
-            curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
-                                 diffopts)
+            curr = _annotatepair(
+                [hist[p] for p in pl], f, curr, skipchild, diffopts
+            )
             for p in pl:
                 if needed[p] == 1:
                     del hist[p]
@@ -541,8 +569,11 @@
             del pcache[f]
 
     a = hist[base]
-    return [annotateline(*r) for r in zip(a.fctxs, a.linenos, a.skips,
-                                          mdiff.splitnewlines(a.text))]
+    return [
+        annotateline(*r)
+        for r in zip(a.fctxs, a.linenos, a.skips, mdiff.splitnewlines(a.text))
+    ]
+
 
 def toposort(revs, parentsfunc, firstbranch=()):
     """Yield revisions from heads to roots one (topo) branch at a time.
@@ -695,7 +726,7 @@
             #
             # we also update the <parents> set to include the parents of the
             # new nodes.
-            if rev == currentrev: # only display stuff in rev
+            if rev == currentrev:  # only display stuff in rev
                 gr[0].append(rev)
             gr[1].remove(rev)
             parents = [p for p in parentsfunc(rev) if p > node.nullrev]
@@ -742,6 +773,7 @@
         for r in g[0]:
             yield r
 
+
 def headrevs(revs, parentsfn):
     """Resolve the set of heads from a set of revisions.
 
@@ -764,6 +796,7 @@
     headrevs.difference_update(parents)
     return headrevs
 
+
 def headrevssubset(revsfn, parentrevsfn, startrev=None, stoprevs=None):
     """Returns the set of all revs that have no children with control.
 
@@ -800,6 +833,7 @@
 
     return heads
 
+
 def linearize(revs, parentsfn):
     """Linearize and topologically sort a list of revisions.
 
--- a/mercurial/dagparser.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/dagparser.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,9 +15,8 @@
     error,
     pycompat,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 
 def parsedag(desc):
     '''parses a DAG from a concise textual description; generates events
@@ -267,7 +266,7 @@
             p1 = -1
             c = nextch()
         elif c == '\0':
-            return # in case it was preceded by whitespace
+            return  # in case it was preceded by whitespace
         else:
             s = ''
             i = 0
@@ -275,17 +274,21 @@
                 s += c
                 i += 1
                 c = nextch()
-            raise error.Abort(_('invalid character in dag description: '
-                               '%s...') % s)
+            raise error.Abort(
+                _('invalid character in dag description: ' '%s...') % s
+            )
+
 
-def dagtextlines(events,
-                 addspaces=True,
-                 wraplabels=False,
-                 wrapannotations=False,
-                 wrapcommands=False,
-                 wrapnonlinear=False,
-                 usedots=False,
-                 maxlinewidth=70):
+def dagtextlines(
+    events,
+    addspaces=True,
+    wraplabels=False,
+    wrapannotations=False,
+    wrapcommands=False,
+    wrapnonlinear=False,
+    usedots=False,
+    maxlinewidth=70,
+):
     '''generates single lines for dagtext()'''
 
     def wrapstring(text):
@@ -310,8 +313,13 @@
                 else:
                     for p in ps:
                         if p >= r:
-                            raise error.Abort(_("parent id %i is larger than "
-                                               "current id %i") % (p, r))
+                            raise error.Abort(
+                                _(
+                                    "parent id %i is larger than "
+                                    "current id %i"
+                                )
+                                % (p, r)
+                            )
                 wantr += 1
 
                 # new root?
@@ -372,10 +380,13 @@
                     yield '#' + data
                     yield '\n'
                 else:
-                    raise error.Abort(_("invalid event type in dag: "
-                                        "('%s', '%s')")
-                                      % (stringutil.escapestr(kind),
-                                         stringutil.escapestr(data)))
+                    raise error.Abort(
+                        _("invalid event type in dag: " "('%s', '%s')")
+                        % (
+                            stringutil.escapestr(kind),
+                            stringutil.escapestr(data),
+                        )
+                    )
         if run:
             yield '+%d' % run
 
@@ -395,14 +406,17 @@
     if line:
         yield line
 
-def dagtext(dag,
-            addspaces=True,
-            wraplabels=False,
-            wrapannotations=False,
-            wrapcommands=False,
-            wrapnonlinear=False,
-            usedots=False,
-            maxlinewidth=70):
+
+def dagtext(
+    dag,
+    addspaces=True,
+    wraplabels=False,
+    wrapannotations=False,
+    wrapcommands=False,
+    wrapnonlinear=False,
+    usedots=False,
+    maxlinewidth=70,
+):
     '''generates lines of a textual representation for a dag event stream
 
     events should generate what parsedag() does, so:
@@ -480,11 +494,15 @@
         '+1 :f +1 :p2 *f */p2'
 
     '''
-    return "\n".join(dagtextlines(dag,
-                                  addspaces,
-                                  wraplabels,
-                                  wrapannotations,
-                                  wrapcommands,
-                                  wrapnonlinear,
-                                  usedots,
-                                  maxlinewidth))
+    return "\n".join(
+        dagtextlines(
+            dag,
+            addspaces,
+            wraplabels,
+            wrapannotations,
+            wrapcommands,
+            wrapnonlinear,
+            usedots,
+            maxlinewidth,
+        )
+    )
--- a/mercurial/debugcommands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/debugcommands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -88,14 +88,13 @@
     stringutil,
 )
 
-from .revlogutils import (
-    deltas as deltautil
-)
+from .revlogutils import deltas as deltautil
 
 release = lockmod.release
 
 command = registrar.command()
 
+
 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
 def debugancestor(ui, repo, *args):
     """find the ancestor revision of two revisions in a given index"""
@@ -105,8 +104,9 @@
         lookup = r.lookup
     elif len(args) == 2:
         if not repo:
-            raise error.Abort(_('there is no Mercurial repository here '
-                                '(.hg not found)'))
+            raise error.Abort(
+                _('there is no Mercurial repository here ' '(.hg not found)')
+            )
         rev1, rev2 = args
         r = repo.changelog
         lookup = repo.lookup
@@ -115,6 +115,7 @@
     a = r.ancestor(lookup(rev1), lookup(rev2))
     ui.write('%d:%s\n' % (r.rev(a), hex(a)))
 
+
 @command('debugapplystreamclonebundle', [], 'FILE')
 def debugapplystreamclonebundle(ui, repo, fname):
     """apply a stream clone bundle file"""
@@ -122,15 +123,29 @@
     gen = exchange.readbundle(ui, f, fname)
     gen.apply(repo)
 
-@command('debugbuilddag',
-    [('m', 'mergeable-file', None, _('add single file mergeable changes')),
-    ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
-    ('n', 'new-file', None, _('add new file at each rev'))],
-    _('[OPTION]... [TEXT]'))
-def debugbuilddag(ui, repo, text=None,
-                  mergeable_file=False,
-                  overwritten_file=False,
-                  new_file=False):
+
+@command(
+    'debugbuilddag',
+    [
+        ('m', 'mergeable-file', None, _('add single file mergeable changes')),
+        (
+            'o',
+            'overwritten-file',
+            None,
+            _('add single file all revs overwrite'),
+        ),
+        ('n', 'new-file', None, _('add new file at each rev')),
+    ],
+    _('[OPTION]... [TEXT]'),
+)
+def debugbuilddag(
+    ui,
+    repo,
+    text=None,
+    mergeable_file=False,
+    overwritten_file=False,
+    new_file=False,
+):
     """builds a repo with a given DAG from scratch in the current empty repo
 
     The description of the DAG is read from stdin if not given on the
@@ -180,13 +195,13 @@
     if mergeable_file:
         linesperrev = 2
         # make a file with k lines per rev
-        initialmergedlines = ['%d' % i
-                              for i in pycompat.xrange(0, total * linesperrev)]
+        initialmergedlines = [
+            '%d' % i for i in pycompat.xrange(0, total * linesperrev)
+        ]
         initialmergedlines.append("")
 
     tags = []
-    progress = ui.makeprogress(_('building'), unit=_('revisions'),
-                               total=total)
+    progress = ui.makeprogress(_('building'), unit=_('revisions'), total=total)
     with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
         at = -1
         atbranch = 'default'
@@ -208,8 +223,9 @@
                     if len(ps) > 1:
                         p2 = repo[ps[1]]
                         pa = p1.ancestor(p2)
-                        base, local, other = [x[fn].data() for x in (pa, p1,
-                                                                     p2)]
+                        base, local, other = [
+                            x[fn].data() for x in (pa, p1, p2)
+                        ]
                         m3 = simplemerge.Merge3Text(base, local, other)
                         ml = [l.strip() for l in m3.merge_lines()]
                         ml.append("")
@@ -241,8 +257,9 @@
 
                 def fctxfn(repo, cx, path):
                     if path in filecontent:
-                        return context.memfilectx(repo, cx, path,
-                                                  filecontent[path])
+                        return context.memfilectx(
+                            repo, cx, path, filecontent[path]
+                        )
                     return None
 
                 if len(ps) == 0 or ps[0] < 0:
@@ -251,10 +268,16 @@
                     pars = [nodeids[ps[0]], None]
                 else:
                     pars = [nodeids[p] for p in ps]
-                cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
-                                    date=(id, 0),
-                                    user="debugbuilddag",
-                                    extra={'branch': atbranch})
+                cx = context.memctx(
+                    repo,
+                    pars,
+                    "r%i" % id,
+                    files,
+                    fctxfn,
+                    date=(id, 0),
+                    user="debugbuilddag",
+                    extra={'branch': atbranch},
+                )
                 nodeid = repo.commitctx(cx)
                 nodeids.append(nodeid)
                 at = id
@@ -270,19 +293,31 @@
         if tags:
             repo.vfs.write("localtags", "".join(tags))
 
+
 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
     indent_string = ' ' * indent
     if all:
-        ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
-                 % indent_string)
+        ui.write(
+            "%sformat: id, p1, p2, cset, delta base, len(delta)\n"
+            % indent_string
+        )
 
         def showchunks(named):
             ui.write("\n%s%s\n" % (indent_string, named))
             for deltadata in gen.deltaiter():
                 node, p1, p2, cs, deltabase, delta, flags = deltadata
-                ui.write("%s%s %s %s %s %s %d\n" %
-                         (indent_string, hex(node), hex(p1), hex(p2),
-                          hex(cs), hex(deltabase), len(delta)))
+                ui.write(
+                    "%s%s %s %s %s %s %d\n"
+                    % (
+                        indent_string,
+                        hex(node),
+                        hex(p1),
+                        hex(p2),
+                        hex(cs),
+                        hex(deltabase),
+                        len(delta),
+                    )
+                )
 
         chunkdata = gen.changelogheader()
         showchunks("changelog")
@@ -299,6 +334,7 @@
             node, p1, p2, cs, deltabase, delta, flags = deltadata
             ui.write("%s%s\n" % (indent_string, hex(node)))
 
+
 def _debugobsmarkers(ui, part, indent=0, **opts):
     """display version and markers contained in 'data'"""
     opts = pycompat.byteskwargs(opts)
@@ -322,6 +358,7 @@
             cmdutil.showmarker(fm, m)
         fm.end()
 
+
 def _debugphaseheads(ui, data, indent=0):
     """display version and markers contained in 'data'"""
     indent_string = ' ' * indent
@@ -331,12 +368,15 @@
             ui.write(indent_string)
             ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
 
+
 def _quasirepr(thing):
     if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
         return '{%s}' % (
-            b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
+            b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing))
+        )
     return pycompat.bytestr(repr(thing))
 
+
 def _debugbundle2(ui, gen, all=None, **opts):
     """lists the contents of a bundle2"""
     if not isinstance(gen, bundle2.unbundle20):
@@ -360,12 +400,17 @@
             if not ui.quiet:
                 _debugphaseheads(ui, part, indent=4)
 
-@command('debugbundle',
-        [('a', 'all', None, _('show all details')),
-         ('', 'part-type', [], _('show only the named part type')),
-         ('', 'spec', None, _('print the bundlespec of the bundle'))],
-        _('FILE'),
-        norepo=True)
+
+@command(
+    'debugbundle',
+    [
+        ('a', 'all', None, _('show all details')),
+        ('', 'part-type', [], _('show only the named part type')),
+        ('', 'spec', None, _('print the bundlespec of the bundle')),
+    ],
+    _('FILE'),
+    norepo=True,
+)
 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
     """lists the contents of a bundle"""
     with hg.openpath(ui, bundlepath) as f:
@@ -379,24 +424,24 @@
             return _debugbundle2(ui, gen, all=all, **opts)
         _debugchangegroup(ui, gen, all=all, **opts)
 
-@command('debugcapabilities',
-        [], _('PATH'),
-        norepo=True)
+
+@command('debugcapabilities', [], _('PATH'), norepo=True)
 def debugcapabilities(ui, path, **opts):
     """lists the capabilities of a remote peer"""
     opts = pycompat.byteskwargs(opts)
     peer = hg.peer(ui, opts, path)
     caps = peer.capabilities()
-    ui.write(('Main capabilities:\n'))
+    ui.write('Main capabilities:\n')
     for c in sorted(caps):
-        ui.write(('  %s\n') % c)
+        ui.write('  %s\n' % c)
     b2caps = bundle2.bundle2caps(peer)
     if b2caps:
-        ui.write(('Bundle2 capabilities:\n'))
+        ui.write('Bundle2 capabilities:\n')
         for key, values in sorted(b2caps.iteritems()):
-            ui.write(('  %s\n') % key)
+            ui.write('  %s\n' % key)
             for v in values:
-                ui.write(('    %s\n') % v)
+                ui.write('    %s\n' % v)
+
 
 @command('debugcheckstate', [], '')
 def debugcheckstate(ui, repo):
@@ -414,8 +459,9 @@
             ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
             errors += 1
         if state in "m" and f not in m1 and f not in m2:
-            ui.warn(_("%s in state %s, but not in either manifest\n") %
-                    (f, state))
+            ui.warn(
+                _("%s in state %s, but not in either manifest\n") % (f, state)
+            )
             errors += 1
     for f in m1:
         state = repo.dirstate[f]
@@ -426,17 +472,21 @@
         error = _(".hg/dirstate inconsistent with current parent's manifest")
         raise error.Abort(error)
 
-@command('debugcolor',
-        [('', 'style', None, _('show all configured styles'))],
-        'hg debugcolor')
+
+@command(
+    'debugcolor',
+    [('', 'style', None, _('show all configured styles'))],
+    'hg debugcolor',
+)
 def debugcolor(ui, repo, **opts):
     """show available color, effects or style"""
-    ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
+    ui.write('color mode: %s\n' % stringutil.pprint(ui._colormode))
     if opts.get(r'style'):
         return _debugdisplaystyle(ui)
     else:
         return _debugdisplaycolor(ui)
 
+
 def _debugdisplaycolor(ui):
     ui = ui.copy()
     ui._styles.clear()
@@ -450,10 +500,10 @@
                 ui._styles[k] = k[9:]
     ui.write(_('available colors:\n'))
     # sort label with a '_' after the other to group '_background' entry.
-    items = sorted(ui._styles.items(),
-                   key=lambda i: ('_' in i[0], i[0], i[1]))
+    items = sorted(ui._styles.items(), key=lambda i: ('_' in i[0], i[0], i[1]))
     for colorname, label in items:
-        ui.write(('%s\n') % colorname, label=label)
+        ui.write('%s\n' % colorname, label=label)
+
 
 def _debugdisplaystyle(ui):
     ui.write(_('available style:\n'))
@@ -469,6 +519,7 @@
             ui.write(', '.join(ui.label(e, e) for e in effects.split()))
         ui.write('\n')
 
+
 @command('debugcreatestreamclonebundle', [], 'FILE')
 def debugcreatestreamclonebundle(ui, repo, fname):
     """create a stream clone bundle file
@@ -479,21 +530,30 @@
     # TODO we may want to turn this into an abort when this functionality
     # is moved into `hg bundle`.
     if phases.hassecret(repo):
-        ui.warn(_('(warning: stream clone bundle will contain secret '
-                  'revisions)\n'))
+        ui.warn(
+            _(
+                '(warning: stream clone bundle will contain secret '
+                'revisions)\n'
+            )
+        )
 
     requirements, gen = streamclone.generatebundlev1(repo)
     changegroup.writechunks(ui, gen, fname)
 
     ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
 
-@command('debugdag',
-    [('t', 'tags', None, _('use tags as labels')),
-    ('b', 'branches', None, _('annotate with branch names')),
-    ('', 'dots', None, _('use dots for runs')),
-    ('s', 'spaces', None, _('separate elements by spaces'))],
+
+@command(
+    'debugdag',
+    [
+        ('t', 'tags', None, _('use tags as labels')),
+        ('b', 'branches', None, _('annotate with branch names')),
+        ('', 'dots', None, _('use dots for runs')),
+        ('s', 'spaces', None, _('separate elements by spaces')),
+    ],
     _('[OPTION]... [FILE [REV]...]'),
-    optionalrepo=True)
+    optionalrepo=True,
+)
 def debugdag(ui, repo, file_=None, *revs, **opts):
     """format the changelog or an index DAG as a concise textual description
 
@@ -505,15 +565,15 @@
     spaces = opts.get(r'spaces')
     dots = opts.get(r'dots')
     if file_:
-        rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
-                             file_)
+        rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), file_)
         revs = set((int(r) for r in revs))
+
         def events():
             for r in rlog:
-                yield 'n', (r, list(p for p in rlog.parentrevs(r)
-                                        if p != -1))
+                yield 'n', (r, list(p for p in rlog.parentrevs(r) if p != -1))
                 if r in revs:
                     yield 'l', (r, "r%i" % r)
+
     elif repo:
         cl = repo.changelog
         tags = opts.get(r'tags')
@@ -522,6 +582,7 @@
             labels = {}
             for l, n in repo.tags().items():
                 labels.setdefault(cl.rev(n), []).append(l)
+
         def events():
             b = "default"
             for r in cl:
@@ -530,26 +591,29 @@
                     if newb != b:
                         yield 'a', newb
                         b = newb
-                yield 'n', (r, list(p for p in cl.parentrevs(r)
-                                        if p != -1))
+                yield 'n', (r, list(p for p in cl.parentrevs(r) if p != -1))
                 if tags:
                     ls = labels.get(r)
                     if ls:
                         for l in ls:
                             yield 'l', (r, l)
+
     else:
         raise error.Abort(_('need repo for changelog dag'))
 
-    for line in dagparser.dagtextlines(events(),
-                                       addspaces=spaces,
-                                       wraplabels=True,
-                                       wrapannotations=True,
-                                       wrapnonlinear=dots,
-                                       usedots=dots,
-                                       maxlinewidth=70):
+    for line in dagparser.dagtextlines(
+        events(),
+        addspaces=spaces,
+        wraplabels=True,
+        wrapannotations=True,
+        wrapnonlinear=dots,
+        usedots=dots,
+        maxlinewidth=70,
+    ):
         ui.write(line)
         ui.write("\n")
 
+
 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
 def debugdata(ui, repo, file_, rev=None, **opts):
     """dump the contents of a data file revision"""
@@ -566,26 +630,33 @@
     except KeyError:
         raise error.Abort(_('invalid revision identifier %s') % rev)
 
-@command('debugdate',
+
+@command(
+    'debugdate',
     [('e', 'extended', None, _('try extended date formats'))],
     _('[-e] DATE [RANGE]'),
-    norepo=True, optionalrepo=True)
+    norepo=True,
+    optionalrepo=True,
+)
 def debugdate(ui, date, range=None, **opts):
     """parse and display a date"""
     if opts[r"extended"]:
         d = dateutil.parsedate(date, util.extendeddateformats)
     else:
         d = dateutil.parsedate(date)
-    ui.write(("internal: %d %d\n") % d)
-    ui.write(("standard: %s\n") % dateutil.datestr(d))
+    ui.write("internal: %d %d\n" % d)
+    ui.write("standard: %s\n" % dateutil.datestr(d))
     if range:
         m = dateutil.matchdate(range)
-        ui.write(("match: %s\n") % m(d[0]))
-
-@command('debugdeltachain',
+        ui.write("match: %s\n" % m(d[0]))
+
+
+@command(
+    'debugdeltachain',
     cmdutil.debugrevlogopts + cmdutil.formatteropts,
     _('-c|-m|FILE'),
-    optionalrepo=True)
+    optionalrepo=True,
+)
 def debugdeltachain(ui, repo, file_=None, **opts):
     """dump information about delta chains in a revlog
 
@@ -661,9 +732,11 @@
 
     fm = ui.formatter('debugdeltachain', opts)
 
-    fm.plain('    rev  chain# chainlen     prev   delta       '
-             'size    rawsize  chainsize     ratio   lindist extradist '
-             'extraratio')
+    fm.plain(
+        '    rev  chain# chainlen     prev   delta       '
+        'size    rawsize  chainsize     ratio   lindist extradist '
+        'extraratio'
+    )
     if withsparseread:
         fm.plain('   readsize largestblk rddensity srchunks')
     fm.plain('\n')
@@ -693,18 +766,36 @@
             extraratio = extradist
 
         fm.startitem()
-        fm.write('rev chainid chainlen prevrev deltatype compsize '
-                 'uncompsize chainsize chainratio lindist extradist '
-                 'extraratio',
-                 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
-                 rev, chainid, len(chain), prevrev, deltatype, comp,
-                 uncomp, chainsize, chainratio, lineardist, extradist,
-                 extraratio,
-                 rev=rev, chainid=chainid, chainlen=len(chain),
-                 prevrev=prevrev, deltatype=deltatype, compsize=comp,
-                 uncompsize=uncomp, chainsize=chainsize,
-                 chainratio=chainratio, lindist=lineardist,
-                 extradist=extradist, extraratio=extraratio)
+        fm.write(
+            'rev chainid chainlen prevrev deltatype compsize '
+            'uncompsize chainsize chainratio lindist extradist '
+            'extraratio',
+            '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
+            rev,
+            chainid,
+            len(chain),
+            prevrev,
+            deltatype,
+            comp,
+            uncomp,
+            chainsize,
+            chainratio,
+            lineardist,
+            extradist,
+            extraratio,
+            rev=rev,
+            chainid=chainid,
+            chainlen=len(chain),
+            prevrev=prevrev,
+            deltatype=deltatype,
+            compsize=comp,
+            uncompsize=uncomp,
+            chainsize=chainsize,
+            chainratio=chainratio,
+            lindist=lineardist,
+            extradist=extradist,
+            extraratio=extraratio,
+        )
         if withsparseread:
             readsize = 0
             largestblock = 0
@@ -724,21 +815,33 @@
             else:
                 readdensity = 1
 
-            fm.write('readsize largestblock readdensity srchunks',
-                     ' %10d %10d %9.5f %8d',
-                     readsize, largestblock, readdensity, srchunks,
-                     readsize=readsize, largestblock=largestblock,
-                     readdensity=readdensity, srchunks=srchunks)
+            fm.write(
+                'readsize largestblock readdensity srchunks',
+                ' %10d %10d %9.5f %8d',
+                readsize,
+                largestblock,
+                readdensity,
+                srchunks,
+                readsize=readsize,
+                largestblock=largestblock,
+                readdensity=readdensity,
+                srchunks=srchunks,
+            )
 
         fm.plain('\n')
 
     fm.end()
 
-@command('debugdirstate|debugstate',
-    [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
-     ('', 'dates', True, _('display the saved mtime')),
-     ('', 'datesort', None, _('sort by saved mtime'))],
-    _('[OPTION]...'))
+
+@command(
+    'debugdirstate|debugstate',
+    [
+        ('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
+        ('', 'dates', True, _('display the saved mtime')),
+        ('', 'datesort', None, _('sort by saved mtime')),
+    ],
+    _('[OPTION]...'),
+)
 def debugstate(ui, repo, **opts):
     """show the contents of the current dirstate"""
 
@@ -748,17 +851,18 @@
     datesort = opts.get(r'datesort')
 
     if datesort:
-        keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
+        keyfunc = lambda x: (x[1][3], x[0])  # sort by mtime, then by filename
     else:
-        keyfunc = None # sort by filename
+        keyfunc = None  # sort by filename
     for file_, ent in sorted(repo.dirstate.iteritems(), key=keyfunc):
         if ent[3] == -1:
             timestr = 'unset               '
         elif nodates:
             timestr = 'set                 '
         else:
-            timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
-                                    time.localtime(ent[3]))
+            timestr = time.strftime(
+                r"%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])
+            )
             timestr = encoding.strtolocal(timestr)
         if ent[1] & 0o20000:
             mode = 'lnk'
@@ -768,14 +872,23 @@
     for f in repo.dirstate.copies():
         ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
 
-@command('debugdiscovery',
-    [('', 'old', None, _('use old-style discovery')),
-    ('', 'nonheads', None,
-     _('use old-style discovery with non-heads included')),
-    ('', 'rev', [], 'restrict discovery to this set of revs'),
-    ('', 'seed', '12323', 'specify the random seed use for discovery'),
-    ] + cmdutil.remoteopts,
-    _('[--rev REV] [OTHER]'))
+
+@command(
+    'debugdiscovery',
+    [
+        ('', 'old', None, _('use old-style discovery')),
+        (
+            '',
+            'nonheads',
+            None,
+            _('use old-style discovery with non-heads included'),
+        ),
+        ('', 'rev', [], 'restrict discovery to this set of revs'),
+        ('', 'seed', '12323', 'specify the random seed use for discovery'),
+    ]
+    + cmdutil.remoteopts,
+    _('[--rev REV] [OTHER]'),
+)
 def debugdiscovery(ui, repo, remoteurl="default", **opts):
     """runs the changeset discovery protocol in isolation"""
     opts = pycompat.byteskwargs(opts)
@@ -786,32 +899,37 @@
     # make sure tests are repeatable
     random.seed(int(opts['seed']))
 
-
-
     if opts.get('old'):
+
         def doit(pushedrevs, remoteheads, remote=remote):
             if not util.safehasattr(remote, 'branches'):
                 # enable in-client legacy support
                 remote = localrepo.locallegacypeer(remote.local())
-            common, _in, hds = treediscovery.findcommonincoming(repo, remote,
-                                                                force=True)
+            common, _in, hds = treediscovery.findcommonincoming(
+                repo, remote, force=True
+            )
             common = set(common)
             if not opts.get('nonheads'):
-                ui.write(("unpruned common: %s\n") %
-                         " ".join(sorted(short(n) for n in common)))
+                ui.write(
+                    "unpruned common: %s\n"
+                    % " ".join(sorted(short(n) for n in common))
+                )
 
                 clnode = repo.changelog.node
                 common = repo.revs('heads(::%ln)', common)
                 common = {clnode(r) for r in common}
             return common, hds
+
     else:
+
         def doit(pushedrevs, remoteheads, remote=remote):
             nodes = None
             if pushedrevs:
                 revs = scmutil.revrange(repo, pushedrevs)
                 nodes = [repo[r].node() for r in revs]
-            common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
-                                                            ancestorsof=nodes)
+            common, any, hds = setdiscovery.findcommonheads(
+                ui, repo, remote, ancestorsof=nodes
+            )
             return common, hds
 
     remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
@@ -839,33 +957,32 @@
     data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
 
     # display discovery summary
-    ui.write(("elapsed time:  %(elapsed)f seconds\n") % data)
-    ui.write(("heads summary:\n"))
-    ui.write(("  total common heads:  %(nb-common)9d\n") % data)
-    ui.write(("    also local heads:  %(nb-common-local)9d\n") % data)
-    ui.write(("    also remote heads: %(nb-common-remote)9d\n") % data)
-    ui.write(("    both:              %(nb-common-both)9d\n") % data)
-    ui.write(("  local heads:         %(nb-local)9d\n") % data)
-    ui.write(("    common:            %(nb-common-local)9d\n") % data)
-    ui.write(("    missing:           %(nb-local-missing)9d\n") % data)
-    ui.write(("  remote heads:        %(nb-remote)9d\n") % data)
-    ui.write(("    common:            %(nb-common-remote)9d\n") % data)
-    ui.write(("    unknown:           %(nb-remote-unknown)9d\n") % data)
-    ui.write(("local changesets:      %(nb-revs)9d\n") % data)
-    ui.write(("  common:              %(nb-revs-common)9d\n") % data)
-    ui.write(("  missing:             %(nb-revs-missing)9d\n") % data)
+    ui.write("elapsed time:  %(elapsed)f seconds\n" % data)
+    ui.write("heads summary:\n")
+    ui.write("  total common heads:  %(nb-common)9d\n" % data)
+    ui.write("    also local heads:  %(nb-common-local)9d\n" % data)
+    ui.write("    also remote heads: %(nb-common-remote)9d\n" % data)
+    ui.write("    both:              %(nb-common-both)9d\n" % data)
+    ui.write("  local heads:         %(nb-local)9d\n" % data)
+    ui.write("    common:            %(nb-common-local)9d\n" % data)
+    ui.write("    missing:           %(nb-local-missing)9d\n" % data)
+    ui.write("  remote heads:        %(nb-remote)9d\n" % data)
+    ui.write("    common:            %(nb-common-remote)9d\n" % data)
+    ui.write("    unknown:           %(nb-remote-unknown)9d\n" % data)
+    ui.write("local changesets:      %(nb-revs)9d\n" % data)
+    ui.write("  common:              %(nb-revs-common)9d\n" % data)
+    ui.write("  missing:             %(nb-revs-missing)9d\n" % data)
 
     if ui.verbose:
-        ui.write(("common heads: %s\n") %
-                 " ".join(sorted(short(n) for n in common)))
+        ui.write(
+            "common heads: %s\n" % " ".join(sorted(short(n) for n in common))
+        )
+
 
 _chunksize = 4 << 10
 
-@command('debugdownload',
-    [
-        ('o', 'output', '', _('path')),
-    ],
-    optionalrepo=True)
+
+@command('debugdownload', [('o', 'output', '', _('path')),], optionalrepo=True)
 def debugdownload(ui, repo, url, output=None, **opts):
     """download a resource using Mercurial logic and config
     """
@@ -883,6 +1000,7 @@
         if output:
             dest.close()
 
+
 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
 def debugextensions(ui, repo, **opts):
     '''show information about active extensions'''
@@ -913,35 +1031,65 @@
                 lasttestedversion = exttestedwith[-1]
                 fm.plain(' (%s!)\n' % lasttestedversion)
 
-        fm.condwrite(ui.verbose and extsource, 'source',
-                 _('  location: %s\n'), extsource or "")
+        fm.condwrite(
+            ui.verbose and extsource,
+            'source',
+            _('  location: %s\n'),
+            extsource or "",
+        )
 
         if ui.verbose:
             fm.plain(_('  bundled: %s\n') % ['no', 'yes'][isinternal])
         fm.data(bundled=isinternal)
 
-        fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
-                     _('  tested with: %s\n'),
-                     fm.formatlist(exttestedwith, name='ver'))
-
-        fm.condwrite(ui.verbose and extbuglink, 'buglink',
-                 _('  bug reporting: %s\n'), extbuglink or "")
+        fm.condwrite(
+            ui.verbose and exttestedwith,
+            'testedwith',
+            _('  tested with: %s\n'),
+            fm.formatlist(exttestedwith, name='ver'),
+        )
+
+        fm.condwrite(
+            ui.verbose and extbuglink,
+            'buglink',
+            _('  bug reporting: %s\n'),
+            extbuglink or "",
+        )
 
     fm.end()
 
-@command('debugfileset',
-    [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
-     ('', 'all-files', False,
-      _('test files from all revisions and working directory')),
-     ('s', 'show-matcher', None,
-      _('print internal representation of matcher')),
-     ('p', 'show-stage', [],
-      _('print parsed tree at the given stage'), _('NAME'))],
-    _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
+
+@command(
+    'debugfileset',
+    [
+        ('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
+        (
+            '',
+            'all-files',
+            False,
+            _('test files from all revisions and working directory'),
+        ),
+        (
+            's',
+            'show-matcher',
+            None,
+            _('print internal representation of matcher'),
+        ),
+        (
+            'p',
+            'show-stage',
+            [],
+            _('print parsed tree at the given stage'),
+            _('NAME'),
+        ),
+    ],
+    _('[-r REV] [--all-files] [OPTION]... FILESPEC'),
+)
 def debugfileset(ui, repo, expr, **opts):
     '''parse and apply a fileset specification'''
     from . import fileset
-    fileset.symbols # force import of fileset so we have predicates to optimize
+
+    fileset.symbols  # force import of fileset so we have predicates to optimize
     opts = pycompat.byteskwargs(opts)
     ctx = scmutil.revsingle(repo, opts.get('rev'), None)
 
@@ -969,7 +1117,7 @@
         tree = f(tree)
         if n in showalways:
             if opts['show_stage'] or n != 'parsed':
-                ui.write(("* %s:\n") % n)
+                ui.write("* %s:\n" % n)
             ui.write(filesetlang.prettyformat(tree), "\n")
 
     files = set()
@@ -980,9 +1128,14 @@
             files.update(c.substate)
     if opts['all_files'] or ctx.rev() is None:
         wctx = repo[None]
-        files.update(repo.dirstate.walk(scmutil.matchall(repo),
-                                        subrepos=list(wctx.substate),
-                                        unknown=True, ignored=True))
+        files.update(
+            repo.dirstate.walk(
+                scmutil.matchall(repo),
+                subrepos=list(wctx.substate),
+                unknown=True,
+                ignored=True,
+            )
+        )
         files.update(wctx.substate)
     else:
         files.update(ctx.files())
@@ -990,14 +1143,14 @@
 
     m = ctx.matchfileset(expr)
     if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
-        ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
+        ui.write('* matcher:\n', stringutil.prettyrepr(m), '\n')
     for f in sorted(files):
         if not m(f):
             continue
         ui.write("%s\n" % f)
 
-@command('debugformat',
-         [] + cmdutil.formatteropts)
+
+@command('debugformat', [] + cmdutil.formatteropts)
 def debugformat(ui, repo, **opts):
     """display format information about the current repository
 
@@ -1012,6 +1165,7 @@
 
     fm = ui.formatter('debugformat', opts)
     if fm.isplain():
+
         def formatvalue(value):
             if util.safehasattr(value, 'startswith'):
                 return value
@@ -1019,6 +1173,7 @@
                 return 'yes'
             else:
                 return 'no'
+
     else:
         formatvalue = pycompat.identity
 
@@ -1043,44 +1198,58 @@
             namelabel = 'formatvariant.name.uptodate'
             repolabel = 'formatvariant.repo.uptodate'
 
-        fm.write('name', makeformatname(fv.name), fv.name,
-                 label=namelabel)
-        fm.write('repo', ' %3s', formatvalue(repovalue),
-                 label=repolabel)
+        fm.write('name', makeformatname(fv.name), fv.name, label=namelabel)
+        fm.write('repo', ' %3s', formatvalue(repovalue), label=repolabel)
         if fv.default != configvalue:
             configlabel = 'formatvariant.config.special'
         else:
             configlabel = 'formatvariant.config.default'
-        fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
-                     label=configlabel)
-        fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
-                     label='formatvariant.default')
+        fm.condwrite(
+            ui.verbose,
+            'config',
+            ' %6s',
+            formatvalue(configvalue),
+            label=configlabel,
+        )
+        fm.condwrite(
+            ui.verbose,
+            'default',
+            ' %7s',
+            formatvalue(fv.default),
+            label='formatvariant.default',
+        )
         fm.plain('\n')
     fm.end()
 
+
 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
 def debugfsinfo(ui, path="."):
     """show information detected about current filesystem"""
-    ui.write(('path: %s\n') % path)
-    ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
-    ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
-    ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
-    ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
-    ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
+    ui.write('path: %s\n' % path)
+    ui.write('mounted on: %s\n' % (util.getfsmountpoint(path) or '(unknown)'))
+    ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
+    ui.write('fstype: %s\n' % (util.getfstype(path) or '(unknown)'))
+    ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
+    ui.write('hardlink: %s\n' % (util.checknlink(path) and 'yes' or 'no'))
     casesensitive = '(unknown)'
     try:
         with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
             casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
     except OSError:
         pass
-    ui.write(('case-sensitive: %s\n') % casesensitive)
-
-@command('debuggetbundle',
-    [('H', 'head', [], _('id of head node'), _('ID')),
-    ('C', 'common', [], _('id of common node'), _('ID')),
-    ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
+    ui.write('case-sensitive: %s\n' % casesensitive)
+
+
+@command(
+    'debuggetbundle',
+    [
+        ('H', 'head', [], _('id of head node'), _('ID')),
+        ('C', 'common', [], _('id of common node'), _('ID')),
+        ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
+    ],
     _('REPO FILE [-H|-C ID]...'),
-    norepo=True)
+    norepo=True,
+)
 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
     """retrieves a bundle from a repo
 
@@ -1101,15 +1270,18 @@
     bundle = repo.getbundle('debug', **args)
 
     bundletype = opts.get('type', 'bzip2').lower()
-    btypes = {'none': 'HG10UN',
-              'bzip2': 'HG10BZ',
-              'gzip': 'HG10GZ',
-              'bundle2': 'HG20'}
+    btypes = {
+        'none': 'HG10UN',
+        'bzip2': 'HG10BZ',
+        'gzip': 'HG10GZ',
+        'bundle2': 'HG20',
+    }
     bundletype = btypes.get(bundletype)
     if bundletype not in bundle2.bundletypes:
         raise error.Abort(_('unknown bundle type specified with --type'))
     bundle2.writebundle(ui, bundle, bundlepath, bundletype)
 
+
 @command('debugignore', [], '[FILE]')
 def debugignore(ui, repo, *files, **opts):
     """display the combined ignore pattern and information about ignored files
@@ -1144,17 +1316,27 @@
                 if ignored == nf:
                     ui.write(_("%s is ignored\n") % uipathfn(f))
                 else:
-                    ui.write(_("%s is ignored because of "
-                               "containing directory %s\n")
-                             % (uipathfn(f), ignored))
+                    ui.write(
+                        _(
+                            "%s is ignored because of "
+                            "containing directory %s\n"
+                        )
+                        % (uipathfn(f), ignored)
+                    )
                 ignorefile, lineno, line = ignoredata
-                ui.write(_("(ignore rule in %s, line %d: '%s')\n")
-                         % (ignorefile, lineno, line))
+                ui.write(
+                    _("(ignore rule in %s, line %d: '%s')\n")
+                    % (ignorefile, lineno, line)
+                )
             else:
                 ui.write(_("%s is not ignored\n") % uipathfn(f))
 
-@command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
-         _('-c|-m|FILE'))
+
+@command(
+    'debugindex',
+    cmdutil.debugrevlogopts + cmdutil.formatteropts,
+    _('-c|-m|FILE'),
+)
 def debugindex(ui, repo, file_=None, **opts):
     """dump index data for a storage primitive"""
     opts = pycompat.byteskwargs(opts)
@@ -1171,9 +1353,10 @@
         break
 
     fm = ui.formatter('debugindex', opts)
-    fm.plain(b'   rev linkrev %s %s p2\n' % (
-        b'nodeid'.ljust(idlen),
-        b'p1'.ljust(idlen)))
+    fm.plain(
+        b'   rev linkrev %s %s p2\n'
+        % (b'nodeid'.ljust(idlen), b'p1'.ljust(idlen))
+    )
 
     for rev in store:
         node = store.node(rev)
@@ -1189,13 +1372,15 @@
 
     fm.end()
 
-@command('debugindexdot', cmdutil.debugrevlogopts,
-    _('-c|-m|FILE'), optionalrepo=True)
+
+@command(
+    'debugindexdot', cmdutil.debugrevlogopts, _('-c|-m|FILE'), optionalrepo=True
+)
 def debugindexdot(ui, repo, file_=None, **opts):
     """dump an index DAG as a graphviz dot file"""
     opts = pycompat.byteskwargs(opts)
     r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
-    ui.write(("digraph G {\n"))
+    ui.write("digraph G {\n")
     for i in r:
         node = r.node(i)
         pp = r.parents(node)
@@ -1204,6 +1389,7 @@
             ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
     ui.write("}\n")
 
+
 @command('debugindexstats', [])
 def debugindexstats(ui, repo):
     """show stats related to the changelog index"""
@@ -1214,6 +1400,7 @@
     for k, v in sorted(index.stats().items()):
         ui.write('%s: %d\n' % (k, v))
 
+
 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
 def debuginstall(ui, **opts):
     '''test Mercurial installation
@@ -1235,48 +1422,79 @@
     except LookupError as inst:
         err = stringutil.forcebytestr(inst)
         problems += 1
-    fm.condwrite(err, 'encodingerror', _(" %s\n"
-                 " (check that your locale is properly set)\n"), err)
+    fm.condwrite(
+        err,
+        'encodingerror',
+        _(" %s\n" " (check that your locale is properly set)\n"),
+        err,
+    )
 
     # Python
-    fm.write('pythonexe', _("checking Python executable (%s)\n"),
-             pycompat.sysexecutable or _("unknown"))
-    fm.write('pythonver', _("checking Python version (%s)\n"),
-             ("%d.%d.%d" % sys.version_info[:3]))
-    fm.write('pythonlib', _("checking Python lib (%s)...\n"),
-             os.path.dirname(pycompat.fsencode(os.__file__)))
+    fm.write(
+        'pythonexe',
+        _("checking Python executable (%s)\n"),
+        pycompat.sysexecutable or _("unknown"),
+    )
+    fm.write(
+        'pythonver',
+        _("checking Python version (%s)\n"),
+        ("%d.%d.%d" % sys.version_info[:3]),
+    )
+    fm.write(
+        'pythonlib',
+        _("checking Python lib (%s)...\n"),
+        os.path.dirname(pycompat.fsencode(os.__file__)),
+    )
 
     security = set(sslutil.supportedprotocols)
     if sslutil.hassni:
         security.add('sni')
 
-    fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
-             fm.formatlist(sorted(security), name='protocol',
-                           fmt='%s', sep=','))
+    fm.write(
+        'pythonsecurity',
+        _("checking Python security support (%s)\n"),
+        fm.formatlist(sorted(security), name='protocol', fmt='%s', sep=','),
+    )
 
     # These are warnings, not errors. So don't increment problem count. This
     # may change in the future.
     if 'tls1.2' not in security:
-        fm.plain(_('  TLS 1.2 not supported by Python install; '
-                   'network connections lack modern security\n'))
+        fm.plain(
+            _(
+                '  TLS 1.2 not supported by Python install; '
+                'network connections lack modern security\n'
+            )
+        )
     if 'sni' not in security:
-        fm.plain(_('  SNI not supported by Python install; may have '
-                   'connectivity issues with some servers\n'))
+        fm.plain(
+            _(
+                '  SNI not supported by Python install; may have '
+                'connectivity issues with some servers\n'
+            )
+        )
 
     # TODO print CA cert info
 
     # hg version
     hgver = util.version()
-    fm.write('hgver', _("checking Mercurial version (%s)\n"),
-             hgver.split('+')[0])
-    fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
-             '+'.join(hgver.split('+')[1:]))
+    fm.write(
+        'hgver', _("checking Mercurial version (%s)\n"), hgver.split('+')[0]
+    )
+    fm.write(
+        'hgverextra',
+        _("checking Mercurial custom build (%s)\n"),
+        '+'.join(hgver.split('+')[1:]),
+    )
 
     # compiled modules
-    fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
-             policy.policy)
-    fm.write('hgmodules', _("checking installed modules (%s)...\n"),
-             os.path.dirname(pycompat.fsencode(__file__)))
+    fm.write(
+        'hgmodulepolicy', _("checking module policy (%s)\n"), policy.policy
+    )
+    fm.write(
+        'hgmodules',
+        _("checking installed modules (%s)...\n"),
+        os.path.dirname(pycompat.fsencode(__file__)),
+    )
 
     rustandc = policy.policy in ('rust+c', 'rust+c-allow')
     rustext = rustandc  # for now, that's the only case
@@ -1292,6 +1510,7 @@
                     mpatch,
                     osutil,
                 )
+
                 # quiet pyflakes
                 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
             if rustext:
@@ -1299,28 +1518,47 @@
                     ancestor,
                     dirstate,
                 )
-                dir(ancestor), dir(dirstate) # quiet pyflakes
+
+                dir(ancestor), dir(dirstate)  # quiet pyflakes
         except Exception as inst:
             err = stringutil.forcebytestr(inst)
             problems += 1
         fm.condwrite(err, 'extensionserror', " %s\n", err)
 
     compengines = util.compengines._engines.values()
-    fm.write('compengines', _('checking registered compression engines (%s)\n'),
-             fm.formatlist(sorted(e.name() for e in compengines),
-                           name='compengine', fmt='%s', sep=', '))
-    fm.write('compenginesavail', _('checking available compression engines '
-                                   '(%s)\n'),
-             fm.formatlist(sorted(e.name() for e in compengines
-                                  if e.available()),
-                           name='compengine', fmt='%s', sep=', '))
+    fm.write(
+        'compengines',
+        _('checking registered compression engines (%s)\n'),
+        fm.formatlist(
+            sorted(e.name() for e in compengines),
+            name='compengine',
+            fmt='%s',
+            sep=', ',
+        ),
+    )
+    fm.write(
+        'compenginesavail',
+        _('checking available compression engines ' '(%s)\n'),
+        fm.formatlist(
+            sorted(e.name() for e in compengines if e.available()),
+            name='compengine',
+            fmt='%s',
+            sep=', ',
+        ),
+    )
     wirecompengines = compression.compengines.supportedwireengines(
-        compression.SERVERROLE)
-    fm.write('compenginesserver', _('checking available compression engines '
-                                    'for wire protocol (%s)\n'),
-             fm.formatlist([e.name() for e in wirecompengines
-                            if e.wireprotosupport()],
-                           name='compengine', fmt='%s', sep=', '))
+        compression.SERVERROLE
+    )
+    fm.write(
+        'compenginesserver',
+        _('checking available compression engines ' 'for wire protocol (%s)\n'),
+        fm.formatlist(
+            [e.name() for e in wirecompengines if e.wireprotosupport()],
+            name='compengine',
+            fmt='%s',
+            sep=', ',
+        ),
+    )
     re2 = 'missing'
     if util._re2:
         re2 = 'available'
@@ -1344,14 +1582,20 @@
             fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
         else:
             p = None
-        fm.condwrite(p, 'defaulttemplate',
-                     _("checking default template (%s)\n"), m)
-        fm.condwrite(not m, 'defaulttemplatenotfound',
-                     _(" template '%s' not found\n"), "default")
+        fm.condwrite(
+            p, 'defaulttemplate', _("checking default template (%s)\n"), m
+        )
+        fm.condwrite(
+            not m,
+            'defaulttemplatenotfound',
+            _(" template '%s' not found\n"),
+            "default",
+        )
     if not p:
         problems += 1
-    fm.condwrite(not p, '',
-                 _(" (templates seem to have been installed incorrectly)\n"))
+    fm.condwrite(
+        not p, '', _(" (templates seem to have been installed incorrectly)\n")
+    )
 
     # editor
     editor = ui.geteditor()
@@ -1359,14 +1603,26 @@
     editorbin = procutil.shellsplit(editor)[0]
     fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
     cmdpath = procutil.findexe(editorbin)
-    fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
-                 _(" No commit editor set and can't find %s in PATH\n"
-                   " (specify a commit editor in your configuration"
-                   " file)\n"), not cmdpath and editor == 'vi' and editorbin)
-    fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
-                 _(" Can't find editor '%s' in PATH\n"
-                   " (specify a commit editor in your configuration"
-                   " file)\n"), not cmdpath and editorbin)
+    fm.condwrite(
+        not cmdpath and editor == 'vi',
+        'vinotfound',
+        _(
+            " No commit editor set and can't find %s in PATH\n"
+            " (specify a commit editor in your configuration"
+            " file)\n"
+        ),
+        not cmdpath and editor == 'vi' and editorbin,
+    )
+    fm.condwrite(
+        not cmdpath and editor != 'vi',
+        'editornotfound',
+        _(
+            " Can't find editor '%s' in PATH\n"
+            " (specify a commit editor in your configuration"
+            " file)\n"
+        ),
+        not cmdpath and editorbin,
+    )
     if not cmdpath and editor != 'vi':
         problems += 1
 
@@ -1379,26 +1635,36 @@
         err = stringutil.forcebytestr(e)
         problems += 1
 
-    fm.condwrite(username, 'username',  _("checking username (%s)\n"), username)
-    fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
-        " (specify a username in your configuration file)\n"), err)
+    fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
+    fm.condwrite(
+        err,
+        'usernameerror',
+        _(
+            "checking username...\n %s\n"
+            " (specify a username in your configuration file)\n"
+        ),
+        err,
+    )
 
     for name, mod in extensions.extensions():
         handler = getattr(mod, 'debuginstall', None)
         if handler is not None:
             problems += handler(ui, fm)
 
-    fm.condwrite(not problems, '',
-                 _("no problems detected\n"))
+    fm.condwrite(not problems, '', _("no problems detected\n"))
     if not problems:
         fm.data(problems=problems)
-    fm.condwrite(problems, 'problems',
-                 _("%d problems detected,"
-                   " please check your install!\n"), problems)
+    fm.condwrite(
+        problems,
+        'problems',
+        _("%d problems detected," " please check your install!\n"),
+        problems,
+    )
     fm.end()
 
     return problems
 
+
 @command('debugknown', [], _('REPO ID...'), norepo=True)
 def debugknown(ui, repopath, *ids, **opts):
     """test whether node ids are known to a repo
@@ -1413,19 +1679,28 @@
     flags = repo.known([bin(s) for s in ids])
     ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
 
+
 @command('debuglabelcomplete', [], _('LABEL...'))
 def debuglabelcomplete(ui, repo, *args):
     '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
     debugnamecomplete(ui, repo, *args)
 
-@command('debuglocks',
-         [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
-          ('W', 'force-wlock', None,
-           _('free the working state lock (DANGEROUS)')),
-          ('s', 'set-lock', None, _('set the store lock until stopped')),
-          ('S', 'set-wlock', None,
-           _('set the working state lock until stopped'))],
-         _('[OPTION]...'))
+
+@command(
+    'debuglocks',
+    [
+        ('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
+        (
+            'W',
+            'force-wlock',
+            None,
+            _('free the working state lock (DANGEROUS)'),
+        ),
+        ('s', 'set-lock', None, _('set the store lock until stopped')),
+        ('S', 'set-wlock', None, _('set the working state lock until stopped')),
+    ],
+    _('[OPTION]...'),
+)
 def debuglocks(ui, repo, **opts):
     """show or modify state of locks
 
@@ -1499,15 +1774,18 @@
                     if host == socket.gethostname():
                         locker = 'user %s, process %s' % (user or b'None', pid)
                     else:
-                        locker = ('user %s, process %s, host %s'
-                                  % (user or b'None', pid, host))
-                ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
+                        locker = 'user %s, process %s, host %s' % (
+                            user or b'None',
+                            pid,
+                            host,
+                        )
+                ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age))
                 return 1
             except OSError as e:
                 if e.errno != errno.ENOENT:
                     raise
 
-        ui.write(("%-6s free\n") % (name + ":"))
+        ui.write("%-6s free\n" % (name + ":"))
         return 0
 
     held += report(repo.svfs, "lock", repo.lock)
@@ -1515,11 +1793,21 @@
 
     return held
 
-@command('debugmanifestfulltextcache', [
+
+@command(
+    'debugmanifestfulltextcache',
+    [
         ('', 'clear', False, _('clear the cache')),
-        ('a', 'add', [], _('add the given manifest nodes to the cache'),
-         _('NODE'))
-    ], '')
+        (
+            'a',
+            'add',
+            [],
+            _('add the given manifest nodes to the cache'),
+            _('NODE'),
+        ),
+    ],
+    '',
+)
 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
     """show, clear or amend the contents of the manifest fulltext cache"""
 
@@ -1528,8 +1816,10 @@
         try:
             return r._fulltextcache
         except AttributeError:
-            msg = _("Current revlog implementation doesn't appear to have a "
-                    "manifest fulltext cache\n")
+            msg = _(
+                "Current revlog implementation doesn't appear to have a "
+                "manifest fulltext cache\n"
+            )
             raise error.Abort(msg)
 
     if opts.get(r'clear'):
@@ -1555,28 +1845,35 @@
         ui.write(_('cache empty\n'))
     else:
         ui.write(
-            _('cache contains %d manifest entries, in order of most to '
-              'least recent:\n') % (len(cache),))
+            _(
+                'cache contains %d manifest entries, in order of most to '
+                'least recent:\n'
+            )
+            % (len(cache),)
+        )
         totalsize = 0
         for nodeid in cache:
             # Use cache.get to not update the LRU order
             data = cache.peek(nodeid)
             size = len(data)
-            totalsize += size + 24   # 20 bytes nodeid, 4 bytes size
-            ui.write(_('id: %s, size %s\n') % (
-                hex(nodeid), util.bytecount(size)))
+            totalsize += size + 24  # 20 bytes nodeid, 4 bytes size
+            ui.write(
+                _('id: %s, size %s\n') % (hex(nodeid), util.bytecount(size))
+            )
         ondisk = cache._opener.stat('manifestfulltextcache').st_size
         ui.write(
-            _('total cache data size %s, on-disk %s\n') % (
-                util.bytecount(totalsize), util.bytecount(ondisk))
+            _('total cache data size %s, on-disk %s\n')
+            % (util.bytecount(totalsize), util.bytecount(ondisk))
         )
 
+
 @command('debugmergestate', [], '')
 def debugmergestate(ui, repo, *args):
     """print merge state
 
     Use --verbose to print out information about whether v1 or v2 merge state
     was chosen."""
+
     def _hashornull(h):
         if h == nullhex:
             return 'null'
@@ -1584,7 +1881,7 @@
             return h
 
     def printrecords(version):
-        ui.write(('* version %d records\n') % version)
+        ui.write('* version %d records\n' % version)
         if version == 1:
             records = v1records
         else:
@@ -1593,13 +1890,12 @@
         for rtype, record in records:
             # pretty print some record types
             if rtype == 'L':
-                ui.write(('local: %s\n') % record)
+                ui.write('local: %s\n' % record)
             elif rtype == 'O':
-                ui.write(('other: %s\n') % record)
+                ui.write('other: %s\n' % record)
             elif rtype == 'm':
                 driver, mdstate = record.split('\0', 1)
-                ui.write(('merge driver: %s (state "%s")\n')
-                         % (driver, mdstate))
+                ui.write('merge driver: %s (state "%s")\n' % (driver, mdstate))
             elif rtype in 'FDC':
                 r = record.split('\0')
                 f, state, hash, lfile, afile, anode, ofile = r[0:7]
@@ -1608,13 +1904,18 @@
                     flags = r[7]
                 else:
                     onode, flags = r[7:9]
-                ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
-                         % (f, rtype, state, _hashornull(hash)))
-                ui.write(('  local path: %s (flags "%s")\n') % (lfile, flags))
-                ui.write(('  ancestor path: %s (node %s)\n')
-                         % (afile, _hashornull(anode)))
-                ui.write(('  other path: %s (node %s)\n')
-                         % (ofile, _hashornull(onode)))
+                ui.write(
+                    'file: %s (record type "%s", state "%s", hash %s)\n'
+                    % (f, rtype, state, _hashornull(hash))
+                )
+                ui.write('  local path: %s (flags "%s")\n' % (lfile, flags))
+                ui.write(
+                    '  ancestor path: %s (node %s)\n'
+                    % (afile, _hashornull(anode))
+                )
+                ui.write(
+                    '  other path: %s (node %s)\n' % (ofile, _hashornull(onode))
+                )
             elif rtype == 'f':
                 filename, rawextras = record.split('\0', 1)
                 extras = rawextras.split('\0')
@@ -1624,19 +1925,23 @@
                     extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
                     i += 2
 
-                ui.write(('file extras: %s (%s)\n')
-                         % (filename, ', '.join(extrastrings)))
+                ui.write(
+                    'file extras: %s (%s)\n'
+                    % (filename, ', '.join(extrastrings))
+                )
             elif rtype == 'l':
                 labels = record.split('\0', 2)
                 labels = [l for l in labels if len(l) > 0]
-                ui.write(('labels:\n'))
+                ui.write('labels:\n')
                 ui.write(('  local: %s\n' % labels[0]))
                 ui.write(('  other: %s\n' % labels[1]))
                 if len(labels) > 2:
                     ui.write(('  base:  %s\n' % labels[2]))
             else:
-                ui.write(('unrecognized entry: %s\t%s\n')
-                         % (rtype, record.replace('\0', '\t')))
+                ui.write(
+                    'unrecognized entry: %s\t%s\n'
+                    % (rtype, record.replace('\0', '\t'))
+                )
 
     # Avoid mergestate.read() since it may raise an exception for unsupported
     # merge state records. We shouldn't be doing this, but this is OK since this
@@ -1647,29 +1952,32 @@
     v1records = ms._readrecordsv1()
     v2records = ms._readrecordsv2()
     order = 'LOml'
+
     def key(r):
         idx = order.find(r[0])
         if idx == -1:
             return (1, r[1])
         else:
             return (0, idx)
+
     v1records.sort(key=key)
     v2records.sort(key=key)
 
     if not v1records and not v2records:
-        ui.write(('no merge state found\n'))
+        ui.write('no merge state found\n')
     elif not v2records:
-        ui.note(('no version 2 merge state\n'))
+        ui.note('no version 2 merge state\n')
         printrecords(1)
     elif ms._v1v2match(v1records, v2records):
-        ui.note(('v1 and v2 states match: using v2\n'))
+        ui.note('v1 and v2 states match: using v2\n')
         printrecords(2)
     else:
-        ui.note(('v1 and v2 states mismatch: using v1\n'))
+        ui.note('v1 and v2 states mismatch: using v1\n')
         printrecords(1)
         if ui.verbose:
             printrecords(2)
 
+
 @command('debugnamecomplete', [], _('NAME...'))
 def debugnamecomplete(ui, repo, *args):
     '''complete "names" - tags, open branch names, bookmark names'''
@@ -1680,8 +1988,11 @@
     for name, ns in repo.names.iteritems():
         if name != 'branches':
             names.update(ns.listnames(repo))
-    names.update(tag for (tag, heads, tip, closed)
-                 in repo.branchmap().iterbranches() if not closed)
+    names.update(
+        tag
+        for (tag, heads, tip, closed) in repo.branchmap().iterbranches()
+        if not closed
+    )
     completions = set()
     if not args:
         args = ['']
@@ -1690,17 +2001,31 @@
     ui.write('\n'.join(sorted(completions)))
     ui.write('\n')
 
-@command('debugobsolete',
-        [('', 'flags', 0, _('markers flag')),
-         ('', 'record-parents', False,
-          _('record parent information for the precursor')),
-         ('r', 'rev', [], _('display markers relevant to REV')),
-         ('', 'exclusive', False, _('restrict display to markers only '
-                                    'relevant to REV')),
-         ('', 'index', False, _('display index of the marker')),
-         ('', 'delete', [], _('delete markers specified by indices')),
-        ] + cmdutil.commitopts2 + cmdutil.formatteropts,
-         _('[OBSOLETED [REPLACEMENT ...]]'))
+
+@command(
+    'debugobsolete',
+    [
+        ('', 'flags', 0, _('markers flag')),
+        (
+            '',
+            'record-parents',
+            False,
+            _('record parent information for the precursor'),
+        ),
+        ('r', 'rev', [], _('display markers relevant to REV')),
+        (
+            '',
+            'exclusive',
+            False,
+            _('restrict display to markers only ' 'relevant to REV'),
+        ),
+        ('', 'index', False, _('display index of the marker')),
+        ('', 'delete', [], _('delete markers specified by indices')),
+    ]
+    + cmdutil.commitopts2
+    + cmdutil.formatteropts,
+    _('[OBSOLETED [REPLACEMENT ...]]'),
+)
 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
     """create arbitrary obsolete marker
 
@@ -1718,8 +2043,10 @@
                 raise TypeError()
             return n
         except TypeError:
-            raise error.Abort('changeset references must be full hexadecimal '
-                             'node identifiers')
+            raise error.Abort(
+                'changeset references must be full hexadecimal '
+                'node identifiers'
+            )
 
     if opts.get('delete'):
         indices = []
@@ -1727,12 +2054,15 @@
             try:
                 indices.append(int(v))
             except ValueError:
-                raise error.Abort(_('invalid index value: %r') % v,
-                                  hint=_('use integers for indices'))
+                raise error.Abort(
+                    _('invalid index value: %r') % v,
+                    hint=_('use integers for indices'),
+                )
 
         if repo.currenttransaction():
-            raise error.Abort(_('cannot delete obsmarkers in the middle '
-                                'of transaction.'))
+            raise error.Abort(
+                _('cannot delete obsmarkers in the middle ' 'of transaction.')
+            )
 
         with repo.lock():
             n = repair.deleteobsmarkers(repo.obsstore, indices)
@@ -1759,17 +2089,27 @@
                 parents = None
                 if opts['record_parents']:
                     if prec not in repo.unfiltered():
-                        raise error.Abort('cannot used --record-parents on '
-                                         'unknown changesets')
+                        raise error.Abort(
+                            'cannot used --record-parents on '
+                            'unknown changesets'
+                        )
                     parents = repo.unfiltered()[prec].parents()
                     parents = tuple(p.node() for p in parents)
-                repo.obsstore.create(tr, prec, succs, opts['flags'],
-                                     parents=parents, date=date,
-                                     metadata=metadata, ui=ui)
+                repo.obsstore.create(
+                    tr,
+                    prec,
+                    succs,
+                    opts['flags'],
+                    parents=parents,
+                    date=date,
+                    metadata=metadata,
+                    ui=ui,
+                )
                 tr.close()
             except ValueError as exc:
-                raise error.Abort(_('bad obsmarker input: %s') %
-                                  pycompat.bytestr(exc))
+                raise error.Abort(
+                    _('bad obsmarker input: %s') % pycompat.bytestr(exc)
+                )
             finally:
                 tr.release()
         finally:
@@ -1778,8 +2118,11 @@
         if opts['rev']:
             revs = scmutil.revrange(repo, opts['rev'])
             nodes = [repo[r].node() for r in revs]
-            markers = list(obsutil.getmarkers(repo, nodes=nodes,
-                                               exclusive=opts['exclusive']))
+            markers = list(
+                obsutil.getmarkers(
+                    repo, nodes=nodes, exclusive=opts['exclusive']
+                )
+            )
             markers.sort(key=lambda x: x._data)
         else:
             markers = obsutil.getmarkers(repo)
@@ -1807,9 +2150,12 @@
             cmdutil.showmarker(fm, m, index=ind)
         fm.end()
 
-@command('debugp1copies',
-         [('r', 'rev', '', _('revision to debug'), _('REV'))],
-         _('[-r REV]'))
+
+@command(
+    'debugp1copies',
+    [('r', 'rev', '', _('revision to debug'), _('REV'))],
+    _('[-r REV]'),
+)
 def debugp1copies(ui, repo, **opts):
     """dump copy information compared to p1"""
 
@@ -1818,9 +2164,12 @@
     for dst, src in ctx.p1copies().items():
         ui.write('%s -> %s\n' % (src, dst))
 
-@command('debugp2copies',
-         [('r', 'rev', '', _('revision to debug'), _('REV'))],
-         _('[-r REV]'))
+
+@command(
+    'debugp2copies',
+    [('r', 'rev', '', _('revision to debug'), _('REV'))],
+    _('[-r REV]'),
+)
 def debugp1copies(ui, repo, **opts):
     """dump copy information compared to p2"""
 
@@ -1829,12 +2178,17 @@
     for dst, src in ctx.p2copies().items():
         ui.write('%s -> %s\n' % (src, dst))
 
-@command('debugpathcomplete',
-         [('f', 'full', None, _('complete an entire path')),
-          ('n', 'normal', None, _('show only normal files')),
-          ('a', 'added', None, _('show only added files')),
-          ('r', 'removed', None, _('show only removed files'))],
-         _('FILESPEC...'))
+
+@command(
+    'debugpathcomplete',
+    [
+        ('f', 'full', None, _('complete an entire path')),
+        ('n', 'normal', None, _('show only normal files')),
+        ('a', 'added', None, _('show only added files')),
+        ('r', 'removed', None, _('show only removed files')),
+    ],
+    _('FILESPEC...'),
+)
 def debugpathcomplete(ui, repo, *specs, **opts):
     '''complete part or all of a tracked path
 
@@ -1852,7 +2206,7 @@
             return [], []
         if os.path.isdir(spec):
             spec += '/'
-        spec = spec[len(rootdir):]
+        spec = spec[len(rootdir) :]
         fixpaths = pycompat.ossep != '/'
         if fixpaths:
             spec = spec.replace(pycompat.ossep, '/')
@@ -1894,10 +2248,13 @@
     ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
     ui.write('\n')
 
-@command('debugpathcopies',
-         cmdutil.walkopts,
-         'hg debugpathcopies REV1 REV2 [FILE]',
-         inferrepo=True)
+
+@command(
+    'debugpathcopies',
+    cmdutil.walkopts,
+    'hg debugpathcopies REV1 REV2 [FILE]',
+    inferrepo=True,
+)
 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
     """show copies between two revisions"""
     ctx1 = scmutil.revsingle(repo, rev1)
@@ -1906,6 +2263,7 @@
     for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
         ui.write('%s -> %s\n' % (src, dst))
 
+
 @command('debugpeer', [], _('PATH'), norepo=True)
 def debugpeer(ui, path):
     """establish a connection to a peer repository"""
@@ -1925,12 +2283,18 @@
         ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
         ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
 
-@command('debugpickmergetool',
-        [('r', 'rev', '', _('check for files in this revision'), _('REV')),
-         ('', 'changedelete', None, _('emulate merging change and delete')),
-        ] + cmdutil.walkopts + cmdutil.mergetoolopts,
-        _('[PATTERN]...'),
-        inferrepo=True)
+
+@command(
+    'debugpickmergetool',
+    [
+        ('r', 'rev', '', _('check for files in this revision'), _('REV')),
+        ('', 'changedelete', None, _('emulate merging change and delete')),
+    ]
+    + cmdutil.walkopts
+    + cmdutil.mergetoolopts,
+    _('[PATTERN]...'),
+    inferrepo=True,
+)
 def debugpickmergetool(ui, repo, *pats, **opts):
     """examine which merge tool is chosen for specified file
 
@@ -1977,15 +2341,15 @@
     overrides = {}
     if opts['tool']:
         overrides[('ui', 'forcemerge')] = opts['tool']
-        ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
+        ui.note('with --tool %r\n' % (pycompat.bytestr(opts['tool'])))
 
     with ui.configoverride(overrides, 'debugmergepatterns'):
         hgmerge = encoding.environ.get("HGMERGE")
         if hgmerge is not None:
-            ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
+            ui.note('with HGMERGE=%r\n' % (pycompat.bytestr(hgmerge)))
         uimerge = ui.config("ui", "merge")
         if uimerge:
-            ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
+            ui.note('with ui.merge=%r\n' % (pycompat.bytestr(uimerge)))
 
         ctx = scmutil.revsingle(repo, opts.get('rev'))
         m = scmutil.match(ctx, pats, opts)
@@ -1995,14 +2359,19 @@
             try:
                 if not ui.debugflag:
                     ui.pushbuffer(error=True)
-                tool, toolpath = filemerge._picktool(repo, ui, path,
-                                                     fctx.isbinary(),
-                                                     'l' in fctx.flags(),
-                                                     changedelete)
+                tool, toolpath = filemerge._picktool(
+                    repo,
+                    ui,
+                    path,
+                    fctx.isbinary(),
+                    'l' in fctx.flags(),
+                    changedelete,
+                )
             finally:
                 if not ui.debugflag:
                     ui.popbuffer()
-            ui.write(('%s = %s\n') % (path, tool))
+            ui.write('%s = %s\n' % (path, tool))
+
 
 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
@@ -2018,19 +2387,19 @@
     if keyinfo:
         key, old, new = keyinfo
         with target.commandexecutor() as e:
-            r = e.callcommand('pushkey', {
-                'namespace': namespace,
-                'key': key,
-                'old': old,
-                'new': new,
-            }).result()
+            r = e.callcommand(
+                'pushkey',
+                {'namespace': namespace, 'key': key, 'old': old, 'new': new,},
+            ).result()
 
         ui.status(pycompat.bytestr(r) + '\n')
         return not r
     else:
         for k, v in sorted(target.listkeys(namespace).iteritems()):
-            ui.write("%s\t%s\n" % (stringutil.escapestr(k),
-                                   stringutil.escapestr(v)))
+            ui.write(
+                "%s\t%s\n" % (stringutil.escapestr(k), stringutil.escapestr(v))
+            )
+
 
 @command('debugpvec', [], _('A B'))
 def debugpvec(ui, repo, a, b=None):
@@ -2049,16 +2418,33 @@
     ui.write(_("a: %s\n") % pa)
     ui.write(_("b: %s\n") % pb)
     ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
-    ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
-             (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
-              pa.distance(pb), rel))
-
-@command('debugrebuilddirstate|debugrebuildstate',
-    [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
-     ('', 'minimal', None, _('only rebuild files that are inconsistent with '
-                             'the working copy parent')),
+    ui.write(
+        _("delta: %d hdist: %d distance: %d relation: %s\n")
+        % (
+            abs(pa._depth - pb._depth),
+            pvec._hamming(pa._vec, pb._vec),
+            pa.distance(pb),
+            rel,
+        )
+    )
+
+
+@command(
+    'debugrebuilddirstate|debugrebuildstate',
+    [
+        ('r', 'rev', '', _('revision to rebuild to'), _('REV')),
+        (
+            '',
+            'minimal',
+            None,
+            _(
+                'only rebuild files that are inconsistent with '
+                'the working copy parent'
+            ),
+        ),
     ],
-    _('[-r REV]'))
+    _('[-r REV]'),
+)
 def debugrebuilddirstate(ui, repo, rev, **opts):
     """rebuild the dirstate as it would look like for the given revision
 
@@ -2091,14 +2477,18 @@
 
         dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
 
+
 @command('debugrebuildfncache', [], '')
 def debugrebuildfncache(ui, repo):
     """rebuild the fncache file"""
     repair.rebuildfncache(ui, repo)
 
-@command('debugrename',
+
+@command(
+    'debugrename',
     [('r', 'rev', '', _('revision to debug'), _('REV'))],
-    _('[-r REV] [FILE]...'))
+    _('[-r REV] [FILE]...'),
+)
 def debugrename(ui, repo, *pats, **opts):
     """dump rename information"""
 
@@ -2114,10 +2504,13 @@
         else:
             ui.write(_("%s not renamed\n") % rel)
 
-@command('debugrevlog', cmdutil.debugrevlogopts +
-    [('d', 'dump', False, _('dump index data'))],
+
+@command(
+    'debugrevlog',
+    cmdutil.debugrevlogopts + [('d', 'dump', False, _('dump index data'))],
     _('-c|-m|FILE'),
-    optionalrepo=True)
+    optionalrepo=True,
+)
 def debugrevlog(ui, repo, file_=None, **opts):
     """show data and statistics about a revlog"""
     opts = pycompat.byteskwargs(opts)
@@ -2125,8 +2518,12 @@
 
     if opts.get("dump"):
         numrevs = len(r)
-        ui.write(("# rev p1rev p2rev start   end deltastart base   p1   p2"
-                 " rawsize totalsize compression heads chainlen\n"))
+        ui.write(
+            (
+                "# rev p1rev p2rev start   end deltastart base   p1   p2"
+                " rawsize totalsize compression heads chainlen\n"
+            )
+        )
         ts = 0
         heads = set()
 
@@ -2145,12 +2542,26 @@
                 compression = ts / r.end(rev)
             except ZeroDivisionError:
                 compression = 0
-            ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
-                     "%11d %5d %8d\n" %
-                     (rev, p1, p2, r.start(rev), r.end(rev),
-                      r.start(dbase), r.start(cbase),
-                      r.start(p1), r.start(p2),
-                      rs, ts, compression, len(heads), clen))
+            ui.write(
+                "%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
+                "%11d %5d %8d\n"
+                % (
+                    rev,
+                    p1,
+                    p2,
+                    r.start(rev),
+                    r.end(rev),
+                    r.start(dbase),
+                    r.start(cbase),
+                    r.start(p1),
+                    r.start(p2),
+                    rs,
+                    ts,
+                    compression,
+                    len(heads),
+                    clen,
+                )
+            )
         return 0
 
     v = r.version
@@ -2319,6 +2730,7 @@
 
     def dfmtstr(max):
         return basedfmtstr % len(str(max))
+
     def pcfmtstr(max, padding=0):
         return basepcfmtstr % (len(str(max)), ' ' * padding)
 
@@ -2328,33 +2740,40 @@
         else:
             return value, 100.0
 
-    ui.write(('format : %d\n') % format)
-    ui.write(('flags  : %s\n') % ', '.join(flags))
+    ui.write('format : %d\n' % format)
+    ui.write('flags  : %s\n' % ', '.join(flags))
 
     ui.write('\n')
     fmt = pcfmtstr(totalsize)
     fmt2 = dfmtstr(totalsize)
-    ui.write(('revisions     : ') + fmt2 % numrevs)
-    ui.write(('    merges    : ') + fmt % pcfmt(nummerges, numrevs))
-    ui.write(('    normal    : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
-    ui.write(('revisions     : ') + fmt2 % numrevs)
-    ui.write(('    empty     : ') + fmt % pcfmt(numempty, numrevs))
-    ui.write(('                   text  : ')
-             + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
-    ui.write(('                   delta : ')
-             + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
-    ui.write(('    snapshot  : ') + fmt % pcfmt(numfull + numsemi, numrevs))
+    ui.write('revisions     : ' + fmt2 % numrevs)
+    ui.write('    merges    : ' + fmt % pcfmt(nummerges, numrevs))
+    ui.write('    normal    : ' + fmt % pcfmt(numrevs - nummerges, numrevs))
+    ui.write('revisions     : ' + fmt2 % numrevs)
+    ui.write('    empty     : ' + fmt % pcfmt(numempty, numrevs))
+    ui.write(
+        '                   text  : '
+        + fmt % pcfmt(numemptytext, numemptytext + numemptydelta)
+    )
+    ui.write(
+        '                   delta : '
+        + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta)
+    )
+    ui.write('    snapshot  : ' + fmt % pcfmt(numfull + numsemi, numrevs))
     for depth in sorted(numsnapdepth):
-        ui.write(('      lvl-%-3d :       ' % depth)
-                 + fmt % pcfmt(numsnapdepth[depth], numrevs))
-    ui.write(('    deltas    : ') + fmt % pcfmt(numdeltas, numrevs))
-    ui.write(('revision size : ') + fmt2 % totalsize)
-    ui.write(('    snapshot  : ')
-             + fmt % pcfmt(fulltotal + semitotal, totalsize))
+        ui.write(
+            ('      lvl-%-3d :       ' % depth)
+            + fmt % pcfmt(numsnapdepth[depth], numrevs)
+        )
+    ui.write('    deltas    : ' + fmt % pcfmt(numdeltas, numrevs))
+    ui.write('revision size : ' + fmt2 % totalsize)
+    ui.write('    snapshot  : ' + fmt % pcfmt(fulltotal + semitotal, totalsize))
     for depth in sorted(numsnapdepth):
-        ui.write(('      lvl-%-3d :       ' % depth)
-                 + fmt % pcfmt(snaptotal[depth], totalsize))
-    ui.write(('    deltas    : ') + fmt % pcfmt(deltatotal, totalsize))
+        ui.write(
+            ('      lvl-%-3d :       ' % depth)
+            + fmt % pcfmt(snaptotal[depth], totalsize)
+        )
+    ui.write('    deltas    : ' + fmt % pcfmt(deltatotal, totalsize))
 
     def fmtchunktype(chunktype):
         if chunktype == 'empty':
@@ -2365,62 +2784,78 @@
             return '    0x%s      : ' % hex(chunktype)
 
     ui.write('\n')
-    ui.write(('chunks        : ') + fmt2 % numrevs)
+    ui.write('chunks        : ' + fmt2 % numrevs)
     for chunktype in sorted(chunktypecounts):
         ui.write(fmtchunktype(chunktype))
         ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
-    ui.write(('chunks size   : ') + fmt2 % totalsize)
+    ui.write('chunks size   : ' + fmt2 % totalsize)
     for chunktype in sorted(chunktypecounts):
         ui.write(fmtchunktype(chunktype))
         ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
 
     ui.write('\n')
     fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
-    ui.write(('avg chain length  : ') + fmt % avgchainlen)
-    ui.write(('max chain length  : ') + fmt % maxchainlen)
-    ui.write(('max chain reach   : ') + fmt % maxchainspan)
-    ui.write(('compression ratio : ') + fmt % compratio)
+    ui.write('avg chain length  : ' + fmt % avgchainlen)
+    ui.write('max chain length  : ' + fmt % maxchainlen)
+    ui.write('max chain reach   : ' + fmt % maxchainspan)
+    ui.write('compression ratio : ' + fmt % compratio)
 
     if format > 0:
         ui.write('\n')
-        ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
-                 % tuple(datasize))
-    ui.write(('full revision size (min/max/avg)     : %d / %d / %d\n')
-             % tuple(fullsize))
-    ui.write(('inter-snapshot size (min/max/avg)    : %d / %d / %d\n')
-             % tuple(semisize))
+        ui.write(
+            'uncompressed data size (min/max/avg) : %d / %d / %d\n'
+            % tuple(datasize)
+        )
+    ui.write(
+        'full revision size (min/max/avg)     : %d / %d / %d\n'
+        % tuple(fullsize)
+    )
+    ui.write(
+        'inter-snapshot size (min/max/avg)    : %d / %d / %d\n'
+        % tuple(semisize)
+    )
     for depth in sorted(snapsizedepth):
         if depth == 0:
             continue
-        ui.write(('    level-%-3d (min/max/avg)          : %d / %d / %d\n')
-                 % ((depth,) + tuple(snapsizedepth[depth])))
-    ui.write(('delta size (min/max/avg)             : %d / %d / %d\n')
-             % tuple(deltasize))
+        ui.write(
+            '    level-%-3d (min/max/avg)          : %d / %d / %d\n'
+            % ((depth,) + tuple(snapsizedepth[depth]))
+        )
+    ui.write(
+        'delta size (min/max/avg)             : %d / %d / %d\n'
+        % tuple(deltasize)
+    )
 
     if numdeltas > 0:
         ui.write('\n')
         fmt = pcfmtstr(numdeltas)
         fmt2 = pcfmtstr(numdeltas, 4)
-        ui.write(('deltas against prev  : ') + fmt % pcfmt(numprev, numdeltas))
+        ui.write('deltas against prev  : ' + fmt % pcfmt(numprev, numdeltas))
         if numprev > 0:
-            ui.write(('    where prev = p1  : ') + fmt2 % pcfmt(nump1prev,
-                                                              numprev))
-            ui.write(('    where prev = p2  : ') + fmt2 % pcfmt(nump2prev,
-                                                              numprev))
-            ui.write(('    other            : ') + fmt2 % pcfmt(numoprev,
-                                                              numprev))
+            ui.write(
+                '    where prev = p1  : ' + fmt2 % pcfmt(nump1prev, numprev)
+            )
+            ui.write(
+                '    where prev = p2  : ' + fmt2 % pcfmt(nump2prev, numprev)
+            )
+            ui.write(
+                '    other            : ' + fmt2 % pcfmt(numoprev, numprev)
+            )
         if gdelta:
-            ui.write(('deltas against p1    : ')
-                     + fmt % pcfmt(nump1, numdeltas))
-            ui.write(('deltas against p2    : ')
-                     + fmt % pcfmt(nump2, numdeltas))
-            ui.write(('deltas against other : ') + fmt % pcfmt(numother,
-                                                             numdeltas))
-
-@command('debugrevlogindex', cmdutil.debugrevlogopts +
-    [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
+            ui.write('deltas against p1    : ' + fmt % pcfmt(nump1, numdeltas))
+            ui.write('deltas against p2    : ' + fmt % pcfmt(nump2, numdeltas))
+            ui.write(
+                'deltas against other : ' + fmt % pcfmt(numother, numdeltas)
+            )
+
+
+@command(
+    'debugrevlogindex',
+    cmdutil.debugrevlogopts
+    + [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
     _('[-f FORMAT] -c|-m|FILE'),
-    optionalrepo=True)
+    optionalrepo=True,
+)
 def debugrevlogindex(ui, repo, file_=None, **opts):
     """dump the contents of a revlog index"""
     opts = pycompat.byteskwargs(opts)
@@ -2442,19 +2877,29 @@
 
     if format == 0:
         if ui.verbose:
-            ui.write(("   rev    offset  length linkrev"
-                     " %s %s p2\n") % ("nodeid".ljust(idlen),
-                                       "p1".ljust(idlen)))
+            ui.write(
+                ("   rev    offset  length linkrev" " %s %s p2\n")
+                % ("nodeid".ljust(idlen), "p1".ljust(idlen))
+            )
         else:
-            ui.write(("   rev linkrev %s %s p2\n") % (
-                "nodeid".ljust(idlen), "p1".ljust(idlen)))
+            ui.write(
+                "   rev linkrev %s %s p2\n"
+                % ("nodeid".ljust(idlen), "p1".ljust(idlen))
+            )
     elif format == 1:
         if ui.verbose:
-            ui.write(("   rev flag   offset   length     size   link     p1"
-                      "     p2 %s\n") % "nodeid".rjust(idlen))
+            ui.write(
+                (
+                    "   rev flag   offset   length     size   link     p1"
+                    "     p2 %s\n"
+                )
+                % "nodeid".rjust(idlen)
+            )
         else:
-            ui.write(("   rev flag     size   link     p1     p2 %s\n") %
-                     "nodeid".rjust(idlen))
+            ui.write(
+                "   rev flag     size   link     p1     p2 %s\n"
+                % "nodeid".rjust(idlen)
+            )
 
     for i in r:
         node = r.node(i)
@@ -2464,35 +2909,89 @@
             except Exception:
                 pp = [nullid, nullid]
             if ui.verbose:
-                ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
-                        i, r.start(i), r.length(i), r.linkrev(i),
-                        shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
+                ui.write(
+                    "% 6d % 9d % 7d % 7d %s %s %s\n"
+                    % (
+                        i,
+                        r.start(i),
+                        r.length(i),
+                        r.linkrev(i),
+                        shortfn(node),
+                        shortfn(pp[0]),
+                        shortfn(pp[1]),
+                    )
+                )
             else:
-                ui.write("% 6d % 7d %s %s %s\n" % (
-                    i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
-                    shortfn(pp[1])))
+                ui.write(
+                    "% 6d % 7d %s %s %s\n"
+                    % (
+                        i,
+                        r.linkrev(i),
+                        shortfn(node),
+                        shortfn(pp[0]),
+                        shortfn(pp[1]),
+                    )
+                )
         elif format == 1:
             pr = r.parentrevs(i)
             if ui.verbose:
-                ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
-                        i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
-                        r.linkrev(i), pr[0], pr[1], shortfn(node)))
+                ui.write(
+                    "% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n"
+                    % (
+                        i,
+                        r.flags(i),
+                        r.start(i),
+                        r.length(i),
+                        r.rawsize(i),
+                        r.linkrev(i),
+                        pr[0],
+                        pr[1],
+                        shortfn(node),
+                    )
+                )
             else:
-                ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
-                    i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
-                    shortfn(node)))
-
-@command('debugrevspec',
-    [('', 'optimize', None,
-      _('print parsed tree after optimizing (DEPRECATED)')),
-     ('', 'show-revs', True, _('print list of result revisions (default)')),
-     ('s', 'show-set', None, _('print internal representation of result set')),
-     ('p', 'show-stage', [],
-      _('print parsed tree at the given stage'), _('NAME')),
-     ('', 'no-optimized', False, _('evaluate tree without optimization')),
-     ('', 'verify-optimized', False, _('verify optimized result')),
-     ],
-    ('REVSPEC'))
+                ui.write(
+                    "% 6d %04x % 8d % 6d % 6d % 6d %s\n"
+                    % (
+                        i,
+                        r.flags(i),
+                        r.rawsize(i),
+                        r.linkrev(i),
+                        pr[0],
+                        pr[1],
+                        shortfn(node),
+                    )
+                )
+
+
+@command(
+    'debugrevspec',
+    [
+        (
+            '',
+            'optimize',
+            None,
+            _('print parsed tree after optimizing (DEPRECATED)'),
+        ),
+        ('', 'show-revs', True, _('print list of result revisions (default)')),
+        (
+            's',
+            'show-set',
+            None,
+            _('print internal representation of result set'),
+        ),
+        (
+            'p',
+            'show-stage',
+            [],
+            _('print parsed tree at the given stage'),
+            _('NAME'),
+        ),
+        ('', 'no-optimized', False, _('evaluate tree without optimization')),
+        ('', 'verify-optimized', False, _('verify optimized result')),
+    ],
+    'REVSPEC',
+)
 def debugrevspec(ui, repo, expr, **opts):
     """parse and apply a revision specification
 
@@ -2509,8 +3008,10 @@
     aliases = ui.configitems('revsetalias')
     stages = [
         ('parsed', lambda tree: tree),
-        ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
-                                                           ui.warn)),
+        (
+            'expanded',
+            lambda tree: revsetlang.expandaliases(tree, aliases, ui.warn),
+        ),
         ('concatenated', revsetlang.foldconcat),
         ('analyzed', revsetlang.analyze),
         ('optimized', revsetlang.optimize),
@@ -2518,8 +3019,9 @@
     if opts['no_optimized']:
         stages = stages[:-1]
     if opts['verify_optimized'] and opts['no_optimized']:
-        raise error.Abort(_('cannot use --verify-optimized with '
-                            '--no-optimized'))
+        raise error.Abort(
+            _('cannot use --verify-optimized with ' '--no-optimized')
+        )
     stagenames = set(n for n, f in stages)
 
     showalways = set()
@@ -2547,7 +3049,7 @@
         treebystage[n] = tree = f(tree)
         if n in showalways or (n in showchanged and tree != printedtree):
             if opts['show_stage'] or n != 'parsed':
-                ui.write(("* %s:\n") % n)
+                ui.write("* %s:\n" % n)
             ui.write(revsetlang.prettyformat(tree), "\n")
             printedtree = tree
 
@@ -2555,14 +3057,14 @@
         arevs = revset.makematcher(treebystage['analyzed'])(repo)
         brevs = revset.makematcher(treebystage['optimized'])(repo)
         if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
-            ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
-            ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
+            ui.write("* analyzed set:\n", stringutil.prettyrepr(arevs), "\n")
+            ui.write("* optimized set:\n", stringutil.prettyrepr(brevs), "\n")
         arevs = list(arevs)
         brevs = list(brevs)
         if arevs == brevs:
             return 0
-        ui.write(('--- analyzed\n'), label='diff.file_a')
-        ui.write(('+++ optimized\n'), label='diff.file_b')
+        ui.write('--- analyzed\n', label='diff.file_a')
+        ui.write('+++ optimized\n', label='diff.file_b')
         sm = difflib.SequenceMatcher(None, arevs, brevs)
         for tag, alo, ahi, blo, bhi in sm.get_opcodes():
             if tag in (r'delete', r'replace'):
@@ -2579,17 +3081,27 @@
     func = revset.makematcher(tree)
     revs = func(repo)
     if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
-        ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
+        ui.write("* set:\n", stringutil.prettyrepr(revs), "\n")
     if not opts['show_revs']:
         return
     for c in revs:
         ui.write("%d\n" % c)
 
-@command('debugserve', [
-    ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
-    ('', 'logiofd', '', _('file descriptor to log server I/O to')),
-    ('', 'logiofile', '', _('file to log server I/O to')),
-], '')
+
+@command(
+    'debugserve',
+    [
+        (
+            '',
+            'sshstdio',
+            False,
+            _('run an SSH server bound to process handles'),
+        ),
+        ('', 'logiofd', '', _('file descriptor to log server I/O to')),
+        ('', 'logiofile', '', _('file to log server I/O to')),
+    ],
+    '',
+)
 def debugserve(ui, repo, **opts):
     """run a server with advanced settings
 
@@ -2622,6 +3134,7 @@
     s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
     s.serve_forever()
 
+
 @command('debugsetparents', [], _('REV1 [REV2]'))
 def debugsetparents(ui, repo, rev1, rev2=None):
     """manually set the parents of the current working directory
@@ -2640,6 +3153,7 @@
     with repo.wlock():
         repo.setparents(node1, node2)
 
+
 @command('debugsidedata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
 def debugsidedata(ui, repo, file_, rev=None, **opts):
     """dump the side data for a cl/manifest/file revision"""
@@ -2665,6 +3179,7 @@
             if ui.verbose:
                 ui.write(('  %s\n' % stringutil.pprint(value)))
 
+
 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
 def debugssl(ui, repo, source=None, **opts):
     '''test a secure connection to a server
@@ -2680,13 +3195,18 @@
     of the SSL error is likely another issue.
     '''
     if not pycompat.iswindows:
-        raise error.Abort(_('certificate chain building is only possible on '
-                            'Windows'))
+        raise error.Abort(
+            _('certificate chain building is only possible on ' 'Windows')
+        )
 
     if not source:
         if not repo:
-            raise error.Abort(_("there is no Mercurial repository here, and no "
-                                "server specified"))
+            raise error.Abort(
+                _(
+                    "there is no Mercurial repository here, and no "
+                    "server specified"
+                )
+            )
         source = "default"
 
     source, branches = hg.parseurl(ui.expandpath(source))
@@ -2703,8 +3223,12 @@
 
     from . import win32
 
-    s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
-                        cert_reqs=ssl.CERT_NONE, ca_certs=None)
+    s = ssl.wrap_socket(
+        socket.socket(),
+        ssl_version=ssl.PROTOCOL_TLS,
+        cert_reqs=ssl.CERT_NONE,
+        ca_certs=None,
+    )
 
     try:
         s.connect(addr)
@@ -2726,20 +3250,25 @@
     finally:
         s.close()
 
-@command('debugsub',
-    [('r', 'rev', '',
-     _('revision to check'), _('REV'))],
-    _('[-r REV] [REV]'))
+
+@command(
+    'debugsub',
+    [('r', 'rev', '', _('revision to check'), _('REV'))],
+    _('[-r REV] [REV]'),
+)
 def debugsub(ui, repo, rev=None):
     ctx = scmutil.revsingle(repo, rev, None)
     for k, v in sorted(ctx.substate.items()):
-        ui.write(('path %s\n') % k)
-        ui.write((' source   %s\n') % v[0])
-        ui.write((' revision %s\n') % v[1])
-
-@command('debugsuccessorssets',
+        ui.write('path %s\n' % k)
+        ui.write(' source   %s\n' % v[0])
+        ui.write(' revision %s\n' % v[1])
+
+
+@command(
+    'debugsuccessorssets',
     [('', 'closest', False, _('return closest successors sets only'))],
-    _('[REV]'))
+    _('[REV]'),
+)
 def debugsuccessorssets(ui, repo, *revs, **opts):
     """show set of successors for revision
 
@@ -2778,10 +3307,10 @@
     node2str = short
     for rev in scmutil.revrange(repo, revs):
         ctx = repo[rev]
-        ui.write('%s\n'% ctx2str(ctx))
-        for succsset in obsutil.successorssets(repo, ctx.node(),
-                                                closest=opts[r'closest'],
-                                                cache=cache):
+        ui.write('%s\n' % ctx2str(ctx))
+        for succsset in obsutil.successorssets(
+            repo, ctx.node(), closest=opts[r'closest'], cache=cache
+        ):
             if succsset:
                 ui.write('    ')
                 ui.write(node2str(succsset[0]))
@@ -2790,11 +3319,16 @@
                     ui.write(node2str(node))
             ui.write('\n')
 
-@command('debugtemplate',
-    [('r', 'rev', [], _('apply template on changesets'), _('REV')),
-     ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
+
+@command(
+    'debugtemplate',
+    [
+        ('r', 'rev', [], _('apply template on changesets'), _('REV')),
+        ('D', 'define', [], _('define template keyword'), _('KEY=VALUE')),
+    ],
     _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
-    optionalrepo=True)
+    optionalrepo=True,
+)
 def debugtemplate(ui, repo, tmpl, **opts):
     """parse and apply a template
 
@@ -2807,8 +3341,9 @@
     revs = None
     if opts[r'rev']:
         if repo is None:
-            raise error.RepoError(_('there is no Mercurial repository here '
-                                    '(.hg not found)'))
+            raise error.RepoError(
+                _('there is no Mercurial repository here ' '(.hg not found)')
+            )
         revs = scmutil.revrange(repo, opts[r'rev'])
 
     props = {}
@@ -2827,41 +3362,50 @@
         ui.note(templater.prettyformat(tree), '\n')
         newtree = templater.expandaliases(tree, aliases)
         if newtree != tree:
-            ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
+            ui.note("* expanded:\n", templater.prettyformat(newtree), '\n')
 
     if revs is None:
         tres = formatter.templateresources(ui, repo)
         t = formatter.maketemplater(ui, tmpl, resources=tres)
         if ui.verbose:
             kwds, funcs = t.symbolsuseddefault()
-            ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
-            ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
+            ui.write("* keywords: %s\n" % ', '.join(sorted(kwds)))
+            ui.write("* functions: %s\n" % ', '.join(sorted(funcs)))
         ui.write(t.renderdefault(props))
     else:
         displayer = logcmdutil.maketemplater(ui, repo, tmpl)
         if ui.verbose:
             kwds, funcs = displayer.t.symbolsuseddefault()
-            ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
-            ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
+            ui.write("* keywords: %s\n" % ', '.join(sorted(kwds)))
+            ui.write("* functions: %s\n" % ', '.join(sorted(funcs)))
         for r in revs:
             displayer.show(repo[r], **pycompat.strkwargs(props))
         displayer.close()
 
-@command('debuguigetpass', [
-    ('p', 'prompt', '', _('prompt text'), _('TEXT')),
-], _('[-p TEXT]'), norepo=True)
+
+@command(
+    'debuguigetpass',
+    [('p', 'prompt', '', _('prompt text'), _('TEXT')),],
+    _('[-p TEXT]'),
+    norepo=True,
+)
 def debuguigetpass(ui, prompt=''):
     """show prompt to type password"""
     r = ui.getpass(prompt)
-    ui.write(('respose: %s\n') % r)
-
-@command('debuguiprompt', [
-    ('p', 'prompt', '', _('prompt text'), _('TEXT')),
-], _('[-p TEXT]'), norepo=True)
+    ui.write('respose: %s\n' % r)
+
+
+@command(
+    'debuguiprompt',
+    [('p', 'prompt', '', _('prompt text'), _('TEXT')),],
+    _('[-p TEXT]'),
+    norepo=True,
+)
 def debuguiprompt(ui, prompt=''):
     """show plain prompt"""
     r = ui.prompt(prompt)
-    ui.write(('response: %s\n') % r)
+    ui.write('response: %s\n' % r)
+
 
 @command('debugupdatecaches', [])
 def debugupdatecaches(ui, repo, *pats, **opts):
@@ -2869,13 +3413,17 @@
     with repo.wlock(), repo.lock():
         repo.updatecaches(full=True)
 
-@command('debugupgraderepo', [
-    ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
-    ('', 'run', False, _('performs an upgrade')),
-    ('', 'backup', True, _('keep the old repository content around')),
-    ('', 'changelog', None, _('select the changelog for upgrade')),
-    ('', 'manifest', None, _('select the manifest for upgrade')),
-])
+
+@command(
+    'debugupgraderepo',
+    [
+        ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
+        ('', 'run', False, _('performs an upgrade')),
+        ('', 'backup', True, _('keep the old repository content around')),
+        ('', 'changelog', None, _('select the changelog for upgrade')),
+        ('', 'manifest', None, _('select the manifest for upgrade')),
+    ],
+)
 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
     """upgrade a repository to use different features
 
@@ -2903,17 +3451,20 @@
       * `--changelog`: optimize the changelog only
       * `--no-changelog --no-manifest`: optimize filelogs only
     """
-    return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
-                               backup=backup, **opts)
-
-@command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
-         inferrepo=True)
+    return upgrade.upgraderepo(
+        ui, repo, run=run, optimize=optimize, backup=backup, **opts
+    )
+
+
+@command(
+    'debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'), inferrepo=True
+)
 def debugwalk(ui, repo, *pats, **opts):
     """show how files match on given patterns"""
     opts = pycompat.byteskwargs(opts)
     m = scmutil.match(repo[None], pats, opts)
     if ui.verbose:
-        ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
+        ui.write('* matcher:\n', stringutil.prettyrepr(m), '\n')
     items = list(repo[None].walk(m))
     if not items:
         return
@@ -2922,29 +3473,43 @@
         f = lambda fn: util.normpath(fn)
     fmt = 'f  %%-%ds  %%-%ds  %%s' % (
         max([len(abs) for abs in items]),
-        max([len(repo.pathto(abs)) for abs in items]))
+        max([len(repo.pathto(abs)) for abs in items]),
+    )
     for abs in items:
         line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
         ui.write("%s\n" % line.rstrip())
 
+
 @command('debugwhyunstable', [], _('REV'))
 def debugwhyunstable(ui, repo, rev):
     """explain instabilities of a changeset"""
     for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
         dnodes = ''
         if entry.get('divergentnodes'):
-            dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
-                              for ctx in entry['divergentnodes']) + ' '
-        ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
-                                    entry['reason'], entry['node']))
-
-@command('debugwireargs',
-    [('', 'three', '', 'three'),
-    ('', 'four', '', 'four'),
-    ('', 'five', '', 'five'),
-    ] + cmdutil.remoteopts,
+            dnodes = (
+                ' '.join(
+                    '%s (%s)' % (ctx.hex(), ctx.phasestr())
+                    for ctx in entry['divergentnodes']
+                )
+                + ' '
+            )
+        ui.write(
+            '%s: %s%s %s\n'
+            % (entry['instability'], dnodes, entry['reason'], entry['node'])
+        )
+
+
+@command(
+    'debugwireargs',
+    [
+        ('', 'three', '', 'three'),
+        ('', 'four', '', 'four'),
+        ('', 'five', '', 'five'),
+    ]
+    + cmdutil.remoteopts,
     _('REPO [OPTIONS]... [ONE [TWO]]'),
-    norepo=True)
+    norepo=True,
+)
 def debugwireargs(ui, repopath, *vals, **opts):
     opts = pycompat.byteskwargs(opts)
     repo = hg.peer(ui, opts, repopath)
@@ -2962,6 +3527,7 @@
     if res1 != res2:
         ui.warn("%s\n" % res2)
 
+
 def _parsewirelangblocks(fh):
     activeaction = None
     blocklines = []
@@ -3003,16 +3569,24 @@
     if activeaction:
         yield activeaction, blocklines
 
-@command('debugwireproto',
+
+@command(
+    'debugwireproto',
     [
         ('', 'localssh', False, _('start an SSH server for this repo')),
         ('', 'peer', '', _('construct a specific version of the peer')),
         ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
-        ('', 'nologhandshake', False,
-         _('do not log I/O related to the peer handshake')),
-    ] + cmdutil.remoteopts,
+        (
+            '',
+            'nologhandshake',
+            False,
+            _('do not log I/O related to the peer handshake'),
+        ),
+    ]
+    + cmdutil.remoteopts,
     _('[PATH]'),
-    optionalrepo=True)
+    optionalrepo=True,
+)
 def debugwireproto(ui, repo, path=None, **opts):
     """send wire protocol commands to a server
 
@@ -3200,12 +3774,15 @@
         raise error.Abort(_('--localssh requires a repository'))
 
     if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
-        raise error.Abort(_('invalid value for --peer'),
-                          hint=_('valid values are "raw", "ssh1", and "ssh2"'))
+        raise error.Abort(
+            _('invalid value for --peer'),
+            hint=_('valid values are "raw", "ssh1", and "ssh2"'),
+        )
 
     if path and opts['localssh']:
-        raise error.Abort(_('cannot specify --localssh with an explicit '
-                            'path'))
+        raise error.Abort(
+            _('cannot specify --localssh with an explicit ' 'path')
+        )
 
     if ui.interactive():
         ui.write(_('(waiting for commands on stdin)\n'))
@@ -3223,13 +3800,18 @@
         # separation. This prevents a whole class of potential bugs around
         # shared state from interfering with server operation.
         args = procutil.hgcmd() + [
-            '-R', repo.root,
-            'debugserve', '--sshstdio',
+            '-R',
+            repo.root,
+            'debugserve',
+            '--sshstdio',
         ]
-        proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
-                                stdin=subprocess.PIPE,
-                                stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                                bufsize=0)
+        proc = subprocess.Popen(
+            pycompat.rapply(procutil.tonativestr, args),
+            stdin=subprocess.PIPE,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            bufsize=0,
+        )
 
         stdin = proc.stdin
         stdout = proc.stdout
@@ -3237,12 +3819,15 @@
 
         # We turn the pipes into observers so we can log I/O.
         if ui.verbose or opts['peer'] == 'raw':
-            stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
-                                               logdata=True)
-            stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
-                                                logdata=True)
-            stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
-                                                logdata=True)
+            stdin = util.makeloggingfileobject(
+                ui, proc.stdin, b'i', logdata=True
+            )
+            stdout = util.makeloggingfileobject(
+                ui, proc.stdout, b'o', logdata=True
+            )
+            stderr = util.makeloggingfileobject(
+                ui, proc.stderr, b'e', logdata=True
+            )
 
         # --localssh also implies the peer connection settings.
 
@@ -3251,19 +3836,42 @@
 
         if opts['peer'] == 'ssh1':
             ui.write(_('creating ssh peer for wire protocol version 1\n'))
-            peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
-                                     None, autoreadstderr=autoreadstderr)
+            peer = sshpeer.sshv1peer(
+                ui,
+                url,
+                proc,
+                stdin,
+                stdout,
+                stderr,
+                None,
+                autoreadstderr=autoreadstderr,
+            )
         elif opts['peer'] == 'ssh2':
             ui.write(_('creating ssh peer for wire protocol version 2\n'))
-            peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
-                                     None, autoreadstderr=autoreadstderr)
+            peer = sshpeer.sshv2peer(
+                ui,
+                url,
+                proc,
+                stdin,
+                stdout,
+                stderr,
+                None,
+                autoreadstderr=autoreadstderr,
+            )
         elif opts['peer'] == 'raw':
             ui.write(_('using raw connection to peer\n'))
             peer = None
         else:
             ui.write(_('creating ssh peer from handshake results\n'))
-            peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
-                                    autoreadstderr=autoreadstderr)
+            peer = sshpeer.makepeer(
+                ui,
+                url,
+                proc,
+                stdin,
+                stdout,
+                stderr,
+                autoreadstderr=autoreadstderr,
+            )
 
     elif path:
         # We bypass hg.peer() so we can proxy the sockets.
@@ -3280,14 +3888,13 @@
 
         # Turn pipes/sockets into observers so we can log I/O.
         if ui.verbose:
-            openerargs.update({
-                r'loggingfh': ui,
-                r'loggingname': b's',
-                r'loggingopts': {
-                    r'logdata': True,
-                    r'logdataapis': False,
-                },
-            })
+            openerargs.update(
+                {
+                    r'loggingfh': ui,
+                    r'loggingname': b's',
+                    r'loggingopts': {r'logdata': True, r'logdataapis': False,},
+                }
+            )
 
         if ui.debugflag:
             openerargs[r'loggingopts'][r'logdataapis'] = True
@@ -3304,8 +3911,9 @@
             ui.write(_('creating http peer for wire protocol version 2\n'))
             # We go through makepeer() because we need an API descriptor for
             # the peer instance to be useful.
-            with ui.configoverride({
-                ('experimental', 'httppeer.advertise-v2'): True}):
+            with ui.configoverride(
+                {('experimental', 'httppeer.advertise-v2'): True}
+            ):
                 if opts['nologhandshake']:
                     ui.pushbuffer()
 
@@ -3315,18 +3923,25 @@
                     ui.popbuffer()
 
             if not isinstance(peer, httppeer.httpv2peer):
-                raise error.Abort(_('could not instantiate HTTP peer for '
-                                    'wire protocol version 2'),
-                                  hint=_('the server may not have the feature '
-                                         'enabled or is not allowing this '
-                                         'client version'))
+                raise error.Abort(
+                    _(
+                        'could not instantiate HTTP peer for '
+                        'wire protocol version 2'
+                    ),
+                    hint=_(
+                        'the server may not have the feature '
+                        'enabled or is not allowing this '
+                        'client version'
+                    ),
+                )
 
         elif opts['peer'] == 'raw':
             ui.write(_('using raw connection to peer\n'))
             peer = None
         elif opts['peer']:
-            raise error.Abort(_('--peer %s not supported with HTTP peers') %
-                              opts['peer'])
+            raise error.Abort(
+                _('--peer %s not supported with HTTP peers') % opts['peer']
+            )
         else:
             peer = httppeer.makepeer(ui, path, opener=opener)
 
@@ -3355,8 +3970,12 @@
             stdin.flush()
         elif action.startswith('command'):
             if not peer:
-                raise error.Abort(_('cannot send commands unless peer instance '
-                                    'is available'))
+                raise error.Abort(
+                    _(
+                        'cannot send commands unless peer instance '
+                        'is available'
+                    )
+                )
 
             command = action.split(' ', 1)[1]
 
@@ -3386,22 +4005,28 @@
             if 'PUSHFILE' in args:
                 with open(args['PUSHFILE'], r'rb') as fh:
                     del args['PUSHFILE']
-                    res, output = peer._callpush(command, fh,
-                                                 **pycompat.strkwargs(args))
+                    res, output = peer._callpush(
+                        command, fh, **pycompat.strkwargs(args)
+                    )
                     ui.status(_('result: %s\n') % stringutil.escapestr(res))
-                    ui.status(_('remote output: %s\n') %
-                              stringutil.escapestr(output))
+                    ui.status(
+                        _('remote output: %s\n') % stringutil.escapestr(output)
+                    )
             else:
                 with peer.commandexecutor() as e:
                     res = e.callcommand(command, args).result()
 
                 if isinstance(res, wireprotov2peer.commandresponse):
                     val = res.objects()
-                    ui.status(_('response: %s\n') %
-                              stringutil.pprint(val, bprefix=True, indent=2))
+                    ui.status(
+                        _('response: %s\n')
+                        % stringutil.pprint(val, bprefix=True, indent=2)
+                    )
                 else:
-                    ui.status(_('response: %s\n') %
-                              stringutil.pprint(res, bprefix=True, indent=2))
+                    ui.status(
+                        _('response: %s\n')
+                        % stringutil.pprint(res, bprefix=True, indent=2)
+                    )
 
         elif action == 'batchbegin':
             if batchedcommands is not None:
@@ -3412,23 +4037,30 @@
             # There is a batching API we could go through. But it would be
             # difficult to normalize requests into function calls. It is easier
             # to bypass this layer and normalize to commands + args.
-            ui.status(_('sending batch with %d sub-commands\n') %
-                      len(batchedcommands))
+            ui.status(
+                _('sending batch with %d sub-commands\n') % len(batchedcommands)
+            )
             for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
-                ui.status(_('response #%d: %s\n') %
-                          (i, stringutil.escapestr(chunk)))
+                ui.status(
+                    _('response #%d: %s\n') % (i, stringutil.escapestr(chunk))
+                )
 
             batchedcommands = None
 
         elif action.startswith('httprequest '):
             if not opener:
-                raise error.Abort(_('cannot use httprequest without an HTTP '
-                                    'peer'))
+                raise error.Abort(
+                    _('cannot use httprequest without an HTTP ' 'peer')
+                )
 
             request = action.split(' ', 2)
             if len(request) != 3:
-                raise error.Abort(_('invalid httprequest: expected format is '
-                                    '"httprequest <method> <path>'))
+                raise error.Abort(
+                    _(
+                        'invalid httprequest: expected format is '
+                        '"httprequest <method> <path>'
+                    )
+                )
 
             method, httppath = request[1:]
             headers = {}
@@ -3449,12 +4081,14 @@
                         body = fh.read()
                 elif line.startswith(b'frame '):
                     frame = wireprotoframing.makeframefromhumanstring(
-                        line[len(b'frame '):])
+                        line[len(b'frame ') :]
+                    )
 
                     frames.append(frame)
                 else:
-                    raise error.Abort(_('unknown argument to httprequest: %s') %
-                                      line)
+                    raise error.Abort(
+                        _('unknown argument to httprequest: %s') % line
+                    )
 
             url = path + httppath
 
@@ -3478,10 +4112,12 @@
 
             ct = res.headers.get(r'Content-Type')
             if ct == r'application/mercurial-cbor':
-                ui.write(_('cbor> %s\n') %
-                         stringutil.pprint(cborutil.decodeall(body),
-                                           bprefix=True,
-                                           indent=2))
+                ui.write(
+                    _('cbor> %s\n')
+                    % stringutil.pprint(
+                        cborutil.decodeall(body), bprefix=True, indent=2
+                    )
+                )
 
         elif action == 'close':
             peer.close()
--- a/mercurial/destutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/destutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,13 +8,8 @@
 from __future__ import absolute_import
 
 from .i18n import _
-from . import (
-    bookmarks,
-    error,
-    obsutil,
-    scmutil,
-    stack
-)
+from . import bookmarks, error, obsutil, scmutil, stack
+
 
 def orphanpossibledestination(repo, rev):
     """Return all changesets that may be a new parent for orphan `rev`.
@@ -49,6 +44,7 @@
                         dest.add(dr)
     return dest
 
+
 def _destupdateobs(repo, clean):
     """decide of an update destination from obsolescence markers"""
     node = None
@@ -85,6 +81,7 @@
                 movemark = repo['.'].node()
     return node, movemark, None
 
+
 def _destupdatebook(repo, clean):
     """decide on an update destination from active bookmark"""
     # we also move the active bookmark, if any
@@ -94,6 +91,7 @@
         node = repo._bookmarks[activemark]
     return node, movemark, activemark
 
+
 def _destupdatebranch(repo, clean):
     """decide on an update destination from current branch
 
@@ -120,6 +118,7 @@
         node = repo['.'].node()
     return node, movemark, None
 
+
 def _destupdatebranchfallback(repo, clean):
     """decide on an update destination from closed heads in current branch"""
     wc = repo[None]
@@ -130,8 +129,9 @@
         heads = repo.branchheads(currentbranch, closed=True)
         assert heads, "any branch has at least one head"
         node = repo.revs('max(.::(%ln))', heads).first()
-        assert node is not None, ("any revision has at least "
-                                  "one descendant branch head")
+        assert node is not None, (
+            "any revision has at least " "one descendant branch head"
+        )
         if bookmarks.isactivewdirparent(repo):
             movemark = repo['.'].node()
     else:
@@ -140,15 +140,18 @@
         assert node is not None, "'tip' exists even in empty repository"
     return node, movemark, None
 
+
 # order in which each step should be evaluated
 # steps are run until one finds a destination
 destupdatesteps = ['evolution', 'bookmark', 'branch', 'branchfallback']
 # mapping to ease extension overriding steps.
-destupdatestepmap = {'evolution': _destupdateobs,
-                     'bookmark': _destupdatebook,
-                     'branch': _destupdatebranch,
-                     'branchfallback': _destupdatebranchfallback,
-                     }
+destupdatestepmap = {
+    'evolution': _destupdateobs,
+    'bookmark': _destupdatebook,
+    'branch': _destupdatebranch,
+    'branchfallback': _destupdatebranchfallback,
+}
+
 
 def destupdate(repo, clean=False):
     """destination for bare update operation
@@ -170,100 +173,109 @@
 
     return rev, movemark, activemark
 
+
 msgdestmerge = {
     # too many matching divergent bookmark
-    'toomanybookmarks':
-        {'merge':
-            (_("multiple matching bookmarks to merge -"
-               " please merge with an explicit rev or bookmark"),
-             _("run 'hg heads' to see all heads")),
-         'rebase':
-            (_("multiple matching bookmarks to rebase -"
-               " please rebase to an explicit rev or bookmark"),
-             _("run 'hg heads' to see all heads")),
-        },
+    'toomanybookmarks': {
+        'merge': (
+            _(
+                "multiple matching bookmarks to merge -"
+                " please merge with an explicit rev or bookmark"
+            ),
+            _("run 'hg heads' to see all heads"),
+        ),
+        'rebase': (
+            _(
+                "multiple matching bookmarks to rebase -"
+                " please rebase to an explicit rev or bookmark"
+            ),
+            _("run 'hg heads' to see all heads"),
+        ),
+    },
     # no other matching divergent bookmark
-    'nootherbookmarks':
-        {'merge':
-            (_("no matching bookmark to merge - "
-               "please merge with an explicit rev or bookmark"),
-             _("run 'hg heads' to see all heads")),
-         'rebase':
-            (_("no matching bookmark to rebase - "
-               "please rebase to an explicit rev or bookmark"),
-             _("run 'hg heads' to see all heads")),
-        },
+    'nootherbookmarks': {
+        'merge': (
+            _(
+                "no matching bookmark to merge - "
+                "please merge with an explicit rev or bookmark"
+            ),
+            _("run 'hg heads' to see all heads"),
+        ),
+        'rebase': (
+            _(
+                "no matching bookmark to rebase - "
+                "please rebase to an explicit rev or bookmark"
+            ),
+            _("run 'hg heads' to see all heads"),
+        ),
+    },
     # branch have too many unbookmarked heads, no obvious destination
-    'toomanyheads':
-        {'merge':
-            (_("branch '%s' has %d heads - please merge with an explicit rev"),
-             _("run 'hg heads .' to see heads")),
-         'rebase':
-            (_("branch '%s' has %d heads - please rebase to an explicit rev"),
-             _("run 'hg heads .' to see heads")),
-        },
+    'toomanyheads': {
+        'merge': (
+            _("branch '%s' has %d heads - please merge with an explicit rev"),
+            _("run 'hg heads .' to see heads"),
+        ),
+        'rebase': (
+            _("branch '%s' has %d heads - please rebase to an explicit rev"),
+            _("run 'hg heads .' to see heads"),
+        ),
+    },
     # branch have no other unbookmarked heads
-    'bookmarkedheads':
-        {'merge':
-            (_("heads are bookmarked - please merge with an explicit rev"),
-             _("run 'hg heads' to see all heads")),
-         'rebase':
-            (_("heads are bookmarked - please rebase to an explicit rev"),
-             _("run 'hg heads' to see all heads")),
-        },
+    'bookmarkedheads': {
+        'merge': (
+            _("heads are bookmarked - please merge with an explicit rev"),
+            _("run 'hg heads' to see all heads"),
+        ),
+        'rebase': (
+            _("heads are bookmarked - please rebase to an explicit rev"),
+            _("run 'hg heads' to see all heads"),
+        ),
+    },
     # branch have just a single heads, but there is other branches
-    'nootherbranchheads':
-        {'merge':
-            (_("branch '%s' has one head - please merge with an explicit rev"),
-             _("run 'hg heads' to see all heads")),
-         'rebase':
-            (_("branch '%s' has one head - please rebase to an explicit rev"),
-             _("run 'hg heads' to see all heads")),
-        },
+    'nootherbranchheads': {
+        'merge': (
+            _("branch '%s' has one head - please merge with an explicit rev"),
+            _("run 'hg heads' to see all heads"),
+        ),
+        'rebase': (
+            _("branch '%s' has one head - please rebase to an explicit rev"),
+            _("run 'hg heads' to see all heads"),
+        ),
+    },
     # repository have a single head
-    'nootherheads':
-        {'merge':
-            (_('nothing to merge'),
-            None),
-         'rebase':
-            (_('nothing to rebase'),
-            None),
-        },
+    'nootherheads': {
+        'merge': (_('nothing to merge'), None),
+        'rebase': (_('nothing to rebase'), None),
+    },
     # repository have a single head and we are not on it
-    'nootherheadsbehind':
-        {'merge':
-            (_('nothing to merge'),
-             _("use 'hg update' instead")),
-         'rebase':
-            (_('nothing to rebase'),
-             _("use 'hg update' instead")),
-        },
+    'nootherheadsbehind': {
+        'merge': (_('nothing to merge'), _("use 'hg update' instead")),
+        'rebase': (_('nothing to rebase'), _("use 'hg update' instead")),
+    },
     # We are not on a head
-    'notatheads':
-        {'merge':
-            (_('working directory not at a head revision'),
-             _("use 'hg update' or merge with an explicit revision")),
-         'rebase':
-            (_('working directory not at a head revision'),
-             _("use 'hg update' or rebase to an explicit revision"))
-        },
-    'emptysourceset':
-        {'merge':
-            (_('source set is empty'),
-             None),
-         'rebase':
-            (_('source set is empty'),
-             None),
-        },
-    'multiplebranchessourceset':
-        {'merge':
-            (_('source set is rooted in multiple branches'),
-             None),
-         'rebase':
-            (_('rebaseset is rooted in multiple named branches'),
-             _('specify an explicit destination with --dest')),
-        },
-    }
+    'notatheads': {
+        'merge': (
+            _('working directory not at a head revision'),
+            _("use 'hg update' or merge with an explicit revision"),
+        ),
+        'rebase': (
+            _('working directory not at a head revision'),
+            _("use 'hg update' or rebase to an explicit revision"),
+        ),
+    },
+    'emptysourceset': {
+        'merge': (_('source set is empty'), None),
+        'rebase': (_('source set is empty'), None),
+    },
+    'multiplebranchessourceset': {
+        'merge': (_('source set is rooted in multiple branches'), None),
+        'rebase': (
+            _('rebaseset is rooted in multiple named branches'),
+            _('specify an explicit destination with --dest'),
+        ),
+    },
+}
+
 
 def _destmergebook(repo, action='merge', sourceset=None, destspace=None):
     """find merge destination in the active bookmark case"""
@@ -284,8 +296,10 @@
     assert node is not None
     return node
 
-def _destmergebranch(repo, action='merge', sourceset=None, onheadcheck=True,
-                     destspace=None):
+
+def _destmergebranch(
+    repo, action='merge', sourceset=None, onheadcheck=True, destspace=None
+):
     """find merge destination based on branch heads"""
     node = None
 
@@ -355,8 +369,10 @@
     assert node is not None
     return node
 
-def destmerge(repo, action='merge', sourceset=None, onheadcheck=True,
-              destspace=None):
+
+def destmerge(
+    repo, action='merge', sourceset=None, onheadcheck=True, destspace=None
+):
     """return the default destination for a merge
 
     (or raise exception about why it can't pick one)
@@ -366,13 +382,20 @@
     # destspace is here to work around issues with `hg pull --rebase` see
     # issue5214 for details
     if repo._activebookmark:
-        node = _destmergebook(repo, action=action, sourceset=sourceset,
-                              destspace=destspace)
+        node = _destmergebook(
+            repo, action=action, sourceset=sourceset, destspace=destspace
+        )
     else:
-        node = _destmergebranch(repo, action=action, sourceset=sourceset,
-                                onheadcheck=onheadcheck, destspace=destspace)
+        node = _destmergebranch(
+            repo,
+            action=action,
+            sourceset=sourceset,
+            onheadcheck=onheadcheck,
+            destspace=destspace,
+        )
     return repo[node].rev()
 
+
 def desthistedit(ui, repo):
     """Default base revision to edit for `hg histedit`."""
     default = ui.config('histedit', 'defaultrev')
@@ -390,10 +413,12 @@
 
     return None
 
+
 def stackbase(ui, repo):
     revs = stack.getstack(repo)
     return revs.first() if revs else None
 
+
 def _statusotherbook(ui, repo):
     bmheads = bookmarks.headsforactive(repo)
     curhead = repo._bookmarks[repo._activebookmark]
@@ -404,6 +429,7 @@
             msg = _('%i other divergent bookmarks for "%s"\n')
             ui.status(msg % (len(bmheads), repo._activebookmark))
 
+
 def _statusotherbranchheads(ui, repo):
     currentbranch = repo.dirstate.branch()
     allheads = repo.branchheads(currentbranch, closed=True)
@@ -420,22 +446,36 @@
         #  ========= ==========
         otherheads = repo.revs('%ln - parents()', heads)
         if repo['.'].closesbranch():
-            ui.warn(_('no open descendant heads on branch "%s", '
-                        'updating to a closed head\n') %
-                      (currentbranch))
+            ui.warn(
+                _(
+                    'no open descendant heads on branch "%s", '
+                    'updating to a closed head\n'
+                )
+                % currentbranch
+            )
             if otherheads:
-                ui.warn(_("(committing will reopen the head, "
-                            "use 'hg heads .' to see %i other heads)\n") %
-                          (len(otherheads)))
+                ui.warn(
+                    _(
+                        "(committing will reopen the head, "
+                        "use 'hg heads .' to see %i other heads)\n"
+                    )
+                    % (len(otherheads))
+                )
             else:
-                ui.warn(_('(committing will reopen branch "%s")\n') %
-                          (currentbranch))
+                ui.warn(
+                    _('(committing will reopen branch "%s")\n') % currentbranch
+                )
         elif otherheads:
             curhead = repo['.']
-            ui.status(_('updated to "%s: %s"\n') % (curhead,
-                                    curhead.description().split('\n')[0]))
-            ui.status(_('%i other heads for branch "%s"\n') %
-                      (len(otherheads), currentbranch))
+            ui.status(
+                _('updated to "%s: %s"\n')
+                % (curhead, curhead.description().split('\n')[0])
+            )
+            ui.status(
+                _('%i other heads for branch "%s"\n')
+                % (len(otherheads), currentbranch)
+            )
+
 
 def statusotherdests(ui, repo):
     """Print message about other head"""
--- a/mercurial/diffutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/diffutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,15 +16,33 @@
     pycompat,
 )
 
-def diffallopts(ui, opts=None, untrusted=False, section='diff',
-                configprefix=''):
+
+def diffallopts(
+    ui, opts=None, untrusted=False, section='diff', configprefix=''
+):
     '''return diffopts with all features supported and parsed'''
-    return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section,
-                           git=True, whitespace=True, formatchanging=True,
-                           configprefix=configprefix)
+    return difffeatureopts(
+        ui,
+        opts=opts,
+        untrusted=untrusted,
+        section=section,
+        git=True,
+        whitespace=True,
+        formatchanging=True,
+        configprefix=configprefix,
+    )
 
-def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False,
-                    whitespace=False, formatchanging=False, configprefix=''):
+
+def difffeatureopts(
+    ui,
+    opts=None,
+    untrusted=False,
+    section='diff',
+    git=False,
+    whitespace=False,
+    formatchanging=False,
+    configprefix='',
+):
     '''return diffopts with only opted-in features parsed
 
     Features:
@@ -33,6 +51,7 @@
     - formatchanging: options that will likely break or cause correctness issues
       with most diff parsers
     '''
+
     def get(key, name=None, getter=ui.configbool, forceplain=None):
         if opts:
             v = opts.get(key)
@@ -47,8 +66,9 @@
                 return v
         if forceplain is not None and ui.plain():
             return forceplain
-        return getter(section, configprefix + (name or key),
-                      untrusted=untrusted)
+        return getter(
+            section, configprefix + (name or key), untrusted=untrusted
+        )
 
     # core options, expected to be understood by every diff parser
     buildopts = {
@@ -63,8 +83,9 @@
 
         # since this is in the experimental section, we need to call
         # ui.configbool directory
-        buildopts['showsimilarity'] = ui.configbool('experimental',
-                                                    'extendedheader.similarity')
+        buildopts['showsimilarity'] = ui.configbool(
+            'experimental', 'extendedheader.similarity'
+        )
 
         # need to inspect the ui object instead of using get() since we want to
         # test for an int
@@ -92,16 +113,21 @@
 
     if whitespace:
         buildopts['ignorews'] = get('ignore_all_space', 'ignorews')
-        buildopts['ignorewsamount'] = get('ignore_space_change',
-                                          'ignorewsamount')
-        buildopts['ignoreblanklines'] = get('ignore_blank_lines',
-                                            'ignoreblanklines')
+        buildopts['ignorewsamount'] = get(
+            'ignore_space_change', 'ignorewsamount'
+        )
+        buildopts['ignoreblanklines'] = get(
+            'ignore_blank_lines', 'ignoreblanklines'
+        )
         buildopts['ignorewseol'] = get('ignore_space_at_eol', 'ignorewseol')
     if formatchanging:
         buildopts['text'] = opts and opts.get('text')
         binary = None if opts is None else opts.get('binary')
-        buildopts['nobinary'] = (not binary if binary is not None
-                                 else get('nobinary', forceplain=False))
+        buildopts['nobinary'] = (
+            not binary
+            if binary is not None
+            else get('nobinary', forceplain=False)
+        )
         buildopts['noprefix'] = get('noprefix', forceplain=False)
         buildopts['worddiff'] = get('word_diff', 'word-diff', forceplain=False)
 
--- a/mercurial/dirstate.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/dirstate.py	Sun Oct 06 09:45:02 2019 -0400
@@ -37,20 +37,25 @@
 
 propertycache = util.propertycache
 filecache = scmutil.filecache
-_rangemask = 0x7fffffff
+_rangemask = 0x7FFFFFFF
 
 dirstatetuple = parsers.dirstatetuple
 
+
 class repocache(filecache):
     """filecache for files in .hg/"""
+
     def join(self, obj, fname):
         return obj._opener.join(fname)
 
+
 class rootcache(filecache):
     """filecache for files in the repository root"""
+
     def join(self, obj, fname):
         return obj._join(fname)
 
+
 def _getfsnow(vfs):
     '''Get "now" timestamp on filesystem'''
     tmpfd, tmpname = vfs.mkstemp()
@@ -60,9 +65,9 @@
         os.close(tmpfd)
         vfs.unlink(tmpname)
 
+
 @interfaceutil.implementer(intdirstate.idirstate)
 class dirstate(object):
-
     def __init__(self, opener, ui, root, validate, sparsematchfn):
         '''Create a new dirstate object.
 
@@ -183,6 +188,7 @@
 
     def flagfunc(self, buildfallback):
         if self._checklink and self._checkexec:
+
             def f(x):
                 try:
                     st = os.lstat(self._join(x))
@@ -193,24 +199,29 @@
                 except OSError:
                     pass
                 return ''
+
             return f
 
         fallback = buildfallback()
         if self._checklink:
+
             def f(x):
                 if os.path.islink(self._join(x)):
                     return 'l'
                 if 'x' in fallback(x):
                     return 'x'
                 return ''
+
             return f
         if self._checkexec:
+
             def f(x):
                 if 'l' in fallback(x):
                     return 'l'
                 if util.isexec(self._join(x)):
                     return 'x'
                 return ''
+
             return f
         else:
             return fallback
@@ -238,7 +249,7 @@
         if not util.endswithsep(rootsep):
             rootsep += pycompat.ossep
         if cwd.startswith(rootsep):
-            return cwd[len(rootsep):]
+            return cwd[len(rootsep) :]
         else:
             # we're outside the repo. return an absolute path.
             return cwd
@@ -296,8 +307,10 @@
         See localrepo.setparents()
         """
         if self._parentwriters == 0:
-            raise ValueError("cannot set dirstate parent outside of "
-                             "dirstate.parentchange context manager")
+            raise ValueError(
+                "cannot set dirstate parent outside of "
+                "dirstate.parentchange context manager"
+            )
 
         self._dirty = True
         oldp2 = self._pl[1]
@@ -307,7 +320,8 @@
         copies = {}
         if oldp2 != nullid and p2 == nullid:
             candidatefiles = self._map.nonnormalset.union(
-                                self._map.otherparentset)
+                self._map.otherparentset
+            )
             for f in candidatefiles:
                 s = self._map.get(f)
                 if s is None:
@@ -339,7 +353,7 @@
             ce = self._filecache['_branch']
             if ce:
                 ce.refresh()
-        except: # re-raises
+        except:  # re-raises
             f.discard()
             raise
 
@@ -382,8 +396,9 @@
         if state == 'a' or oldstate == 'r':
             scmutil.checkfilename(f)
             if self._map.hastrackeddir(f):
-                raise error.Abort(_('directory %r already in dirstate') %
-                                  pycompat.bytestr(f))
+                raise error.Abort(
+                    _('directory %r already in dirstate') % pycompat.bytestr(f)
+                )
             # shadows
             for d in util.finddirs(f):
                 if self._map.hastrackeddir(d):
@@ -391,8 +406,9 @@
                 entry = self._map.get(d)
                 if entry is not None and entry[0] != 'r':
                     raise error.Abort(
-                        _('file %r in dirstate clashes with %r') %
-                        (pycompat.bytestr(d), pycompat.bytestr(f)))
+                        _('file %r in dirstate clashes with %r')
+                        % (pycompat.bytestr(d), pycompat.bytestr(f))
+                    )
         self._dirty = True
         self._updatedfiles.add(f)
         self._map.addfile(f, oldstate, state, mode, size, mtime)
@@ -449,8 +465,9 @@
     def otherparent(self, f):
         '''Mark as coming from the other parent, always dirty.'''
         if self._pl[1] == nullid:
-            raise error.Abort(_("setting %r to other parent "
-                               "only allowed in merges") % f)
+            raise error.Abort(
+                _("setting %r to other parent " "only allowed in merges") % f
+            )
         if f in self and self[f] == 'n':
             # merge-like
             self._addpath(f, 'm', 0, -2, -1)
@@ -473,9 +490,9 @@
             entry = self._map.get(f)
             if entry is not None:
                 # backup the previous state
-                if entry[0] == 'm': # merge
+                if entry[0] == 'm':  # merge
                     size = -1
-                elif entry[0] == 'n' and entry[2] == -2: # other parent
+                elif entry[0] == 'n' and entry[2] == -2:  # other parent
                     size = -2
                     self._map.otherparentset.add(f)
         self._updatedfiles.add(f)
@@ -530,8 +547,9 @@
             if isknown:
                 folded = path
             else:
-                folded = self._discoverpath(path, normed, ignoremissing, exists,
-                                            self._map.filefoldmap)
+                folded = self._discoverpath(
+                    path, normed, ignoremissing, exists, self._map.filefoldmap
+                )
         return folded
 
     def _normalize(self, path, isknown, ignoremissing=False, exists=None):
@@ -545,8 +563,9 @@
             else:
                 # store discovered result in dirfoldmap so that future
                 # normalizefile calls don't start matching directories
-                folded = self._discoverpath(path, normed, ignoremissing, exists,
-                                            self._map.dirfoldmap)
+                folded = self._discoverpath(
+                    path, normed, ignoremissing, exists, self._map.dirfoldmap
+                )
         return folded
 
     def normalize(self, path, isknown=False, ignoremissing=False):
@@ -625,8 +644,12 @@
             self._updatedfiles.clear()
 
             # delay writing in-memory changes out
-            tr.addfilegenerator('dirstate', (self._filename,),
-                                self._writedirstate, location='plain')
+            tr.addfilegenerator(
+                'dirstate',
+                (self._filename,),
+                self._writedirstate,
+                location='plain',
+            )
             return
 
         st = self._opener(filename, "w", atomictemp=True, checkambig=True)
@@ -661,14 +684,15 @@
             items = self._map.iteritems()
             for f, e in items:
                 if e[0] == 'n' and e[3] == now:
-                    import time # to avoid useless import
+                    import time  # to avoid useless import
+
                     # rather than sleep n seconds, sleep until the next
                     # multiple of n seconds
                     clock = time.time()
                     start = int(clock) - (int(clock) % delaywrite)
                     end = start + delaywrite
                     time.sleep(end - clock)
-                    now = end # trust our estimate that the end is near now
+                    now = end  # trust our estimate that the end is near now
                     break
             # since the iterator is potentially not deleted,
             # delete the iterator to release the reference for the Rust
@@ -705,16 +729,18 @@
         visited = set()
         while files:
             i = files.popleft()
-            patterns = matchmod.readpatternfile(i, self._ui.warn,
-                                                sourceinfo=True)
+            patterns = matchmod.readpatternfile(
+                i, self._ui.warn, sourceinfo=True
+            )
             for pattern, lineno, line in patterns:
                 kind, p = matchmod._patsplit(pattern, 'glob')
                 if kind == "subinclude":
                     if p not in visited:
                         files.append(p)
                     continue
-                m = matchmod.match(self._root, '', [], [pattern],
-                                   warn=self._ui.warn)
+                m = matchmod.match(
+                    self._root, '', [], [pattern], warn=self._ui.warn
+                )
                 if m(f):
                     return (i, lineno, line)
             visited.add(i)
@@ -807,10 +833,10 @@
                     badfn(ff, badtype(kind))
                     if nf in dmap:
                         results[nf] = None
-            except OSError as inst: # nf not found on disk - it is dirstate only
-                if nf in dmap: # does it exactly match a missing file?
+            except OSError as inst:  # nf not found on disk - it is dirstate only
+                if nf in dmap:  # does it exactly match a missing file?
                     results[nf] = None
-                else: # does it match a missing directory?
+                else:  # does it match a missing directory?
                     if self._map.hasdir(nf):
                         if matchedir:
                             matchedir(nf)
@@ -852,8 +878,9 @@
             for norm, paths in normed.iteritems():
                 if len(paths) > 1:
                     for path in paths:
-                        folded = self._discoverpath(path, norm, True, None,
-                                                    self._map.dirfoldmap)
+                        folded = self._discoverpath(
+                            path, norm, True, None, self._map.dirfoldmap
+                        )
                         if path != folded:
                             results[path] = None
 
@@ -897,10 +924,10 @@
         join = self._join
 
         exact = skipstep3 = False
-        if match.isexact(): # match.exact
+        if match.isexact():  # match.exact
             exact = True
-            dirignore = util.always # skip step 2
-        elif match.prefix(): # match.match, no patterns
+            dirignore = util.always  # skip step 2
+        elif match.prefix():  # match.match, no patterns
             skipstep3 = True
 
         if not exact and self._checkcase:
@@ -934,8 +961,9 @@
                     entries = listdir(join(nd), stat=True, skip=skip)
                 except OSError as inst:
                     if inst.errno in (errno.EACCES, errno.ENOENT):
-                        match.bad(self.pathto(nd),
-                                  encoding.strtolocal(inst.strerror))
+                        match.bad(
+                            self.pathto(nd), encoding.strtolocal(inst.strerror)
+                        )
                         continue
                     raise
                 for f, kind, st in entries:
@@ -953,8 +981,9 @@
                         # even though f might be a directory, we're only
                         # interested in comparing it to files currently in the
                         # dmap -- therefore normalizefile is enough
-                        nf = normalizefile(nd and (nd + "/" + f) or f, True,
-                                           True)
+                        nf = normalizefile(
+                            nd and (nd + "/" + f) or f, True, True
+                        )
                     else:
                         nf = nd and (nd + "/" + f) or f
                     if nf not in results:
@@ -969,8 +998,9 @@
                             if nf in dmap:
                                 if matchalways or matchfn(nf):
                                     results[nf] = st
-                            elif ((matchalways or matchfn(nf))
-                                  and not ignore(nf)):
+                            elif (matchalways or matchfn(nf)) and not ignore(
+                                nf
+                            ):
                                 # unknown file -- normalize if necessary
                                 if not alreadynormed:
                                     nf = normalize(nf, False, True)
@@ -1011,8 +1041,10 @@
                     # different case, don't add one for this, since that would
                     # make it appear as if the file exists under both names
                     # on disk.
-                    if (normalizefile and
-                        normalizefile(nf, True, True) in results):
+                    if (
+                        normalizefile
+                        and normalizefile(nf, True, True) in results
+                    ):
                         results[nf] = None
                     # Report ignored items in the dmap as long as they are not
                     # under a symlink directory.
@@ -1059,7 +1091,7 @@
         dmap.preload()
         dcontains = dmap.__contains__
         dget = dmap.__getitem__
-        ladd = lookup.append            # aka "unsure"
+        ladd = lookup.append  # aka "unsure"
         madd = modified.append
         aadd = added.append
         uadd = unknown.append
@@ -1078,8 +1110,9 @@
         # - match.traversedir does something, because match.traversedir should
         #   be called for every dir in the working dir
         full = listclean or match.traversedir is not None
-        for fn, st in self.walk(match, subrepos, listunknown, listignored,
-                                full=full).iteritems():
+        for fn, st in self.walk(
+            match, subrepos, listunknown, listignored, full=full
+        ).iteritems():
             if not dcontains(fn):
                 if (listignored or mexact(fn)) and dirignore(fn):
                     if listignored:
@@ -1104,14 +1137,20 @@
             if not st and state in "nma":
                 dadd(fn)
             elif state == 'n':
-                if (size >= 0 and
-                    ((size != st.st_size and size != st.st_size & _rangemask)
-                     or ((mode ^ st.st_mode) & 0o100 and checkexec))
-                    or size == -2 # other parent
-                    or fn in copymap):
+                if (
+                    size >= 0
+                    and (
+                        (size != st.st_size and size != st.st_size & _rangemask)
+                        or ((mode ^ st.st_mode) & 0o100 and checkexec)
+                    )
+                    or size == -2  # other parent
+                    or fn in copymap
+                ):
                     madd(fn)
-                elif (time != st[stat.ST_MTIME]
-                      and time != st[stat.ST_MTIME] & _rangemask):
+                elif (
+                    time != st[stat.ST_MTIME]
+                    and time != st[stat.ST_MTIME] & _rangemask
+                ):
                     ladd(fn)
                 elif st[stat.ST_MTIME] == lastnormaltime:
                     # fn may have just been marked as normal and it may have
@@ -1128,8 +1167,12 @@
             elif state == 'r':
                 radd(fn)
 
-        return (lookup, scmutil.status(modified, added, removed, deleted,
-                                       unknown, ignored, clean))
+        return (
+            lookup,
+            scmutil.status(
+                modified, added, removed, deleted, unknown, ignored, clean
+            ),
+        )
 
     def matches(self, match):
         '''
@@ -1164,15 +1207,20 @@
         # because the latter omits writing out if transaction is running.
         # output file will be used to create backup of dirstate at this point.
         if self._dirty or not self._opener.exists(filename):
-            self._writedirstate(self._opener(filename, "w", atomictemp=True,
-                                             checkambig=True))
+            self._writedirstate(
+                self._opener(filename, "w", atomictemp=True, checkambig=True)
+            )
 
         if tr:
             # ensure that subsequent tr.writepending returns True for
             # changes written out above, even if dirstate is never
             # changed after this
-            tr.addfilegenerator('dirstate', (self._filename,),
-                                self._writedirstate, location='plain')
+            tr.addfilegenerator(
+                'dirstate',
+                (self._filename,),
+                self._writedirstate,
+                location='plain',
+            )
 
             # ensure that pending file written above is unlinked at
             # failure, even if tr.writepending isn't invoked until the
@@ -1182,8 +1230,11 @@
         self._opener.tryunlink(backupname)
         # hardlink backup is okay because _writedirstate is always called
         # with an "atomictemp=True" file.
-        util.copyfile(self._opener.join(filename),
-                      self._opener.join(backupname), hardlink=True)
+        util.copyfile(
+            self._opener.join(filename),
+            self._opener.join(backupname),
+            hardlink=True,
+        )
 
     def restorebackup(self, tr, backupname):
         '''Restore dirstate by backup file'''
@@ -1201,6 +1252,7 @@
         '''Clear backup file'''
         self._opener.unlink(backupname)
 
+
 class dirstatemap(object):
     """Map encapsulating the dirstate's contents.
 
@@ -1376,15 +1428,16 @@
         except AttributeError:
             pass
         else:
-            return makefilefoldmap(self._map, util.normcasespec,
-                                   util.normcasefallback)
+            return makefilefoldmap(
+                self._map, util.normcasespec, util.normcasefallback
+            )
 
         f = {}
         normcase = util.normcase
         for name, s in self._map.iteritems():
             if s[0] != 'r':
                 f[normcase(name)] = name
-        f['.'] = '.' # prevents useless util.fspath() invocation
+        f['.'] = '.'  # prevents useless util.fspath() invocation
         return f
 
     def hastrackeddir(self, d):
@@ -1413,8 +1466,9 @@
         fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
         if self._pendingmode is not None and self._pendingmode != mode:
             fp.close()
-            raise error.Abort(_('working directory state may be '
-                                'changed parallelly'))
+            raise error.Abort(
+                _('working directory state may be ' 'changed parallelly')
+            )
         self._pendingmode = mode
         return fp
 
@@ -1436,8 +1490,9 @@
             elif l == 0:
                 self._parents = (nullid, nullid)
             else:
-                raise error.Abort(_('working directory state appears '
-                                    'damaged!'))
+                raise error.Abort(
+                    _('working directory state appears ' 'damaged!')
+                )
 
         return self._parents
 
@@ -1448,7 +1503,8 @@
     def read(self):
         # ignore HG_PENDING because identity is used only for writing
         self.identity = util.filestat.frompath(
-            self._opener.join(self._filename))
+            self._opener.join(self._filename)
+        )
 
         try:
             fp = self._opendirstatefile()
@@ -1499,8 +1555,9 @@
         self.get = self._map.get
 
     def write(self, st, now):
-        st.write(parsers.pack_dirstate(self._map, self.copymap,
-                                       self.parents(), now))
+        st.write(
+            parsers.pack_dirstate(self._map, self.copymap, self.parents(), now)
+        )
         st.close()
         self._dirtyparents = False
         self.nonnormalset, self.otherparentset = self.nonnormalentries()
@@ -1532,6 +1589,7 @@
 
 
 if rustmod is not None:
+
     class dirstatemap(object):
         def __init__(self, ui, opener, root):
             self._ui = ui
@@ -1604,12 +1662,14 @@
         iteritems = items
 
         def _opendirstatefile(self):
-            fp, mode = txnutil.trypending(self._root, self._opener,
-                                          self._filename)
+            fp, mode = txnutil.trypending(
+                self._root, self._opener, self._filename
+            )
             if self._pendingmode is not None and self._pendingmode != mode:
                 fp.close()
-                raise error.Abort(_('working directory state may be '
-                                    'changed parallelly'))
+                raise error.Abort(
+                    _('working directory state may be ' 'changed parallelly')
+                )
             self._pendingmode = mode
             return fp
 
@@ -1633,15 +1693,17 @@
                 try:
                     self._parents = self._rustmap.parents(st)
                 except ValueError:
-                    raise error.Abort(_('working directory state appears '
-                                        'damaged!'))
+                    raise error.Abort(
+                        _('working directory state appears ' 'damaged!')
+                    )
 
             return self._parents
 
         def read(self):
             # ignore HG_PENDING because identity is used only for writing
             self.identity = util.filestat.frompath(
-                self._opener.join(self._filename))
+                self._opener.join(self._filename)
+            )
 
             try:
                 fp = self._opendirstatefile()
@@ -1675,11 +1737,11 @@
             return self._rustmap.filefoldmapasdict()
 
         def hastrackeddir(self, d):
-            self._dirs # Trigger Python's propertycache
+            self._dirs  # Trigger Python's propertycache
             return self._rustmap.hastrackeddir(d)
 
         def hasdir(self, d):
-            self._dirs # Trigger Python's propertycache
+            self._dirs  # Trigger Python's propertycache
             return self._rustmap.hasdir(d)
 
         @propertycache
--- a/mercurial/discovery.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/discovery.py	Sun Oct 06 09:45:02 2019 -0400
@@ -27,6 +27,7 @@
     util,
 )
 
+
 def findcommonincoming(repo, remote, heads=None, force=False, ancestorsof=None):
     """Return a tuple (common, anyincoming, heads) used to identify the common
     subset of nodes between repo and remote.
@@ -53,16 +54,21 @@
         return treediscovery.findcommonincoming(repo, remote, heads, force)
 
     if heads:
-        knownnode = repo.changelog.hasnode # no nodemap until it is filtered
+        knownnode = repo.changelog.hasnode  # no nodemap until it is filtered
         if all(knownnode(h) for h in heads):
             return (heads, False, heads)
 
-    res = setdiscovery.findcommonheads(repo.ui, repo, remote,
-                                       abortwhenunrelated=not force,
-                                       ancestorsof=ancestorsof)
+    res = setdiscovery.findcommonheads(
+        repo.ui,
+        repo,
+        remote,
+        abortwhenunrelated=not force,
+        ancestorsof=ancestorsof,
+    )
     common, anyinc, srvheads = res
     return (list(common), anyinc, heads or list(srvheads))
 
+
 class outgoing(object):
     '''Represents the set of nodes present in a local repo but not in a
     (possibly) remote one.
@@ -78,8 +84,9 @@
     The sets are computed on demand from the heads, unless provided upfront
     by discovery.'''
 
-    def __init__(self, repo, commonheads=None, missingheads=None,
-                 missingroots=None):
+    def __init__(
+        self, repo, commonheads=None, missingheads=None, missingroots=None
+    ):
         # at least one of them must not be set
         assert None in (commonheads, missingroots)
         cl = repo.changelog
@@ -106,8 +113,9 @@
         self.excluded = []
 
     def _computecommonmissing(self):
-        sets = self._revlog.findcommonmissing(self.commonheads,
-                                              self.missingheads)
+        sets = self._revlog.findcommonmissing(
+            self.commonheads, self.missingheads
+        )
         self._common, self._missing = sets
 
     @util.propertycache
@@ -122,8 +130,10 @@
             self._computecommonmissing()
         return self._missing
 
-def findcommonoutgoing(repo, other, onlyheads=None, force=False,
-                       commoninc=None, portable=False):
+
+def findcommonoutgoing(
+    repo, other, onlyheads=None, force=False, commoninc=None, portable=False
+):
     '''Return an outgoing instance to identify the nodes present in repo but
     not in other.
 
@@ -141,12 +151,13 @@
 
     # get common set if not provided
     if commoninc is None:
-        commoninc = findcommonincoming(repo, other, force=force,
-                                       ancestorsof=onlyheads)
+        commoninc = findcommonincoming(
+            repo, other, force=force, ancestorsof=onlyheads
+        )
     og.commonheads, _any, _hds = commoninc
 
     # compute outgoing
-    mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
+    mayexclude = repo._phasecache.phaseroots[phases.secret] or repo.obsstore
     if not mayexclude:
         og.missingheads = onlyheads or repo.heads()
     elif onlyheads is None:
@@ -167,7 +178,7 @@
                 missing.append(node)
         if len(missing) == len(allmissing):
             missingheads = onlyheads
-        else: # update missing heads
+        else:  # update missing heads
             missingheads = phases.newheads(repo, onlyheads, excluded)
         og.missingheads = missingheads
     if portable:
@@ -183,6 +194,7 @@
 
     return og
 
+
 def _headssummary(pushop):
     """compute a summary of branch and heads status before and after push
 
@@ -212,7 +224,7 @@
     with remote.commandexecutor() as e:
         remotemap = e.callcommand('branchmap', {}).result()
 
-    knownnode = cl.hasnode # do not use nodemap until it is filtered
+    knownnode = cl.hasnode  # do not use nodemap until it is filtered
     # A. register remote heads of branches which are in outgoing set
     for branch, heads in remotemap.iteritems():
         # don't add head info about branches which we don't have locally
@@ -234,9 +246,11 @@
 
     # C. Update newmap with outgoing changes.
     # This will possibly add new heads and remove existing ones.
-    newmap = branchmap.remotebranchcache((branch, heads[1])
-                                 for branch, heads in headssum.iteritems()
-                                 if heads[0] is not None)
+    newmap = branchmap.remotebranchcache(
+        (branch, heads[1])
+        for branch, heads in headssum.iteritems()
+        if heads[0] is not None
+    )
     newmap.update(repo, (ctx.rev() for ctx in missingctx))
     for branch, newheads in newmap.iteritems():
         headssum[branch][1][:] = newheads
@@ -255,17 +269,22 @@
         for branch, heads in sorted(headssum.iteritems()):
             remoteheads, newheads, unsyncedheads, placeholder = heads
             result = _postprocessobsolete(pushop, allfuturecommon, newheads)
-            headssum[branch] = (remoteheads, sorted(result[0]), unsyncedheads,
-                                sorted(result[1]))
+            headssum[branch] = (
+                remoteheads,
+                sorted(result[0]),
+                unsyncedheads,
+                sorted(result[1]),
+            )
     return headssum
 
+
 def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
     """Compute branchmapsummary for repo without branchmap support"""
 
     # 1-4b. old servers: Check for new topological heads.
     # Construct {old,new}map with branch = None (topological branch).
     # (code based on update)
-    knownnode = repo.changelog.hasnode # no nodemap until it is filtered
+    knownnode = repo.changelog.hasnode  # no nodemap until it is filtered
     oldheads = sorted(h for h in remoteheads if knownnode(h))
     # all nodes in outgoing.missing are children of either:
     # - an element of oldheads
@@ -281,6 +300,7 @@
         unsynced = []
     return {None: (oldheads, newheads, unsynced, [])}
 
+
 def _nowarnheads(pushop):
     # Compute newly pushed bookmarks. We don't warn about bookmarked heads.
     repo = pushop.repo.unfiltered()
@@ -288,15 +308,17 @@
     localbookmarks = repo._bookmarks
 
     with remote.commandexecutor() as e:
-        remotebookmarks = e.callcommand('listkeys', {
-            'namespace': 'bookmarks',
-        }).result()
+        remotebookmarks = e.callcommand(
+            'listkeys', {'namespace': 'bookmarks',}
+        ).result()
 
     bookmarkedheads = set()
 
     # internal config: bookmarks.pushing
-    newbookmarks = [localbookmarks.expandname(b)
-                    for b in pushop.ui.configlist('bookmarks', 'pushing')]
+    newbookmarks = [
+        localbookmarks.expandname(b)
+        for b in pushop.ui.configlist('bookmarks', 'pushing')
+    ]
 
     for bm in localbookmarks:
         rnode = remotebookmarks.get(bm)
@@ -310,6 +332,7 @@
 
     return bookmarkedheads
 
+
 def checkheads(pushop):
     """Check that a push won't add any outgoing head
 
@@ -338,8 +361,9 @@
     else:
         headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
     pushop.pushbranchmap = headssum
-    newbranches = [branch for branch, heads in headssum.iteritems()
-                   if heads[0] is None]
+    newbranches = [
+        branch for branch, heads in headssum.iteritems() if heads[0] is None
+    ]
     # 1. Check for new branches on the remote.
     if newbranches and not newbranch:  # new branch requires --new-branch
         branchnames = ', '.join(sorted(newbranches))
@@ -348,13 +372,15 @@
         for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
             if isclosed:
                 closedbranches.add(tag)
-        closedbranches = (closedbranches & set(newbranches))
+        closedbranches = closedbranches & set(newbranches)
         if closedbranches:
-            errmsg = (_("push creates new remote branches: %s (%d closed)!")
-                        % (branchnames, len(closedbranches)))
+            errmsg = _("push creates new remote branches: %s (%d closed)!") % (
+                branchnames,
+                len(closedbranches),
+            )
         else:
-            errmsg = (_("push creates new remote branches: %s!")% branchnames)
-        hint=_("use 'hg push --new-branch' to create new remote branches")
+            errmsg = _("push creates new remote branches: %s!") % branchnames
+        hint = _("use 'hg push --new-branch' to create new remote branches")
         raise error.Abort(errmsg, hint=hint)
 
     # 2. Find heads that we need not warn about
@@ -372,7 +398,7 @@
         else:
             oldhs = set(remoteheads)
         oldhs.update(unsyncedheads)
-        dhs = None # delta heads, the new heads on branch
+        dhs = None  # delta heads, the new heads on branch
         newhs = set(newheads)
         newhs.update(unsyncedheads)
         if unsyncedheads:
@@ -382,57 +408,74 @@
             else:
                 heads = scmutil.nodesummaries(repo, unsyncedheads)
             if heads is None:
-                repo.ui.status(_("remote has heads that are "
-                                 "not known locally\n"))
+                repo.ui.status(
+                    _("remote has heads that are " "not known locally\n")
+                )
             elif branch is None:
-                repo.ui.status(_("remote has heads that are "
-                                 "not known locally: %s\n") % heads)
+                repo.ui.status(
+                    _("remote has heads that are " "not known locally: %s\n")
+                    % heads
+                )
             else:
-                repo.ui.status(_("remote has heads on branch '%s' that are "
-                                 "not known locally: %s\n") % (branch, heads))
+                repo.ui.status(
+                    _(
+                        "remote has heads on branch '%s' that are "
+                        "not known locally: %s\n"
+                    )
+                    % (branch, heads)
+                )
         if remoteheads is None:
             if len(newhs) > 1:
                 dhs = list(newhs)
                 if errormsg is None:
                     errormsg = (
-                        _("push creates new branch '%s' with multiple heads") %
-                        branch
+                        _("push creates new branch '%s' with multiple heads")
+                        % branch
                     )
-                    hint = _("merge or"
-                             " see 'hg help push' for details about"
-                             " pushing new heads")
+                    hint = _(
+                        "merge or"
+                        " see 'hg help push' for details about"
+                        " pushing new heads"
+                    )
         elif len(newhs) > len(oldhs):
             # remove bookmarked or existing remote heads from the new heads list
             dhs = sorted(newhs - nowarnheads - oldhs)
         if dhs:
             if errormsg is None:
                 if branch not in ('default', None):
-                    errormsg = _("push creates new remote head %s "
-                                 "on branch '%s'!") % (short(dhs[0]), branch)
+                    errormsg = _(
+                        "push creates new remote head %s " "on branch '%s'!"
+                    ) % (short(dhs[0]), branch)
                 elif repo[dhs[0]].bookmarks():
-                    errormsg = _("push creates new remote head %s "
-                                 "with bookmark '%s'!") % (
-                                 short(dhs[0]), repo[dhs[0]].bookmarks()[0])
+                    errormsg = _(
+                        "push creates new remote head %s " "with bookmark '%s'!"
+                    ) % (short(dhs[0]), repo[dhs[0]].bookmarks()[0])
                 else:
-                    errormsg = _("push creates new remote head %s!"
-                                 ) % short(dhs[0])
+                    errormsg = _("push creates new remote head %s!") % short(
+                        dhs[0]
+                    )
                 if unsyncedheads:
-                    hint = _("pull and merge or"
-                             " see 'hg help push' for details about"
-                             " pushing new heads")
+                    hint = _(
+                        "pull and merge or"
+                        " see 'hg help push' for details about"
+                        " pushing new heads"
+                    )
                 else:
-                    hint = _("merge or"
-                             " see 'hg help push' for details about"
-                             " pushing new heads")
+                    hint = _(
+                        "merge or"
+                        " see 'hg help push' for details about"
+                        " pushing new heads"
+                    )
             if branch is None:
                 repo.ui.note(_("new remote heads:\n"))
             else:
                 repo.ui.note(_("new remote heads on branch '%s':\n") % branch)
             for h in dhs:
-                repo.ui.note((" %s\n") % short(h))
+                repo.ui.note(" %s\n" % short(h))
     if errormsg:
         raise error.Abort(errormsg, hint=hint)
 
+
 def _postprocessobsolete(pushop, futurecommon, candidate_newhs):
     """post process the list of new heads with obsolescence information
 
@@ -455,22 +498,24 @@
     torev = unfi.changelog.nodemap.get
     public = phases.public
     getphase = unfi._phasecache.phase
-    ispublic = (lambda r: getphase(unfi, r) == public)
-    ispushed = (lambda n: torev(n) in futurecommon)
+    ispublic = lambda r: getphase(unfi, r) == public
+    ispushed = lambda n: torev(n) in futurecommon
     hasoutmarker = functools.partial(pushingmarkerfor, unfi.obsstore, ispushed)
     successorsmarkers = unfi.obsstore.successors
-    newhs = set() # final set of new heads
-    discarded = set() # new head of fully replaced branch
+    newhs = set()  # final set of new heads
+    discarded = set()  # new head of fully replaced branch
 
-    localcandidate = set() # candidate heads known locally
-    unknownheads = set() # candidate heads unknown locally
+    localcandidate = set()  # candidate heads known locally
+    unknownheads = set()  # candidate heads unknown locally
     for h in candidate_newhs:
         if h in unfi:
             localcandidate.add(h)
         else:
             if successorsmarkers.get(h) is not None:
-                msg = ('checkheads: remote head unknown locally has'
-                       ' local marker: %s\n')
+                msg = (
+                    'checkheads: remote head unknown locally has'
+                    ' local marker: %s\n'
+                )
                 repo.ui.debug(msg % hex(h))
             unknownheads.add(h)
 
@@ -482,23 +527,24 @@
     while localcandidate:
         nh = localcandidate.pop()
         # run this check early to skip the evaluation of the whole branch
-        if (torev(nh) in futurecommon or ispublic(torev(nh))):
+        if torev(nh) in futurecommon or ispublic(torev(nh)):
             newhs.add(nh)
             continue
 
         # Get all revs/nodes on the branch exclusive to this head
         # (already filtered heads are "ignored"))
-        branchrevs = unfi.revs('only(%n, (%ln+%ln))',
-                               nh, localcandidate, newhs)
+        branchrevs = unfi.revs('only(%n, (%ln+%ln))', nh, localcandidate, newhs)
         branchnodes = [tonode(r) for r in branchrevs]
 
         # The branch won't be hidden on the remote if
         # * any part of it is public,
         # * any part of it is considered part of the result by previous logic,
         # * if we have no markers to push to obsolete it.
-        if (any(ispublic(r) for r in branchrevs)
-                or any(torev(n) in futurecommon for n in branchnodes)
-                or any(not hasoutmarker(n) for n in branchnodes)):
+        if (
+            any(ispublic(r) for r in branchrevs)
+            or any(torev(n) in futurecommon for n in branchnodes)
+            or any(not hasoutmarker(n) for n in branchnodes)
+        ):
             newhs.add(nh)
         else:
             # note: there is a corner case if there is a merge in the branch.
@@ -509,6 +555,7 @@
     newhs |= unknownheads
     return newhs, discarded
 
+
 def pushingmarkerfor(obsstore, ispushed, node):
     """true if some markers are to be pushed for node
 
@@ -530,9 +577,9 @@
         markers = successorsmarkers.get(current, ())
         # markers fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents')
         for m in markers:
-            nexts = m[1] # successors
-            if not nexts: # this is a prune marker
-                nexts = m[5] or () # parents
+            nexts = m[1]  # successors
+            if not nexts:  # this is a prune marker
+                nexts = m[5] or ()  # parents
             for n in nexts:
                 if n not in seen:
                     seen.add(n)
--- a/mercurial/dispatch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/dispatch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -48,9 +48,19 @@
     stringutil,
 )
 
+
 class request(object):
-    def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
-                 ferr=None, fmsg=None, prereposetups=None):
+    def __init__(
+        self,
+        args,
+        ui=None,
+        repo=None,
+        fin=None,
+        fout=None,
+        ferr=None,
+        fmsg=None,
+        prereposetups=None,
+    ):
         self.args = args
         self.ui = ui
         self.repo = repo
@@ -80,15 +90,16 @@
                 func, args, kwargs = handlers.pop()
                 try:
                     func(*args, **kwargs)
-                except: # re-raises below
+                except:  # re-raises below
                     if exc is None:
                         exc = sys.exc_info()[1]
-                    self.ui.warn(('error in exit handlers:\n'))
+                    self.ui.warn('error in exit handlers:\n')
                     self.ui.traceback(force=True)
         finally:
             if exc is not None:
                 raise exc
 
+
 def run():
     "run the command in sys.argv"
     initstdio()
@@ -112,8 +123,9 @@
     if util.safehasattr(req.ui, 'ferr'):
         try:
             if err is not None and err.errno != errno.EPIPE:
-                req.ui.ferr.write('abort: %s\n' %
-                                  encoding.strtolocal(err.strerror))
+                req.ui.ferr.write(
+                    'abort: %s\n' % encoding.strtolocal(err.strerror)
+                )
             req.ui.ferr.flush()
         # There's not much we can do about an I/O error here. So (possibly)
         # change the status code and move on.
@@ -123,7 +135,9 @@
     _silencestdio()
     sys.exit(status & 255)
 
+
 if pycompat.ispy3:
+
     def initstdio():
         pass
 
@@ -143,7 +157,10 @@
                 fp.close()
             except IOError:
                 pass
+
+
 else:
+
     def initstdio():
         for fp in (sys.stdin, sys.stdout, sys.stderr):
             procutil.setbinary(fp)
@@ -151,12 +168,14 @@
     def _silencestdio():
         pass
 
+
 def _getsimilar(symbols, value):
     sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
     # The cutoff for similarity here is pretty arbitrary. It should
     # probably be investigated and tweaked.
     return [s for s in symbols if sim(s) > 0.6]
 
+
 def _reportsimilar(write, similar):
     if len(similar) == 1:
         write(_("(did you mean %s?)\n") % similar[0])
@@ -164,14 +183,17 @@
         ss = ", ".join(sorted(similar))
         write(_("(did you mean one of %s?)\n") % ss)
 
+
 def _formatparse(write, inst):
     similar = []
     if isinstance(inst, error.UnknownIdentifier):
         # make sure to check fileset first, as revset can invoke fileset
         similar = _getsimilar(inst.symbols, inst.function)
     if len(inst.args) > 1:
-        write(_("hg: parse error at %s: %s\n") %
-              (pycompat.bytestr(inst.args[1]), inst.args[0]))
+        write(
+            _("hg: parse error at %s: %s\n")
+            % (pycompat.bytestr(inst.args[1]), inst.args[0])
+        )
         if inst.args[0].startswith(' '):
             write(_("unexpected leading whitespace\n"))
     else:
@@ -180,9 +202,11 @@
     if inst.hint:
         write(_("(%s)\n") % inst.hint)
 
+
 def _formatargs(args):
     return ' '.join(procutil.shellquote(a) for a in args)
 
+
 def dispatch(req):
     """run the command specified in req.args; returns an integer status code"""
     with tracing.log('dispatch.dispatch'):
@@ -248,8 +272,11 @@
             req.ui.flush()
             if req.ui.logblockedtimes:
                 req.ui._blockedtimes['command_duration'] = duration * 1000
-                req.ui.log('uiblocked', 'ui blocked ms\n',
-                           **pycompat.strkwargs(req.ui._blockedtimes))
+                req.ui.log(
+                    'uiblocked',
+                    'ui blocked ms\n',
+                    **pycompat.strkwargs(req.ui._blockedtimes)
+                )
             return_code = ret & 255
             req.ui.log(
                 "commandfinish",
@@ -263,12 +290,14 @@
             )
             try:
                 req._runexithandlers()
-            except: # exiting, so no re-raises
+            except:  # exiting, so no re-raises
                 ret = ret or -1
         return ret
 
+
 def _runcatch(req):
     with tracing.log('dispatch._runcatch'):
+
         def catchterm(*args):
             raise error.SignalInterrupt
 
@@ -279,18 +308,23 @@
                 if num:
                     signal.signal(num, catchterm)
         except ValueError:
-            pass # happens if called in a thread
+            pass  # happens if called in a thread
 
         def _runcatchfunc():
             realcmd = None
             try:
                 cmdargs = fancyopts.fancyopts(
-                    req.args[:], commands.globalopts, {})
+                    req.args[:], commands.globalopts, {}
+                )
                 cmd = cmdargs[0]
                 aliases, entry = cmdutil.findcmd(cmd, commands.table, False)
                 realcmd = aliases[0]
-            except (error.UnknownCommand, error.AmbiguousCommand,
-                    IndexError, getopt.GetoptError):
+            except (
+                error.UnknownCommand,
+                error.AmbiguousCommand,
+                IndexError,
+                getopt.GetoptError,
+            ):
                 # Don't handle this here. We know the command is
                 # invalid, but all we're worried about for now is that
                 # it's not a command that server operators expect to
@@ -305,23 +339,22 @@
                 # shenanigans wherein a user does something like pass
                 # --debugger or --config=ui.debugger=1 as a repo
                 # name. This used to actually run the debugger.
-                if (len(req.args) != 4 or
-                    req.args[0] != '-R' or
-                    req.args[1].startswith('--') or
-                    req.args[2] != 'serve' or
-                    req.args[3] != '--stdio'):
+                if (
+                    len(req.args) != 4
+                    or req.args[0] != '-R'
+                    or req.args[1].startswith('--')
+                    or req.args[2] != 'serve'
+                    or req.args[3] != '--stdio'
+                ):
                     raise error.Abort(
-                        _('potentially unsafe serve --stdio invocation: %s') %
-                        (stringutil.pprint(req.args),))
+                        _('potentially unsafe serve --stdio invocation: %s')
+                        % (stringutil.pprint(req.args),)
+                    )
 
             try:
                 debugger = 'pdb'
-                debugtrace = {
-                    'pdb': pdb.set_trace
-                }
-                debugmortem = {
-                    'pdb': pdb.post_mortem
-                }
+                debugtrace = {'pdb': pdb.set_trace}
+                debugmortem = {'pdb': pdb.post_mortem}
 
                 # read --config before doing anything else
                 # (e.g. to change trust settings for reading .hg/hgrc)
@@ -347,34 +380,47 @@
                         try:
                             debugmod = __import__(debugger)
                         except ImportError:
-                            pass # Leave debugmod = pdb
+                            pass  # Leave debugmod = pdb
 
                 debugtrace[debugger] = debugmod.set_trace
                 debugmortem[debugger] = debugmod.post_mortem
 
                 # enter the debugger before command execution
                 if req.earlyoptions['debugger']:
-                    ui.warn(_("entering debugger - "
-                            "type c to continue starting hg or h for help\n"))
+                    ui.warn(
+                        _(
+                            "entering debugger - "
+                            "type c to continue starting hg or h for help\n"
+                        )
+                    )
 
-                    if (debugger != 'pdb' and
-                        debugtrace[debugger] == debugtrace['pdb']):
-                        ui.warn(_("%s debugger specified "
-                                  "but its module was not found\n") % debugger)
+                    if (
+                        debugger != 'pdb'
+                        and debugtrace[debugger] == debugtrace['pdb']
+                    ):
+                        ui.warn(
+                            _(
+                                "%s debugger specified "
+                                "but its module was not found\n"
+                            )
+                            % debugger
+                        )
                     with demandimport.deactivated():
                         debugtrace[debugger]()
                 try:
                     return _dispatch(req)
                 finally:
                     ui.flush()
-            except: # re-raises
+            except:  # re-raises
                 # enter the debugger when we hit an exception
                 if req.earlyoptions['debugger']:
                     traceback.print_exc()
                     debugmortem[debugger](sys.exc_info()[2])
                 raise
+
         return _callcatch(ui, _runcatchfunc)
 
+
 def _callcatch(ui, func):
     """like scmutil.callcatch but handles more high-level exceptions about
     config parsing and commands. besides, use handlecommandexception to handle
@@ -383,8 +429,10 @@
     try:
         return scmutil.callcatch(ui, func)
     except error.AmbiguousCommand as inst:
-        ui.warn(_("hg: command '%s' is ambiguous:\n    %s\n") %
-                (inst.args[0], " ".join(inst.args[1])))
+        ui.warn(
+            _("hg: command '%s' is ambiguous:\n    %s\n")
+            % (inst.args[0], " ".join(inst.args[1]))
+        )
     except error.CommandError as inst:
         if inst.args[0]:
             ui.pager('help')
@@ -402,8 +450,9 @@
         try:
             # check if the command is in a disabled extension
             # (but don't check for extensions themselves)
-            formatted = help.formattedhelp(ui, commands, inst.args[0],
-                                           unknowncmd=True)
+            formatted = help.formattedhelp(
+                ui, commands, inst.args[0], unknowncmd=True
+            )
             ui.warn(nocmdmsg)
             ui.write(formatted)
         except (error.UnknownCommand, error.Abort):
@@ -427,6 +476,7 @@
 
     return -1
 
+
 def aliasargs(fn, givenargs):
     args = []
     # only care about alias 'args', ignore 'args' set by extensions.wrapfunction
@@ -436,18 +486,20 @@
         cmd = ' '.join(map(procutil.shellquote, args))
 
         nums = []
+
         def replacer(m):
             num = int(m.group(1)) - 1
             nums.append(num)
             if num < len(givenargs):
                 return givenargs[num]
             raise error.Abort(_('too few arguments for command alias'))
+
         cmd = re.sub(br'\$(\d+|\$)', replacer, cmd)
-        givenargs = [x for i, x in enumerate(givenargs)
-                     if i not in nums]
+        givenargs = [x for i, x in enumerate(givenargs) if i not in nums]
         args = pycompat.shlexsplit(cmd)
     return args + givenargs
 
+
 def aliasinterpolate(name, args, cmd):
     '''interpolate args into cmd for shell aliases
 
@@ -469,6 +521,7 @@
     r = re.compile(regex)
     return r.sub(lambda x: replacemap[x.group()], cmd)
 
+
 class cmdalias(object):
     def __init__(self, ui, name, definition, cmdtable, source):
         self.name = self.cmd = name
@@ -499,22 +552,29 @@
         if self.definition.startswith('!'):
             shdef = self.definition[1:]
             self.shell = True
+
             def fn(ui, *args):
                 env = {'HG_ARGS': ' '.join((self.name,) + args)}
+
                 def _checkvar(m):
                     if m.groups()[0] == '$':
                         return m.group()
                     elif int(m.groups()[0]) <= len(args):
                         return m.group()
                     else:
-                        ui.debug("No argument found for substitution "
-                                 "of %i variable in alias '%s' definition.\n"
-                                 % (int(m.groups()[0]), self.name))
+                        ui.debug(
+                            "No argument found for substitution "
+                            "of %i variable in alias '%s' definition.\n"
+                            % (int(m.groups()[0]), self.name)
+                        )
                         return ''
+
                 cmd = re.sub(br'\$(\d+|\$)', _checkvar, shdef)
                 cmd = aliasinterpolate(self.name, args, cmd)
-                return ui.system(cmd, environ=env,
-                                 blockedtag='alias_%s' % self.name)
+                return ui.system(
+                    cmd, environ=env, blockedtag='alias_%s' % self.name
+                )
+
             self.fn = fn
             self.alias = True
             self._populatehelp(ui, name, shdef, self.fn)
@@ -523,15 +583,17 @@
         try:
             args = pycompat.shlexsplit(self.definition)
         except ValueError as inst:
-            self.badalias = (_("error in definition for alias '%s': %s")
-                             % (self.name, stringutil.forcebytestr(inst)))
+            self.badalias = _("error in definition for alias '%s': %s") % (
+                self.name,
+                stringutil.forcebytestr(inst),
+            )
             return
         earlyopts, args = _earlysplitopts(args)
         if earlyopts:
-            self.badalias = (_("error in definition for alias '%s': %s may "
-                               "only be given on the command line")
-                             % (self.name, '/'.join(pycompat.ziplist(*earlyopts)
-                                                    [0])))
+            self.badalias = _(
+                "error in definition for alias '%s': %s may "
+                "only be given on the command line"
+            ) % (self.name, '/'.join(pycompat.ziplist(*earlyopts)[0]))
             return
         self.cmdname = cmd = args.pop(0)
         self.givenargs = args
@@ -548,12 +610,15 @@
             self._populatehelp(ui, name, cmd, self.fn, cmdhelp)
 
         except error.UnknownCommand:
-            self.badalias = (_("alias '%s' resolves to unknown command '%s'")
-                             % (self.name, cmd))
+            self.badalias = _("alias '%s' resolves to unknown command '%s'") % (
+                self.name,
+                cmd,
+            )
             self.unknowncmd = True
         except error.AmbiguousCommand:
-            self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
-                             % (self.name, cmd))
+            self.badalias = _(
+                "alias '%s' resolves to ambiguous command '%s'"
+            ) % (self.name, cmd)
 
     def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
         # confine strings to be passed to i18n.gettext()
@@ -563,15 +628,16 @@
             if v is None:
                 continue
             if not encoding.isasciistr(v):
-                self.badalias = (_("non-ASCII character in alias definition "
-                                   "'%s:%s'") % (name, k))
+                self.badalias = _(
+                    "non-ASCII character in alias definition " "'%s:%s'"
+                ) % (name, k)
                 return
             cfg[k] = v
 
         self.help = cfg.get('help', defaulthelp or '')
         if self.help and self.help.startswith("hg " + cmd):
             # drop prefix in old-style help lines so hg shows the alias
-            self.help = self.help[4 + len(cmd):]
+            self.help = self.help[4 + len(cmd) :]
 
         self.owndoc = 'doc' in cfg
         doc = cfg.get('doc', pycompat.getdoc(fn))
@@ -587,8 +653,12 @@
         return aliasargs(self.fn, args)
 
     def __getattr__(self, name):
-        adefaults = {r'norepo': True, r'intents': set(),
-                     r'optionalrepo': False, r'inferrepo': False}
+        adefaults = {
+            r'norepo': True,
+            r'intents': set(),
+            r'optionalrepo': False,
+            r'inferrepo': False,
+        }
         if name not in adefaults:
             raise AttributeError(name)
         if self.badalias or util.safehasattr(self, 'shell'):
@@ -607,11 +677,16 @@
                     pass
             raise error.Abort(self.badalias, hint=hint)
         if self.shadows:
-            ui.debug("alias '%s' shadows command '%s'\n" %
-                     (self.name, self.cmdname))
+            ui.debug(
+                "alias '%s' shadows command '%s'\n" % (self.name, self.cmdname)
+            )
 
-        ui.log('commandalias', "alias '%s' expands to '%s'\n",
-               self.name, self.definition)
+        ui.log(
+            'commandalias',
+            "alias '%s' expands to '%s'\n",
+            self.name,
+            self.definition,
+        )
         if util.safehasattr(self, 'shell'):
             return self.fn(ui, *args, **opts)
         else:
@@ -622,6 +697,7 @@
                 ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
                 raise
 
+
 class lazyaliasentry(object):
     """like a typical command entry (func, opts, help), but is lazy"""
 
@@ -635,8 +711,9 @@
 
     @util.propertycache
     def _aliasdef(self):
-        return cmdalias(self.ui, self.name, self.definition, self.cmdtable,
-                        self.source)
+        return cmdalias(
+            self.ui, self.name, self.definition, self.cmdtable, self.source
+        )
 
     def __getitem__(self, n):
         aliasdef = self._aliasdef
@@ -656,6 +733,7 @@
     def __len__(self):
         return 3
 
+
 def addaliases(ui, cmdtable):
     # aliases are processed after extensions have been loaded, so they
     # may use extension commands. Aliases can also use other alias definitions,
@@ -672,6 +750,7 @@
         entry = lazyaliasentry(ui, alias, definition, cmdtable, source)
         cmdtable[alias] = entry
 
+
 def _parse(ui, args):
     options = {}
     cmdoptions = {}
@@ -683,14 +762,17 @@
 
     if args:
         cmd, args = args[0], args[1:]
-        aliases, entry = cmdutil.findcmd(cmd, commands.table,
-                                         ui.configbool("ui", "strict"))
+        aliases, entry = cmdutil.findcmd(
+            cmd, commands.table, ui.configbool("ui", "strict")
+        )
         cmd = aliases[0]
         args = aliasargs(entry[0], args)
         defaults = ui.config("defaults", cmd)
         if defaults:
-            args = pycompat.maplist(
-                util.expandpath, pycompat.shlexsplit(defaults)) + args
+            args = (
+                pycompat.maplist(util.expandpath, pycompat.shlexsplit(defaults))
+                + args
+            )
         c = list(entry[1])
     else:
         cmd = None
@@ -713,57 +795,93 @@
 
     return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
 
+
 def _parseconfig(ui, config):
     """parse the --config options from the command line"""
     configs = []
 
     for cfg in config:
         try:
-            name, value = [cfgelem.strip()
-                           for cfgelem in cfg.split('=', 1)]
+            name, value = [cfgelem.strip() for cfgelem in cfg.split('=', 1)]
             section, name = name.split('.', 1)
             if not section or not name:
                 raise IndexError
             ui.setconfig(section, name, value, '--config')
             configs.append((section, name, value))
         except (IndexError, ValueError):
-            raise error.Abort(_('malformed --config option: %r '
-                                '(use --config section.name=value)')
-                              % pycompat.bytestr(cfg))
+            raise error.Abort(
+                _(
+                    'malformed --config option: %r '
+                    '(use --config section.name=value)'
+                )
+                % pycompat.bytestr(cfg)
+            )
 
     return configs
 
+
 def _earlyparseopts(ui, args):
     options = {}
-    fancyopts.fancyopts(args, commands.globalopts, options,
-                        gnu=not ui.plain('strictflags'), early=True,
-                        optaliases={'repository': ['repo']})
+    fancyopts.fancyopts(
+        args,
+        commands.globalopts,
+        options,
+        gnu=not ui.plain('strictflags'),
+        early=True,
+        optaliases={'repository': ['repo']},
+    )
     return options
 
+
 def _earlysplitopts(args):
     """Split args into a list of possible early options and remainder args"""
     shortoptions = 'R:'
     # TODO: perhaps 'debugger' should be included
     longoptions = ['cwd=', 'repository=', 'repo=', 'config=']
-    return fancyopts.earlygetopt(args, shortoptions, longoptions,
-                                 gnu=True, keepsep=True)
+    return fancyopts.earlygetopt(
+        args, shortoptions, longoptions, gnu=True, keepsep=True
+    )
+
 
 def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
     # run pre-hook, and abort if it fails
-    hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
-              pats=cmdpats, opts=cmdoptions)
+    hook.hook(
+        lui,
+        repo,
+        "pre-%s" % cmd,
+        True,
+        args=" ".join(fullargs),
+        pats=cmdpats,
+        opts=cmdoptions,
+    )
     try:
         ret = _runcommand(ui, options, cmd, d)
         # run post-hook, passing command result
-        hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
-                  result=ret, pats=cmdpats, opts=cmdoptions)
+        hook.hook(
+            lui,
+            repo,
+            "post-%s" % cmd,
+            False,
+            args=" ".join(fullargs),
+            result=ret,
+            pats=cmdpats,
+            opts=cmdoptions,
+        )
     except Exception:
         # run failure hook and re-raise
-        hook.hook(lui, repo, "fail-%s" % cmd, False, args=" ".join(fullargs),
-                  pats=cmdpats, opts=cmdoptions)
+        hook.hook(
+            lui,
+            repo,
+            "fail-%s" % cmd,
+            False,
+            args=" ".join(fullargs),
+            pats=cmdpats,
+            opts=cmdoptions,
+        )
         raise
     return ret
 
+
 def _getlocal(ui, rpath, wd=None):
     """Return (path, local ui object) for the given target path.
 
@@ -773,8 +891,10 @@
         try:
             wd = encoding.getcwd()
         except OSError as e:
-            raise error.Abort(_("error getting current working directory: %s") %
-                              encoding.strtolocal(e.strerror))
+            raise error.Abort(
+                _("error getting current working directory: %s")
+                % encoding.strtolocal(e.strerror)
+            )
     path = cmdutil.findrepo(wd) or ""
     if not path:
         lui = ui
@@ -789,6 +909,7 @@
 
     return path, lui
 
+
 def _checkshellalias(lui, ui, args):
     """Return the function to run the shell alias, if it is required"""
     options = {}
@@ -817,8 +938,10 @@
         # shell alias shouldn't receive early options which are consumed by hg
         _earlyopts, args = _earlysplitopts(args)
         d = lambda: fn(ui, *args[1:])
-        return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
-                                  [], {})
+        return lambda: runcommand(
+            lui, None, cmd, args[:1], ui, options, d, [], {}
+        )
+
 
 def _dispatch(req):
     args = req.args
@@ -837,8 +960,11 @@
     if req.repo:
         uis.add(req.repo.ui)
 
-    if (req.earlyoptions['verbose'] or req.earlyoptions['debug']
-            or req.earlyoptions['quiet']):
+    if (
+        req.earlyoptions['verbose']
+        or req.earlyoptions['debug']
+        or req.earlyoptions['quiet']
+    ):
         for opt in ('verbose', 'debug', 'quiet'):
             val = pycompat.bytestr(bool(req.earlyoptions[opt]))
             for ui_ in uis:
@@ -887,9 +1013,12 @@
         if options["cwd"] != req.earlyoptions["cwd"]:
             raise error.Abort(_("option --cwd may not be abbreviated!"))
         if options["repository"] != req.earlyoptions["repository"]:
-            raise error.Abort(_(
-                "option -R has to be separated from other options (e.g. not "
-                "-qR) and --repository may only be abbreviated as --repo!"))
+            raise error.Abort(
+                _(
+                    "option -R has to be separated from other options (e.g. not "
+                    "-qR) and --repository may only be abbreviated as --repo!"
+                )
+            )
         if options["debugger"] != req.earlyoptions["debugger"]:
             raise error.Abort(_("option --debugger may not be abbreviated!"))
         # don't validate --profile/--traceback, which can be enabled from now
@@ -899,18 +1028,29 @@
         if options["encodingmode"]:
             encoding.encodingmode = options["encodingmode"]
         if options["time"]:
+
             def get_times():
                 t = os.times()
                 if t[4] == 0.0:
                     # Windows leaves this as zero, so use time.clock()
                     t = (t[0], t[1], t[2], t[3], time.clock())
                 return t
+
             s = get_times()
+
             def print_time():
                 t = get_times()
                 ui.warn(
-                    _("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
-                    (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
+                    _("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n")
+                    % (
+                        t[4] - s[4],
+                        t[0] - s[0],
+                        t[2] - s[2],
+                        t[1] - s[1],
+                        t[3] - s[3],
+                    )
+                )
+
             ui.atexit(print_time)
         if options["profile"]:
             profiler.start()
@@ -977,18 +1117,23 @@
                 repo.ui.fmsg = ui.fmsg
             else:
                 try:
-                    repo = hg.repository(ui, path=path,
-                                         presetupfuncs=req.prereposetups,
-                                         intents=func.intents)
+                    repo = hg.repository(
+                        ui,
+                        path=path,
+                        presetupfuncs=req.prereposetups,
+                        intents=func.intents,
+                    )
                     if not repo.local():
-                        raise error.Abort(_("repository '%s' is not local")
-                                          % path)
-                    repo.ui.setconfig("bundle", "mainreporoot", repo.root,
-                                      'repo')
+                        raise error.Abort(
+                            _("repository '%s' is not local") % path
+                        )
+                    repo.ui.setconfig(
+                        "bundle", "mainreporoot", repo.root, 'repo'
+                    )
                 except error.RequirementError:
                     raise
                 except error.RepoError:
-                    if rpath: # invalid -R path
+                    if rpath:  # invalid -R path
                         raise
                     if not func.optionalrepo:
                         if func.inferrepo and args and not path:
@@ -1000,9 +1145,13 @@
                                 req.earlyoptions['repository'] = guess
                                 return _dispatch(req)
                         if not path:
-                            raise error.RepoError(_("no repository found in"
-                                                    " '%s' (.hg not found)")
-                                                  % encoding.getcwd())
+                            raise error.RepoError(
+                                _(
+                                    "no repository found in"
+                                    " '%s' (.hg not found)"
+                                )
+                                % encoding.getcwd()
+                            )
                         raise
             if repo:
                 ui = repo.ui
@@ -1017,12 +1166,14 @@
         strcmdopt = pycompat.strkwargs(cmdoptions)
         d = lambda: util.checksignature(func)(ui, *args, **strcmdopt)
         try:
-            return runcommand(lui, repo, cmd, fullargs, ui, options, d,
-                              cmdpats, cmdoptions)
+            return runcommand(
+                lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions
+            )
         finally:
             if repo and repo != req.repo:
                 repo.close()
 
+
 def _runcommand(ui, options, cmd, cmdfunc):
     """Run a command function, possibly with profiling enabled."""
     try:
@@ -1031,6 +1182,7 @@
     except error.SignatureError:
         raise error.CommandError(cmd, _('invalid arguments'))
 
+
 def _exceptionwarning(ui):
     """Produce a warning message for the current active exception"""
 
@@ -1069,28 +1221,40 @@
     if worst[0] is not None:
         name, testedwith, report = worst
         if not isinstance(testedwith, (bytes, str)):
-            testedwith = '.'.join([stringutil.forcebytestr(c)
-                                   for c in testedwith])
-        warning = (_('** Unknown exception encountered with '
-                     'possibly-broken third-party extension %s\n'
-                     '** which supports versions %s of Mercurial.\n'
-                     '** Please disable %s and try your action again.\n'
-                     '** If that fixes the bug please report it to %s\n')
-                   % (name, testedwith, name, stringutil.forcebytestr(report)))
+            testedwith = '.'.join(
+                [stringutil.forcebytestr(c) for c in testedwith]
+            )
+        warning = _(
+            '** Unknown exception encountered with '
+            'possibly-broken third-party extension %s\n'
+            '** which supports versions %s of Mercurial.\n'
+            '** Please disable %s and try your action again.\n'
+            '** If that fixes the bug please report it to %s\n'
+        ) % (name, testedwith, name, stringutil.forcebytestr(report))
     else:
         bugtracker = ui.config('ui', 'supportcontact')
         if bugtracker is None:
             bugtracker = _("https://mercurial-scm.org/wiki/BugTracker")
-        warning = (_("** unknown exception encountered, "
-                     "please report by visiting\n** ") + bugtracker + '\n')
+        warning = (
+            _(
+                "** unknown exception encountered, "
+                "please report by visiting\n** "
+            )
+            + bugtracker
+            + '\n'
+        )
     sysversion = pycompat.sysbytes(sys.version).replace('\n', '')
-    warning += ((_("** Python %s\n") % sysversion) +
-                (_("** Mercurial Distributed SCM (version %s)\n") %
-                 util.version()) +
-                (_("** Extensions loaded: %s\n") %
-                 ", ".join([x[0] for x in extensions.extensions()])))
+    warning += (
+        (_("** Python %s\n") % sysversion)
+        + (_("** Mercurial Distributed SCM (version %s)\n") % util.version())
+        + (
+            _("** Extensions loaded: %s\n")
+            % ", ".join([x[0] for x in extensions.extensions()])
+        )
+    )
     return warning
 
+
 def handlecommandexception(ui):
     """Produce a warning message for broken commands
 
@@ -1098,7 +1262,11 @@
     this function returns False, ignored otherwise.
     """
     warning = _exceptionwarning(ui)
-    ui.log("commandexception", "%s\n%s\n", warning,
-           pycompat.sysbytes(traceback.format_exc()))
+    ui.log(
+        "commandexception",
+        "%s\n%s\n",
+        warning,
+        pycompat.sysbytes(traceback.format_exc()),
+    )
     ui.warn(warning)
     return False  # re-raise the exception
--- a/mercurial/encoding.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/encoding.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,9 +17,7 @@
     pycompat,
 )
 
-from .pure import (
-    charencode as charencodepure,
-)
+from .pure import charencode as charencodepure
 
 charencode = policy.importmod(r'charencode')
 
@@ -36,12 +34,15 @@
 # These unicode characters are ignored by HFS+ (Apple Technote 1150,
 # "Unicode Subtleties"), so we need to ignore them in some places for
 # sanity.
-_ignore = [unichr(int(x, 16)).encode("utf-8") for x in
-           "200c 200d 200e 200f 202a 202b 202c 202d 202e "
-           "206a 206b 206c 206d 206e 206f feff".split()]
+_ignore = [
+    unichr(int(x, 16)).encode("utf-8")
+    for x in "200c 200d 200e 200f 202a 202b 202c 202d 202e "
+    "206a 206b 206c 206d 206e 206f feff".split()
+]
 # verify the next function will work
 assert all(i.startswith(("\xe2", "\xef")) for i in _ignore)
 
+
 def hfsignoreclean(s):
     """Remove codepoints ignored by HFS+ from s.
 
@@ -55,9 +56,10 @@
             s = s.replace(c, '')
     return s
 
+
 # encoding.environ is provided read-only, which may not be used to modify
 # the process environment
-_nativeenviron = (not pycompat.ispy3 or os.supports_bytes_environ)
+_nativeenviron = not pycompat.ispy3 or os.supports_bytes_environ
 if not pycompat.ispy3:
     environ = os.environ  # re-exports
 elif _nativeenviron:
@@ -65,8 +67,10 @@
 else:
     # preferred encoding isn't known yet; use utf-8 to avoid unicode error
     # and recreate it once encoding is settled
-    environ = dict((k.encode(r'utf-8'), v.encode(r'utf-8'))
-                   for k, v in os.environ.items())  # re-exports
+    environ = dict(
+        (k.encode(r'utf-8'), v.encode(r'utf-8'))
+        for k, v in os.environ.items()  # re-exports
+    )
 
 _encodingrewrites = {
     '646': 'ascii',
@@ -88,15 +92,19 @@
 encodingmode = environ.get("HGENCODINGMODE", "strict")
 fallbackencoding = 'ISO-8859-1'
 
+
 class localstr(bytes):
     '''This class allows strings that are unmodified to be
     round-tripped to the local encoding and back'''
+
     def __new__(cls, u, l):
         s = bytes.__new__(cls, l)
         s._utf8 = u
         return s
+
     def __hash__(self):
-        return hash(self._utf8) # avoid collisions in local string space
+        return hash(self._utf8)  # avoid collisions in local string space
+
 
 class safelocalstr(bytes):
     """Tagged string denoting it was previously an internal UTF-8 string,
@@ -108,6 +116,7 @@
     >>> assert safelocalstr(b'\\xc3') in {b'\\xc3': 0}
     """
 
+
 def tolocal(s):
     """
     Convert a string from internal UTF-8 to local encoding
@@ -167,12 +176,13 @@
                     return safelocalstr(r)
                 return localstr(u.encode('UTF-8'), r)
             except UnicodeDecodeError:
-                u = s.decode("utf-8", "replace") # last ditch
+                u = s.decode("utf-8", "replace")  # last ditch
                 # can't round-trip
                 return u.encode(_sysstr(encoding), r"replace")
     except LookupError as k:
         raise error.Abort(k, hint="please check your locale settings")
 
+
 def fromlocal(s):
     """
     Convert a string from the local character encoding to UTF-8
@@ -194,27 +204,34 @@
         u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
         return u.encode("utf-8")
     except UnicodeDecodeError as inst:
-        sub = s[max(0, inst.start - 10):inst.start + 10]
-        raise error.Abort("decoding near '%s': %s!"
-                          % (sub, pycompat.bytestr(inst)))
+        sub = s[max(0, inst.start - 10) : inst.start + 10]
+        raise error.Abort(
+            "decoding near '%s': %s!" % (sub, pycompat.bytestr(inst))
+        )
     except LookupError as k:
         raise error.Abort(k, hint="please check your locale settings")
 
+
 def unitolocal(u):
     """Convert a unicode string to a byte string of local encoding"""
     return tolocal(u.encode('utf-8'))
 
+
 def unifromlocal(s):
     """Convert a byte string of local encoding to a unicode string"""
     return fromlocal(s).decode('utf-8')
 
+
 def unimethod(bytesfunc):
     """Create a proxy method that forwards __unicode__() and __str__() of
     Python 3 to __bytes__()"""
+
     def unifunc(obj):
         return unifromlocal(bytesfunc(obj))
+
     return unifunc
 
+
 # converter functions between native str and byte string. use these if the
 # character encoding is not aware (e.g. exception message) or is known to
 # be locale dependent (e.g. date formatting.)
@@ -230,8 +247,10 @@
 if not _nativeenviron:
     # now encoding and helper functions are available, recreate the environ
     # dict to be exported to other modules
-    environ = dict((tolocal(k.encode(r'utf-8')), tolocal(v.encode(r'utf-8')))
-                   for k, v in os.environ.items())  # re-exports
+    environ = dict(
+        (tolocal(k.encode(r'utf-8')), tolocal(v.encode(r'utf-8')))
+        for k, v in os.environ.items()  # re-exports
+    )
 
 if pycompat.ispy3:
     # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
@@ -246,13 +265,16 @@
     getcwd = os.getcwd  # re-exports
 
 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
-_wide = _sysstr(environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
-                and "WFA" or "WF")
+_wide = _sysstr(
+    environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide" and "WFA" or "WF"
+)
+
 
 def colwidth(s):
     "Find the column width of a string for display in the local encoding"
     return ucolwidth(s.decode(_sysstr(encoding), r'replace'))
 
+
 def ucolwidth(d):
     "Find the column width of a Unicode string for display"
     eaw = getattr(unicodedata, 'east_asian_width', None)
@@ -260,6 +282,7 @@
         return sum([eaw(c) in _wide and 2 or 1 for c in d])
     return len(d)
 
+
 def getcols(s, start, c):
     '''Use colwidth to find a c-column substring of s starting at byte
     index start'''
@@ -268,6 +291,7 @@
         if colwidth(t) == c:
             return t
 
+
 def trim(s, width, ellipsis='', leftside=False):
     """Trim string 's' to at most 'width' columns (including 'ellipsis').
 
@@ -336,21 +360,21 @@
     try:
         u = s.decode(_sysstr(encoding))
     except UnicodeDecodeError:
-        if len(s) <= width: # trimming is not needed
+        if len(s) <= width:  # trimming is not needed
             return s
         width -= len(ellipsis)
-        if width <= 0: # no enough room even for ellipsis
-            return ellipsis[:width + len(ellipsis)]
+        if width <= 0:  # no enough room even for ellipsis
+            return ellipsis[: width + len(ellipsis)]
         if leftside:
             return ellipsis + s[-width:]
         return s[:width] + ellipsis
 
-    if ucolwidth(u) <= width: # trimming is not needed
+    if ucolwidth(u) <= width:  # trimming is not needed
         return s
 
     width -= len(ellipsis)
-    if width <= 0: # no enough room even for ellipsis
-        return ellipsis[:width + len(ellipsis)]
+    if width <= 0:  # no enough room even for ellipsis
+        return ellipsis[: width + len(ellipsis)]
 
     if leftside:
         uslice = lambda i: u[i:]
@@ -362,7 +386,8 @@
         usub = uslice(i)
         if ucolwidth(usub) <= width:
             return concat(usub.encode(_sysstr(encoding)))
-    return ellipsis # no enough room for multi-column characters
+    return ellipsis  # no enough room for multi-column characters
+
 
 def lower(s):
     "best-effort encoding-aware case-folding of local string s"
@@ -378,13 +403,14 @@
 
         lu = u.lower()
         if u == lu:
-            return s # preserve localstring
+            return s  # preserve localstring
         return lu.encode(_sysstr(encoding))
     except UnicodeError:
-        return s.lower() # we don't know how to fold this except in ASCII
+        return s.lower()  # we don't know how to fold this except in ASCII
     except LookupError as k:
         raise error.Abort(k, hint="please check your locale settings")
 
+
 def upper(s):
     "best-effort encoding-aware case-folding of local string s"
     try:
@@ -392,6 +418,7 @@
     except UnicodeDecodeError:
         return upperfallback(s)
 
+
 def upperfallback(s):
     try:
         if isinstance(s, localstr):
@@ -401,13 +428,14 @@
 
         uu = u.upper()
         if u == uu:
-            return s # preserve localstring
+            return s  # preserve localstring
         return uu.encode(_sysstr(encoding))
     except UnicodeError:
-        return s.upper() # we don't know how to fold this except in ASCII
+        return s.upper()  # we don't know how to fold this except in ASCII
     except LookupError as k:
         raise error.Abort(k, hint="please check your locale settings")
 
+
 class normcasespecs(object):
     '''what a platform's normcase does to ASCII strings
 
@@ -419,10 +447,12 @@
     other: the fallback function should always be called
 
     This should be kept in sync with normcase_spec in util.h.'''
+
     lower = -1
     upper = 1
     other = 0
 
+
 def jsonescape(s, paranoid=False):
     '''returns a string suitable for JSON
 
@@ -475,6 +505,7 @@
         pass
     return charencodepure.jsonescapeu8fallback(u8chars, paranoid)
 
+
 # We need to decode/encode U+DCxx codes transparently since invalid UTF-8
 # bytes are mapped to that range.
 if pycompat.ispy3:
@@ -484,6 +515,7 @@
 
 _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
 
+
 def getutf8char(s, pos):
     '''get the next full utf-8 character in the given string, starting at pos
 
@@ -492,15 +524,16 @@
     '''
 
     # find how many bytes to attempt decoding from first nibble
-    l = _utf8len[ord(s[pos:pos + 1]) >> 4]
-    if not l: # ascii
-        return s[pos:pos + 1]
+    l = _utf8len[ord(s[pos : pos + 1]) >> 4]
+    if not l:  # ascii
+        return s[pos : pos + 1]
 
-    c = s[pos:pos + l]
+    c = s[pos : pos + l]
     # validate with attempted decode
     c.decode("utf-8", _utf8strict)
     return c
 
+
 def toutf8b(s):
     '''convert a local, possibly-binary string into UTF-8b
 
@@ -558,16 +591,17 @@
             c = getutf8char(s, pos)
             if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf":
                 # have to re-escape existing U+DCxx characters
-                c = unichr(0xdc00 + ord(s[pos])).encode('utf-8', _utf8strict)
+                c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
                 pos += 1
             else:
                 pos += len(c)
         except UnicodeDecodeError:
-            c = unichr(0xdc00 + ord(s[pos])).encode('utf-8', _utf8strict)
+            c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
             pos += 1
         r += c
     return r
 
+
 def fromutf8b(s):
     '''Given a UTF-8b string, return a local, possibly-binary string.
 
@@ -611,6 +645,6 @@
         pos += len(c)
         # unescape U+DCxx characters
         if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf":
-            c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xff)
+            c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xFF)
         r += c
     return r
--- a/mercurial/error.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/error.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,6 +16,7 @@
 # Do not import anything but pycompat here, please
 from . import pycompat
 
+
 def _tobytes(exc):
     """Byte-stringify exception in the same way as BaseException_str()"""
     if not exc.args:
@@ -24,36 +25,43 @@
         return pycompat.bytestr(exc.args[0])
     return b'(%s)' % b', '.join(b"'%s'" % pycompat.bytestr(a) for a in exc.args)
 
+
 class Hint(object):
     """Mix-in to provide a hint of an error
 
     This should come first in the inheritance list to consume a hint and
     pass remaining arguments to the exception class.
     """
+
     def __init__(self, *args, **kw):
         self.hint = kw.pop(r'hint', None)
         super(Hint, self).__init__(*args, **kw)
 
+
 class StorageError(Hint, Exception):
     """Raised when an error occurs in a storage layer.
 
     Usually subclassed by a storage-specific exception.
     """
+
     __bytes__ = _tobytes
 
+
 class RevlogError(StorageError):
     __bytes__ = _tobytes
 
+
 class SidedataHashError(RevlogError):
-
     def __init__(self, key, expected, got):
         self.sidedatakey = key
         self.expecteddigest = expected
         self.actualdigest = got
 
+
 class FilteredIndexError(IndexError):
     __bytes__ = _tobytes
 
+
 class LookupError(RevlogError, KeyError):
     def __init__(self, name, index, message):
         self.name = name
@@ -63,6 +71,7 @@
         self.lookupmessage = message
         if isinstance(name, bytes) and len(name) == 20:
             from .node import short
+
             name = short(name)
         RevlogError.__init__(self, '%s@%s: %s' % (index, name, message))
 
@@ -72,93 +81,125 @@
     def __str__(self):
         return RevlogError.__str__(self)
 
+
 class AmbiguousPrefixLookupError(LookupError):
     pass
 
+
 class FilteredLookupError(LookupError):
     pass
 
+
 class ManifestLookupError(LookupError):
     pass
 
+
 class CommandError(Exception):
     """Exception raised on errors in parsing the command line."""
+
     __bytes__ = _tobytes
 
+
 class InterventionRequired(Hint, Exception):
     """Exception raised when a command requires human intervention."""
+
     __bytes__ = _tobytes
 
+
 class Abort(Hint, Exception):
     """Raised if a command needs to print an error and exit."""
+
     __bytes__ = _tobytes
 
+
 class HookLoadError(Abort):
     """raised when loading a hook fails, aborting an operation
 
     Exists to allow more specialized catching."""
 
+
 class HookAbort(Abort):
     """raised when a validation hook fails, aborting an operation
 
     Exists to allow more specialized catching."""
 
+
 class ConfigError(Abort):
     """Exception raised when parsing config files"""
 
+
 class UpdateAbort(Abort):
     """Raised when an update is aborted for destination issue"""
 
+
 class MergeDestAbort(Abort):
     """Raised when an update is aborted for destination issues"""
 
+
 class NoMergeDestAbort(MergeDestAbort):
     """Raised when an update is aborted because there is nothing to merge"""
 
+
 class ManyMergeDestAbort(MergeDestAbort):
     """Raised when an update is aborted because destination is ambiguous"""
 
+
 class ResponseExpected(Abort):
     """Raised when an EOF is received for a prompt"""
+
     def __init__(self):
         from .i18n import _
+
         Abort.__init__(self, _('response expected'))
 
+
 class OutOfBandError(Hint, Exception):
     """Exception raised when a remote repo reports failure"""
+
     __bytes__ = _tobytes
 
+
 class ParseError(Hint, Exception):
     """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
+
     __bytes__ = _tobytes
 
+
 class PatchError(Exception):
     __bytes__ = _tobytes
 
+
 class UnknownIdentifier(ParseError):
     """Exception raised when a {rev,file}set references an unknown identifier"""
 
     def __init__(self, function, symbols):
         from .i18n import _
+
         ParseError.__init__(self, _("unknown identifier: %s") % function)
         self.function = function
         self.symbols = symbols
 
+
 class RepoError(Hint, Exception):
     __bytes__ = _tobytes
 
+
 class RepoLookupError(RepoError):
     pass
 
+
 class FilteredRepoLookupError(RepoLookupError):
     pass
 
+
 class CapabilityError(RepoError):
     pass
 
+
 class RequirementError(RepoError):
     """Exception raised if .hg/requires has an unknown entry."""
 
+
 class StdioError(IOError):
     """Raised if I/O to stdout or stderr fails"""
 
@@ -167,15 +208,22 @@
 
     # no __bytes__() because error message is derived from the standard IOError
 
+
 class UnsupportedMergeRecords(Abort):
     def __init__(self, recordtypes):
         from .i18n import _
+
         self.recordtypes = sorted(recordtypes)
         s = ' '.join(self.recordtypes)
         Abort.__init__(
-            self, _('unsupported merge state records: %s') % s,
-            hint=_('see https://mercurial-scm.org/wiki/MergeStateRecords for '
-                   'more information'))
+            self,
+            _('unsupported merge state records: %s') % s,
+            hint=_(
+                'see https://mercurial-scm.org/wiki/MergeStateRecords for '
+                'more information'
+            ),
+        )
+
 
 class UnknownVersion(Abort):
     """generic exception for aborting from an encounter with an unknown version
@@ -185,6 +233,7 @@
         self.version = version
         super(UnknownVersion, self).__init__(msg, hint=hint)
 
+
 class LockError(IOError):
     def __init__(self, errno, strerror, filename, desc):
         IOError.__init__(self, errno, strerror, filename)
@@ -192,41 +241,55 @@
 
     # no __bytes__() because error message is derived from the standard IOError
 
+
 class LockHeld(LockError):
     def __init__(self, errno, filename, desc, locker):
         LockError.__init__(self, errno, 'Lock held', filename, desc)
         self.locker = locker
 
+
 class LockUnavailable(LockError):
     pass
 
+
 # LockError is for errors while acquiring the lock -- this is unrelated
 class LockInheritanceContractViolation(RuntimeError):
     __bytes__ = _tobytes
 
+
 class ResponseError(Exception):
     """Raised to print an error with part of output and exit."""
+
     __bytes__ = _tobytes
 
+
 class UnknownCommand(Exception):
     """Exception raised if command is not in the command table."""
+
     __bytes__ = _tobytes
 
+
 class AmbiguousCommand(Exception):
     """Exception raised if command shortcut matches more than one command."""
+
     __bytes__ = _tobytes
 
+
 # derived from KeyboardInterrupt to simplify some breakout code
 class SignalInterrupt(KeyboardInterrupt):
     """Exception raised on SIGTERM and SIGHUP."""
 
+
 class SignatureError(Exception):
     __bytes__ = _tobytes
 
+
 class PushRaced(RuntimeError):
     """An exception raised during unbundling that indicate a push race"""
+
     __bytes__ = _tobytes
 
+
 class ProgrammingError(Hint, RuntimeError):
     """Raised if a mercurial (core or extension) developer made a mistake"""
 
@@ -239,15 +302,20 @@
 
     __bytes__ = _tobytes
 
+
 class WdirUnsupported(Exception):
     """An exception which is raised when 'wdir()' is not supported"""
+
     __bytes__ = _tobytes
 
+
 # bundle2 related errors
 class BundleValueError(ValueError):
     """error raised when bundle2 cannot be processed"""
+
     __bytes__ = _tobytes
 
+
 class BundleUnknownFeatureError(BundleValueError):
     def __init__(self, parttype=None, params=(), values=()):
         self.parttype = parttype
@@ -271,15 +339,19 @@
             msg = '%s - %s' % (msg, ', '.join(entries))
         ValueError.__init__(self, msg)
 
+
 class ReadOnlyPartError(RuntimeError):
     """error raised when code tries to alter a part being generated"""
+
     __bytes__ = _tobytes
 
+
 class PushkeyFailed(Abort):
     """error raised when a pushkey part failed to update a value"""
 
-    def __init__(self, partid, namespace=None, key=None, new=None, old=None,
-                 ret=None):
+    def __init__(
+        self, partid, namespace=None, key=None, new=None, old=None, ret=None
+    ):
         self.partid = partid
         self.namespace = namespace
         self.key = key
@@ -287,8 +359,10 @@
         self.old = old
         self.ret = ret
         # no i18n expected to be processed into a better message
-        Abort.__init__(self, 'failed to update value for "%s/%s"'
-                       % (namespace, key))
+        Abort.__init__(
+            self, 'failed to update value for "%s/%s"' % (namespace, key)
+        )
+
 
 class CensoredNodeError(StorageError):
     """error raised when content verification fails on a censored node
@@ -298,9 +372,11 @@
 
     def __init__(self, filename, node, tombstone):
         from .node import short
+
         StorageError.__init__(self, '%s:%s' % (filename, short(node)))
         self.tombstone = tombstone
 
+
 class CensoredBaseError(StorageError):
     """error raised when a delta is rejected because its base is censored
 
@@ -309,28 +385,38 @@
     the delta may be applied by clones which have not censored the base.
     """
 
+
 class InvalidBundleSpecification(Exception):
     """error raised when a bundle specification is invalid.
 
     This is used for syntax errors as opposed to support errors.
     """
+
     __bytes__ = _tobytes
 
+
 class UnsupportedBundleSpecification(Exception):
     """error raised when a bundle specification is not supported."""
+
     __bytes__ = _tobytes
 
+
 class CorruptedState(Exception):
     """error raised when a command is not able to read its state from file"""
+
     __bytes__ = _tobytes
 
+
 class PeerTransportError(Abort):
     """Transport-level I/O error when communicating with a peer repo."""
 
+
 class InMemoryMergeConflictsError(Exception):
     """Exception raised when merge conflicts arose during an in-memory merge."""
+
     __bytes__ = _tobytes
 
+
 class WireprotoCommandError(Exception):
     """Represents an error during execution of a wire protocol command.
 
@@ -338,6 +424,7 @@
 
     The error is a formatter string and an optional iterable of arguments.
     """
+
     def __init__(self, message, args=None):
         self.message = message
         self.messageargs = args
--- a/mercurial/exchange.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/exchange.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,9 +16,7 @@
     nullid,
     nullrev,
 )
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 from . import (
     bookmarks as bookmod,
     bundle2,
@@ -40,12 +38,8 @@
     util,
     wireprototypes,
 )
-from .interfaces import (
-    repository,
-)
-from .utils import (
-    stringutil,
-)
+from .interfaces import repository
+from .utils import stringutil
 
 urlerr = util.urlerr
 urlreq = util.urlreq
@@ -53,11 +47,12 @@
 _NARROWACL_SECTION = 'narrowacl'
 
 # Maps bundle version human names to changegroup versions.
-_bundlespeccgversions = {'v1': '01',
-                         'v2': '02',
-                         'packed1': 's1',
-                         'bundle2': '02', #legacy
-                        }
+_bundlespeccgversions = {
+    'v1': '01',
+    'v2': '02',
+    'packed1': 's1',
+    'bundle2': '02',  # legacy
+}
 
 # Maps bundle version with content opts to choose which part to bundle
 _bundlespeccontentopts = {
@@ -67,7 +62,7 @@
         'obsolescence': False,
         'phases': False,
         'tagsfnodescache': False,
-        'revbranchcache': False
+        'revbranchcache': False,
     },
     'v2': {
         'changegroup': True,
@@ -75,21 +70,25 @@
         'obsolescence': False,
         'phases': False,
         'tagsfnodescache': True,
-        'revbranchcache': True
+        'revbranchcache': True,
     },
-    'packed1' : {
-        'cg.version': 's1'
+    'packed1': {'cg.version': 's1'},
+}
+_bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
+
+_bundlespecvariants = {
+    "streamv2": {
+        "changegroup": False,
+        "streamv2": True,
+        "tagsfnodescache": False,
+        "revbranchcache": False,
     }
 }
-_bundlespeccontentopts['bundle2'] = _bundlespeccontentopts['v2']
-
-_bundlespecvariants = {"streamv2": {"changegroup": False, "streamv2": True,
-                                    "tagsfnodescache": False,
-                                    "revbranchcache": False}}
 
 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
 _bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
 
+
 @attr.s
 class bundlespec(object):
     compression = attr.ib()
@@ -99,6 +98,7 @@
     params = attr.ib()
     contentopts = attr.ib()
 
+
 def parsebundlespec(repo, spec, strict=True):
     """Parse a bundle string specification into parts.
 
@@ -132,6 +132,7 @@
     Note: this function will likely eventually return a more complex data
     structure, including bundle2 part information.
     """
+
     def parseparams(s):
         if ';' not in s:
             return s, {}
@@ -142,8 +143,12 @@
         for p in paramstr.split(';'):
             if '=' not in p:
                 raise error.InvalidBundleSpecification(
-                    _('invalid bundle specification: '
-                      'missing "=" in parameter: %s') % p)
+                    _(
+                        'invalid bundle specification: '
+                        'missing "=" in parameter: %s'
+                    )
+                    % p
+                )
 
             key, value = p.split('=', 1)
             key = urlreq.unquote(key)
@@ -152,24 +157,29 @@
 
         return version, params
 
-
     if strict and '-' not in spec:
         raise error.InvalidBundleSpecification(
-                _('invalid bundle specification; '
-                  'must be prefixed with compression: %s') % spec)
+            _(
+                'invalid bundle specification; '
+                'must be prefixed with compression: %s'
+            )
+            % spec
+        )
 
     if '-' in spec:
         compression, version = spec.split('-', 1)
 
         if compression not in util.compengines.supportedbundlenames:
             raise error.UnsupportedBundleSpecification(
-                    _('%s compression is not supported') % compression)
+                _('%s compression is not supported') % compression
+            )
 
         version, params = parseparams(version)
 
         if version not in _bundlespeccgversions:
             raise error.UnsupportedBundleSpecification(
-                    _('%s is not a recognized bundle version') % version)
+                _('%s is not a recognized bundle version') % version
+            )
     else:
         # Value could be just the compression or just the version, in which
         # case some defaults are assumed (but only when not in strict mode).
@@ -194,13 +204,15 @@
             version = spec
         else:
             raise error.UnsupportedBundleSpecification(
-                    _('%s is not a recognized bundle specification') % spec)
+                _('%s is not a recognized bundle specification') % spec
+            )
 
     # Bundle version 1 only supports a known set of compression engines.
     if version == 'v1' and compression not in _bundlespecv1compengines:
         raise error.UnsupportedBundleSpecification(
-            _('compression engine %s is not supported on v1 bundles') %
-            compression)
+            _('compression engine %s is not supported on v1 bundles')
+            % compression
+        )
 
     # The specification for packed1 can optionally declare the data formats
     # required to apply it. If we see this metadata, compare against what the
@@ -210,8 +222,9 @@
         missingreqs = requirements - repo.supportedformats
         if missingreqs:
             raise error.UnsupportedBundleSpecification(
-                    _('missing support for repository features: %s') %
-                      ', '.join(sorted(missingreqs)))
+                _('missing support for repository features: %s')
+                % ', '.join(sorted(missingreqs))
+            )
 
     # Compute contentopts based on the version
     contentopts = _bundlespeccontentopts.get(version, {}).copy()
@@ -225,8 +238,10 @@
     compression, wirecompression = engine.bundletype()
     wireversion = _bundlespeccgversions[version]
 
-    return bundlespec(compression, wirecompression, version, wireversion,
-                      params, contentopts)
+    return bundlespec(
+        compression, wirecompression, version, wireversion, params, contentopts
+    )
+
 
 def readbundle(ui, fh, fname, vfs=None):
     header = changegroup.readexactly(fh, 4)
@@ -256,12 +271,14 @@
     else:
         raise error.Abort(_('%s: unknown bundle version %s') % (fname, version))
 
+
 def getbundlespec(ui, fh):
     """Infer the bundlespec from a bundle file handle.
 
     The input file handle is seeked and the original seek position is not
     restored.
     """
+
     def speccompression(alg):
         try:
             return util.compengines.forbundletype(alg).bundletype()[0]
@@ -292,10 +309,14 @@
                 if version in ('01', '02'):
                     version = 'v2'
                 else:
-                    raise error.Abort(_('changegroup version %s does not have '
-                                        'a known bundlespec') % version,
-                                      hint=_('try upgrading your Mercurial '
-                                              'client'))
+                    raise error.Abort(
+                        _(
+                            'changegroup version %s does not have '
+                            'a known bundlespec'
+                        )
+                        % version,
+                        hint=_('try upgrading your Mercurial ' 'client'),
+                    )
             elif part.type == 'stream2' and version is None:
                 # A stream2 part requires to be part of a v2 bundle
                 requirements = urlreq.unquote(part.params['requirements'])
@@ -304,8 +325,9 @@
                 return 'none-v2;stream=v2;%s' % params
 
         if not version:
-            raise error.Abort(_('could not identify changegroup version in '
-                                'bundle'))
+            raise error.Abort(
+                _('could not identify changegroup version in ' 'bundle')
+            )
 
         return '%s-%s' % (comp, version)
     elif isinstance(b, streamclone.streamcloneapplier):
@@ -315,6 +337,7 @@
     else:
         raise error.Abort(_('unknown bundle type: %s') % b)
 
+
 def _computeoutgoing(repo, heads, common):
     """Computes which revs are outgoing given a set of common
     and a set of heads.
@@ -334,6 +357,7 @@
         heads = cl.heads()
     return discovery.outgoing(repo, common, heads)
 
+
 def _checkpublish(pushop):
     repo = pushop.repo
     ui = repo.ui
@@ -350,18 +374,21 @@
         published = repo.revs('::%ln - public()', pushop.revs)
     if published:
         if behavior == 'warn':
-            ui.warn(_('%i changesets about to be published\n')
-                    % len(published))
+            ui.warn(_('%i changesets about to be published\n') % len(published))
         elif behavior == 'confirm':
-            if ui.promptchoice(_('push and publish %i changesets (yn)?'
-                                 '$$ &Yes $$ &No') % len(published)):
+            if ui.promptchoice(
+                _('push and publish %i changesets (yn)?' '$$ &Yes $$ &No')
+                % len(published)
+            ):
                 raise error.Abort(_('user quit'))
         elif behavior == 'abort':
             msg = _('push would publish %i changesets') % len(published)
-            hint = _("use --publish or adjust 'experimental.auto-publish'"
-                     " config")
+            hint = _(
+                "use --publish or adjust 'experimental.auto-publish'" " config"
+            )
             raise error.Abort(msg, hint=hint)
 
+
 def _forcebundle1(op):
     """return true if a pull/push must use bundle1
 
@@ -377,6 +404,7 @@
     forcebundle1 = 'bundle2' not in exchange and 'bundle1' in exchange
     return forcebundle1 or not op.remote.capable('bundle2')
 
+
 class pushoperation(object):
     """A object that represent a single push operation
 
@@ -386,8 +414,17 @@
     discarded afterward.
     """
 
-    def __init__(self, repo, remote, force=False, revs=None, newbranch=False,
-                 bookmarks=(), publish=False, pushvars=None):
+    def __init__(
+        self,
+        repo,
+        remote,
+        force=False,
+        revs=None,
+        newbranch=False,
+        bookmarks=(),
+        publish=False,
+        pushvars=None,
+    ):
         # repo we push from
         self.repo = repo
         self.ui = repo.ui
@@ -483,9 +520,11 @@
         cheads = [node for node in self.revs if nm[node] in common]
         # and
         # * commonheads parents on missing
-        revset = unfi.set('%ln and parents(roots(%ln))',
-                         self.outgoing.commonheads,
-                         self.outgoing.missing)
+        revset = unfi.set(
+            '%ln and parents(roots(%ln))',
+            self.outgoing.commonheads,
+            self.outgoing.missing,
+        )
         cheads.extend(c.node() for c in revset)
         return cheads
 
@@ -497,18 +536,34 @@
         else:
             return self.fallbackheads
 
+
 # mapping of message used when pushing bookmark
-bookmsgmap = {'update': (_("updating bookmark %s\n"),
-                         _('updating bookmark %s failed!\n')),
-              'export': (_("exporting bookmark %s\n"),
-                         _('exporting bookmark %s failed!\n')),
-              'delete': (_("deleting remote bookmark %s\n"),
-                         _('deleting remote bookmark %s failed!\n')),
-              }
-
-
-def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(),
-         publish=False, opargs=None):
+bookmsgmap = {
+    'update': (
+        _("updating bookmark %s\n"),
+        _('updating bookmark %s failed!\n'),
+    ),
+    'export': (
+        _("exporting bookmark %s\n"),
+        _('exporting bookmark %s failed!\n'),
+    ),
+    'delete': (
+        _("deleting remote bookmark %s\n"),
+        _('deleting remote bookmark %s failed!\n'),
+    ),
+}
+
+
+def push(
+    repo,
+    remote,
+    force=False,
+    revs=None,
+    newbranch=False,
+    bookmarks=(),
+    publish=False,
+    opargs=None,
+):
     '''Push outgoing changesets (limited by revs) from a local
     repository to remote. Return an integer:
       - None means nothing to push
@@ -519,23 +574,38 @@
     '''
     if opargs is None:
         opargs = {}
-    pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks,
-                           publish, **pycompat.strkwargs(opargs))
+    pushop = pushoperation(
+        repo,
+        remote,
+        force,
+        revs,
+        newbranch,
+        bookmarks,
+        publish,
+        **pycompat.strkwargs(opargs)
+    )
     if pushop.remote.local():
-        missing = (set(pushop.repo.requirements)
-                   - pushop.remote.local().supported)
+        missing = (
+            set(pushop.repo.requirements) - pushop.remote.local().supported
+        )
         if missing:
-            msg = _("required features are not"
-                    " supported in the destination:"
-                    " %s") % (', '.join(sorted(missing)))
+            msg = _(
+                "required features are not"
+                " supported in the destination:"
+                " %s"
+            ) % (', '.join(sorted(missing)))
             raise error.Abort(msg)
 
     if not pushop.remote.canpush():
         raise error.Abort(_("destination does not support push"))
 
     if not pushop.remote.capable('unbundle'):
-        raise error.Abort(_('cannot push: destination does not support the '
-                            'unbundle wire protocol command'))
+        raise error.Abort(
+            _(
+                'cannot push: destination does not support the '
+                'unbundle wire protocol command'
+            )
+        )
 
     # get lock as we might write phase data
     wlock = lock = None
@@ -543,20 +613,23 @@
         # bundle2 push may receive a reply bundle touching bookmarks
         # requiring the wlock. Take it now to ensure proper ordering.
         maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback')
-        if ((not _forcebundle1(pushop)) and
-            maypushback and
-            not bookmod.bookmarksinstore(repo)):
+        if (
+            (not _forcebundle1(pushop))
+            and maypushback
+            and not bookmod.bookmarksinstore(repo)
+        ):
             wlock = pushop.repo.wlock()
         lock = pushop.repo.lock()
-        pushop.trmanager = transactionmanager(pushop.repo,
-                                              'push-response',
-                                              pushop.remote.url())
+        pushop.trmanager = transactionmanager(
+            pushop.repo, 'push-response', pushop.remote.url()
+        )
     except error.LockUnavailable as err:
         # source repo cannot be locked.
         # We do not abort the push, but just disable the local phase
         # synchronisation.
-        msg = ('cannot lock source repository: %s\n'
-               % stringutil.forcebytestr(err))
+        msg = 'cannot lock source repository: %s\n' % stringutil.forcebytestr(
+            err
+        )
         pushop.ui.debug(msg)
 
     with wlock or util.nullcontextmanager():
@@ -577,6 +650,7 @@
 
     return pushop
 
+
 # list of steps to perform discovery before push
 pushdiscoveryorder = []
 
@@ -585,6 +659,7 @@
 # This exists to help extensions wrap steps if necessary
 pushdiscoverymapping = {}
 
+
 def pushdiscovery(stepname):
     """decorator for function performing discovery before push
 
@@ -594,36 +669,50 @@
 
     You can only use this decorator for a new step, if you want to wrap a step
     from an extension, change the pushdiscovery dictionary directly."""
+
     def dec(func):
         assert stepname not in pushdiscoverymapping
         pushdiscoverymapping[stepname] = func
         pushdiscoveryorder.append(stepname)
         return func
+
     return dec
 
+
 def _pushdiscovery(pushop):
     """Run all discovery steps"""
     for stepname in pushdiscoveryorder:
         step = pushdiscoverymapping[stepname]
         step(pushop)
 
+
 @pushdiscovery('changeset')
 def _pushdiscoverychangeset(pushop):
     """discover the changeset that need to be pushed"""
     fci = discovery.findcommonincoming
     if pushop.revs:
-        commoninc = fci(pushop.repo, pushop.remote, force=pushop.force,
-                        ancestorsof=pushop.revs)
+        commoninc = fci(
+            pushop.repo,
+            pushop.remote,
+            force=pushop.force,
+            ancestorsof=pushop.revs,
+        )
     else:
         commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
     common, inc, remoteheads = commoninc
     fco = discovery.findcommonoutgoing
-    outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs,
-                   commoninc=commoninc, force=pushop.force)
+    outgoing = fco(
+        pushop.repo,
+        pushop.remote,
+        onlyheads=pushop.revs,
+        commoninc=commoninc,
+        force=pushop.force,
+    )
     pushop.outgoing = outgoing
     pushop.remoteheads = remoteheads
     pushop.incoming = inc
 
+
 @pushdiscovery('phase')
 def _pushdiscoveryphase(pushop):
     """discover the phase that needs to be pushed
@@ -633,10 +722,12 @@
     unfi = pushop.repo.unfiltered()
     remotephases = listkeys(pushop.remote, 'phases')
 
-    if (pushop.ui.configbool('ui', '_usedassubrepo')
-        and remotephases    # server supports phases
-        and not pushop.outgoing.missing # no changesets to be pushed
-        and remotephases.get('publishing', False)):
+    if (
+        pushop.ui.configbool('ui', '_usedassubrepo')
+        and remotephases  # server supports phases
+        and not pushop.outgoing.missing  # no changesets to be pushed
+        and remotephases.get('publishing', False)
+    ):
         # When:
         # - this is a subrepo push
         # - and remote support phase
@@ -650,9 +741,9 @@
         pushop.fallbackoutdatedphases = []
         return
 
-    pushop.remotephases = phases.remotephasessummary(pushop.repo,
-                                                     pushop.fallbackheads,
-                                                     remotephases)
+    pushop.remotephases = phases.remotephasessummary(
+        pushop.repo, pushop.fallbackheads, remotephases
+    )
     droots = pushop.remotephases.draftroots
 
     extracond = ''
@@ -664,8 +755,11 @@
     # XXX root we may want to ensure it is but it is costly
     fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
     if not pushop.remotephases.publishing and pushop.publish:
-        future = list(unfi.set('%ln and (not public() or %ln::)',
-                               pushop.futureheads, droots))
+        future = list(
+            unfi.set(
+                '%ln and (not public() or %ln::)', pushop.futureheads, droots
+            )
+        )
     elif not outgoing.missing:
         future = fallback
     else:
@@ -673,13 +767,15 @@
         #
         # should not be necessary for publishing server, but because of an
         # issue fixed in xxxxx we have to do it anyway.
-        fdroots = list(unfi.set('roots(%ln  + %ln::)',
-                       outgoing.missing, droots))
+        fdroots = list(
+            unfi.set('roots(%ln  + %ln::)', outgoing.missing, droots)
+        )
         fdroots = [f.node() for f in fdroots]
         future = list(unfi.set(revset, fdroots, pushop.futureheads))
     pushop.outdatedphases = future
     pushop.fallbackoutdatedphases = fallback
 
+
 @pushdiscovery('obsmarker')
 def _pushdiscoveryobsmarkers(pushop):
     if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
@@ -697,6 +793,7 @@
     nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads))
     pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
 
+
 @pushdiscovery('bookmarks')
 def _pushdiscoverybookmarks(pushop):
     ui = pushop.ui
@@ -710,12 +807,14 @@
 
     remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, 'bookmarks'))
 
-    explicit = {repo._bookmarks.expandname(bookmark)
-                for bookmark in pushop.bookmarks}
+    explicit = {
+        repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
+    }
 
     comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
     return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
 
+
 def _processcompared(pushop, pushed, explicit, remotebms, comp):
     """take decision on bookmarks to push to the remote repo
 
@@ -754,12 +853,18 @@
     if explicit:
         explicit = sorted(explicit)
         # we should probably list all of them
-        pushop.ui.warn(_('bookmark %s does not exist on the local '
-                         'or remote repository!\n') % explicit[0])
+        pushop.ui.warn(
+            _(
+                'bookmark %s does not exist on the local '
+                'or remote repository!\n'
+            )
+            % explicit[0]
+        )
         pushop.bkresult = 2
 
     pushop.outbookmarks.sort()
 
+
 def _pushcheckoutgoing(pushop):
     outgoing = pushop.outgoing
     unfi = pushop.repo.unfiltered()
@@ -776,9 +881,11 @@
             mso = _("push includes obsolete changeset: %s!")
             mspd = _("push includes phase-divergent changeset: %s!")
             mscd = _("push includes content-divergent changeset: %s!")
-            mst = {"orphan": _("push includes orphan changeset: %s!"),
-                   "phase-divergent": mspd,
-                   "content-divergent": mscd}
+            mst = {
+                "orphan": _("push includes orphan changeset: %s!"),
+                "phase-divergent": mspd,
+                "content-divergent": mscd,
+            }
             # If we are to push if there is at least one
             # obsolete or unstable changeset in missing, at
             # least one of the missinghead will be obsolete or
@@ -795,6 +902,7 @@
         discovery.checkheads(pushop)
     return True
 
+
 # List of names of steps to perform for an outgoing bundle2, order matters.
 b2partsgenorder = []
 
@@ -803,6 +911,7 @@
 # This exists to help extensions wrap steps if necessary
 b2partsgenmapping = {}
 
+
 def b2partsgenerator(stepname, idx=None):
     """decorator for function generating bundle2 part
 
@@ -812,6 +921,7 @@
 
     You can only use this decorator for new steps, if you want to wrap a step
     from an extension, attack the b2partsgenmapping dictionary directly."""
+
     def dec(func):
         assert stepname not in b2partsgenmapping
         b2partsgenmapping[stepname] = func
@@ -820,8 +930,10 @@
         else:
             b2partsgenorder.insert(idx, stepname)
         return func
+
     return dec
 
+
 def _pushb2ctxcheckheads(pushop, bundler):
     """Generate race condition checking parts
 
@@ -846,12 +958,16 @@
                 data = iter(sorted(affected))
                 bundler.newpart('check:updated-heads', data=data)
 
+
 def _pushing(pushop):
     """return True if we are pushing anything"""
-    return bool(pushop.outgoing.missing
-                or pushop.outdatedphases
-                or pushop.outobsmarkers
-                or pushop.outbookmarks)
+    return bool(
+        pushop.outgoing.missing
+        or pushop.outdatedphases
+        or pushop.outobsmarkers
+        or pushop.outbookmarks
+    )
+
 
 @b2partsgenerator('check-bookmarks')
 def _pushb2checkbookmarks(pushop, bundler):
@@ -868,6 +984,7 @@
     checkdata = bookmod.binaryencode(data)
     bundler.newpart('check:bookmarks', data=checkdata)
 
+
 @b2partsgenerator('check-phases')
 def _pushb2checkphases(pushop, bundler):
     """insert phase move checking"""
@@ -886,6 +1003,7 @@
             checkdata = phases.binaryencode(checks)
             bundler.newpart('check:phases', data=checkdata)
 
+
 @b2partsgenerator('changeset')
 def _pushb2ctx(pushop, bundler):
     """handle changegroup push through bundle2
@@ -906,26 +1024,32 @@
     version = '01'
     cgversions = b2caps.get('changegroup')
     if cgversions:  # 3.1 and 3.2 ship with an empty value
-        cgversions = [v for v in cgversions
-                      if v in changegroup.supportedoutgoingversions(
-                          pushop.repo)]
+        cgversions = [
+            v
+            for v in cgversions
+            if v in changegroup.supportedoutgoingversions(pushop.repo)
+        ]
         if not cgversions:
             raise error.Abort(_('no common changegroup version'))
         version = max(cgversions)
-    cgstream = changegroup.makestream(pushop.repo, pushop.outgoing, version,
-                                      'push')
+    cgstream = changegroup.makestream(
+        pushop.repo, pushop.outgoing, version, 'push'
+    )
     cgpart = bundler.newpart('changegroup', data=cgstream)
     if cgversions:
         cgpart.addparam('version', version)
     if 'treemanifest' in pushop.repo.requirements:
         cgpart.addparam('treemanifest', '1')
+
     def handlereply(op):
         """extract addchangegroup returns from server reply"""
         cgreplies = op.records.getreplies(cgpart.id)
         assert len(cgreplies['changegroup']) == 1
         pushop.cgresult = cgreplies['changegroup'][0]['return']
+
     return handlereply
 
+
 @b2partsgenerator('phase')
 def _pushb2phases(pushop, bundler):
     """handle phase push through bundle2"""
@@ -943,6 +1067,7 @@
     elif haspushkey:
         return _pushb2phasespushkey(pushop, bundler)
 
+
 def _pushb2phaseheads(pushop, bundler):
     """push phase information through a bundle2 - binary part"""
     pushop.stepsdone.add('phases')
@@ -952,6 +1077,7 @@
         phasedata = phases.binaryencode(updates)
         bundler.newpart('phase-heads', data=phasedata)
 
+
 def _pushb2phasespushkey(pushop, bundler):
     """push phase information through a bundle2 - pushkey part"""
     pushop.stepsdone.add('phases')
@@ -985,8 +1111,10 @@
                 msg = _('updating %s to public failed!\n') % node
             if msg is not None:
                 pushop.ui.warn(msg)
+
     return handlereply
 
+
 @b2partsgenerator('obsmarkers')
 def _pushb2obsmarkers(pushop, bundler):
     if 'obsmarkers' in pushop.stepsdone:
@@ -999,6 +1127,7 @@
         markers = sorted(pushop.outobsmarkers)
         bundle2.buildobsmarkerspart(bundler, markers)
 
+
 @b2partsgenerator('bookmarks')
 def _pushb2bookmarks(pushop, bundler):
     """handle bookmark push through bundle2"""
@@ -1014,6 +1143,7 @@
     elif 'pushkey' in b2caps:
         return _pushb2bookmarkspushkey(pushop, bundler)
 
+
 def _bmaction(old, new):
     """small utility for bookmark pushing"""
     if not old:
@@ -1022,11 +1152,15 @@
         return 'delete'
     return 'update'
 
+
 def _abortonsecretctx(pushop, node, b):
     """abort if a given bookmark points to a secret changeset"""
     if node and pushop.repo[node].phase() == phases.secret:
-        raise error.Abort(_('cannot push bookmark %s as it points to a secret'
-                            ' changeset') % b)
+        raise error.Abort(
+            _('cannot push bookmark %s as it points to a secret' ' changeset')
+            % b
+        )
+
 
 def _pushb2bookmarkspart(pushop, bundler):
     pushop.stepsdone.add('bookmarks')
@@ -1050,6 +1184,7 @@
 
     return handlereply
 
+
 def _pushb2bookmarkspushkey(pushop, bundler):
     pushop.stepsdone.add('bookmarks')
     part2book = []
@@ -1094,8 +1229,10 @@
                     ui.warn(bookmsgmap[action][1] % book)
                     if pushop.bkresult is not None:
                         pushop.bkresult = 1
+
     return handlereply
 
+
 @b2partsgenerator('pushvars', idx=0)
 def _getbundlesendvars(pushop, bundler):
     '''send shellvars via bundle2'''
@@ -1104,8 +1241,10 @@
         shellvars = {}
         for raw in pushvars:
             if '=' not in raw:
-                msg = ("unable to parse variable '%s', should follow "
-                        "'KEY=VALUE' or 'KEY=' format")
+                msg = (
+                    "unable to parse variable '%s', should follow "
+                    "'KEY=VALUE' or 'KEY=' format"
+                )
                 raise error.Abort(msg % raw)
             k, v = raw.split('=', 1)
             shellvars[k] = v
@@ -1115,19 +1254,21 @@
         for key, value in shellvars.iteritems():
             part.addparam(key, value, mandatory=False)
 
+
 def _pushbundle2(pushop):
     """push data to the remote using bundle2
 
     The only currently supported type of data is changegroup but this will
     evolve in the future."""
     bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
-    pushback = (pushop.trmanager
-                and pushop.ui.configbool('experimental', 'bundle2.pushback'))
+    pushback = pushop.trmanager and pushop.ui.configbool(
+        'experimental', 'bundle2.pushback'
+    )
 
     # create reply capability
-    capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo,
-                                                      allowpushback=pushback,
-                                                      role='client'))
+    capsblob = bundle2.encodecaps(
+        bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role='client')
+    )
     bundler.newpart('replycaps', data=capsblob)
     replyhandlers = []
     for partgenname in b2partsgenorder:
@@ -1142,11 +1283,14 @@
     try:
         try:
             with pushop.remote.commandexecutor() as e:
-                reply = e.callcommand('unbundle', {
-                    'bundle': stream,
-                    'heads': ['force'],
-                    'url': pushop.remote.url(),
-                }).result()
+                reply = e.callcommand(
+                    'unbundle',
+                    {
+                        'bundle': stream,
+                        'heads': ['force'],
+                        'url': pushop.remote.url(),
+                    },
+                ).result()
         except error.BundleValueError as exc:
             raise error.Abort(_('missing support for %s') % exc)
         try:
@@ -1169,6 +1313,7 @@
     for rephand in replyhandlers:
         rephand(op)
 
+
 def _pushchangeset(pushop):
     """Make the actual push of changeset bundle to remote repo"""
     if 'changesets' in pushop.stepsdone:
@@ -1185,15 +1330,23 @@
     # TODO: get bundlecaps from remote
     bundlecaps = None
     # create a changegroup from local
-    if pushop.revs is None and not (outgoing.excluded
-                            or pushop.repo.changelog.filteredrevs):
+    if pushop.revs is None and not (
+        outgoing.excluded or pushop.repo.changelog.filteredrevs
+    ):
         # push everything,
         # use the fast path, no race possible on push
-        cg = changegroup.makechangegroup(pushop.repo, outgoing, '01', 'push',
-                fastpath=True, bundlecaps=bundlecaps)
+        cg = changegroup.makechangegroup(
+            pushop.repo,
+            outgoing,
+            '01',
+            'push',
+            fastpath=True,
+            bundlecaps=bundlecaps,
+        )
     else:
-        cg = changegroup.makechangegroup(pushop.repo, outgoing, '01',
-                                        'push', bundlecaps=bundlecaps)
+        cg = changegroup.makechangegroup(
+            pushop.repo, outgoing, '01', 'push', bundlecaps=bundlecaps
+        )
 
     # apply changegroup to remote
     # local repo finds heads on server, finds out what
@@ -1206,18 +1359,20 @@
         remoteheads = pushop.remoteheads
     # ssh: return remote's addchangegroup()
     # http: return remote's addchangegroup() or 0 for error
-    pushop.cgresult = pushop.remote.unbundle(cg, remoteheads,
-                                        pushop.repo.url())
+    pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
+
 
 def _pushsyncphase(pushop):
     """synchronise phase information locally and remotely"""
     cheads = pushop.commonheads
     # even when we don't push, exchanging phase data is useful
     remotephases = listkeys(pushop.remote, 'phases')
-    if (pushop.ui.configbool('ui', '_usedassubrepo')
-        and remotephases    # server supports phases
-        and pushop.cgresult is None # nothing was pushed
-        and remotephases.get('publishing', False)):
+    if (
+        pushop.ui.configbool('ui', '_usedassubrepo')
+        and remotephases  # server supports phases
+        and pushop.cgresult is None  # nothing was pushed
+        and remotephases.get('publishing', False)
+    ):
         # When:
         # - this is a subrepo push
         # - and remote support phase
@@ -1228,17 +1383,16 @@
         # courtesy to publish changesets possibly locally draft
         # on the remote.
         remotephases = {'publishing': 'True'}
-    if not remotephases: # old server or public only reply from non-publishing
+    if not remotephases:  # old server or public only reply from non-publishing
         _localphasemove(pushop, cheads)
         # don't push any phase data as there is nothing to push
     else:
-        ana = phases.analyzeremotephases(pushop.repo, cheads,
-                                         remotephases)
+        ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
         pheads, droots = ana
         ### Apply remote phase on local
         if remotephases.get('publishing', False):
             _localphasemove(pushop, cheads)
-        else: # publish = False
+        else:  # publish = False
             _localphasemove(pushop, pheads)
             _localphasemove(pushop, cheads, phases.draft)
         ### Apply local phase on remote
@@ -1258,24 +1412,28 @@
         # fallback to independent pushkey command
         for newremotehead in outdated:
             with pushop.remote.commandexecutor() as e:
-                r = e.callcommand('pushkey', {
-                    'namespace': 'phases',
-                    'key': newremotehead.hex(),
-                    'old': '%d' % phases.draft,
-                    'new': '%d' % phases.public
-                }).result()
+                r = e.callcommand(
+                    'pushkey',
+                    {
+                        'namespace': 'phases',
+                        'key': newremotehead.hex(),
+                        'old': '%d' % phases.draft,
+                        'new': '%d' % phases.public,
+                    },
+                ).result()
 
             if not r:
-                pushop.ui.warn(_('updating %s to public failed!\n')
-                               % newremotehead)
+                pushop.ui.warn(
+                    _('updating %s to public failed!\n') % newremotehead
+                )
+
 
 def _localphasemove(pushop, nodes, phase=phases.public):
     """move <nodes> to <phase> in the local source repo"""
     if pushop.trmanager:
-        phases.advanceboundary(pushop.repo,
-                               pushop.trmanager.transaction(),
-                               phase,
-                               nodes)
+        phases.advanceboundary(
+            pushop.repo, pushop.trmanager.transaction(), phase, nodes
+        )
     else:
         # repo is not locked, do not change any phases!
         # Informs the user that phases should have been moved when
@@ -1283,8 +1441,14 @@
         actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
         phasestr = phases.phasenames[phase]
         if actualmoves:
-            pushop.ui.status(_('cannot lock source repo, skipping '
-                               'local %s phase update\n') % phasestr)
+            pushop.ui.status(
+                _(
+                    'cannot lock source repo, skipping '
+                    'local %s phase update\n'
+                )
+                % phasestr
+            )
+
 
 def _pushobsolete(pushop):
     """utility function to push obsolete markers to a remote"""
@@ -1305,6 +1469,7 @@
             msg = _('failed to push some obsolete markers!\n')
             repo.ui.warn(msg)
 
+
 def _pushbookmark(pushop):
     """Update bookmark position on remote"""
     if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone:
@@ -1321,12 +1486,15 @@
             action = 'delete'
 
         with remote.commandexecutor() as e:
-            r = e.callcommand('pushkey', {
-                'namespace': 'bookmarks',
-                'key': b,
-                'old': hex(old),
-                'new': hex(new),
-            }).result()
+            r = e.callcommand(
+                'pushkey',
+                {
+                    'namespace': 'bookmarks',
+                    'key': b,
+                    'old': hex(old),
+                    'new': hex(new),
+                },
+            ).result()
 
         if r:
             ui.status(bookmsgmap[action][0] % b)
@@ -1336,6 +1504,7 @@
             if pushop.bkresult is not None:
                 pushop.bkresult = 1
 
+
 class pulloperation(object):
     """A object that represent a single pull operation
 
@@ -1345,9 +1514,19 @@
     afterward.
     """
 
-    def __init__(self, repo, remote, heads=None, force=False, bookmarks=(),
-                 remotebookmarks=None, streamclonerequested=None,
-                 includepats=None, excludepats=None, depth=None):
+    def __init__(
+        self,
+        repo,
+        remote,
+        heads=None,
+        force=False,
+        bookmarks=(),
+        remotebookmarks=None,
+        streamclonerequested=None,
+        includepats=None,
+        excludepats=None,
+        depth=None,
+    ):
         # repo we pull into
         self.repo = repo
         # repo we pull from
@@ -1355,8 +1534,9 @@
         # revision we try to pull (None is "all")
         self.heads = heads
         # bookmark pulled explicitly
-        self.explicitbookmarks = [repo._bookmarks.expandname(bookmark)
-                                  for bookmark in bookmarks]
+        self.explicitbookmarks = [
+            repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
+        ]
         # do we force pull?
         self.force = force
         # whether a streaming clone was requested
@@ -1414,11 +1594,13 @@
         # deprecated; talk to trmanager directly
         return self.trmanager.transaction()
 
+
 class transactionmanager(util.transactional):
     """An object to manage the life cycle of a transaction
 
     It creates the transaction on demand and calls the appropriate hooks when
     closing the transaction."""
+
     def __init__(self, repo, source, url):
         self.repo = repo
         self.source = source
@@ -1444,10 +1626,12 @@
         if self._tr is not None:
             self._tr.release()
 
+
 def listkeys(remote, namespace):
     with remote.commandexecutor() as e:
         return e.callcommand('listkeys', {'namespace': namespace}).result()
 
+
 def _fullpullbundle2(repo, pullop):
     # The server may send a partial reply, i.e. when inlining
     # pre-computed bundles. In that case, update the common
@@ -1460,14 +1644,17 @@
     # markers can hide heads.
     unfi = repo.unfiltered()
     unficl = unfi.changelog
+
     def headsofdiff(h1, h2):
         """Returns heads(h1 % h2)"""
         res = unfi.set('heads(%ln %% %ln)', h1, h2)
         return set(ctx.node() for ctx in res)
+
     def headsofunion(h1, h2):
         """Returns heads((h1 + h2) - null)"""
         res = unfi.set('heads((%ln + %ln - null))', h1, h2)
         return set(ctx.node() for ctx in res)
+
     while True:
         old_heads = unficl.heads()
         clstart = len(unficl)
@@ -1486,9 +1673,19 @@
         pullop.common = headsofunion(new_heads, pullop.common)
         pullop.rheads = set(pullop.rheads) - pullop.common
 
-def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None,
-         streamclonerequested=None, includepats=None, excludepats=None,
-         depth=None):
+
+def pull(
+    repo,
+    remote,
+    heads=None,
+    force=False,
+    bookmarks=(),
+    opargs=None,
+    streamclonerequested=None,
+    includepats=None,
+    excludepats=None,
+    depth=None,
+):
     """Fetch repository data from a remote.
 
     This is the main function used to retrieve data from a remote repository.
@@ -1529,19 +1726,28 @@
     narrowspec.validatepatterns(includepats)
     narrowspec.validatepatterns(excludepats)
 
-    pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks,
-                           streamclonerequested=streamclonerequested,
-                           includepats=includepats, excludepats=excludepats,
-                           depth=depth,
-                           **pycompat.strkwargs(opargs))
+    pullop = pulloperation(
+        repo,
+        remote,
+        heads,
+        force,
+        bookmarks=bookmarks,
+        streamclonerequested=streamclonerequested,
+        includepats=includepats,
+        excludepats=excludepats,
+        depth=depth,
+        **pycompat.strkwargs(opargs)
+    )
 
     peerlocal = pullop.remote.local()
     if peerlocal:
         missing = set(peerlocal.requirements) - pullop.repo.supported
         if missing:
-            msg = _("required features are not"
-                    " supported in the destination:"
-                    " %s") % (', '.join(sorted(missing)))
+            msg = _(
+                "required features are not"
+                " supported in the destination:"
+                " %s"
+            ) % (', '.join(sorted(missing)))
             raise error.Abort(msg)
 
     pullop.trmanager = transactionmanager(repo, 'pull', remote.url())
@@ -1571,6 +1777,7 @@
 
     return pullop
 
+
 # list of steps to perform discovery before pull
 pulldiscoveryorder = []
 
@@ -1579,6 +1786,7 @@
 # This exists to help extensions wrap steps if necessary
 pulldiscoverymapping = {}
 
+
 def pulldiscovery(stepname):
     """decorator for function performing discovery before pull
 
@@ -1588,19 +1796,23 @@
 
     You can only use this decorator for a new step, if you want to wrap a step
     from an extension, change the pulldiscovery dictionary directly."""
+
     def dec(func):
         assert stepname not in pulldiscoverymapping
         pulldiscoverymapping[stepname] = func
         pulldiscoveryorder.append(stepname)
         return func
+
     return dec
 
+
 def _pulldiscovery(pullop):
     """Run all discovery steps"""
     for stepname in pulldiscoveryorder:
         step = pulldiscoverymapping[stepname]
         step(pullop)
 
+
 @pulldiscovery('b1:bookmarks')
 def _pullbookmarkbundle1(pullop):
     """fetch bookmark data in bundle1 case
@@ -1623,10 +1835,9 @@
 
     Current handle changeset discovery only, will change handle all discovery
     at some point."""
-    tmp = discovery.findcommonincoming(pullop.repo,
-                                       pullop.remote,
-                                       heads=pullop.heads,
-                                       force=pullop.force)
+    tmp = discovery.findcommonincoming(
+        pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
+    )
     common, fetch, rheads = tmp
     nm = pullop.repo.unfiltered().changelog.nodemap
     if fetch and rheads:
@@ -1650,6 +1861,7 @@
     pullop.fetch = fetch
     pullop.rheads = rheads
 
+
 def _pullbundle2(pullop):
     """pull data using bundle2
 
@@ -1688,7 +1900,7 @@
 
         legacyphase = 'phases' in ui.configlist('devel', 'legacy.exchange')
         hasbinaryphase = 'heads' in pullop.remotebundle2caps.get('phases', ())
-        if (not legacyphase and hasbinaryphase):
+        if not legacyphase and hasbinaryphase:
             kwargs['phases'] = True
             pullop.stepsdone.add('phases')
 
@@ -1703,9 +1915,12 @@
     if pullop.remotebookmarks is not None:
         pullop.stepsdone.add('request-bookmarks')
 
-    if ('request-bookmarks' not in pullop.stepsdone
+    if (
+        'request-bookmarks' not in pullop.stepsdone
         and pullop.remotebookmarks is None
-        and not legacybookmark and hasbinarybook):
+        and not legacybookmark
+        and hasbinarybook
+    ):
         kwargs['bookmarks'] = True
         bookmarksrequested = True
 
@@ -1721,8 +1936,11 @@
     # presence of this flag indicates the client supports clone bundles. This
     # will enable the server to treat clients that support clone bundles
     # differently from those that don't.
-    if (pullop.remote.capable('clonebundles')
-        and pullop.heads is None and list(pullop.common) == [nullid]):
+    if (
+        pullop.remote.capable('clonebundles')
+        and pullop.heads is None
+        and list(pullop.common) == [nullid]
+    ):
         kwargs['cbattempted'] = pullop.clonebundleattempted
 
     if streaming:
@@ -1746,8 +1964,9 @@
         bundle = e.callcommand('getbundle', args).result()
 
         try:
-            op = bundle2.bundleoperation(pullop.repo, pullop.gettransaction,
-                                         source='pull')
+            op = bundle2.bundleoperation(
+                pullop.repo, pullop.gettransaction, source='pull'
+            )
             op.modes['bookmarks'] = 'records'
             bundle2.processbundle(pullop.repo, bundle, op=op)
         except bundle2.AbortFromPart as exc:
@@ -1779,9 +1998,11 @@
     if pullop.remotebookmarks is not None:
         _pullbookmarks(pullop)
 
+
 def _pullbundle2extraprepare(pullop, kwargs):
     """hook function so that extensions can extend the getbundle call"""
 
+
 def _pullchangeset(pullop):
     """pull changeset from unbundle into the local repo"""
     # We delay the open of the transaction as late as possible so we
@@ -1803,31 +2024,40 @@
 
     if pullop.remote.capable('getbundle'):
         # TODO: get bundlecaps from remote
-        cg = pullop.remote.getbundle('pull', common=pullop.common,
-                                     heads=pullop.heads or pullop.rheads)
+        cg = pullop.remote.getbundle(
+            'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
+        )
     elif pullop.heads is None:
         with pullop.remote.commandexecutor() as e:
-            cg = e.callcommand('changegroup', {
-                'nodes': pullop.fetch,
-                'source': 'pull',
-            }).result()
+            cg = e.callcommand(
+                'changegroup', {'nodes': pullop.fetch, 'source': 'pull',}
+            ).result()
 
     elif not pullop.remote.capable('changegroupsubset'):
-        raise error.Abort(_("partial pull cannot be done because "
-                           "other repository doesn't support "
-                           "changegroupsubset."))
+        raise error.Abort(
+            _(
+                "partial pull cannot be done because "
+                "other repository doesn't support "
+                "changegroupsubset."
+            )
+        )
     else:
         with pullop.remote.commandexecutor() as e:
-            cg = e.callcommand('changegroupsubset', {
-                'bases': pullop.fetch,
-                'heads': pullop.heads,
-                'source': 'pull',
-            }).result()
-
-    bundleop = bundle2.applybundle(pullop.repo, cg, tr, 'pull',
-                                   pullop.remote.url())
+            cg = e.callcommand(
+                'changegroupsubset',
+                {
+                    'bases': pullop.fetch,
+                    'heads': pullop.heads,
+                    'source': 'pull',
+                },
+            ).result()
+
+    bundleop = bundle2.applybundle(
+        pullop.repo, cg, tr, 'pull', pullop.remote.url()
+    )
     pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
 
+
 def _pullphase(pullop):
     # Get remote phases data from remote
     if 'phases' in pullop.stepsdone:
@@ -1835,6 +2065,7 @@
     remotephases = listkeys(pullop.remote, 'phases')
     _pullapplyphases(pullop, remotephases)
 
+
 def _pullapplyphases(pullop, remotephases):
     """apply phase movement from observed remote state"""
     if 'phases' in pullop.stepsdone:
@@ -1843,9 +2074,9 @@
     publishing = bool(remotephases.get('publishing', False))
     if remotephases and not publishing:
         # remote is new and non-publishing
-        pheads, _dr = phases.analyzeremotephases(pullop.repo,
-                                                 pullop.pulledsubset,
-                                                 remotephases)
+        pheads, _dr = phases.analyzeremotephases(
+            pullop.repo, pullop.pulledsubset, remotephases
+        )
         dheads = pullop.pulledsubset
     else:
         # Remote is old or publishing all common changesets
@@ -1870,6 +2101,7 @@
         tr = pullop.gettransaction()
         phases.advanceboundary(pullop.repo, tr, draft, dheads)
 
+
 def _pullbookmarks(pullop):
     """process the remote bookmark information to update the local one"""
     if 'bookmarks' in pullop.stepsdone:
@@ -1877,10 +2109,15 @@
     pullop.stepsdone.add('bookmarks')
     repo = pullop.repo
     remotebookmarks = pullop.remotebookmarks
-    bookmod.updatefromremote(repo.ui, repo, remotebookmarks,
-                             pullop.remote.url(),
-                             pullop.gettransaction,
-                             explicit=pullop.explicitbookmarks)
+    bookmod.updatefromremote(
+        repo.ui,
+        repo,
+        remotebookmarks,
+        pullop.remote.url(),
+        pullop.gettransaction,
+        explicit=pullop.explicitbookmarks,
+    )
+
 
 def _pullobsolete(pullop):
     """utility function to pull obsolete markers from a remote
@@ -1910,6 +2147,7 @@
             pullop.repo.invalidatevolatilesets()
     return tr
 
+
 def applynarrowacl(repo, kwargs):
     """Apply narrow fetch access control.
 
@@ -1920,30 +2158,42 @@
     # TODO this assumes existence of HTTP and is a layering violation.
     username = ui.shortuser(ui.environ.get('REMOTE_USER') or ui.username())
     user_includes = ui.configlist(
-        _NARROWACL_SECTION, username + '.includes',
-        ui.configlist(_NARROWACL_SECTION, 'default.includes'))
+        _NARROWACL_SECTION,
+        username + '.includes',
+        ui.configlist(_NARROWACL_SECTION, 'default.includes'),
+    )
     user_excludes = ui.configlist(
-        _NARROWACL_SECTION, username + '.excludes',
-        ui.configlist(_NARROWACL_SECTION, 'default.excludes'))
+        _NARROWACL_SECTION,
+        username + '.excludes',
+        ui.configlist(_NARROWACL_SECTION, 'default.excludes'),
+    )
     if not user_includes:
-        raise error.Abort(_("{} configuration for user {} is empty")
-                          .format(_NARROWACL_SECTION, username))
+        raise error.Abort(
+            _("{} configuration for user {} is empty").format(
+                _NARROWACL_SECTION, username
+            )
+        )
 
     user_includes = [
-        'path:.' if p == '*' else 'path:' + p for p in user_includes]
+        'path:.' if p == '*' else 'path:' + p for p in user_includes
+    ]
     user_excludes = [
-        'path:.' if p == '*' else 'path:' + p for p in user_excludes]
+        'path:.' if p == '*' else 'path:' + p for p in user_excludes
+    ]
 
     req_includes = set(kwargs.get(r'includepats', []))
     req_excludes = set(kwargs.get(r'excludepats', []))
 
     req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
-        req_includes, req_excludes, user_includes, user_excludes)
+        req_includes, req_excludes, user_includes, user_excludes
+    )
 
     if invalid_includes:
         raise error.Abort(
-            _("The following includes are not accessible for {}: {}")
-            .format(username, invalid_includes))
+            _("The following includes are not accessible for {}: {}").format(
+                username, invalid_includes
+            )
+        )
 
     new_args = {}
     new_args.update(kwargs)
@@ -1955,6 +2205,7 @@
 
     return new_args
 
+
 def _computeellipsis(repo, common, heads, known, match, depth=None):
     """Compute the shape of a narrowed DAG.
 
@@ -2013,15 +2264,18 @@
     def splithead(head):
         r1, r2, r3 = sorted(ellipsisroots[head])
         for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
-            mid = repo.revs('sort(merge() & %d::%d & %d::%d, -rev)',
-                            nr1, head, nr2, head)
+            mid = repo.revs(
+                'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
+            )
             for j in mid:
                 if j == nr2:
                     return nr2, (nr1, nr2)
                 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
                     return j, (nr1, nr2)
-        raise error.Abort(_('Failed to split up ellipsis node! head: %d, '
-                            'roots: %d %d %d') % (head, r1, r2, r3))
+        raise error.Abort(
+            _('Failed to split up ellipsis node! head: %d, ' 'roots: %d %d %d')
+            % (head, r1, r2, r3)
+        )
 
     missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
     visit = reversed(missing)
@@ -2081,6 +2335,7 @@
             addroot(head, c)
     return visitnodes, relevant_nodes, ellipsisroots
 
+
 def caps20to10(repo, role):
     """return a set with appropriate options to use bundle20 during getbundle"""
     caps = {'HG20'}
@@ -2088,6 +2343,7 @@
     caps.add('bundle2=' + urlreq.quote(capsblob))
     return caps
 
+
 # List of names of steps to perform for a bundle2 for getbundle, order matters.
 getbundle2partsorder = []
 
@@ -2096,6 +2352,7 @@
 # This exists to help extensions wrap steps if necessary
 getbundle2partsmapping = {}
 
+
 def getbundle2partsgenerator(stepname, idx=None):
     """decorator for function generating bundle2 part for getbundle
 
@@ -2105,6 +2362,7 @@
 
     You can only use this decorator for new steps, if you want to wrap a step
     from an extension, attack the getbundle2partsmapping dictionary directly."""
+
     def dec(func):
         assert stepname not in getbundle2partsmapping
         getbundle2partsmapping[stepname] = func
@@ -2113,15 +2371,19 @@
         else:
             getbundle2partsorder.insert(idx, stepname)
         return func
+
     return dec
 
+
 def bundle2requested(bundlecaps):
     if bundlecaps is not None:
         return any(cap.startswith('HG2') for cap in bundlecaps)
     return False
 
-def getbundlechunks(repo, source, heads=None, common=None, bundlecaps=None,
-                    **kwargs):
+
+def getbundlechunks(
+    repo, source, heads=None, common=None, bundlecaps=None, **kwargs
+):
     """Return chunks constituting a bundle's raw data.
 
     Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
@@ -2139,19 +2401,25 @@
             raise ValueError(_('request for bundle10 must include changegroup'))
 
         if kwargs:
-            raise ValueError(_('unsupported getbundle arguments: %s')
-                             % ', '.join(sorted(kwargs.keys())))
+            raise ValueError(
+                _('unsupported getbundle arguments: %s')
+                % ', '.join(sorted(kwargs.keys()))
+            )
         outgoing = _computeoutgoing(repo, heads, common)
         info['bundleversion'] = 1
-        return info, changegroup.makestream(repo, outgoing, '01', source,
-                                            bundlecaps=bundlecaps)
+        return (
+            info,
+            changegroup.makestream(
+                repo, outgoing, '01', source, bundlecaps=bundlecaps
+            ),
+        )
 
     # bundle20 case
     info['bundleversion'] = 2
     b2caps = {}
     for bcaps in bundlecaps:
         if bcaps.startswith('bundle2='):
-            blob = urlreq.unquote(bcaps[len('bundle2='):])
+            blob = urlreq.unquote(bcaps[len('bundle2=') :])
             b2caps.update(bundle2.decodecaps(blob))
     bundler = bundle2.bundle20(repo.ui, b2caps)
 
@@ -2160,20 +2428,36 @@
 
     for name in getbundle2partsorder:
         func = getbundle2partsmapping[name]
-        func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps,
-             **pycompat.strkwargs(kwargs))
+        func(
+            bundler,
+            repo,
+            source,
+            bundlecaps=bundlecaps,
+            b2caps=b2caps,
+            **pycompat.strkwargs(kwargs)
+        )
 
     info['prefercompressed'] = bundler.prefercompressed
 
     return info, bundler.getchunks()
 
+
 @getbundle2partsgenerator('stream2')
 def _getbundlestream2(bundler, repo, *args, **kwargs):
     return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
 
+
 @getbundle2partsgenerator('changegroup')
-def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None,
-                              b2caps=None, heads=None, common=None, **kwargs):
+def _getbundlechangegrouppart(
+    bundler,
+    repo,
+    source,
+    bundlecaps=None,
+    b2caps=None,
+    heads=None,
+    common=None,
+    **kwargs
+):
     """add a changegroup part to the requested bundle"""
     if not kwargs.get(r'cg', True):
         return
@@ -2181,8 +2465,11 @@
     version = '01'
     cgversions = b2caps.get('changegroup')
     if cgversions:  # 3.1 and 3.2 ship with an empty value
-        cgversions = [v for v in cgversions
-                      if v in changegroup.supportedoutgoingversions(repo)]
+        cgversions = [
+            v
+            for v in cgversions
+            if v in changegroup.supportedoutgoingversions(repo)
+        ]
         if not cgversions:
             raise error.Abort(_('no common changegroup version'))
         version = max(cgversions)
@@ -2198,42 +2485,51 @@
     else:
         matcher = None
 
-    cgstream = changegroup.makestream(repo, outgoing, version, source,
-                                      bundlecaps=bundlecaps, matcher=matcher)
+    cgstream = changegroup.makestream(
+        repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
+    )
 
     part = bundler.newpart('changegroup', data=cgstream)
     if cgversions:
         part.addparam('version', version)
 
-    part.addparam('nbchanges', '%d' % len(outgoing.missing),
-                  mandatory=False)
+    part.addparam('nbchanges', '%d' % len(outgoing.missing), mandatory=False)
 
     if 'treemanifest' in repo.requirements:
         part.addparam('treemanifest', '1')
 
-    if (kwargs.get(r'narrow', False) and kwargs.get(r'narrow_acl', False)
-        and (include or exclude)):
+    if (
+        kwargs.get(r'narrow', False)
+        and kwargs.get(r'narrow_acl', False)
+        and (include or exclude)
+    ):
         # this is mandatory because otherwise ACL clients won't work
         narrowspecpart = bundler.newpart('Narrow:responsespec')
-        narrowspecpart.data = '%s\0%s' % ('\n'.join(include),
-                                           '\n'.join(exclude))
+        narrowspecpart.data = '%s\0%s' % (
+            '\n'.join(include),
+            '\n'.join(exclude),
+        )
+
 
 @getbundle2partsgenerator('bookmarks')
-def _getbundlebookmarkpart(bundler, repo, source, bundlecaps=None,
-                              b2caps=None, **kwargs):
+def _getbundlebookmarkpart(
+    bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
+):
     """add a bookmark part to the requested bundle"""
     if not kwargs.get(r'bookmarks', False):
         return
     if 'bookmarks' not in b2caps:
         raise error.Abort(_('no common bookmarks exchange method'))
-    books  = bookmod.listbinbookmarks(repo)
+    books = bookmod.listbinbookmarks(repo)
     data = bookmod.binaryencode(books)
     if data:
         bundler.newpart('bookmarks', data=data)
 
+
 @getbundle2partsgenerator('listkeys')
-def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None,
-                            b2caps=None, **kwargs):
+def _getbundlelistkeysparts(
+    bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
+):
     """add parts containing listkeys namespaces to the requested bundle"""
     listkeys = kwargs.get(r'listkeys', ())
     for namespace in listkeys:
@@ -2242,9 +2538,11 @@
         keys = repo.listkeys(namespace).items()
         part.data = pushkey.encodekeys(keys)
 
+
 @getbundle2partsgenerator('obsmarkers')
-def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None,
-                            b2caps=None, heads=None, **kwargs):
+def _getbundleobsmarkerpart(
+    bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
+):
     """add an obsolescence markers part to the requested bundle"""
     if kwargs.get(r'obsmarkers', False):
         if heads is None:
@@ -2254,9 +2552,11 @@
         markers = sorted(markers)
         bundle2.buildobsmarkerspart(bundler, markers)
 
+
 @getbundle2partsgenerator('phases')
-def _getbundlephasespart(bundler, repo, source, bundlecaps=None,
-                            b2caps=None, heads=None, **kwargs):
+def _getbundlephasespart(
+    bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
+):
     """add phase heads part to the requested bundle"""
     if kwargs.get(r'phases', False):
         if not 'heads' in b2caps.get('phases'):
@@ -2301,10 +2601,18 @@
         phasedata = phases.binaryencode(phasemapping)
         bundler.newpart('phase-heads', data=phasedata)
 
+
 @getbundle2partsgenerator('hgtagsfnodes')
-def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None,
-                         b2caps=None, heads=None, common=None,
-                         **kwargs):
+def _getbundletagsfnodes(
+    bundler,
+    repo,
+    source,
+    bundlecaps=None,
+    b2caps=None,
+    heads=None,
+    common=None,
+    **kwargs
+):
     """Transfer the .hgtags filenodes mapping.
 
     Only values for heads in this bundle will be transferred.
@@ -2321,10 +2629,18 @@
     outgoing = _computeoutgoing(repo, heads, common)
     bundle2.addparttagsfnodescache(repo, bundler, outgoing)
 
+
 @getbundle2partsgenerator('cache:rev-branch-cache')
-def _getbundlerevbranchcache(bundler, repo, source, bundlecaps=None,
-                             b2caps=None, heads=None, common=None,
-                             **kwargs):
+def _getbundlerevbranchcache(
+    bundler,
+    repo,
+    source,
+    bundlecaps=None,
+    b2caps=None,
+    heads=None,
+    common=None,
+    **kwargs
+):
     """Transfer the rev-branch-cache mapping
 
     The payload is a series of data related to each branch
@@ -2339,15 +2655,18 @@
     # - changeset are being exchanged,
     # - the client supports it.
     # - narrow bundle isn't in play (not currently compatible).
-    if (not kwargs.get(r'cg', True)
+    if (
+        not kwargs.get(r'cg', True)
         or 'rev-branch-cache' not in b2caps
         or kwargs.get(r'narrow', False)
-        or repo.ui.has_section(_NARROWACL_SECTION)):
+        or repo.ui.has_section(_NARROWACL_SECTION)
+    ):
         return
 
     outgoing = _computeoutgoing(repo, heads, common)
     bundle2.addpartrevbranchcache(repo, bundler, outgoing)
 
+
 def check_heads(repo, their_heads, context):
     """check if the heads of a repo have been modified
 
@@ -2355,12 +2674,17 @@
     """
     heads = repo.heads()
     heads_hash = hashlib.sha1(''.join(sorted(heads))).digest()
-    if not (their_heads == ['force'] or their_heads == heads or
-            their_heads == ['hashed', heads_hash]):
+    if not (
+        their_heads == ['force']
+        or their_heads == heads
+        or their_heads == ['hashed', heads_hash]
+    ):
         # someone else committed/pushed/unbundled while we
         # were transferring data
-        raise error.PushRaced('repository changed while %s - '
-                              'please try again' % context)
+        raise error.PushRaced(
+            'repository changed while %s - ' 'please try again' % context
+        )
+
 
 def unbundle(repo, cg, heads, source, url):
     """Apply a bundle to a repo.
@@ -2393,6 +2717,7 @@
         else:
             r = None
             try:
+
                 def gettransaction():
                     if not lockandtr[2]:
                         if not bookmod.bookmarksinstore(repo):
@@ -2409,27 +2734,35 @@
                 if not repo.ui.configbool('experimental', 'bundle2lazylocking'):
                     gettransaction()
 
-                op = bundle2.bundleoperation(repo, gettransaction,
-                                             captureoutput=captureoutput,
-                                             source='push')
+                op = bundle2.bundleoperation(
+                    repo,
+                    gettransaction,
+                    captureoutput=captureoutput,
+                    source='push',
+                )
                 try:
                     op = bundle2.processbundle(repo, cg, op=op)
                 finally:
                     r = op.reply
                     if captureoutput and r is not None:
                         repo.ui.pushbuffer(error=True, subproc=True)
+
                         def recordout(output):
                             r.newpart('output', data=output, mandatory=False)
+
                 if lockandtr[2] is not None:
                     lockandtr[2].close()
             except BaseException as exc:
                 exc.duringunbundle2 = True
                 if captureoutput and r is not None:
                     parts = exc._bundle2salvagedoutput = r.salvageoutput()
+
                     def recordout(output):
-                        part = bundle2.bundlepart('output', data=output,
-                                                  mandatory=False)
+                        part = bundle2.bundlepart(
+                            'output', data=output, mandatory=False
+                        )
                         parts.append(part)
+
                 raise
     finally:
         lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
@@ -2437,6 +2770,7 @@
             recordout(repo.ui.popbuffer())
     return r
 
+
 def _maybeapplyclonebundle(pullop):
     """Apply a clone bundle from a remote, if possible."""
 
@@ -2465,12 +2799,17 @@
 
     entries = parseclonebundlesmanifest(repo, res)
     if not entries:
-        repo.ui.note(_('no clone bundles available on remote; '
-                       'falling back to regular clone\n'))
+        repo.ui.note(
+            _(
+                'no clone bundles available on remote; '
+                'falling back to regular clone\n'
+            )
+        )
         return
 
     entries = filterclonebundleentries(
-        repo, entries, streamclonerequested=pullop.streamclonerequested)
+        repo, entries, streamclonerequested=pullop.streamclonerequested
+    )
 
     if not entries:
         # There is a thundering herd concern here. However, if a server
@@ -2478,10 +2817,15 @@
         # they deserve what's coming. Furthermore, from a client's
         # perspective, no automatic fallback would mean not being able to
         # clone!
-        repo.ui.warn(_('no compatible clone bundles available on server; '
-                       'falling back to regular clone\n'))
-        repo.ui.warn(_('(you may want to report this to the server '
-                       'operator)\n'))
+        repo.ui.warn(
+            _(
+                'no compatible clone bundles available on server; '
+                'falling back to regular clone\n'
+            )
+        )
+        repo.ui.warn(
+            _('(you may want to report this to the server ' 'operator)\n')
+        )
         return
 
     entries = sortclonebundleentries(repo.ui, entries)
@@ -2498,11 +2842,16 @@
     elif repo.ui.configbool('ui', 'clonebundlefallback'):
         repo.ui.warn(_('falling back to normal clone\n'))
     else:
-        raise error.Abort(_('error applying bundle'),
-                          hint=_('if this error persists, consider contacting '
-                                 'the server operator or disable clone '
-                                 'bundles via '
-                                 '"--config ui.clonebundles=false"'))
+        raise error.Abort(
+            _('error applying bundle'),
+            hint=_(
+                'if this error persists, consider contacting '
+                'the server operator or disable clone '
+                'bundles via '
+                '"--config ui.clonebundles=false"'
+            ),
+        )
+
 
 def parseclonebundlesmanifest(repo, s):
     """Parses the raw text of a clone bundles manifest.
@@ -2539,19 +2888,23 @@
 
     return m
 
+
 def isstreamclonespec(bundlespec):
     # Stream clone v1
-    if (bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1'):
+    if bundlespec.wirecompression == 'UN' and bundlespec.wireversion == 's1':
         return True
 
     # Stream clone v2
-    if (bundlespec.wirecompression == 'UN' and
-        bundlespec.wireversion == '02' and
-        bundlespec.contentopts.get('streamv2')):
+    if (
+        bundlespec.wirecompression == 'UN'
+        and bundlespec.wireversion == '02'
+        and bundlespec.contentopts.get('streamv2')
+    ):
         return True
 
     return False
 
+
 def filterclonebundleentries(repo, entries, streamclonerequested=False):
     """Remove incompatible clone bundle manifest entries.
 
@@ -2572,34 +2925,41 @@
                 # If a stream clone was requested, filter out non-streamclone
                 # entries.
                 if streamclonerequested and not isstreamclonespec(bundlespec):
-                    repo.ui.debug('filtering %s because not a stream clone\n' %
-                                  entry['URL'])
+                    repo.ui.debug(
+                        'filtering %s because not a stream clone\n'
+                        % entry['URL']
+                    )
                     continue
 
             except error.InvalidBundleSpecification as e:
                 repo.ui.debug(stringutil.forcebytestr(e) + '\n')
                 continue
             except error.UnsupportedBundleSpecification as e:
-                repo.ui.debug('filtering %s because unsupported bundle '
-                              'spec: %s\n' % (
-                                  entry['URL'], stringutil.forcebytestr(e)))
+                repo.ui.debug(
+                    'filtering %s because unsupported bundle '
+                    'spec: %s\n' % (entry['URL'], stringutil.forcebytestr(e))
+                )
                 continue
         # If we don't have a spec and requested a stream clone, we don't know
         # what the entry is so don't attempt to apply it.
         elif streamclonerequested:
-            repo.ui.debug('filtering %s because cannot determine if a stream '
-                          'clone bundle\n' % entry['URL'])
+            repo.ui.debug(
+                'filtering %s because cannot determine if a stream '
+                'clone bundle\n' % entry['URL']
+            )
             continue
 
         if 'REQUIRESNI' in entry and not sslutil.hassni:
-            repo.ui.debug('filtering %s because SNI not supported\n' %
-                          entry['URL'])
+            repo.ui.debug(
+                'filtering %s because SNI not supported\n' % entry['URL']
+            )
             continue
 
         newentries.append(entry)
 
     return newentries
 
+
 class clonebundleentry(object):
     """Represents an item in a clone bundles manifest.
 
@@ -2664,6 +3024,7 @@
     def __ne__(self, other):
         return self._cmp(other) != 0
 
+
 def sortclonebundleentries(ui, entries):
     prefers = ui.configlist('ui', 'clonebundleprefers')
     if not prefers:
@@ -2674,6 +3035,7 @@
     items = sorted(clonebundleentry(v, prefers) for v in entries)
     return [i.value for i in items]
 
+
 def trypullbundlefromurl(ui, repo, url):
     """Attempt to apply a bundle from a URL."""
     with repo.lock(), repo.transaction('bundleurl') as tr:
@@ -2687,10 +3049,14 @@
                 bundle2.applybundle(repo, cg, tr, 'clonebundles', url)
             return True
         except urlerr.httperror as e:
-            ui.warn(_('HTTP error fetching bundle: %s\n') %
-                    stringutil.forcebytestr(e))
+            ui.warn(
+                _('HTTP error fetching bundle: %s\n')
+                % stringutil.forcebytestr(e)
+            )
         except urlerr.urlerror as e:
-            ui.warn(_('error fetching bundle: %s\n') %
-                    stringutil.forcebytestr(e.reason))
+            ui.warn(
+                _('error fetching bundle: %s\n')
+                % stringutil.forcebytestr(e.reason)
+            )
 
         return False
--- a/mercurial/exchangev2.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/exchangev2.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,9 +24,8 @@
     pycompat,
     setdiscovery,
 )
-from .interfaces import (
-    repository,
-)
+from .interfaces import repository
+
 
 def pull(pullop):
     """Pull using wire protocol version 2."""
@@ -48,11 +47,13 @@
 
     # We don't use the repo's narrow matcher here because the patterns passed
     # to exchange.pull() could be different.
-    narrowmatcher = narrowspec.match(repo.root,
-                                     # Empty maps to nevermatcher. So always
-                                     # set includes if missing.
-                                     pullop.includepats or {'path:.'},
-                                     pullop.excludepats)
+    narrowmatcher = narrowspec.match(
+        repo.root,
+        # Empty maps to nevermatcher. So always
+        # set includes if missing.
+        pullop.includepats or {'path:.'},
+        pullop.excludepats,
+    )
 
     if pullop.includepats or pullop.excludepats:
         pathfilter = {}
@@ -65,7 +66,8 @@
 
     # Figure out what needs to be fetched.
     common, fetch, remoteheads = _pullchangesetdiscovery(
-        repo, remote, pullop.heads, abortwhenunrelated=pullop.force)
+        repo, remote, pullop.heads, abortwhenunrelated=pullop.force
+    )
 
     # And fetch the data.
     pullheads = pullop.heads or remoteheads
@@ -84,13 +86,22 @@
         if phase == b'secret' or not csetres['nodesbyphase'][phase]:
             continue
 
-        phases.advanceboundary(repo, tr, phases.phasenames.index(phase),
-                               csetres['nodesbyphase'][phase])
+        phases.advanceboundary(
+            repo,
+            tr,
+            phases.phasenames.index(phase),
+            csetres['nodesbyphase'][phase],
+        )
 
     # Write bookmark updates.
-    bookmarks.updatefromremote(repo.ui, repo, csetres['bookmarks'],
-                               remote.url(), pullop.gettransaction,
-                               explicit=pullop.explicitbookmarks)
+    bookmarks.updatefromremote(
+        repo.ui,
+        repo,
+        csetres['bookmarks'],
+        remote.url(),
+        pullop.gettransaction,
+        explicit=pullop.explicitbookmarks,
+    )
 
     manres = _fetchmanifests(repo, tr, remote, csetres['manifestnodes'])
 
@@ -100,8 +111,9 @@
         relevantcsetnodes = set()
         clnode = repo.changelog.node
 
-        for rev in repo.revs(b'ancestors(%ln, %s)',
-                             pullheads, pullop.depth - 1):
+        for rev in repo.revs(
+            b'ancestors(%ln, %s)', pullheads, pullop.depth - 1
+        ):
             relevantcsetnodes.add(clnode(rev))
 
         csetrelevantfilter = lambda n: n in relevantcsetnodes
@@ -137,8 +149,17 @@
     # Find all file nodes referenced by added manifests and fetch those
     # revisions.
     fnodes = _derivefilesfrommanifests(repo, narrowmatcher, mnodesforfiles)
-    _fetchfilesfromcsets(repo, tr, remote, pathfilter, fnodes, csetsforfiles,
-                         manifestlinkrevs, shallow=bool(pullop.depth))
+    _fetchfilesfromcsets(
+        repo,
+        tr,
+        remote,
+        pathfilter,
+        fnodes,
+        csetsforfiles,
+        manifestlinkrevs,
+        shallow=bool(pullop.depth),
+    )
+
 
 def _checkuserawstorefiledata(pullop):
     """Check whether we should use rawstorefiledata command to retrieve data."""
@@ -165,17 +186,19 @@
 
     return True
 
+
 def _fetchrawstorefiles(repo, remote):
     with remote.commandexecutor() as e:
-        objs = e.callcommand(b'rawstorefiledata', {
-            b'files': [b'changelog', b'manifestlog'],
-        }).result()
+        objs = e.callcommand(
+            b'rawstorefiledata', {b'files': [b'changelog', b'manifestlog'],}
+        ).result()
 
         # First object is a summary of files data that follows.
         overall = next(objs)
 
-        progress = repo.ui.makeprogress(_('clone'), total=overall[b'totalsize'],
-                                        unit=_('bytes'))
+        progress = repo.ui.makeprogress(
+            _('clone'), total=overall[b'totalsize'], unit=_('bytes')
+        )
         with progress:
             progress.update(0)
 
@@ -188,14 +211,17 @@
 
                 for k in (b'location', b'path', b'size'):
                     if k not in filemeta:
-                        raise error.Abort(_(b'remote file data missing key: %s')
-                                          % k)
+                        raise error.Abort(
+                            _(b'remote file data missing key: %s') % k
+                        )
 
                 if filemeta[b'location'] == b'store':
                     vfs = repo.svfs
                 else:
-                    raise error.Abort(_(b'invalid location for raw file data: '
-                                        b'%s') % filemeta[b'location'])
+                    raise error.Abort(
+                        _(b'invalid location for raw file data: ' b'%s')
+                        % filemeta[b'location']
+                    )
 
                 bytesremaining = filemeta[b'size']
 
@@ -209,10 +235,13 @@
                         bytesremaining -= len(chunk)
 
                         if bytesremaining < 0:
-                            raise error.Abort(_(
-                                b'received invalid number of bytes for file '
-                                b'data; expected %d, got extra') %
-                                              filemeta[b'size'])
+                            raise error.Abort(
+                                _(
+                                    b'received invalid number of bytes for file '
+                                    b'data; expected %d, got extra'
+                                )
+                                % filemeta[b'size']
+                            )
 
                         progress.increment(step=len(chunk))
                         fh.write(chunk)
@@ -221,15 +250,25 @@
                             if chunk.islast:
                                 break
                         except AttributeError:
-                            raise error.Abort(_(
-                                b'did not receive indefinite length bytestring '
-                                b'for file data'))
+                            raise error.Abort(
+                                _(
+                                    b'did not receive indefinite length bytestring '
+                                    b'for file data'
+                                )
+                            )
 
                 if bytesremaining:
-                    raise error.Abort(_(b'received invalid number of bytes for'
-                                        b'file data; expected %d got %d') %
-                                      (filemeta[b'size'],
-                                       filemeta[b'size'] - bytesremaining))
+                    raise error.Abort(
+                        _(
+                            b'received invalid number of bytes for'
+                            b'file data; expected %d got %d'
+                        )
+                        % (
+                            filemeta[b'size'],
+                            filemeta[b'size'] - bytesremaining,
+                        )
+                    )
+
 
 def _pullchangesetdiscovery(repo, remote, heads, abortwhenunrelated=True):
     """Determine which changesets need to be pulled."""
@@ -242,7 +281,8 @@
     # TODO wire protocol version 2 is capable of more efficient discovery
     # than setdiscovery. Consider implementing something better.
     common, fetch, remoteheads = setdiscovery.findcommonheads(
-        repo.ui, repo, remote, abortwhenunrelated=abortwhenunrelated)
+        repo.ui, repo, remote, abortwhenunrelated=abortwhenunrelated
+    )
 
     common = set(common)
     remoteheads = set(remoteheads)
@@ -262,6 +302,7 @@
 
     return common, fetch, remoteheads
 
+
 def _fetchchangesets(repo, tr, remote, common, fetch, remoteheads):
     # TODO consider adding a step here where we obtain the DAG shape first
     # (or ask the server to slice changesets into chunks for us) so that
@@ -269,22 +310,27 @@
     # resuming interrupted clones, higher server-side cache hit rates due
     # to smaller segments, etc.
     with remote.commandexecutor() as e:
-        objs = e.callcommand(b'changesetdata', {
-            b'revisions': [{
-                b'type': b'changesetdagrange',
-                b'roots': sorted(common),
-                b'heads': sorted(remoteheads),
-            }],
-            b'fields': {b'bookmarks', b'parents', b'phase', b'revision'},
-        }).result()
+        objs = e.callcommand(
+            b'changesetdata',
+            {
+                b'revisions': [
+                    {
+                        b'type': b'changesetdagrange',
+                        b'roots': sorted(common),
+                        b'heads': sorted(remoteheads),
+                    }
+                ],
+                b'fields': {b'bookmarks', b'parents', b'phase', b'revision'},
+            },
+        ).result()
 
         # The context manager waits on all response data when exiting. So
         # we need to remain in the context manager in order to stream data.
         return _processchangesetdata(repo, tr, objs)
 
+
 def _processchangesetdata(repo, tr, objs):
-    repo.hook('prechangegroup', throw=True,
-              **pycompat.strkwargs(tr.hookargs))
+    repo.hook('prechangegroup', throw=True, **pycompat.strkwargs(tr.hookargs))
 
     urepo = repo.unfiltered()
     cl = urepo.changelog
@@ -295,9 +341,9 @@
     # follows.
     meta = next(objs)
 
-    progress = repo.ui.makeprogress(_('changesets'),
-                                    unit=_('chunks'),
-                                    total=meta.get(b'totalitems'))
+    progress = repo.ui.makeprogress(
+        _('changesets'), unit=_('chunks'), total=meta.get(b'totalitems')
+    )
 
     manifestnodes = {}
 
@@ -360,8 +406,9 @@
                 0,
             )
 
-    added = cl.addgroup(iterrevisions(), linkrev, weakref.proxy(tr),
-                        addrevisioncb=onchangeset)
+    added = cl.addgroup(
+        iterrevisions(), linkrev, weakref.proxy(tr), addrevisioncb=onchangeset
+    )
 
     progress.complete()
 
@@ -372,6 +419,7 @@
         'manifestnodes': manifestnodes,
     }
 
+
 def _fetchmanifests(repo, tr, remote, manifestnodes):
     rootmanifest = repo.manifestlog.getstorage(b'')
 
@@ -429,13 +477,14 @@
                 basenode,
                 delta,
                 # Flags not yet supported.
-                0
+                0,
             )
 
             progress.increment()
 
-    progress = repo.ui.makeprogress(_('manifests'), unit=_('chunks'),
-                                    total=len(fetchnodes))
+    progress = repo.ui.makeprogress(
+        _('manifests'), unit=_('chunks'), total=len(fetchnodes)
+    )
 
     commandmeta = remote.apidescriptor[b'commands'][b'manifestdata']
     batchsize = commandmeta.get(b'recommendedbatchsize', 10000)
@@ -452,25 +501,31 @@
     added = []
 
     for i in pycompat.xrange(0, len(fetchnodes), batchsize):
-        batch = [node for node in fetchnodes[i:i + batchsize]]
+        batch = [node for node in fetchnodes[i : i + batchsize]]
         if not batch:
             continue
 
         with remote.commandexecutor() as e:
-            objs = e.callcommand(b'manifestdata', {
-                b'tree': b'',
-                b'nodes': batch,
-                b'fields': {b'parents', b'revision'},
-                b'haveparents': True,
-            }).result()
+            objs = e.callcommand(
+                b'manifestdata',
+                {
+                    b'tree': b'',
+                    b'nodes': batch,
+                    b'fields': {b'parents', b'revision'},
+                    b'haveparents': True,
+                },
+            ).result()
 
             # Chomp off header object.
             next(objs)
 
-            added.extend(rootmanifest.addgroup(
-                iterrevisions(objs, progress),
-                linkrevs.__getitem__,
-                weakref.proxy(tr)))
+            added.extend(
+                rootmanifest.addgroup(
+                    iterrevisions(objs, progress),
+                    linkrevs.__getitem__,
+                    weakref.proxy(tr),
+                )
+            )
 
     progress.complete()
 
@@ -479,6 +534,7 @@
         'linkrevs': linkrevs,
     }
 
+
 def _derivefilesfrommanifests(repo, matcher, manifestnodes):
     """Determine what file nodes are relevant given a set of manifest nodes.
 
@@ -489,7 +545,8 @@
     fnodes = collections.defaultdict(dict)
 
     progress = repo.ui.makeprogress(
-        _('scanning manifests'), total=len(manifestnodes))
+        _('scanning manifests'), total=len(manifestnodes)
+    )
 
     with progress:
         for manifestnode in manifestnodes:
@@ -511,8 +568,10 @@
 
     return fnodes
 
+
 def _fetchfiles(repo, tr, remote, fnodes, linkrevs):
     """Fetch file data from explicit file revisions."""
+
     def iterrevisions(objs, progress):
         for filerevision in objs:
             node = filerevision[b'node']
@@ -546,15 +605,17 @@
             progress.increment()
 
     progress = repo.ui.makeprogress(
-        _('files'), unit=_('chunks'),
-         total=sum(len(v) for v in fnodes.itervalues()))
+        _('files'),
+        unit=_('chunks'),
+        total=sum(len(v) for v in fnodes.itervalues()),
+    )
 
     # TODO make batch size configurable
     batchsize = 10000
     fnodeslist = [x for x in sorted(fnodes.items())]
 
     for i in pycompat.xrange(0, len(fnodeslist), batchsize):
-        batch = [x for x in fnodeslist[i:i + batchsize]]
+        batch = [x for x in fnodeslist[i : i + batchsize]]
         if not batch:
             continue
 
@@ -563,16 +624,25 @@
             locallinkrevs = {}
 
             for path, nodes in batch:
-                fs.append((path, e.callcommand(b'filedata', {
-                    b'path': path,
-                    b'nodes': sorted(nodes),
-                    b'fields': {b'parents', b'revision'},
-                    b'haveparents': True,
-                })))
+                fs.append(
+                    (
+                        path,
+                        e.callcommand(
+                            b'filedata',
+                            {
+                                b'path': path,
+                                b'nodes': sorted(nodes),
+                                b'fields': {b'parents', b'revision'},
+                                b'haveparents': True,
+                            },
+                        ),
+                    )
+                )
 
                 locallinkrevs[path] = {
                     node: linkrevs[manifestnode]
-                    for node, manifestnode in nodes.iteritems()}
+                    for node, manifestnode in nodes.iteritems()
+                }
 
             for path, f in fs:
                 objs = f.result()
@@ -584,10 +654,13 @@
                 store.addgroup(
                     iterrevisions(objs, progress),
                     locallinkrevs[path].__getitem__,
-                    weakref.proxy(tr))
+                    weakref.proxy(tr),
+                )
+
 
-def _fetchfilesfromcsets(repo, tr, remote, pathfilter, fnodes, csets,
-                         manlinkrevs, shallow=False):
+def _fetchfilesfromcsets(
+    repo, tr, remote, pathfilter, fnodes, csets, manlinkrevs, shallow=False
+):
     """Fetch file data from explicit changeset revisions."""
 
     def iterrevisions(objs, remaining, progress):
@@ -631,8 +704,10 @@
             remaining -= 1
 
     progress = repo.ui.makeprogress(
-        _('files'), unit=_('chunks'),
-        total=sum(len(v) for v in fnodes.itervalues()))
+        _('files'),
+        unit=_('chunks'),
+        total=sum(len(v) for v in fnodes.itervalues()),
+    )
 
     commandmeta = remote.apidescriptor[b'commands'][b'filesdata']
     batchsize = commandmeta.get(b'recommendedbatchsize', 50000)
@@ -653,16 +728,15 @@
         fields.add(b'linknode')
 
     for i in pycompat.xrange(0, len(csets), batchsize):
-        batch = [x for x in csets[i:i + batchsize]]
+        batch = [x for x in csets[i : i + batchsize]]
         if not batch:
             continue
 
         with remote.commandexecutor() as e:
             args = {
-                b'revisions': [{
-                    b'type': b'changesetexplicit',
-                    b'nodes': batch,
-                }],
+                b'revisions': [
+                    {b'type': b'changesetexplicit', b'nodes': batch,}
+                ],
                 b'fields': fields,
                 b'haveparents': haveparents,
             }
@@ -684,7 +758,8 @@
 
                 linkrevs = {
                     fnode: manlinkrevs[mnode]
-                    for fnode, mnode in fnodes[path].iteritems()}
+                    for fnode, mnode in fnodes[path].iteritems()
+                }
 
                 def getlinkrev(node):
                     if node in linkrevs:
@@ -692,8 +767,9 @@
                     else:
                         return clrev(node)
 
-                store.addgroup(iterrevisions(objs, header[b'totalitems'],
-                                             progress),
-                               getlinkrev,
-                               weakref.proxy(tr),
-                               maybemissingparents=shallow)
+                store.addgroup(
+                    iterrevisions(objs, header[b'totalitems'], progress),
+                    getlinkrev,
+                    weakref.proxy(tr),
+                    maybemissingparents=shallow,
+                )
--- a/mercurial/extensions.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/extensions.py	Sun Oct 06 09:45:02 2019 -0400
@@ -27,9 +27,7 @@
     util,
 )
 
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 _extensions = {}
 _disabledextensions = {}
@@ -47,13 +45,16 @@
     'shelve',
 }
 
+
 def extensions(ui=None):
     if ui:
+
         def enabled(name):
             for format in ['%s', 'hgext.%s']:
                 conf = ui.config('extensions', format % name)
                 if conf is not None and not conf.startswith('!'):
                     return True
+
     else:
         enabled = lambda name: True
     for name in _order:
@@ -61,6 +62,7 @@
         if module and enabled(name):
             yield name, module
 
+
 def find(name):
     '''return module with given extension name'''
     mod = None
@@ -75,6 +77,7 @@
         raise KeyError(name)
     return mod
 
+
 def loadpath(path, module_name):
     module_name = module_name.replace('.', '_')
     path = util.normpath(util.expandpath(path))
@@ -90,9 +93,10 @@
             return imp.load_source(module_name, path)
         except IOError as exc:
             if not exc.filename:
-                exc.filename = path # python does not fill this
+                exc.filename = path  # python does not fill this
             raise
 
+
 def _importh(name):
     """import and return the <name> module"""
     mod = __import__(pycompat.sysstr(name))
@@ -101,6 +105,7 @@
         mod = getattr(mod, comp)
     return mod
 
+
 def _importext(name, path=None, reportfunc=None):
     if path:
         # the module will be loaded in sys.modules
@@ -121,14 +126,21 @@
                 mod = _importh(name)
     return mod
 
+
 def _reportimporterror(ui, err, failed, next):
     # note: this ui.log happens before --debug is processed,
     #       Use --config ui.debug=1 to see them.
-    ui.log(b'extension', b'    - could not import %s (%s): trying %s\n',
-           failed, stringutil.forcebytestr(err), next)
+    ui.log(
+        b'extension',
+        b'    - could not import %s (%s): trying %s\n',
+        failed,
+        stringutil.forcebytestr(err),
+        next,
+    )
     if ui.debugflag and ui.configbool('devel', 'debug.extensions'):
         ui.traceback()
 
+
 def _rejectunicode(name, xs):
     if isinstance(xs, (list, set, tuple)):
         for x in xs:
@@ -138,12 +150,16 @@
             _rejectunicode(name, k)
             _rejectunicode(b'%s.%s' % (name, stringutil.forcebytestr(k)), v)
     elif isinstance(xs, type(u'')):
-        raise error.ProgrammingError(b"unicode %r found in %s" % (xs, name),
-                                     hint="use b'' to make it byte string")
+        raise error.ProgrammingError(
+            b"unicode %r found in %s" % (xs, name),
+            hint="use b'' to make it byte string",
+        )
+
 
 # attributes set by registrar.command
 _cmdfuncattrs = ('norepo', 'optionalrepo', 'inferrepo')
 
+
 def _validatecmdtable(ui, cmdtable):
     """Check if extension commands have required attributes"""
     for c, e in cmdtable.iteritems():
@@ -153,19 +169,28 @@
             continue
         raise error.ProgrammingError(
             'missing attributes: %s' % ', '.join(missing),
-            hint="use @command decorator to register '%s'" % c)
+            hint="use @command decorator to register '%s'" % c,
+        )
+
 
 def _validatetables(ui, mod):
     """Sanity check for loadable tables provided by extension module"""
     for t in ['cmdtable', 'colortable', 'configtable']:
         _rejectunicode(t, getattr(mod, t, {}))
-    for t in ['filesetpredicate', 'internalmerge', 'revsetpredicate',
-              'templatefilter', 'templatefunc', 'templatekeyword']:
+    for t in [
+        'filesetpredicate',
+        'internalmerge',
+        'revsetpredicate',
+        'templatefilter',
+        'templatefunc',
+        'templatekeyword',
+    ]:
         o = getattr(mod, t, None)
         if o:
             _rejectunicode(t, o._table)
     _validatecmdtable(ui, getattr(mod, 'cmdtable', {}))
 
+
 def load(ui, name, path, loadingtime=None):
     if name.startswith('hgext.') or name.startswith('hgext/'):
         shortname = name[6:]
@@ -189,8 +214,10 @@
     # of Mercurial.
     minver = getattr(mod, 'minimumhgversion', None)
     if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2):
-        msg = _('(third party extension %s requires version %s or newer '
-                'of Mercurial (current: %s); disabling)\n')
+        msg = _(
+            '(third party extension %s requires version %s or newer '
+            'of Mercurial (current: %s); disabling)\n'
+        )
         ui.warn(msg % (shortname, minver, util.version()))
         return
     ui.log(b'extension', b'    - validating extension tables: %s\n', shortname)
@@ -198,14 +225,16 @@
 
     _extensions[shortname] = mod
     _order.append(shortname)
-    ui.log(b'extension', b'    - invoking registered callbacks: %s\n',
-           shortname)
+    ui.log(
+        b'extension', b'    - invoking registered callbacks: %s\n', shortname
+    )
     with util.timedcm('callbacks extension %s', shortname) as stats:
         for fn in _aftercallbacks.get(shortname, []):
             fn(loaded=True)
     ui.log(b'extension', b'    > callbacks completed in %s\n', stats)
     return mod
 
+
 def _runuisetup(name, ui):
     uisetup = getattr(_extensions[name], 'uisetup', None)
     if uisetup:
@@ -218,6 +247,7 @@
             return False
     return True
 
+
 def _runextsetup(name, ui):
     extsetup = getattr(_extensions[name], 'extsetup', None)
     if extsetup:
@@ -230,22 +260,29 @@
             return False
     return True
 
+
 def loadall(ui, whitelist=None):
     loadingtime = collections.defaultdict(int)
     result = ui.configitems("extensions")
     if whitelist is not None:
         result = [(k, v) for (k, v) in result if k in whitelist]
     newindex = len(_order)
-    ui.log(b'extension', b'loading %sextensions\n',
-           'additional ' if newindex else '')
+    ui.log(
+        b'extension',
+        b'loading %sextensions\n',
+        'additional ' if newindex else '',
+    )
     ui.log(b'extension', b'- processing %d entries\n', len(result))
     with util.timedcm('load all extensions') as stats:
         for (name, path) in result:
             if path:
                 if path[0:1] == '!':
                     if name not in _disabledextensions:
-                        ui.log(b'extension',
-                               b'  - skipping disabled extension: %s\n', name)
+                        ui.log(
+                            b'extension',
+                            b'  - skipping disabled extension: %s\n',
+                            name,
+                        )
                     _disabledextensions[name] = path[1:]
                     continue
             try:
@@ -253,17 +290,25 @@
             except Exception as inst:
                 msg = stringutil.forcebytestr(inst)
                 if path:
-                    ui.warn(_("*** failed to import extension %s from %s: %s\n")
-                            % (name, path, msg))
+                    ui.warn(
+                        _("*** failed to import extension %s from %s: %s\n")
+                        % (name, path, msg)
+                    )
                 else:
-                    ui.warn(_("*** failed to import extension %s: %s\n")
-                            % (name, msg))
+                    ui.warn(
+                        _("*** failed to import extension %s: %s\n")
+                        % (name, msg)
+                    )
                 if isinstance(inst, error.Hint) and inst.hint:
                     ui.warn(_("*** (%s)\n") % inst.hint)
                 ui.traceback()
 
-    ui.log(b'extension', b'> loaded %d extensions, total time %s\n',
-           len(_order) - newindex, stats)
+    ui.log(
+        b'extension',
+        b'> loaded %d extensions, total time %s\n',
+        len(_order) - newindex,
+        stats,
+    )
     # list of (objname, loadermod, loadername) tuple:
     # - objname is the name of an object in extension module,
     #   from which extra information is loaded
@@ -286,8 +331,11 @@
             ui.log(b'extension', b'  - running uisetup for %s\n', name)
             with util.timedcm('uisetup %s', name) as stats:
                 if not _runuisetup(name, ui):
-                    ui.log(b'extension',
-                           b'    - the %s extension uisetup failed\n', name)
+                    ui.log(
+                        b'extension',
+                        b'    - the %s extension uisetup failed\n',
+                        name,
+                    )
                     broken.add(name)
             ui.log(b'extension', b'  > uisetup for %s took %s\n', name, stats)
             loadingtime[name] += stats.elapsed
@@ -301,8 +349,11 @@
             ui.log(b'extension', b'  - running extsetup for %s\n', name)
             with util.timedcm('extsetup %s', name) as stats:
                 if not _runextsetup(name, ui):
-                    ui.log(b'extension',
-                           b'    - the %s extension extsetup failed\n', name)
+                    ui.log(
+                        b'extension',
+                        b'    - the %s extension extsetup failed\n',
+                        name,
+                    )
                     broken.add(name)
             ui.log(b'extension', b'  > extsetup for %s took %s\n', name, stats)
             loadingtime[name] += stats.elapsed
@@ -320,9 +371,11 @@
                 continue
 
             for fn in _aftercallbacks[shortname]:
-                ui.log(b'extension',
-                       b'  - extension %s not loaded, notify callbacks\n',
-                       shortname)
+                ui.log(
+                    b'extension',
+                    b'  - extension %s not loaded, notify callbacks\n',
+                    shortname,
+                )
                 fn(loaded=False)
     ui.log(b'extension', b'> remaining aftercallbacks completed in %s\n', stats)
 
@@ -361,27 +414,36 @@
     ]
     with util.timedcm('load registration objects') as stats:
         _loadextra(ui, newindex, extraloaders)
-    ui.log(b'extension', b'> extension registration object loading took %s\n',
-           stats)
+    ui.log(
+        b'extension',
+        b'> extension registration object loading took %s\n',
+        stats,
+    )
 
     # Report per extension loading time (except reposetup)
     for name in sorted(loadingtime):
-        ui.log(b'extension', b'> extension %s take a total of %s to load\n',
-               name, util.timecount(loadingtime[name]))
+        ui.log(
+            b'extension',
+            b'> extension %s take a total of %s to load\n',
+            name,
+            util.timecount(loadingtime[name]),
+        )
 
     ui.log(b'extension', b'extension loading complete\n')
 
+
 def _loadextra(ui, newindex, extraloaders):
     for name in _order[newindex:]:
         module = _extensions[name]
         if not module:
-            continue # loading this module failed
+            continue  # loading this module failed
 
         for objname, loadermod, loadername in extraloaders:
             extraobj = getattr(module, objname, None)
             if extraobj is not None:
                 getattr(loadermod, loadername)(ui, name, extraobj)
 
+
 def afterloaded(extension, callback):
     '''Run the specified function after a named extension is loaded.
 
@@ -397,11 +459,12 @@
 
     if extension in _extensions:
         # Report loaded as False if the extension is disabled
-        loaded = (_extensions[extension] is not None)
+        loaded = _extensions[extension] is not None
         callback(loaded=loaded)
     else:
         _aftercallbacks.setdefault(extension, []).append(callback)
 
+
 def populateui(ui):
     """Run extension hooks on the given ui to populate additional members,
     extend the class dynamically, etc.
@@ -418,8 +481,11 @@
             hook(ui)
         except Exception as inst:
             ui.traceback(force=True)
-            ui.warn(_('*** failed to populate ui by extension %s: %s\n')
-                    % (name, stringutil.forcebytestr(inst)))
+            ui.warn(
+                _('*** failed to populate ui by extension %s: %s\n')
+                % (name, stringutil.forcebytestr(inst))
+            )
+
 
 def bind(func, *args):
     '''Partial function application
@@ -429,10 +495,13 @@
 
           f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
     assert callable(func)
+
     def closure(*a, **kw):
         return func(*(args + a), **kw)
+
     return closure
 
+
 def _updatewrapper(wrap, origfn, unboundwrapper):
     '''Copy and add some useful attributes to wrapper'''
     try:
@@ -445,6 +514,7 @@
     wrap._origfunc = origfn
     wrap._unboundwrapper = unboundwrapper
 
+
 def wrapcommand(table, command, wrapper, synopsis=None, docstring=None):
     '''Wrap the command named `command' in table
 
@@ -482,8 +552,9 @@
             break
 
     origfn = entry[0]
-    wrap = functools.partial(util.checksignature(wrapper),
-                             util.checksignature(origfn))
+    wrap = functools.partial(
+        util.checksignature(wrapper), util.checksignature(origfn)
+    )
     _updatewrapper(wrap, origfn, wrapper)
     if docstring is not None:
         wrap.__doc__ += docstring
@@ -495,6 +566,7 @@
     table[key] = tuple(newentry)
     return entry
 
+
 def wrapfilecache(cls, propname, wrapper):
     """Wraps a filecache property.
 
@@ -506,14 +578,18 @@
         if propname in currcls.__dict__:
             origfn = currcls.__dict__[propname].func
             assert callable(origfn)
+
             def wrap(*args, **kwargs):
                 return wrapper(origfn, *args, **kwargs)
+
             currcls.__dict__[propname].func = wrap
             break
 
     if currcls is object:
-        raise AttributeError(r"type '%s' has no property '%s'" % (
-            cls, propname))
+        raise AttributeError(
+            r"type '%s' has no property '%s'" % (cls, propname)
+        )
+
 
 class wrappedfunction(object):
     '''context manager for temporarily wrapping a function'''
@@ -530,6 +606,7 @@
     def __exit__(self, exctype, excvalue, traceback):
         unwrapfunction(self._container, self._funcname, self._wrapper)
 
+
 def wrapfunction(container, funcname, wrapper):
     '''Wrap the function named funcname in container
 
@@ -579,6 +656,7 @@
     setattr(container, funcname, wrap)
     return origfn
 
+
 def unwrapfunction(container, funcname, wrapper=None):
     '''undo wrapfunction
 
@@ -599,6 +677,7 @@
         wrapfunction(container, funcname, w)
     return wrapper
 
+
 def getwrapperchain(container, funcname):
     '''get a chain of wrappers of a function
 
@@ -615,12 +694,15 @@
         fn = getattr(fn, '_origfunc', None)
     return result
 
+
 def _disabledpaths():
     '''find paths of disabled extensions. returns a dict of {name: path}'''
     import hgext
+
     extpath = os.path.dirname(
-        os.path.abspath(pycompat.fsencode(hgext.__file__)))
-    try: # might not be a filesystem path
+        os.path.abspath(pycompat.fsencode(hgext.__file__))
+    )
+    try:  # might not be a filesystem path
         files = os.listdir(extpath)
     except OSError:
         return {}
@@ -645,6 +727,7 @@
             exts[name] = path
     return exts
 
+
 def _moduledoc(file):
     '''return the top-level python documentation for the given file
 
@@ -669,7 +752,7 @@
                     result.append(line)
                 break
             elif not line:
-                return None # unmatched delimiter
+                return None  # unmatched delimiter
             result.append(line)
             line = file.readline()
     else:
@@ -677,6 +760,7 @@
 
     return ''.join(result)
 
+
 def _disabledhelp(path):
     '''retrieve help synopsis of a disabled extension (without importing)'''
     try:
@@ -685,18 +769,22 @@
     except IOError:
         return
 
-    if doc: # extracting localized synopsis
+    if doc:  # extracting localized synopsis
         return gettext(doc)
     else:
         return _('(no help text available)')
 
+
 def disabled():
     '''find disabled extensions from hgext. returns a dict of {name: desc}'''
     try:
         from hgext import __index__
-        return dict((name, gettext(desc))
-                    for name, desc in __index__.docs.iteritems()
-                    if name not in _order)
+
+        return dict(
+            (name, gettext(desc))
+            for name, desc in __index__.docs.iteritems()
+            if name not in _order
+        )
     except (ImportError, AttributeError):
         pass
 
@@ -712,10 +800,12 @@
 
     return exts
 
+
 def disabledext(name):
     '''find a specific disabled extension from hgext. returns desc'''
     try:
         from hgext import __index__
+
         if name in _order:  # enabled
             return
         else:
@@ -727,6 +817,7 @@
     if name in paths:
         return _disabledhelp(paths[name])
 
+
 def _walkcommand(node):
     """Scan @command() decorators in the tree starting at node"""
     todo = collections.deque([node])
@@ -744,6 +835,7 @@
                 continue
             yield d
 
+
 def _disabledcmdtable(path):
     """Construct a dummy command table without loading the extension module
 
@@ -765,6 +857,7 @@
         cmdtable[name] = (None, [], b'')
     return cmdtable
 
+
 def _finddisabledcmd(ui, cmd, name, path, strict):
     try:
         cmdtable = _disabledcmdtable(path)
@@ -783,6 +876,7 @@
     doc = _disabledhelp(path)
     return (cmd, name, doc)
 
+
 def disabledcmd(ui, cmd, strict=False):
     '''find cmd from disabled extensions without importing.
     returns (cmdname, extname, doc)'''
@@ -807,25 +901,27 @@
 
     raise error.UnknownCommand(cmd)
 
+
 def enabled(shortname=True):
     '''return a dict of {name: desc} of extensions'''
     exts = {}
     for ename, ext in extensions():
-        doc = (gettext(ext.__doc__) or _('(no help text available)'))
+        doc = gettext(ext.__doc__) or _('(no help text available)')
         if shortname:
             ename = ename.split('.')[-1]
         exts[ename] = doc.splitlines()[0].strip()
 
     return exts
 
+
 def notloaded():
     '''return short names of extensions that failed to load'''
     return [name for name, mod in _extensions.iteritems() if mod is None]
 
+
 def moduleversion(module):
     '''return version information from given module as a string'''
-    if (util.safehasattr(module, 'getversion')
-          and callable(module.getversion)):
+    if util.safehasattr(module, 'getversion') and callable(module.getversion):
         version = module.getversion()
     elif util.safehasattr(module, '__version__'):
         version = module.__version__
@@ -835,6 +931,7 @@
         version = '.'.join(pycompat.bytestr(o) for o in version)
     return version
 
+
 def ismoduleinternal(module):
     exttestedwith = getattr(module, 'testedwith', None)
     return exttestedwith == "ships-with-hg-core"
--- a/mercurial/exthelper.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/exthelper.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,6 +21,7 @@
 
 from hgdemandimport import tracing
 
+
 class exthelper(object):
     """Helper for modular extension setup
 
@@ -281,9 +282,11 @@
             if extension is None:
                 self._commandwrappers.append((command, wrapper, opts))
             else:
-                self._extcommandwrappers.append((extension, command, wrapper,
-                                                 opts))
+                self._extcommandwrappers.append(
+                    (extension, command, wrapper, opts)
+                )
             return wrapper
+
         return dec
 
     def wrapfunction(self, container, funcname):
@@ -300,7 +303,9 @@
                 ui.note('His head smashed in and his heart cut out')
                 return orig(*args, **kwargs)
         """
+
         def dec(wrapper):
             self._functionwrappers.append((container, funcname, wrapper))
             return wrapper
+
         return dec
--- a/mercurial/fancyopts.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/fancyopts.py	Sun Oct 06 09:45:02 2019 -0400
@@ -27,6 +27,7 @@
     'version',
 }
 
+
 def _earlyoptarg(arg, shortlist, namelist):
     """Check if the given arg is a valid unabbreviated option
 
@@ -89,6 +90,7 @@
             return flag, bool(val), val, shortlist.startswith(':', i + 1)
     return '', False, '', False
 
+
 def earlygetopt(args, shortlist, namelist, gnu=False, keepsep=False):
     """Parse options like getopt, but ignores unknown options and abbreviated
     forms
@@ -202,6 +204,7 @@
     parsedargs.extend(args[pos:])
     return parsedopts, parsedargs
 
+
 class customopt(object):
     """Manage defaults and mutations for any type of opt."""
 
@@ -226,6 +229,7 @@
 
         On failure, abort can be called with a string error message."""
 
+
 class _simpleopt(customopt):
     def _isboolopt(self):
         return isinstance(self._defaultvalue, (bool, type(None)))
@@ -233,6 +237,7 @@
     def newstate(self, oldstate, newparam, abort):
         return newparam
 
+
 class _callableopt(customopt):
     def __init__(self, callablefn):
         self.callablefn = callablefn
@@ -241,6 +246,7 @@
     def newstate(self, oldstate, newparam, abort):
         return self.callablefn(newparam)
 
+
 class _listopt(customopt):
     def getdefaultvalue(self):
         return self._defaultvalue[:]
@@ -249,6 +255,7 @@
         oldstate.append(newparam)
         return oldstate
 
+
 class _intopt(customopt):
     def newstate(self, oldstate, newparam, abort):
         try:
@@ -256,6 +263,7 @@
         except ValueError:
             abort(_('expected int'))
 
+
 def _defaultopt(default):
     """Returns a default opt implementation, given a default value."""
 
@@ -270,6 +278,7 @@
     else:
         return _simpleopt(default)
 
+
 def fancyopts(args, options, state, gnu=False, early=False, optaliases=None):
     """
     read args, parse options, and store options in state
@@ -369,9 +378,13 @@
         if obj._isboolopt():
             state[name] = boolval
         else:
+
             def abort(s):
-                raise error.Abort(_('invalid value %r for option %s, %s')
-                                  % (pycompat.maybebytestr(val), opt, s))
+                raise error.Abort(
+                    _('invalid value %r for option %s, %s')
+                    % (pycompat.maybebytestr(val), opt, s)
+                )
+
             state[name] = defmap[name].newstate(state[name], val, abort)
 
     # return unparsed args
--- a/mercurial/filelog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/filelog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -20,16 +20,15 @@
     repository,
     util as interfaceutil,
 )
-from .utils import (
-    storageutil,
-)
+from .utils import storageutil
+
 
 @interfaceutil.implementer(repository.ifilestorage)
 class filelog(object):
     def __init__(self, opener, path):
-        self._revlog = revlog.revlog(opener,
-                                     '/'.join(('data', path + '.i')),
-                                     censorable=True)
+        self._revlog = revlog.revlog(
+            opener, '/'.join(('data', path + '.i')), censorable=True
+        )
         # Full name of the user visible file, relative to the repository root.
         # Used by LFS.
         self._revlog.filename = path
@@ -66,8 +65,9 @@
         return self._revlog.node(rev)
 
     def lookup(self, node):
-        return storageutil.fileidlookup(self._revlog, node,
-                                        self._revlog.indexfile)
+        return storageutil.fileidlookup(
+            self._revlog, node, self._revlog.indexfile
+        )
 
     def linkrev(self, rev):
         return self._revlog.linkrev(rev)
@@ -95,29 +95,63 @@
     def rawdata(self, node, _df=None):
         return self._revlog.rawdata(node, _df=_df)
 
-    def emitrevisions(self, nodes, nodesorder=None,
-                      revisiondata=False, assumehaveparentrevisions=False,
-                      deltamode=repository.CG_DELTAMODE_STD):
+    def emitrevisions(
+        self,
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+        deltamode=repository.CG_DELTAMODE_STD,
+    ):
         return self._revlog.emitrevisions(
-            nodes, nodesorder=nodesorder, revisiondata=revisiondata,
+            nodes,
+            nodesorder=nodesorder,
+            revisiondata=revisiondata,
             assumehaveparentrevisions=assumehaveparentrevisions,
-            deltamode=deltamode)
+            deltamode=deltamode,
+        )
 
-    def addrevision(self, revisiondata, transaction, linkrev, p1, p2,
-                    node=None, flags=revlog.REVIDX_DEFAULT_FLAGS,
-                    cachedelta=None):
-        return self._revlog.addrevision(revisiondata, transaction, linkrev,
-                                    p1, p2, node=node, flags=flags,
-                                    cachedelta=cachedelta)
+    def addrevision(
+        self,
+        revisiondata,
+        transaction,
+        linkrev,
+        p1,
+        p2,
+        node=None,
+        flags=revlog.REVIDX_DEFAULT_FLAGS,
+        cachedelta=None,
+    ):
+        return self._revlog.addrevision(
+            revisiondata,
+            transaction,
+            linkrev,
+            p1,
+            p2,
+            node=node,
+            flags=flags,
+            cachedelta=cachedelta,
+        )
 
-    def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None,
-                 maybemissingparents=False):
+    def addgroup(
+        self,
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        maybemissingparents=False,
+    ):
         if maybemissingparents:
-            raise error.Abort(_('revlog storage does not support missing '
-                                'parents write mode'))
+            raise error.Abort(
+                _(
+                    'revlog storage does not support missing '
+                    'parents write mode'
+                )
+            )
 
-        return self._revlog.addgroup(deltas, linkmapper, transaction,
-                                     addrevisioncb=addrevisioncb)
+        return self._revlog.addgroup(
+            deltas, linkmapper, transaction, addrevisioncb=addrevisioncb
+        )
 
     def getstrippoint(self, minlink):
         return self._revlog.getstrippoint(minlink)
@@ -165,13 +199,21 @@
     def verifyintegrity(self, state):
         return self._revlog.verifyintegrity(state)
 
-    def storageinfo(self, exclusivefiles=False, sharedfiles=False,
-                    revisionscount=False, trackedsize=False,
-                    storedsize=False):
+    def storageinfo(
+        self,
+        exclusivefiles=False,
+        sharedfiles=False,
+        revisionscount=False,
+        trackedsize=False,
+        storedsize=False,
+    ):
         return self._revlog.storageinfo(
-            exclusivefiles=exclusivefiles, sharedfiles=sharedfiles,
-            revisionscount=revisionscount, trackedsize=trackedsize,
-            storedsize=storedsize)
+            exclusivefiles=exclusivefiles,
+            sharedfiles=sharedfiles,
+            revisionscount=revisionscount,
+            trackedsize=trackedsize,
+            storedsize=storedsize,
+        )
 
     # TODO these aren't part of the interface and aren't internal methods.
     # Callers should be fixed to not use them.
@@ -192,6 +234,7 @@
 
         return self._revlog.clone(tr, destrevlog._revlog, **kwargs)
 
+
 class narrowfilelog(filelog):
     """Filelog variation to be used with narrow stores."""
 
--- a/mercurial/filemerge.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/filemerge.py	Sun Oct 06 09:45:02 2019 -0400
@@ -40,15 +40,19 @@
     stringutil,
 )
 
+
 def _toolstr(ui, tool, part, *args):
     return ui.config("merge-tools", tool + "." + part, *args)
 
-def _toolbool(ui, tool, part,*args):
+
+def _toolbool(ui, tool, part, *args):
     return ui.configbool("merge-tools", tool + "." + part, *args)
 
+
 def _toollist(ui, tool, part):
     return ui.configlist("merge-tools", tool + "." + part)
 
+
 internals = {}
 # Merge tools to document.
 internalsdoc = {}
@@ -57,8 +61,8 @@
 
 # internal tool merge types
 nomerge = internaltool.nomerge
-mergeonly = internaltool.mergeonly # just the full merge, no premerge
-fullmerge = internaltool.fullmerge # both premerge and merge
+mergeonly = internaltool.mergeonly  # just the full merge, no premerge
+fullmerge = internaltool.fullmerge  # both premerge and merge
 
 # IMPORTANT: keep the last line of this prompt very short ("What do you want to
 # do?") because of issue6158, ideally to <40 English characters (to allow other
@@ -68,13 +72,16 @@
     "file '%(fd)s' was deleted in other%(o)s but was modified in local%(l)s.\n"
     "You can use (c)hanged version, (d)elete, or leave (u)nresolved.\n"
     "What do you want to do?"
-    "$$ &Changed $$ &Delete $$ &Unresolved")
+    "$$ &Changed $$ &Delete $$ &Unresolved"
+)
 
 _otherchangedlocaldeletedmsg = _(
     "file '%(fd)s' was deleted in local%(l)s but was modified in other%(o)s.\n"
     "You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.\n"
     "What do you want to do?"
-    "$$ &Changed $$ &Deleted $$ &Unresolved")
+    "$$ &Changed $$ &Deleted $$ &Unresolved"
+)
+
 
 class absentfilectx(object):
     """Represents a file that's ostensibly in a context but is actually not
@@ -82,6 +89,7 @@
 
     This is here because it's very specific to the filemerge code for now --
     other code is likely going to break with the values this returns."""
+
     def __init__(self, ctx, f):
         self._ctx = ctx
         self._f = f
@@ -99,14 +107,17 @@
         return nullid
 
     _customcmp = True
+
     def cmp(self, fctx):
         """compare with other file context
 
         returns True if different from fctx.
         """
-        return not (fctx.isabsent() and
-                    fctx.ctx() == self.ctx() and
-                    fctx.path() == self.path())
+        return not (
+            fctx.isabsent()
+            and fctx.ctx() == self.ctx()
+            and fctx.path() == self.path()
+        )
 
     def flags(self):
         return ''
@@ -120,6 +131,7 @@
     def isabsent(self):
         return True
 
+
 def _findtool(ui, tool):
     if tool in internals:
         return tool
@@ -128,11 +140,13 @@
         return cmd
     return findexternaltool(ui, tool)
 
+
 def _quotetoolpath(cmd):
     if cmd.startswith('python:'):
         return cmd
     return procutil.shellquote(cmd)
 
+
 def findexternaltool(ui, tool):
     for kn in ("regkey", "regkeyalt"):
         k = _toolstr(ui, tool, kn)
@@ -146,6 +160,7 @@
     exe = _toolstr(ui, tool, "executable", tool)
     return procutil.findexe(util.expandpath(exe))
 
+
 def _picktool(repo, ui, path, binary, symlink, changedelete):
     strictcheck = ui.configbool('merge', 'strict-capability-check')
 
@@ -162,9 +177,9 @@
         if pat:
             tmsg = _("%s (for pattern %s)") % (tool, pat)
         if not _findtool(ui, tool):
-            if pat: # explicitly requested tool deserves a warning
+            if pat:  # explicitly requested tool deserves a warning
                 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
-            else: # configured but non-existing tools are more silent
+            else:  # configured but non-existing tools are more silent
                 ui.note(_("couldn't find merge tool %s\n") % tmsg)
         elif symlink and not hascapability(tool, "symlink", strictcheck):
             ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
@@ -211,11 +226,15 @@
         mf = match.match(repo.root, '', [pat])
         if mf(path) and check(tool, pat, symlink, binarycap, changedelete):
             if binary and not hascapability(tool, "binary", strict=True):
-                ui.warn(_("warning: check merge-patterns configurations,"
-                          " if %r for binary file %r is unintentional\n"
-                          "(see 'hg help merge-tools'"
-                          " for binary files capability)\n")
-                        % (pycompat.bytestr(tool), pycompat.bytestr(path)))
+                ui.warn(
+                    _(
+                        "warning: check merge-patterns configurations,"
+                        " if %r for binary file %r is unintentional\n"
+                        "(see 'hg help merge-tools'"
+                        " for binary files capability)\n"
+                    )
+                    % (pycompat.bytestr(tool), pycompat.bytestr(path))
+                )
             toolpath = _findtool(ui, tool)
             return (tool, _quotetoolpath(toolpath))
 
@@ -229,8 +248,9 @@
         if _toolbool(ui, t, "disabled"):
             disabled.add(t)
     names = tools.keys()
-    tools = sorted([(-p, tool) for tool, p in tools.items()
-                    if tool not in disabled])
+    tools = sorted(
+        [(-p, tool) for tool, p in tools.items() if tool not in disabled]
+    )
     uimerge = ui.config("ui", "merge")
     if uimerge:
         # external tools defined in uimerge won't be able to handle
@@ -238,8 +258,8 @@
         if check(uimerge, path, symlink, binary, changedelete):
             if uimerge not in names and not changedelete:
                 return (uimerge, uimerge)
-            tools.insert(0, (None, uimerge)) # highest priority
-    tools.append((None, "hgmerge")) # the old default, if found
+            tools.insert(0, (None, uimerge))  # highest priority
+    tools.append((None, "hgmerge"))  # the old default, if found
     for p, t in tools:
         if check(t, None, symlink, binary, changedelete):
             toolpath = _findtool(ui, t)
@@ -253,21 +273,23 @@
         return ":prompt", None
     return ":merge", None
 
+
 def _eoltype(data):
     "Guess the EOL type of a file"
-    if '\0' in data: # binary
+    if '\0' in data:  # binary
         return None
-    if '\r\n' in data: # Windows
+    if '\r\n' in data:  # Windows
         return '\r\n'
-    if '\r' in data: # Old Mac
+    if '\r' in data:  # Old Mac
         return '\r'
-    if '\n' in data: # UNIX
+    if '\n' in data:  # UNIX
         return '\n'
-    return None # unknown
+    return None  # unknown
+
 
 def _matcheol(file, back):
     "Convert EOL markers in a file to match origfile"
-    tostyle = _eoltype(back.data()) # No repo.wread filters?
+    tostyle = _eoltype(back.data())  # No repo.wread filters?
     if tostyle:
         data = util.readfile(file)
         style = _eoltype(data)
@@ -276,6 +298,7 @@
             if newdata != data:
                 util.writefile(file, newdata)
 
+
 @internaltool('prompt', nomerge)
 def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """Asks the user which of the local `p1()` or the other `p2()` version to
@@ -287,51 +310,53 @@
     # Avoid prompting during an in-memory merge since it doesn't support merge
     # conflicts.
     if fcd.changectx().isinmemory():
-        raise error.InMemoryMergeConflictsError('in-memory merge does not '
-                                                'support file conflicts')
+        raise error.InMemoryMergeConflictsError(
+            'in-memory merge does not ' 'support file conflicts'
+        )
 
     prompts = partextras(labels)
     prompts['fd'] = uipathfn(fd)
     try:
         if fco.isabsent():
-            index = ui.promptchoice(
-                _localchangedotherdeletedmsg % prompts, 2)
+            index = ui.promptchoice(_localchangedotherdeletedmsg % prompts, 2)
             choice = ['local', 'other', 'unresolved'][index]
         elif fcd.isabsent():
-            index = ui.promptchoice(
-                _otherchangedlocaldeletedmsg % prompts, 2)
+            index = ui.promptchoice(_otherchangedlocaldeletedmsg % prompts, 2)
             choice = ['other', 'local', 'unresolved'][index]
         else:
             # IMPORTANT: keep the last line of this prompt ("What do you want to
             # do?") very short, see comment next to _localchangedotherdeletedmsg
             # at the top of the file for details.
             index = ui.promptchoice(
-                _("file '%(fd)s' needs to be resolved.\n"
-                  "You can keep (l)ocal%(l)s, take (o)ther%(o)s, or leave "
-                  "(u)nresolved.\n"
-                  "What do you want to do?"
-                  "$$ &Local $$ &Other $$ &Unresolved") % prompts, 2)
+                _(
+                    "file '%(fd)s' needs to be resolved.\n"
+                    "You can keep (l)ocal%(l)s, take (o)ther%(o)s, or leave "
+                    "(u)nresolved.\n"
+                    "What do you want to do?"
+                    "$$ &Local $$ &Other $$ &Unresolved"
+                )
+                % prompts,
+                2,
+            )
             choice = ['local', 'other', 'unresolved'][index]
 
         if choice == 'other':
-            return _iother(repo, mynode, orig, fcd, fco, fca, toolconf,
-                           labels)
+            return _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
         elif choice == 'local':
-            return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf,
-                           labels)
+            return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
         elif choice == 'unresolved':
-            return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
-                          labels)
+            return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
     except error.ResponseExpected:
         ui.write("\n")
-        return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf,
-                      labels)
+        return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels)
+
 
 @internaltool('local', nomerge)
 def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """Uses the local `p1()` version of files as the merged version."""
     return 0, fcd.isabsent()
 
+
 @internaltool('other', nomerge)
 def _iother(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """Uses the other `p2()` version of files as the merged version."""
@@ -344,6 +369,7 @@
         deleted = False
     return 0, deleted
 
+
 @internaltool('fail', nomerge)
 def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf, labels=None):
     """
@@ -355,6 +381,7 @@
         _underlyingfctxifabsent(fcd).write(fco.data(), fco.flags())
     return 1, False
 
+
 def _underlyingfctxifabsent(filectx):
     """Sometimes when resolving, our fcd is actually an absentfilectx, but
     we want to write to it (to do the resolve). This helper returns the
@@ -365,6 +392,7 @@
     else:
         return filectx
 
+
 def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None):
     tool, toolpath, binary, symlink, scriptfn = toolconf
     if symlink or fcd.isabsent() or fco.isabsent():
@@ -382,9 +410,10 @@
         premerge = _toolstr(ui, tool, "premerge", "").lower()
         if premerge not in validkeep:
             _valid = ', '.join(["'" + v + "'" for v in validkeep])
-            raise error.ConfigError(_("%s.premerge not valid "
-                                      "('%s' is neither boolean nor %s)") %
-                                    (tool, premerge, _valid))
+            raise error.ConfigError(
+                _("%s.premerge not valid " "('%s' is neither boolean nor %s)")
+                % (tool, premerge, _valid)
+            )
 
     if premerge:
         if premerge == 'keep-merge3':
@@ -399,21 +428,30 @@
         if premerge not in validkeep:
             # restore from backup and try again
             _restorebackup(fcd, back)
-    return 1 # continue merging
+    return 1  # continue merging
+
 
 def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf):
     tool, toolpath, binary, symlink, scriptfn = toolconf
     uipathfn = scmutil.getuipathfn(repo)
     if symlink:
-        repo.ui.warn(_('warning: internal %s cannot merge symlinks '
-                       'for %s\n') % (tool, uipathfn(fcd.path())))
+        repo.ui.warn(
+            _('warning: internal %s cannot merge symlinks ' 'for %s\n')
+            % (tool, uipathfn(fcd.path()))
+        )
         return False
     if fcd.isabsent() or fco.isabsent():
-        repo.ui.warn(_('warning: internal %s cannot merge change/delete '
-                       'conflict for %s\n') % (tool, uipathfn(fcd.path())))
+        repo.ui.warn(
+            _(
+                'warning: internal %s cannot merge change/delete '
+                'conflict for %s\n'
+            )
+            % (tool, uipathfn(fcd.path()))
+        )
         return False
     return True
 
+
 def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode):
     """
     Uses the internal non-interactive simple merge algorithm for merging
@@ -425,35 +463,55 @@
     r = simplemerge.simplemerge(ui, fcd, fca, fco, label=labels, mode=mode)
     return True, r, False
 
-@internaltool('union', fullmerge,
-              _("warning: conflicts while merging %s! "
-                "(edit, then use 'hg resolve --mark')\n"),
-              precheck=_mergecheck)
+
+@internaltool(
+    'union',
+    fullmerge,
+    _(
+        "warning: conflicts while merging %s! "
+        "(edit, then use 'hg resolve --mark')\n"
+    ),
+    precheck=_mergecheck,
+)
 def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Uses the internal non-interactive simple merge algorithm for merging
     files. It will use both left and right sides for conflict regions.
     No markers are inserted."""
-    return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
-                  files, labels, 'union')
+    return _merge(
+        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, 'union'
+    )
+
 
-@internaltool('merge', fullmerge,
-              _("warning: conflicts while merging %s! "
-                "(edit, then use 'hg resolve --mark')\n"),
-              precheck=_mergecheck)
+@internaltool(
+    'merge',
+    fullmerge,
+    _(
+        "warning: conflicts while merging %s! "
+        "(edit, then use 'hg resolve --mark')\n"
+    ),
+    precheck=_mergecheck,
+)
 def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Uses the internal non-interactive simple merge algorithm for merging
     files. It will fail if there are any conflicts and leave markers in
     the partially merged file. Markers will have two sections, one for each side
     of merge."""
-    return _merge(repo, mynode, orig, fcd, fco, fca, toolconf,
-                  files, labels, 'merge')
+    return _merge(
+        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, 'merge'
+    )
+
 
-@internaltool('merge3', fullmerge,
-              _("warning: conflicts while merging %s! "
-                "(edit, then use 'hg resolve --mark')\n"),
-              precheck=_mergecheck)
+@internaltool(
+    'merge3',
+    fullmerge,
+    _(
+        "warning: conflicts while merging %s! "
+        "(edit, then use 'hg resolve --mark')\n"
+    ),
+    precheck=_mergecheck,
+)
 def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Uses the internal non-interactive simple merge algorithm for merging
@@ -466,16 +524,29 @@
         labels.append('base')
     return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
 
-def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files,
-                labels=None, localorother=None):
+
+def _imergeauto(
+    repo,
+    mynode,
+    orig,
+    fcd,
+    fco,
+    fca,
+    toolconf,
+    files,
+    labels=None,
+    localorother=None,
+):
     """
     Generic driver for _imergelocal and _imergeother
     """
     assert localorother is not None
-    r = simplemerge.simplemerge(repo.ui, fcd, fca, fco, label=labels,
-                                localorother=localorother)
+    r = simplemerge.simplemerge(
+        repo.ui, fcd, fca, fco, label=labels, localorother=localorother
+    )
     return True, r
 
+
 @internaltool('merge-local', mergeonly, precheck=_mergecheck)
 def _imergelocal(*args, **kwargs):
     """
@@ -484,6 +555,7 @@
     success, status = _imergeauto(localorother='local', *args, **kwargs)
     return success, status, False
 
+
 @internaltool('merge-other', mergeonly, precheck=_mergecheck)
 def _imergeother(*args, **kwargs):
     """
@@ -492,10 +564,16 @@
     success, status = _imergeauto(localorother='other', *args, **kwargs)
     return success, status, False
 
-@internaltool('tagmerge', mergeonly,
-              _("automatic tag merging of %s failed! "
-                "(use 'hg resolve --tool :merge' or another merge "
-                "tool of your choice)\n"))
+
+@internaltool(
+    'tagmerge',
+    mergeonly,
+    _(
+        "automatic tag merging of %s failed! "
+        "(use 'hg resolve --tool :merge' or another merge "
+        "tool of your choice)\n"
+    ),
+)
 def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Uses the internal tag merge algorithm (experimental).
@@ -503,6 +581,7 @@
     success, status = tagmerge.merge(repo, fcd, fco, fca)
     return success, status, False
 
+
 @internaltool('dump', fullmerge, binary=True, symlink=True)
 def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
@@ -520,23 +599,27 @@
     fd = fcd.path()
 
     from . import context
+
     if isinstance(fcd, context.overlayworkingfilectx):
-        raise error.InMemoryMergeConflictsError('in-memory merge does not '
-                                                'support the :dump tool.')
+        raise error.InMemoryMergeConflictsError(
+            'in-memory merge does not ' 'support the :dump tool.'
+        )
 
     util.writefile(a + ".local", fcd.decodeddata())
     repo.wwrite(fd + ".other", fco.data(), fco.flags())
     repo.wwrite(fd + ".base", fca.data(), fca.flags())
     return False, 1, False
 
+
 @internaltool('forcedump', mergeonly, binary=True, symlink=True)
-def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
-                labels=None):
+def _forcedump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     """
     Creates three versions of the files as same as :dump, but omits premerge.
     """
-    return _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files,
-                labels=labels)
+    return _idump(
+        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=labels
+    )
+
 
 def _xmergeimm(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     # In-memory merge simply raises an exception on all external merge tools,
@@ -547,8 +630,10 @@
     # file -- we can't leave a merge state. (Copy to somewhere in the .hg/
     # directory and tell the user how to get it is my best idea, but it's
     # clunky.)
-    raise error.InMemoryMergeConflictsError('in-memory merge does not support '
-                                            'external merge tools')
+    raise error.InMemoryMergeConflictsError(
+        'in-memory merge does not support ' 'external merge tools'
+    )
+
 
 def _describemerge(ui, repo, mynode, fcl, fcb, fco, env, toolpath, args):
     tmpl = ui.config('ui', 'pre-merge-tool-output-template')
@@ -556,27 +641,41 @@
         return
 
     mappingdict = templateutil.mappingdict
-    props = {'ctx': fcl.changectx(),
-             'node': hex(mynode),
-             'path': fcl.path(),
-             'local': mappingdict({'ctx': fcl.changectx(),
-                                   'fctx': fcl,
-                                   'node': hex(mynode),
-                                   'name': _('local'),
-                                   'islink': 'l' in fcl.flags(),
-                                   'label': env['HG_MY_LABEL']}),
-             'base': mappingdict({'ctx': fcb.changectx(),
-                                  'fctx': fcb,
-                                  'name': _('base'),
-                                  'islink': 'l' in fcb.flags(),
-                                  'label': env['HG_BASE_LABEL']}),
-             'other': mappingdict({'ctx': fco.changectx(),
-                                   'fctx': fco,
-                                   'name': _('other'),
-                                   'islink': 'l' in fco.flags(),
-                                   'label': env['HG_OTHER_LABEL']}),
-             'toolpath': toolpath,
-             'toolargs': args}
+    props = {
+        'ctx': fcl.changectx(),
+        'node': hex(mynode),
+        'path': fcl.path(),
+        'local': mappingdict(
+            {
+                'ctx': fcl.changectx(),
+                'fctx': fcl,
+                'node': hex(mynode),
+                'name': _('local'),
+                'islink': 'l' in fcl.flags(),
+                'label': env['HG_MY_LABEL'],
+            }
+        ),
+        'base': mappingdict(
+            {
+                'ctx': fcb.changectx(),
+                'fctx': fcb,
+                'name': _('base'),
+                'islink': 'l' in fcb.flags(),
+                'label': env['HG_BASE_LABEL'],
+            }
+        ),
+        'other': mappingdict(
+            {
+                'ctx': fco.changectx(),
+                'fctx': fco,
+                'name': _('other'),
+                'islink': 'l' in fco.flags(),
+                'label': env['HG_OTHER_LABEL'],
+            }
+        ),
+        'toolpath': toolpath,
+        'toolargs': args,
+    }
 
     # TODO: make all of this something that can be specified on a per-tool basis
     tmpl = templater.unquotestring(tmpl)
@@ -584,23 +683,28 @@
     # Not using cmdutil.rendertemplate here since it causes errors importing
     # things for us to import cmdutil.
     tres = formatter.templateresources(ui, repo)
-    t = formatter.maketemplater(ui, tmpl, defaults=templatekw.keywords,
-                                resources=tres)
+    t = formatter.maketemplater(
+        ui, tmpl, defaults=templatekw.keywords, resources=tres
+    )
     ui.status(t.renderdefault(props))
 
+
 def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
     tool, toolpath, binary, symlink, scriptfn = toolconf
     uipathfn = scmutil.getuipathfn(repo)
     if fcd.isabsent() or fco.isabsent():
-        repo.ui.warn(_('warning: %s cannot merge change/delete conflict '
-                       'for %s\n') % (tool, uipathfn(fcd.path())))
+        repo.ui.warn(
+            _('warning: %s cannot merge change/delete conflict ' 'for %s\n')
+            % (tool, uipathfn(fcd.path()))
+        )
         return False, 1, None
     unused, unused, unused, back = files
     localpath = _workingpath(repo, fcd)
     args = _toolstr(repo.ui, tool, "args")
 
-    with _maketempfiles(repo, fco, fca, repo.wvfs.join(back.path()),
-                        "$output" in args) as temppaths:
+    with _maketempfiles(
+        repo, fco, fca, repo.wvfs.join(back.path()), "$output" in args
+    ) as temppaths:
         basepath, otherpath, localoutputpath = temppaths
         outpath = ""
         mylabel, otherlabel = labels[:2]
@@ -608,63 +712,83 @@
             baselabel = labels[2]
         else:
             baselabel = 'base'
-        env = {'HG_FILE': fcd.path(),
-               'HG_MY_NODE': short(mynode),
-               'HG_OTHER_NODE': short(fco.changectx().node()),
-               'HG_BASE_NODE': short(fca.changectx().node()),
-               'HG_MY_ISLINK': 'l' in fcd.flags(),
-               'HG_OTHER_ISLINK': 'l' in fco.flags(),
-               'HG_BASE_ISLINK': 'l' in fca.flags(),
-               'HG_MY_LABEL': mylabel,
-               'HG_OTHER_LABEL': otherlabel,
-               'HG_BASE_LABEL': baselabel,
-               }
+        env = {
+            'HG_FILE': fcd.path(),
+            'HG_MY_NODE': short(mynode),
+            'HG_OTHER_NODE': short(fco.changectx().node()),
+            'HG_BASE_NODE': short(fca.changectx().node()),
+            'HG_MY_ISLINK': 'l' in fcd.flags(),
+            'HG_OTHER_ISLINK': 'l' in fco.flags(),
+            'HG_BASE_ISLINK': 'l' in fca.flags(),
+            'HG_MY_LABEL': mylabel,
+            'HG_OTHER_LABEL': otherlabel,
+            'HG_BASE_LABEL': baselabel,
+        }
         ui = repo.ui
 
         if "$output" in args:
             # read input from backup, write to original
             outpath = localpath
             localpath = localoutputpath
-        replace = {'local': localpath, 'base': basepath, 'other': otherpath,
-                   'output': outpath, 'labellocal': mylabel,
-                   'labelother': otherlabel, 'labelbase': baselabel}
+        replace = {
+            'local': localpath,
+            'base': basepath,
+            'other': otherpath,
+            'output': outpath,
+            'labellocal': mylabel,
+            'labelother': otherlabel,
+            'labelbase': baselabel,
+        }
         args = util.interpolate(
-            br'\$', replace, args,
-            lambda s: procutil.shellquote(util.localpath(s)))
+            br'\$',
+            replace,
+            args,
+            lambda s: procutil.shellquote(util.localpath(s)),
+        )
         if _toolbool(ui, tool, "gui"):
-            repo.ui.status(_('running merge tool %s for file %s\n') %
-                           (tool, uipathfn(fcd.path())))
+            repo.ui.status(
+                _('running merge tool %s for file %s\n')
+                % (tool, uipathfn(fcd.path()))
+            )
         if scriptfn is None:
             cmd = toolpath + ' ' + args
             repo.ui.debug('launching merge tool: %s\n' % cmd)
             _describemerge(ui, repo, mynode, fcd, fca, fco, env, toolpath, args)
-            r = ui.system(cmd, cwd=repo.root, environ=env,
-                          blockedtag='mergetool')
+            r = ui.system(
+                cmd, cwd=repo.root, environ=env, blockedtag='mergetool'
+            )
         else:
-            repo.ui.debug('launching python merge script: %s:%s\n' %
-                          (toolpath, scriptfn))
+            repo.ui.debug(
+                'launching python merge script: %s:%s\n' % (toolpath, scriptfn)
+            )
             r = 0
             try:
                 # avoid cycle cmdutil->merge->filemerge->extensions->cmdutil
                 from . import extensions
+
                 mod = extensions.loadpath(toolpath, 'hgmerge.%s' % tool)
             except Exception:
-                raise error.Abort(_("loading python merge script failed: %s") %
-                                  toolpath)
+                raise error.Abort(
+                    _("loading python merge script failed: %s") % toolpath
+                )
             mergefn = getattr(mod, scriptfn, None)
             if mergefn is None:
-                raise error.Abort(_("%s does not have function: %s") %
-                                  (toolpath, scriptfn))
+                raise error.Abort(
+                    _("%s does not have function: %s") % (toolpath, scriptfn)
+                )
             argslist = procutil.shellsplit(args)
             # avoid cycle cmdutil->merge->filemerge->hook->extensions->cmdutil
             from . import hook
-            ret, raised = hook.pythonhook(ui, repo, "merge", toolpath,
-                                          mergefn, {'args': argslist}, True)
+
+            ret, raised = hook.pythonhook(
+                ui, repo, "merge", toolpath, mergefn, {'args': argslist}, True
+            )
             if raised:
                 r = 1
         repo.ui.debug('merge tool returned: %d\n' % r)
         return True, r, False
 
+
 def _formatconflictmarker(ctx, template, label, pad):
     """Applies the given template to the ctx, prefixed by the label.
 
@@ -681,13 +805,15 @@
     mark = '%s %s' % (label, templateresult)
 
     if mark:
-        mark = mark.splitlines()[0] # split for safety
+        mark = mark.splitlines()[0]  # split for safety
 
     # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ')
     return stringutil.ellipsis(mark, 80 - 8)
 
+
 _defaultconflictlabels = ['local', 'other']
 
+
 def _formatlabels(repo, fcd, fco, fca, labels, tool=None):
     """Formats the given labels using the conflict marker template.
 
@@ -703,17 +829,21 @@
         template = _toolstr(ui, tool, 'mergemarkertemplate', template)
     template = templater.unquotestring(template)
     tres = formatter.templateresources(ui, repo)
-    tmpl = formatter.maketemplater(ui, template, defaults=templatekw.keywords,
-                                   resources=tres)
+    tmpl = formatter.maketemplater(
+        ui, template, defaults=templatekw.keywords, resources=tres
+    )
 
     pad = max(len(l) for l in labels)
 
-    newlabels = [_formatconflictmarker(cd, tmpl, labels[0], pad),
-                 _formatconflictmarker(co, tmpl, labels[1], pad)]
+    newlabels = [
+        _formatconflictmarker(cd, tmpl, labels[0], pad),
+        _formatconflictmarker(co, tmpl, labels[1], pad),
+    ]
     if len(labels) > 2:
         newlabels.append(_formatconflictmarker(ca, tmpl, labels[2], pad))
     return newlabels
 
+
 def partextras(labels):
     """Return a dictionary of extra labels for use in prompts to the user
 
@@ -730,11 +860,13 @@
         "o": " [%s]" % labels[1],
     }
 
+
 def _restorebackup(fcd, back):
     # TODO: Add a workingfilectx.write(otherfilectx) path so we can use
     # util.copy here instead.
     fcd.write(back.data(), fcd.flags())
 
+
 def _makebackup(repo, ui, wctx, fcd, premerge):
     """Makes and returns a filectx-like object for ``fcd``'s backup file.
 
@@ -751,14 +883,16 @@
     # TODO: Break this import cycle somehow. (filectx -> ctx -> fileset ->
     # merge -> filemerge). (I suspect the fileset import is the weakest link)
     from . import context
+
     back = scmutil.backuppath(ui, repo, fcd.path())
-    inworkingdir = (back.startswith(repo.wvfs.base) and not
-        back.startswith(repo.vfs.base))
+    inworkingdir = back.startswith(repo.wvfs.base) and not back.startswith(
+        repo.vfs.base
+    )
     if isinstance(fcd, context.overlayworkingfilectx) and inworkingdir:
         # If the backup file is to be in the working directory, and we're
         # merging in-memory, we must redirect the backup to the memory context
         # so we don't disturb the working directory.
-        relpath = back[len(repo.wvfs.base) + 1:]
+        relpath = back[len(repo.wvfs.base) + 1 :]
         if premerge:
             wctx[relpath].write(fcd.data(), fcd.flags())
         return wctx[relpath]
@@ -777,6 +911,7 @@
         # the backup context regardless of where it lives.
         return context.arbitraryfilectx(back, repo=repo)
 
+
 @contextlib.contextmanager
 def _maketempfiles(repo, fco, fca, localpath, uselocalpath):
     """Writes out `fco` and `fca` as temporary files, and (if uselocalpath)
@@ -834,6 +969,7 @@
             if d and uselocalpath:
                 util.unlink(d)
 
+
 def _filemerge(premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
     """perform a 3-way merge in the working directory
 
@@ -847,7 +983,7 @@
     Returns whether the merge is complete, the return value of the merge, and
     a boolean indicating whether the file was deleted from disk."""
 
-    if not fco.cmp(fcd): # files identical?
+    if not fco.cmp(fcd):  # files identical?
         return True, None, False
 
     ui = repo.ui
@@ -861,7 +997,7 @@
     scriptfn = None
     if tool in internals and tool.startswith('internal:'):
         # normalize to new-style names (':merge' etc)
-        tool = tool[len('internal'):]
+        tool = tool[len('internal') :]
     if toolpath and toolpath.startswith('python:'):
         invalidsyntax = False
         if toolpath.count(':') >= 2:
@@ -876,9 +1012,16 @@
         if invalidsyntax:
             raise error.Abort(_("invalid 'python:' syntax: %s") % toolpath)
         toolpath = script
-    ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
-             % (tool, fduipath, pycompat.bytestr(binary),
-                pycompat.bytestr(symlink), pycompat.bytestr(changedelete)))
+    ui.debug(
+        "picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n"
+        % (
+            tool,
+            fduipath,
+            pycompat.bytestr(binary),
+            pycompat.bytestr(symlink),
+            pycompat.bytestr(changedelete),
+        )
+    )
 
     if tool in internals:
         func = internals[tool]
@@ -904,20 +1047,21 @@
 
     if premerge:
         if orig != fco.path():
-            ui.status(_("merging %s and %s to %s\n") %
-                      (uipathfn(orig), uipathfn(fco.path()), fduipath))
+            ui.status(
+                _("merging %s and %s to %s\n")
+                % (uipathfn(orig), uipathfn(fco.path()), fduipath)
+            )
         else:
             ui.status(_("merging %s\n") % fduipath)
 
     ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
 
-    if precheck and not precheck(repo, mynode, orig, fcd, fco, fca,
-                                 toolconf):
+    if precheck and not precheck(repo, mynode, orig, fcd, fco, fca, toolconf):
         if onfailure:
             if wctx.isinmemory():
-                raise error.InMemoryMergeConflictsError('in-memory merge does '
-                                                        'not support merge '
-                                                        'conflicts')
+                raise error.InMemoryMergeConflictsError(
+                    'in-memory merge does ' 'not support merge ' 'conflicts'
+                )
             ui.warn(onfailure % fduipath)
         return True, 1, False
 
@@ -935,8 +1079,9 @@
             labels = _defaultconflictlabels
         formattedlabels = labels
         if markerstyle != 'basic':
-            formattedlabels = _formatlabels(repo, fcd, fco, fca, labels,
-                                            tool=tool)
+            formattedlabels = _formatlabels(
+                repo, fcd, fco, fca, labels, tool=tool
+            )
 
         if premerge and mergetype == fullmerge:
             # conflict markers generated by premerge will use 'detailed'
@@ -951,16 +1096,27 @@
                 # ui.mergemarkertemplate)
                 labeltool = tool
             if internalmarkerstyle != 'basic' or markerstyle != 'basic':
-                premergelabels = _formatlabels(repo, fcd, fco, fca,
-                                               premergelabels, tool=labeltool)
+                premergelabels = _formatlabels(
+                    repo, fcd, fco, fca, premergelabels, tool=labeltool
+                )
 
-            r = _premerge(repo, fcd, fco, fca, toolconf, files,
-                          labels=premergelabels)
+            r = _premerge(
+                repo, fcd, fco, fca, toolconf, files, labels=premergelabels
+            )
             # complete if premerge successful (r is 0)
             return not r, r, False
 
-        needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca,
-                                     toolconf, files, labels=formattedlabels)
+        needcheck, r, deleted = func(
+            repo,
+            mynode,
+            orig,
+            fcd,
+            fco,
+            fca,
+            toolconf,
+            files,
+            labels=formattedlabels,
+        )
 
         if needcheck:
             r = _check(repo, r, ui, tool, fcd, files)
@@ -968,9 +1124,9 @@
         if r:
             if onfailure:
                 if wctx.isinmemory():
-                    raise error.InMemoryMergeConflictsError('in-memory merge '
-                                                            'does not support '
-                                                            'merge conflicts')
+                    raise error.InMemoryMergeConflictsError(
+                        'in-memory merge ' 'does not support ' 'merge conflicts'
+                    )
                 ui.warn(onfailure % fduipath)
             _onfilemergefailure(ui)
 
@@ -979,10 +1135,12 @@
         if not r and back is not None:
             back.remove()
 
+
 def _haltmerge():
     msg = _('merge halted after failed merge (see hg resolve)')
     raise error.InterventionRequired(msg)
 
+
 def _onfilemergefailure(ui):
     action = ui.config('merge', 'on-failure')
     if action == 'prompt':
@@ -993,34 +1151,53 @@
         _haltmerge()
     # default action is 'continue', in which case we neither prompt nor halt
 
+
 def hasconflictmarkers(data):
-    return bool(re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", data,
-                          re.MULTILINE))
+    return bool(
+        re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", data, re.MULTILINE)
+    )
+
 
 def _check(repo, r, ui, tool, fcd, files):
     fd = fcd.path()
     uipathfn = scmutil.getuipathfn(repo)
     unused, unused, unused, back = files
 
-    if not r and (_toolbool(ui, tool, "checkconflicts") or
-                  'conflicts' in _toollist(ui, tool, "check")):
+    if not r and (
+        _toolbool(ui, tool, "checkconflicts")
+        or 'conflicts' in _toollist(ui, tool, "check")
+    ):
         if hasconflictmarkers(fcd.data()):
             r = 1
 
     checked = False
     if 'prompt' in _toollist(ui, tool, "check"):
         checked = True
-        if ui.promptchoice(_("was merge of '%s' successful (yn)?"
-                             "$$ &Yes $$ &No") % uipathfn(fd), 1):
+        if ui.promptchoice(
+            _("was merge of '%s' successful (yn)?" "$$ &Yes $$ &No")
+            % uipathfn(fd),
+            1,
+        ):
             r = 1
 
-    if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
-                                  'changed' in
-                                  _toollist(ui, tool, "check")):
+    if (
+        not r
+        and not checked
+        and (
+            _toolbool(ui, tool, "checkchanged")
+            or 'changed' in _toollist(ui, tool, "check")
+        )
+    ):
         if back is not None and not fcd.cmp(back):
-            if ui.promptchoice(_(" output file %s appears unchanged\n"
-                                 "was merge successful (yn)?"
-                                 "$$ &Yes $$ &No") % uipathfn(fd), 1):
+            if ui.promptchoice(
+                _(
+                    " output file %s appears unchanged\n"
+                    "was merge successful (yn)?"
+                    "$$ &Yes $$ &No"
+                )
+                % uipathfn(fd),
+                1,
+            ):
                 r = 1
 
     if back is not None and _toolbool(ui, tool, "fixeol"):
@@ -1028,16 +1205,22 @@
 
     return r
 
+
 def _workingpath(repo, ctx):
     return repo.wjoin(ctx.path())
 
+
 def premerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
-    return _filemerge(True, repo, wctx, mynode, orig, fcd, fco, fca,
-                      labels=labels)
+    return _filemerge(
+        True, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
+    )
+
 
 def filemerge(repo, wctx, mynode, orig, fcd, fco, fca, labels=None):
-    return _filemerge(False, repo, wctx, mynode, orig, fcd, fco, fca,
-                      labels=labels)
+    return _filemerge(
+        False, repo, wctx, mynode, orig, fcd, fco, fca, labels=labels
+    )
+
 
 def loadinternalmerge(ui, extname, registrarobj):
     """Load internal merge tool from specified registrarobj
@@ -1051,8 +1234,7 @@
         capabilities = sorted([k for k, v in func.capabilities.items() if v])
         if capabilities:
             capdesc = "    (actual capabilities: %s)" % ', '.join(capabilities)
-            func.__doc__ = (func.__doc__ +
-                            pycompat.sysstr("\n\n%s" % capdesc))
+            func.__doc__ = func.__doc__ + pycompat.sysstr("\n\n%s" % capdesc)
 
     # to put i18n comments into hg.pot for automatically generated texts
 
@@ -1066,6 +1248,7 @@
     # i18n: this text is added automatically
     _("    (actual capabilities: symlink)")
 
+
 # load built-in merge tools explicitly to setup internalsdoc
 loadinternalmerge(None, None, internaltool)
 
--- a/mercurial/fileset.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/fileset.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,9 +21,7 @@
     scmutil,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 # common weight constants
 _WEIGHT_CHECK_FILENAME = filesetlang.WEIGHT_CHECK_FILENAME
@@ -38,49 +36,67 @@
 getpattern = filesetlang.getpattern
 getargs = filesetlang.getargs
 
+
 def getmatch(mctx, x):
     if not x:
         raise error.ParseError(_("missing argument"))
     return methods[x[0]](mctx, *x[1:])
 
+
 def getmatchwithstatus(mctx, x, hint):
     keys = set(getstring(hint, 'status hint must be a string').split())
     return getmatch(mctx.withstatus(keys), x)
 
+
 def stringmatch(mctx, x):
     return mctx.matcher([x])
 
+
 def kindpatmatch(mctx, x, y):
-    return stringmatch(mctx, _getkindpat(x, y, matchmod.allpatternkinds,
-                                         _("pattern must be a string")))
+    return stringmatch(
+        mctx,
+        _getkindpat(
+            x, y, matchmod.allpatternkinds, _("pattern must be a string")
+        ),
+    )
+
 
 def patternsmatch(mctx, *xs):
     allkinds = matchmod.allpatternkinds
-    patterns = [getpattern(x, allkinds, _("pattern must be a string"))
-                for x in xs]
+    patterns = [
+        getpattern(x, allkinds, _("pattern must be a string")) for x in xs
+    ]
     return mctx.matcher(patterns)
 
+
 def andmatch(mctx, x, y):
     xm = getmatch(mctx, x)
     ym = getmatch(mctx.narrowed(xm), y)
     return matchmod.intersectmatchers(xm, ym)
 
+
 def ormatch(mctx, *xs):
     ms = [getmatch(mctx, x) for x in xs]
     return matchmod.unionmatcher(ms)
 
+
 def notmatch(mctx, x):
     m = getmatch(mctx, x)
     return mctx.predicate(lambda f: not m(f), predrepr=('<not %r>', m))
 
+
 def minusmatch(mctx, x, y):
     xm = getmatch(mctx, x)
     ym = getmatch(mctx.narrowed(xm), y)
     return matchmod.differencematcher(xm, ym)
 
+
 def listmatch(mctx, *xs):
-    raise error.ParseError(_("can't use a list in this context"),
-                           hint=_('see \'hg help "filesets.x or y"\''))
+    raise error.ParseError(
+        _("can't use a list in this context"),
+        hint=_('see \'hg help "filesets.x or y"\''),
+    )
+
 
 def func(mctx, a, b):
     funcname = getsymbol(a)
@@ -92,6 +108,7 @@
     syms = [s for (s, fn) in symbols.items() if keep(fn)]
     raise error.UnknownIdentifier(funcname, syms)
 
+
 # symbols are callable like:
 #  fun(mctx, x)
 # with:
@@ -101,6 +118,7 @@
 
 predicate = registrar.filesetpredicate(symbols)
 
+
 @predicate('modified()', callstatus=True, weight=_WEIGHT_STATUS)
 def modified(mctx, x):
     """File that is modified according to :hg:`status`.
@@ -110,6 +128,7 @@
     s = set(mctx.status().modified)
     return mctx.predicate(s.__contains__, predrepr='modified')
 
+
 @predicate('added()', callstatus=True, weight=_WEIGHT_STATUS)
 def added(mctx, x):
     """File that is added according to :hg:`status`.
@@ -119,6 +138,7 @@
     s = set(mctx.status().added)
     return mctx.predicate(s.__contains__, predrepr='added')
 
+
 @predicate('removed()', callstatus=True, weight=_WEIGHT_STATUS)
 def removed(mctx, x):
     """File that is removed according to :hg:`status`.
@@ -128,6 +148,7 @@
     s = set(mctx.status().removed)
     return mctx.predicate(s.__contains__, predrepr='removed')
 
+
 @predicate('deleted()', callstatus=True, weight=_WEIGHT_STATUS)
 def deleted(mctx, x):
     """Alias for ``missing()``.
@@ -137,6 +158,7 @@
     s = set(mctx.status().deleted)
     return mctx.predicate(s.__contains__, predrepr='deleted')
 
+
 @predicate('missing()', callstatus=True, weight=_WEIGHT_STATUS)
 def missing(mctx, x):
     """File that is missing according to :hg:`status`.
@@ -146,6 +168,7 @@
     s = set(mctx.status().deleted)
     return mctx.predicate(s.__contains__, predrepr='deleted')
 
+
 @predicate('unknown()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
 def unknown(mctx, x):
     """File that is unknown according to :hg:`status`."""
@@ -154,6 +177,7 @@
     s = set(mctx.status().unknown)
     return mctx.predicate(s.__contains__, predrepr='unknown')
 
+
 @predicate('ignored()', callstatus=True, weight=_WEIGHT_STATUS_THOROUGH)
 def ignored(mctx, x):
     """File that is ignored according to :hg:`status`."""
@@ -162,6 +186,7 @@
     s = set(mctx.status().ignored)
     return mctx.predicate(s.__contains__, predrepr='ignored')
 
+
 @predicate('clean()', callstatus=True, weight=_WEIGHT_STATUS)
 def clean(mctx, x):
     """File that is clean according to :hg:`status`.
@@ -171,6 +196,7 @@
     s = set(mctx.status().clean)
     return mctx.predicate(s.__contains__, predrepr='clean')
 
+
 @predicate('tracked()')
 def tracked(mctx, x):
     """File that is under Mercurial control."""
@@ -178,14 +204,17 @@
     getargs(x, 0, 0, _("tracked takes no arguments"))
     return mctx.predicate(mctx.ctx.__contains__, predrepr='tracked')
 
+
 @predicate('binary()', weight=_WEIGHT_READ_CONTENTS)
 def binary(mctx, x):
     """File that appears to be binary (contains NUL bytes).
     """
     # i18n: "binary" is a keyword
     getargs(x, 0, 0, _("binary takes no arguments"))
-    return mctx.fpredicate(lambda fctx: fctx.isbinary(),
-                           predrepr='binary', cache=True)
+    return mctx.fpredicate(
+        lambda fctx: fctx.isbinary(), predrepr='binary', cache=True
+    )
+
 
 @predicate('exec()')
 def exec_(mctx, x):
@@ -196,6 +225,7 @@
     ctx = mctx.ctx
     return mctx.predicate(lambda f: ctx.flags(f) == 'x', predrepr='exec')
 
+
 @predicate('symlink()')
 def symlink(mctx, x):
     """File that is marked as a symlink.
@@ -205,6 +235,7 @@
     ctx = mctx.ctx
     return mctx.predicate(lambda f: ctx.flags(f) == 'l', predrepr='symlink')
 
+
 @predicate('resolved()', weight=_WEIGHT_STATUS)
 def resolved(mctx, x):
     """File that is marked resolved according to :hg:`resolve -l`.
@@ -214,8 +245,10 @@
     if mctx.ctx.rev() is not None:
         return mctx.never()
     ms = merge.mergestate.read(mctx.ctx.repo())
-    return mctx.predicate(lambda f: f in ms and ms[f] == 'r',
-                          predrepr='resolved')
+    return mctx.predicate(
+        lambda f: f in ms and ms[f] == 'r', predrepr='resolved'
+    )
+
 
 @predicate('unresolved()', weight=_WEIGHT_STATUS)
 def unresolved(mctx, x):
@@ -226,8 +259,10 @@
     if mctx.ctx.rev() is not None:
         return mctx.never()
     ms = merge.mergestate.read(mctx.ctx.repo())
-    return mctx.predicate(lambda f: f in ms and ms[f] == 'u',
-                          predrepr='unresolved')
+    return mctx.predicate(
+        lambda f: f in ms and ms[f] == 'u', predrepr='unresolved'
+    )
+
 
 @predicate('hgignore()', weight=_WEIGHT_STATUS)
 def hgignore(mctx, x):
@@ -237,6 +272,7 @@
     getargs(x, 0, 0, _("hgignore takes no arguments"))
     return mctx.ctx.repo().dirstate._ignore
 
+
 @predicate('portable()', weight=_WEIGHT_CHECK_FILENAME)
 def portable(mctx, x):
     """File that has a portable name. (This doesn't include filenames with case
@@ -244,8 +280,10 @@
     """
     # i18n: "portable" is a keyword
     getargs(x, 0, 0, _("portable takes no arguments"))
-    return mctx.predicate(lambda f: util.checkwinfilename(f) is None,
-                          predrepr='portable')
+    return mctx.predicate(
+        lambda f: util.checkwinfilename(f) is None, predrepr='portable'
+    )
+
 
 @predicate('grep(regex)', weight=_WEIGHT_READ_CONTENTS)
 def grep(mctx, x):
@@ -255,10 +293,15 @@
         # i18n: "grep" is a keyword
         r = re.compile(getstring(x, _("grep requires a pattern")))
     except re.error as e:
-        raise error.ParseError(_('invalid match pattern: %s') %
-                               stringutil.forcebytestr(e))
-    return mctx.fpredicate(lambda fctx: r.search(fctx.data()),
-                           predrepr=('grep(%r)', r.pattern), cache=True)
+        raise error.ParseError(
+            _('invalid match pattern: %s') % stringutil.forcebytestr(e)
+        )
+    return mctx.fpredicate(
+        lambda fctx: r.search(fctx.data()),
+        predrepr=('grep(%r)', r.pattern),
+        cache=True,
+    )
+
 
 def _sizetomax(s):
     try:
@@ -266,7 +309,7 @@
         for k, v in util._sizeunits:
             if s.endswith(k):
                 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
-                n = s[:-len(k)]
+                n = s[: -len(k)]
                 inc = 1.0
                 if "." in n:
                     inc /= 10 ** len(n.split(".")[1])
@@ -276,10 +319,11 @@
     except ValueError:
         raise error.ParseError(_("couldn't parse size: %s") % s)
 
+
 def sizematcher(expr):
     """Return a function(size) -> bool from the ``size()`` expression"""
     expr = expr.strip()
-    if '-' in expr: # do we have a range?
+    if '-' in expr:  # do we have a range?
         a, b = expr.split('-', 1)
         a = util.sizetoint(a)
         b = util.sizetoint(b)
@@ -301,6 +345,7 @@
         b = _sizetomax(expr)
         return lambda x: x >= a and x <= b
 
+
 @predicate('size(expression)', weight=_WEIGHT_STATUS)
 def size(mctx, x):
     """File size matches the given expression. Examples:
@@ -313,8 +358,10 @@
     # i18n: "size" is a keyword
     expr = getstring(x, _("size requires an expression"))
     m = sizematcher(expr)
-    return mctx.fpredicate(lambda fctx: m(fctx.size()),
-                           predrepr=('size(%r)', expr), cache=True)
+    return mctx.fpredicate(
+        lambda fctx: m(fctx.size()), predrepr=('size(%r)', expr), cache=True
+    )
+
 
 @predicate('encoding(name)', weight=_WEIGHT_READ_CONTENTS)
 def encoding(mctx, x):
@@ -338,6 +385,7 @@
 
     return mctx.fpredicate(encp, predrepr=('encoding(%r)', enc), cache=True)
 
+
 @predicate('eol(style)', weight=_WEIGHT_READ_CONTENTS)
 def eol(mctx, x):
     """File contains newlines of the given style (dos, unix, mac). Binary
@@ -359,19 +407,24 @@
         elif enc == 'mac' and re.search('\r(?!\n)', d):
             return True
         return False
+
     return mctx.fpredicate(eolp, predrepr=('eol(%r)', enc), cache=True)
 
+
 @predicate('copied()')
 def copied(mctx, x):
     """File that is recorded as being copied.
     """
     # i18n: "copied" is a keyword
     getargs(x, 0, 0, _("copied takes no arguments"))
+
     def copiedp(fctx):
         p = fctx.parents()
         return p and p[0].path() != fctx.path()
+
     return mctx.fpredicate(copiedp, predrepr='copied', cache=True)
 
+
 @predicate('revs(revs, pattern)', weight=_WEIGHT_STATUS)
 def revs(mctx, x):
     """Evaluate set in the specified revisions. If the revset match multiple
@@ -395,6 +448,7 @@
         return matchers[0]
     return matchmod.unionmatcher(matchers)
 
+
 @predicate('status(base, rev, pattern)', weight=_WEIGHT_STATUS)
 def status(mctx, x):
     """Evaluate predicate using status change between ``base`` and
@@ -418,6 +472,7 @@
     mc = mctx.switch(basectx, ctx)
     return getmatch(mc, x)
 
+
 @predicate('subrepo([pattern])')
 def subrepo(mctx, x):
     """Subrepositories whose paths match the given pattern.
@@ -427,20 +482,27 @@
     ctx = mctx.ctx
     sstate = ctx.substate
     if x:
-        pat = getpattern(x, matchmod.allpatternkinds,
-                         # i18n: "subrepo" is a keyword
-                         _("subrepo requires a pattern or no arguments"))
+        pat = getpattern(
+            x,
+            matchmod.allpatternkinds,
+            # i18n: "subrepo" is a keyword
+            _("subrepo requires a pattern or no arguments"),
+        )
         fast = not matchmod.patkind(pat)
         if fast:
+
             def m(s):
-                return (s == pat)
+                return s == pat
+
         else:
             m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
-        return mctx.predicate(lambda f: f in sstate and m(f),
-                              predrepr=('subrepo(%r)', pat))
+        return mctx.predicate(
+            lambda f: f in sstate and m(f), predrepr=('subrepo(%r)', pat)
+        )
     else:
         return mctx.predicate(sstate.__contains__, predrepr='subrepo')
 
+
 methods = {
     'withstatus': getmatchwithstatus,
     'string': stringmatch,
@@ -455,6 +517,7 @@
     'func': func,
 }
 
+
 class matchctx(object):
     def __init__(self, basectx, ctx, badfn=None):
         self._basectx = basectx
@@ -484,10 +547,13 @@
         return mctx
 
     def _buildstatus(self, keys):
-        self._status = self._basectx.status(self.ctx, self._match,
-                                            listignored='ignored' in keys,
-                                            listclean='clean' in keys,
-                                            listunknown='unknown' in keys)
+        self._status = self._basectx.status(
+            self.ctx,
+            self._match,
+            listignored='ignored' in keys,
+            listclean='clean' in keys,
+            listunknown='unknown' in keys,
+        )
 
     def status(self):
         return self._status
@@ -499,8 +565,9 @@
         """Create a matcher to select files by predfn(filename)"""
         if cache:
             predfn = util.cachefunc(predfn)
-        return matchmod.predicatematcher(predfn, predrepr=predrepr,
-                                         badfn=self._badfn)
+        return matchmod.predicatematcher(
+            predfn, predrepr=predrepr, badfn=self._badfn
+        )
 
     def fpredicate(self, predfn, predrepr=None, cache=False):
         """Create a matcher to select files by predfn(fctx) at the current
@@ -510,6 +577,7 @@
         """
         ctx = self.ctx
         if ctx.rev() is None:
+
             def fctxpredfn(f):
                 try:
                     fctx = ctx[f]
@@ -523,23 +591,31 @@
                     return predfn(fctx)
                 except (IOError, OSError) as e:
                     # open()-ing a directory fails with EACCES on Windows
-                    if e.errno in (errno.ENOENT, errno.EACCES, errno.ENOTDIR,
-                                   errno.EISDIR):
+                    if e.errno in (
+                        errno.ENOENT,
+                        errno.EACCES,
+                        errno.ENOTDIR,
+                        errno.EISDIR,
+                    ):
                         return False
                     raise
+
         else:
+
             def fctxpredfn(f):
                 try:
                     fctx = ctx[f]
                 except error.LookupError:
                     return False
                 return predfn(fctx)
+
         return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
 
     def never(self):
         """Create a matcher to select nothing"""
         return matchmod.never(badfn=self._badfn)
 
+
 def match(ctx, expr, badfn=None):
     """Create a matcher for a single fileset expression"""
     tree = filesetlang.parse(expr)
@@ -555,5 +631,6 @@
     for name, func in registrarobj._table.iteritems():
         symbols[name] = func
 
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = symbols.values()
--- a/mercurial/filesetlang.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/filesetlang.py	Sun Oct 06 09:45:02 2019 -0400
@@ -46,17 +46,19 @@
 
 globchars = ".*{}[]?/\\_"
 
+
 def tokenize(program):
     pos, l = 0, len(program)
     program = pycompat.bytestr(program)
     while pos < l:
         c = program[pos]
-        if c.isspace(): # skip inter-token whitespace
+        if c.isspace():  # skip inter-token whitespace
             pass
-        elif c in "(),-:|&+!": # handle simple operators
+        elif c in "(),-:|&+!":  # handle simple operators
             yield (c, None, pos)
-        elif (c in '"\'' or c == 'r' and
-              program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
+        elif (
+            c in '"\'' or c == 'r' and program[pos : pos + 2] in ("r'", 'r"')
+        ):  # handle quoted strings
             if c == 'r':
                 pos += 1
                 c = program[pos]
@@ -65,9 +67,9 @@
                 decode = parser.unescapestr
             pos += 1
             s = pos
-            while pos < l: # find closing quote
+            while pos < l:  # find closing quote
                 d = program[pos]
-                if d == '\\': # skip over escaped characters
+                if d == '\\':  # skip over escaped characters
                     pos += 2
                     continue
                 if d == c:
@@ -80,13 +82,13 @@
             # gather up a symbol/keyword
             s = pos
             pos += 1
-            while pos < l: # find end of symbol
+            while pos < l:  # find end of symbol
                 d = program[pos]
                 if not (d.isalnum() or d in globchars or ord(d) > 127):
                     break
                 pos += 1
             sym = program[s:pos]
-            if sym in keywords: # operator keywords
+            if sym in keywords:  # operator keywords
                 yield (sym, None, s)
             else:
                 yield ('symbol', sym, s)
@@ -96,6 +98,7 @@
         pos += 1
     yield ('end', None, pos)
 
+
 def parse(expr):
     p = parser.parser(elements)
     tree, pos = p.parse(tokenize(expr))
@@ -103,16 +106,19 @@
         raise error.ParseError(_("invalid token"), pos)
     return parser.simplifyinfixops(tree, {'list', 'or'})
 
+
 def getsymbol(x):
     if x and x[0] == 'symbol':
         return x[1]
     raise error.ParseError(_('not a symbol'))
 
+
 def getstring(x, err):
     if x and (x[0] == 'string' or x[0] == 'symbol'):
         return x[1]
     raise error.ParseError(err)
 
+
 def getkindpat(x, y, allkinds, err):
     kind = getsymbol(x)
     pat = getstring(y, err)
@@ -120,11 +126,13 @@
         raise error.ParseError(_("invalid pattern kind: %s") % kind)
     return '%s:%s' % (kind, pat)
 
+
 def getpattern(x, allkinds, err):
     if x and x[0] == 'kindpat':
         return getkindpat(x[1], x[2], allkinds, err)
     return getstring(x, err)
 
+
 def getlist(x):
     if not x:
         return []
@@ -132,12 +140,14 @@
         return list(x[1:])
     return [x]
 
+
 def getargs(x, min, max, err):
     l = getlist(x)
     if len(l) < min or len(l) > max:
         raise error.ParseError(err)
     return l
 
+
 def _analyze(x):
     if x is None:
         return x
@@ -171,6 +181,7 @@
         return (op, x[1], ta)
     raise error.ProgrammingError('invalid operator %r' % op)
 
+
 def _insertstatushints(x):
     """Insert hint nodes where status should be calculated (first path)
 
@@ -214,6 +225,7 @@
         return (), (op, x[1], ta)
     raise error.ProgrammingError('invalid operator %r' % op)
 
+
 def _mergestatushints(x, instatus):
     """Remove redundant status hint nodes (second path)
 
@@ -247,6 +259,7 @@
         return (op, x[1], ta)
     raise error.ProgrammingError('invalid operator %r' % op)
 
+
 def analyze(x):
     """Transform raw parsed tree to evaluatable tree which can be fed to
     optimize() or getmatch()
@@ -258,11 +271,13 @@
     _h, t = _insertstatushints(t)
     return _mergestatushints(t, instatus=False)
 
+
 def _optimizeandops(op, ta, tb):
     if tb is not None and tb[0] == 'not':
         return ('minus', ta, tb[1])
     return (op, ta, tb)
 
+
 def _optimizeunion(xs):
     # collect string patterns so they can be compiled into a single regexp
     ws, ts, ss = [], [], []
@@ -278,6 +293,7 @@
         ts.append(('patterns',) + tuple(ss))
     return ws, ts
 
+
 def _optimize(x):
     if x is None:
         return 0, x
@@ -304,9 +320,10 @@
     if op == 'or':
         ws, ts = _optimizeunion(x[1:])
         if len(ts) == 1:
-            return ws[0], ts[0] # 'or' operation is fully optimized out
-        ts = tuple(it[1] for it in sorted(enumerate(ts),
-                                          key=lambda it: ws[it[0]]))
+            return ws[0], ts[0]  # 'or' operation is fully optimized out
+        ts = tuple(
+            it[1] for it in sorted(enumerate(ts), key=lambda it: ws[it[0]])
+        )
         return max(ws), (op,) + ts
     if op == 'list':
         ws, ts = zip(*(_optimize(y) for y in x[1:]))
@@ -318,6 +335,7 @@
         return w + wa, (op, x[1], ta)
     raise error.ProgrammingError('invalid operator %r' % op)
 
+
 def optimize(x):
     """Reorder/rewrite evaluatable tree for optimization
 
@@ -326,5 +344,6 @@
     _w, t = _optimize(x)
     return t
 
+
 def prettyformat(tree):
     return parser.prettyformat(tree, ('string', 'symbol'))
--- a/mercurial/formatter.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/formatter.py	Sun Oct 06 09:45:02 2019 -0400
@@ -116,9 +116,7 @@
     hex,
     short,
 )
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 
 from . import (
     error,
@@ -137,6 +135,7 @@
 
 pickle = util.pickle
 
+
 class _nullconverter(object):
     '''convert non-primitive data types to be processed by formatter'''
 
@@ -147,23 +146,27 @@
     def wrapnested(data, tmpl, sep):
         '''wrap nested data by appropriate type'''
         return data
+
     @staticmethod
     def formatdate(date, fmt):
         '''convert date tuple to appropriate format'''
         # timestamp can be float, but the canonical form should be int
         ts, tz = date
         return (int(ts), tz)
+
     @staticmethod
     def formatdict(data, key, value, fmt, sep):
         '''convert dict or key-value pairs to appropriate dict format'''
         # use plain dict instead of util.sortdict so that data can be
         # serialized as a builtin dict in pickle output
         return dict(data)
+
     @staticmethod
     def formatlist(data, name, fmt, sep):
         '''convert iterable to appropriate list format'''
         return list(data)
 
+
 class baseformatter(object):
     def __init__(self, ui, topic, opts, converter):
         self._ui = ui
@@ -173,29 +176,37 @@
         self._item = None
         # function to convert node to string suitable for this output
         self.hexfunc = hex
+
     def __enter__(self):
         return self
+
     def __exit__(self, exctype, excvalue, traceback):
         if exctype is None:
             self.end()
+
     def _showitem(self):
         '''show a formatted item once all data is collected'''
+
     def startitem(self):
         '''begin an item in the format list'''
         if self._item is not None:
             self._showitem()
         self._item = {}
+
     def formatdate(self, date, fmt='%a %b %d %H:%M:%S %Y %1%2'):
         '''convert date tuple to appropriate format'''
         return self._converter.formatdate(date, fmt)
+
     def formatdict(self, data, key='key', value='value', fmt=None, sep=' '):
         '''convert dict or key-value pairs to appropriate dict format'''
         return self._converter.formatdict(data, key, value, fmt, sep)
+
     def formatlist(self, data, name, fmt=None, sep=' '):
         '''convert iterable to appropriate list format'''
         # name is mandatory argument for now, but it could be optional if
         # we have default template keyword, e.g. {item}
         return self._converter.formatlist(data, name, fmt, sep)
+
     def context(self, **ctxs):
         '''insert context objects to be used to render template keywords'''
         ctxs = pycompat.byteskwargs(ctxs)
@@ -207,56 +218,70 @@
             if 'ctx' in ctxs and 'repo' not in ctxs:
                 ctxs['repo'] = ctxs['ctx'].repo()
             self._item.update(ctxs)
+
     def datahint(self):
         '''set of field names to be referenced'''
         return set()
+
     def data(self, **data):
         '''insert data into item that's not shown in default output'''
         data = pycompat.byteskwargs(data)
         self._item.update(data)
+
     def write(self, fields, deftext, *fielddata, **opts):
         '''do default text output while assigning data to item'''
         fieldkeys = fields.split()
         assert len(fieldkeys) == len(fielddata), (fieldkeys, fielddata)
         self._item.update(zip(fieldkeys, fielddata))
+
     def condwrite(self, cond, fields, deftext, *fielddata, **opts):
         '''do conditional write (primarily for plain formatter)'''
         fieldkeys = fields.split()
         assert len(fieldkeys) == len(fielddata)
         self._item.update(zip(fieldkeys, fielddata))
+
     def plain(self, text, **opts):
         '''show raw text for non-templated mode'''
+
     def isplain(self):
         '''check for plain formatter usage'''
         return False
+
     def nested(self, field, tmpl=None, sep=''):
         '''sub formatter to store nested data in the specified field'''
         data = []
         self._item[field] = self._converter.wrapnested(data, tmpl, sep)
         return _nestedformatter(self._ui, self._converter, data)
+
     def end(self):
         '''end output for the formatter'''
         if self._item is not None:
             self._showitem()
 
+
 def nullformatter(ui, topic, opts):
     '''formatter that prints nothing'''
     return baseformatter(ui, topic, opts, converter=_nullconverter)
 
+
 class _nestedformatter(baseformatter):
     '''build sub items and store them in the parent formatter'''
+
     def __init__(self, ui, converter, data):
         baseformatter.__init__(self, ui, topic='', opts={}, converter=converter)
         self._data = data
+
     def _showitem(self):
         self._data.append(self._item)
 
+
 def _iteritems(data):
     '''iterate key-value pairs in stable order'''
     if isinstance(data, dict):
         return sorted(data.iteritems())
     return data
 
+
 class _plainconverter(object):
     '''convert non-primitive data types to text'''
 
@@ -265,10 +290,12 @@
     @staticmethod
     def wrapnested(data, tmpl, sep):
         raise error.ProgrammingError('plainformatter should never be nested')
+
     @staticmethod
     def formatdate(date, fmt):
         '''stringify date tuple in the given format'''
         return dateutil.datestr(date, fmt)
+
     @staticmethod
     def formatdict(data, key, value, fmt, sep):
         '''stringify key-value pairs separated by sep'''
@@ -276,8 +303,10 @@
         if fmt is None:
             fmt = '%s=%s'
             prefmt = pycompat.bytestr
-        return sep.join(fmt % (prefmt(k), prefmt(v))
-                        for k, v in _iteritems(data))
+        return sep.join(
+            fmt % (prefmt(k), prefmt(v)) for k, v in _iteritems(data)
+        )
+
     @staticmethod
     def formatlist(data, name, fmt, sep):
         '''stringify iterable separated by sep'''
@@ -287,8 +316,10 @@
             prefmt = pycompat.bytestr
         return sep.join(fmt % prefmt(e) for e in data)
 
+
 class plainformatter(baseformatter):
     '''the default text output scheme'''
+
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _plainconverter)
         if ui.debugflag:
@@ -299,67 +330,88 @@
             self._write = ui.write
         else:
             self._write = lambda s, **opts: out.write(s)
+
     def startitem(self):
         pass
+
     def data(self, **data):
         pass
+
     def write(self, fields, deftext, *fielddata, **opts):
         self._write(deftext % fielddata, **opts)
+
     def condwrite(self, cond, fields, deftext, *fielddata, **opts):
         '''do conditional write'''
         if cond:
             self._write(deftext % fielddata, **opts)
+
     def plain(self, text, **opts):
         self._write(text, **opts)
+
     def isplain(self):
         return True
+
     def nested(self, field, tmpl=None, sep=''):
         # nested data will be directly written to ui
         return self
+
     def end(self):
         pass
 
+
 class debugformatter(baseformatter):
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _nullconverter)
         self._out = out
         self._out.write("%s = [\n" % self._topic)
+
     def _showitem(self):
-        self._out.write('    %s,\n'
-                        % stringutil.pprint(self._item, indent=4, level=1))
+        self._out.write(
+            '    %s,\n' % stringutil.pprint(self._item, indent=4, level=1)
+        )
+
     def end(self):
         baseformatter.end(self)
         self._out.write("]\n")
 
+
 class pickleformatter(baseformatter):
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _nullconverter)
         self._out = out
         self._data = []
+
     def _showitem(self):
         self._data.append(self._item)
+
     def end(self):
         baseformatter.end(self)
         self._out.write(pickle.dumps(self._data))
 
+
 class cborformatter(baseformatter):
     '''serialize items as an indefinite-length CBOR array'''
+
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _nullconverter)
         self._out = out
         self._out.write(cborutil.BEGIN_INDEFINITE_ARRAY)
+
     def _showitem(self):
         self._out.write(b''.join(cborutil.streamencode(self._item)))
+
     def end(self):
         baseformatter.end(self)
         self._out.write(cborutil.BREAK)
 
+
 class jsonformatter(baseformatter):
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _nullconverter)
         self._out = out
         self._out.write("[")
         self._first = True
+
     def _showitem(self):
         if self._first:
             self._first = False
@@ -376,10 +428,12 @@
             u = templatefilters.json(v, paranoid=False)
             self._out.write('  "%s": %s' % (k, u))
         self._out.write("\n }")
+
     def end(self):
         baseformatter.end(self)
         self._out.write("\n]\n")
 
+
 class _templateconverter(object):
     '''convert non-primitive data types to be processed by templater'''
 
@@ -389,37 +443,51 @@
     def wrapnested(data, tmpl, sep):
         '''wrap nested data by templatable type'''
         return templateutil.mappinglist(data, tmpl=tmpl, sep=sep)
+
     @staticmethod
     def formatdate(date, fmt):
         '''return date tuple'''
         return templateutil.date(date)
+
     @staticmethod
     def formatdict(data, key, value, fmt, sep):
         '''build object that can be evaluated as either plain string or dict'''
         data = util.sortdict(_iteritems(data))
+
         def f():
             yield _plainconverter.formatdict(data, key, value, fmt, sep)
-        return templateutil.hybriddict(data, key=key, value=value, fmt=fmt,
-                                       gen=f)
+
+        return templateutil.hybriddict(
+            data, key=key, value=value, fmt=fmt, gen=f
+        )
+
     @staticmethod
     def formatlist(data, name, fmt, sep):
         '''build object that can be evaluated as either plain string or list'''
         data = list(data)
+
         def f():
             yield _plainconverter.formatlist(data, name, fmt, sep)
+
         return templateutil.hybridlist(data, name=name, fmt=fmt, gen=f)
 
+
 class templateformatter(baseformatter):
     def __init__(self, ui, out, topic, opts):
         baseformatter.__init__(self, ui, topic, opts, _templateconverter)
         self._out = out
         spec = lookuptemplate(ui, topic, opts.get('template', ''))
         self._tref = spec.ref
-        self._t = loadtemplater(ui, spec, defaults=templatekw.keywords,
-                                resources=templateresources(ui),
-                                cache=templatekw.defaulttempl)
-        self._parts = templatepartsmap(spec, self._t,
-                                       ['docheader', 'docfooter', 'separator'])
+        self._t = loadtemplater(
+            ui,
+            spec,
+            defaults=templatekw.keywords,
+            resources=templateresources(ui),
+            cache=templatekw.defaulttempl,
+        )
+        self._parts = templatepartsmap(
+            spec, self._t, ['docheader', 'docfooter', 'separator']
+        )
         self._counter = itertools.count()
         self._renderitem('docheader', {})
 
@@ -448,12 +516,14 @@
         baseformatter.end(self)
         self._renderitem('docfooter', {})
 
+
 @attr.s(frozen=True)
 class templatespec(object):
     ref = attr.ib()
     tmpl = attr.ib()
     mapfile = attr.ib()
 
+
 def lookuptemplate(ui, topic, tmpl):
     """Find the template matching the given -T/--template spec 'tmpl'
 
@@ -479,8 +549,9 @@
 
     # perhaps a stock style?
     if not os.path.split(tmpl)[0]:
-        mapname = (templater.templatepath('map-cmdline.' + tmpl)
-                   or templater.templatepath(tmpl))
+        mapname = templater.templatepath(
+            'map-cmdline.' + tmpl
+        ) or templater.templatepath(tmpl)
         if mapname and os.path.isfile(mapname):
             return templatespec(topic, None, mapname)
 
@@ -504,6 +575,7 @@
     # constant string?
     return templatespec('', tmpl, None)
 
+
 def templatepartsmap(spec, t, partnames):
     """Create a mapping of {part: ref}"""
     partsmap = {spec.ref: spec.ref}  # initial ref must exist in t
@@ -516,32 +588,40 @@
                 partsmap[part] = ref
     return partsmap
 
+
 def loadtemplater(ui, spec, defaults=None, resources=None, cache=None):
     """Create a templater from either a literal template or loading from
     a map file"""
     assert not (spec.tmpl and spec.mapfile)
     if spec.mapfile:
         frommapfile = templater.templater.frommapfile
-        return frommapfile(spec.mapfile, defaults=defaults, resources=resources,
-                           cache=cache)
-    return maketemplater(ui, spec.tmpl, defaults=defaults, resources=resources,
-                         cache=cache)
+        return frommapfile(
+            spec.mapfile, defaults=defaults, resources=resources, cache=cache
+        )
+    return maketemplater(
+        ui, spec.tmpl, defaults=defaults, resources=resources, cache=cache
+    )
+
 
 def maketemplater(ui, tmpl, defaults=None, resources=None, cache=None):
     """Create a templater from a string template 'tmpl'"""
     aliases = ui.configitems('templatealias')
-    t = templater.templater(defaults=defaults, resources=resources,
-                            cache=cache, aliases=aliases)
-    t.cache.update((k, templater.unquotestring(v))
-                   for k, v in ui.configitems('templates'))
+    t = templater.templater(
+        defaults=defaults, resources=resources, cache=cache, aliases=aliases
+    )
+    t.cache.update(
+        (k, templater.unquotestring(v)) for k, v in ui.configitems('templates')
+    )
     if tmpl:
         t.cache[''] = tmpl
     return t
 
+
 # marker to denote a resource to be loaded on demand based on mapping values
 # (e.g. (ctx, path) -> fctx)
 _placeholder = object()
 
+
 class templateresources(templater.resourcemapper):
     """Resource mapper designed for the default templatekw and function"""
 
@@ -553,8 +633,9 @@
         }
 
     def availablekeys(self, mapping):
-        return {k for k in self.knownkeys()
-                if self._getsome(mapping, k) is not None}
+        return {
+            k for k in self.knownkeys() if self._getsome(mapping, k) is not None
+        }
 
     def knownkeys(self):
         return {'cache', 'ctx', 'fctx', 'repo', 'revcache', 'ui'}
@@ -611,7 +692,7 @@
         try:
             return repo[node]
         except error.RepoLookupError:
-            return None # maybe hidden/non-existent node
+            return None  # maybe hidden/non-existent node
 
     def _loadfctx(self, mapping):
         ctx = self._getsome(mapping, 'ctx')
@@ -621,13 +702,14 @@
         try:
             return ctx[path]
         except error.LookupError:
-            return None # maybe removed file?
+            return None  # maybe removed file?
 
     _loadermap = {
         'ctx': _loadctx,
         'fctx': _loadfctx,
     }
 
+
 def formatter(ui, out, topic, opts):
     template = opts.get("template", "")
     if template == "cbor":
@@ -648,6 +730,7 @@
         return jsonformatter(ui, out, topic, opts)
     return plainformatter(ui, out, topic, opts)
 
+
 @contextlib.contextmanager
 def openformatter(ui, filename, topic, opts):
     """Create a formatter that writes outputs to the specified file
@@ -658,10 +741,12 @@
         with formatter(ui, out, topic, opts) as fm:
             yield fm
 
+
 @contextlib.contextmanager
 def _neverending(fm):
     yield fm
 
+
 def maybereopen(fm, filename):
     """Create a formatter backed by file if filename specified, else return
     the given formatter
--- a/mercurial/graphmod.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/graphmod.py	Sun Oct 06 09:45:02 2019 -0400
@@ -37,6 +37,7 @@
 # (so making N negative) and all but the first N characters use that style.
 EDGES = {PARENT: '|', GRANDPARENT: ':', MISSINGPARENT: None}
 
+
 def dagwalker(repo, revs):
     """cset DAG generator yielding (id, CHANGESET, ctx, [parentinfo]) tuples
 
@@ -57,8 +58,11 @@
         # augment the lists with markers, to inform graph drawing code about
         # what kind of edge to draw between nodes.
         pset = set(p.rev() for p in ctx.parents() if p.rev() in revs)
-        mpars = [p.rev() for p in ctx.parents()
-                 if p.rev() != nullrev and p.rev() not in pset]
+        mpars = [
+            p.rev()
+            for p in ctx.parents()
+            if p.rev() != nullrev and p.rev() not in pset
+        ]
         parents = [(PARENT, p) for p in sorted(pset)]
 
         for mpar in mpars:
@@ -68,8 +72,9 @@
                 # through all revs (issue4782)
                 if not isinstance(revs, smartset.baseset):
                     revs = smartset.baseset(revs)
-                gp = gpcache[mpar] = sorted(set(dagop.reachableroots(
-                    repo, revs, [mpar])))
+                gp = gpcache[mpar] = sorted(
+                    set(dagop.reachableroots(repo, revs, [mpar]))
+                )
             if not gp:
                 parents.append((MISSINGPARENT, mpar))
                 pset.add(mpar)
@@ -79,6 +84,7 @@
 
         yield (ctx.rev(), CHANGESET, ctx, parents)
 
+
 def nodes(repo, nodes):
     """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples
 
@@ -88,10 +94,12 @@
     include = set(nodes)
     for node in nodes:
         ctx = repo[node]
-        parents = set((PARENT, p.rev()) for p in ctx.parents()
-                      if p.node() in include)
+        parents = set(
+            (PARENT, p.rev()) for p in ctx.parents() if p.node() in include
+        )
         yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
 
+
 def colored(dag, repo):
     """annotates a DAG with colored edge information
 
@@ -121,7 +129,8 @@
 
     if config:
         getconf = util.lrucachefunc(
-            lambda rev: config.get(repo[rev].branch(), {}))
+            lambda rev: config.get(repo[rev].branch(), {})
+        )
     else:
         getconf = lambda rev: {}
 
@@ -129,7 +138,7 @@
 
         # Compute seen and next
         if cur not in seen:
-            seen.append(cur) # new head
+            seen.append(cur)  # new head
             colors[cur] = newcolor
             newcolor += 1
 
@@ -139,7 +148,7 @@
 
         # Add parents to next
         addparents = [p for pt, p in parents if p not in next]
-        next[col:col + 1] = addparents
+        next[col : col + 1] = addparents
 
         # Set colors for the parents
         for i, p in enumerate(addparents):
@@ -154,22 +163,33 @@
         for ecol, eid in enumerate(seen):
             if eid in next:
                 bconf = getconf(eid)
-                edges.append((
-                    ecol, next.index(eid), colors[eid],
-                    bconf.get('width', -1),
-                    bconf.get('color', '')))
+                edges.append(
+                    (
+                        ecol,
+                        next.index(eid),
+                        colors[eid],
+                        bconf.get('width', -1),
+                        bconf.get('color', ''),
+                    )
+                )
             elif eid == cur:
                 for ptype, p in parents:
                     bconf = getconf(p)
-                    edges.append((
-                        ecol, next.index(p), color,
-                        bconf.get('width', -1),
-                        bconf.get('color', '')))
+                    edges.append(
+                        (
+                            ecol,
+                            next.index(p),
+                            color,
+                            bconf.get('width', -1),
+                            bconf.get('color', ''),
+                        )
+                    )
 
         # Yield and move on
         yield (cur, type, data, (col, color), edges)
         seen = next
 
+
 def asciiedges(type, char, state, rev, parents):
     """adds edge info to changelog DAG walk suitable for ascii()"""
     seen = state['seen']
@@ -192,7 +212,7 @@
     ncols = len(seen)
     width = 1 + ncols * 2
     nextseen = seen[:]
-    nextseen[nodeidx:nodeidx + 1] = newparents
+    nextseen[nodeidx : nodeidx + 1] = newparents
     edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
 
     seen[:] = nextseen
@@ -223,25 +243,27 @@
     state['edges'].pop(rev, None)
     yield (type, char, width, (nodeidx, edges, ncols, nmorecols))
 
+
 def _fixlongrightedges(edges):
     for (i, (start, end)) in enumerate(edges):
         if end > start:
             edges[i] = (start, end + 1)
 
-def _getnodelineedgestail(
-        echars, idx, pidx, ncols, coldiff, pdiff, fix_tail):
+
+def _getnodelineedgestail(echars, idx, pidx, ncols, coldiff, pdiff, fix_tail):
     if fix_tail and coldiff == pdiff and coldiff != 0:
         # Still going in the same non-vertical direction.
         if coldiff == -1:
             start = max(idx + 1, pidx)
-            tail = echars[idx * 2:(start - 1) * 2]
+            tail = echars[idx * 2 : (start - 1) * 2]
             tail.extend(["/", " "] * (ncols - start))
             return tail
         else:
             return ["\\", " "] * (ncols - idx - 1)
     else:
-        remainder = (ncols - idx - 1)
-        return echars[-(remainder * 2):] if remainder > 0 else []
+        remainder = ncols - idx - 1
+        return echars[-(remainder * 2) :] if remainder > 0 else []
+
 
 def _drawedges(echars, edges, nodeline, interline):
     for (start, end) in edges:
@@ -261,9 +283,10 @@
                 if nodeline[i] != "+":
                     nodeline[i] = "-"
 
+
 def _getpaddingline(echars, idx, ncols, edges):
     # all edges up to the current node
-    line = echars[:idx * 2]
+    line = echars[: idx * 2]
     # an edge for the current node, if there is one
     if (idx, idx - 1) in edges or (idx, idx) in edges:
         # (idx, idx - 1)      (idx, idx)
@@ -272,15 +295,16 @@
         # | | X |           | X | |
         # | |/ /            | |/ /
         # | | |             | | |
-        line.extend(echars[idx * 2:(idx + 1) * 2])
+        line.extend(echars[idx * 2 : (idx + 1) * 2])
     else:
         line.extend([' ', ' '])
     # all edges to the right of the current node
     remainder = ncols - idx - 1
     if remainder > 0:
-        line.extend(echars[-(remainder * 2):])
+        line.extend(echars[-(remainder * 2) :])
     return line
 
+
 def _drawendinglines(lines, extra, edgemap, seen, state):
     """Draw ending lines for missing parent edges
 
@@ -332,6 +356,7 @@
         del edgemap[parent]
         seen.remove(parent)
 
+
 def asciistate():
     """returns the initial value for the "state" argument to ascii()"""
     return {
@@ -343,6 +368,7 @@
         'graphshorten': False,
     }
 
+
 def outputgraph(ui, graph):
     """outputs an ASCII graph of a DAG
 
@@ -359,6 +385,7 @@
     for (ln, logstr) in graph:
         ui.write((ln + logstr).rstrip() + "\n")
 
+
 def ascii(ui, state, type, char, text, coldata):
     """prints an ASCII graph of the DAG
 
@@ -404,8 +431,9 @@
     #     |  / /         |   | |  # <--- padding line
     #     o | |          |  / /
     #                    o | |
-    add_padding_line = (len(text) > 2 and coldiff == -1 and
-                        [x for (x, y) in edges if x + 1 < y])
+    add_padding_line = (
+        len(text) > 2 and coldiff == -1 and [x for (x, y) in edges if x + 1 < y]
+    )
 
     # fix_nodeline_tail says whether to rewrite
     #
@@ -417,17 +445,24 @@
     fix_nodeline_tail = len(text) <= 2 and not add_padding_line
 
     # nodeline is the line containing the node character (typically o)
-    nodeline = echars[:idx * 2]
+    nodeline = echars[: idx * 2]
     nodeline.extend([char, " "])
 
     nodeline.extend(
         _getnodelineedgestail(
-            echars, idx, state['lastindex'], ncols, coldiff,
-            state['lastcoldiff'], fix_nodeline_tail))
+            echars,
+            idx,
+            state['lastindex'],
+            ncols,
+            coldiff,
+            state['lastcoldiff'],
+            fix_nodeline_tail,
+        )
+    )
 
     # shift_interline is the line containing the non-vertical
     # edges between this entry and the next
-    shift_interline = echars[:idx * 2]
+    shift_interline = echars[: idx * 2]
     for i in pycompat.xrange(2 + coldiff):
         shift_interline.append(' ')
     count = ncols - idx - 1
@@ -435,7 +470,7 @@
         for i in pycompat.xrange(count):
             shift_interline.extend(['/', ' '])
     elif coldiff == 0:
-        shift_interline.extend(echars[(idx + 1) * 2:ncols * 2])
+        shift_interline.extend(echars[(idx + 1) * 2 : ncols * 2])
     else:
         for i in pycompat.xrange(count):
             shift_interline.extend(['\\', ' '])
@@ -459,7 +494,7 @@
 
     # make sure that there are as many graph lines as there are
     # log strings
-    extra_interline = echars[:(ncols + coldiff) * 2]
+    extra_interline = echars[: (ncols + coldiff) * 2]
     if len(lines) < len(text):
         while len(lines) < len(text):
             lines.append(extra_interline[:])
--- a/mercurial/hbisect.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hbisect.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,9 +17,8 @@
     hex,
     short,
 )
-from . import (
-    error,
-)
+from . import error
+
 
 def bisect(repo, state):
     """find the next node (if any) for testing during a bisect search.
@@ -48,16 +47,21 @@
 
     good = False
     badrev, ancestors = buildancestors(state['bad'], state['good'])
-    if not ancestors: # looking for bad to good transition?
+    if not ancestors:  # looking for bad to good transition?
         good = True
         badrev, ancestors = buildancestors(state['good'], state['bad'])
     bad = changelog.node(badrev)
-    if not ancestors: # now we're confused
-        if (len(state['bad']) == 1 and len(state['good']) == 1 and
-            state['bad'] != state['good']):
+    if not ancestors:  # now we're confused
+        if (
+            len(state['bad']) == 1
+            and len(state['good']) == 1
+            and state['bad'] != state['good']
+        ):
             raise error.Abort(_("starting revisions are not directly related"))
-        raise error.Abort(_("inconsistent state, %d:%s is good and bad")
-                         % (badrev, short(bad)))
+        raise error.Abort(
+            _("inconsistent state, %d:%s is good and bad")
+            % (badrev, short(bad))
+        )
 
     # build children dict
     children = {}
@@ -97,16 +101,16 @@
         a = ancestors[rev] or [rev]
         ancestors[rev] = None
 
-        x = len(a) # number of ancestors
-        y = tot - x # number of non-ancestors
-        value = min(x, y) # how good is this test?
+        x = len(a)  # number of ancestors
+        y = tot - x  # number of non-ancestors
+        value = min(x, y)  # how good is this test?
         if value > best_len and rev not in skip:
             best_len = value
             best_rev = rev
-            if value == perfect: # found a perfect candidate? quit early
+            if value == perfect:  # found a perfect candidate? quit early
                 break
 
-        if y < perfect and rev not in skip: # all downhill from here?
+        if y < perfect and rev not in skip:  # all downhill from here?
             # poison children
             poison.update(children.get(rev, []))
             continue
@@ -122,6 +126,7 @@
 
     return ([best_node], tot, good)
 
+
 def extendrange(repo, state, nodes, good):
     # bisect is incomplete when it ends on a merge node and
     # one of the parent was not checked.
@@ -136,6 +141,7 @@
             return parents[0].ancestor(parents[1])
     return None
 
+
 def load_state(repo):
     state = {'current': [], 'good': [], 'bad': [], 'skip': []}
     for l in repo.vfs.tryreadlines("bisect.state"):
@@ -155,11 +161,13 @@
                 f.write("%s %s\n" % (kind, hex(node)))
         f.close()
 
+
 def resetstate(repo):
     """remove any bisect state from the repository"""
     if repo.vfs.exists("bisect.state"):
         repo.vfs.unlink("bisect.state")
 
+
 def checkstate(state):
     """check we have both 'good' and 'bad' to define a range
 
@@ -171,6 +179,7 @@
     else:
         raise error.Abort(_('cannot bisect (no known bad revisions)'))
 
+
 def get(repo, status):
     """
     Return a list of revision(s) that match the given status:
@@ -201,15 +210,15 @@
         # The sets of topologically good or bad csets
         if len(_t) == 0:
             # Goods are topologically after bads
-            goods = 'bisect(good)::'    # Pruned good csets
-            bads  = '::bisect(bad)'     # Pruned bad csets
+            goods = 'bisect(good)::'  # Pruned good csets
+            bads = '::bisect(bad)'  # Pruned bad csets
         else:
             # Goods are topologically before bads
-            goods = '::bisect(good)'    # Pruned good csets
-            bads  = 'bisect(bad)::'     # Pruned bad csets
+            goods = '::bisect(good)'  # Pruned good csets
+            bads = 'bisect(bad)::'  # Pruned bad csets
 
         # 'pruned' is all csets whose fate is already known: good, bad, skip
-        skips = 'bisect(skip)'                 # Pruned skipped csets
+        skips = 'bisect(skip)'  # Pruned skipped csets
         pruned = '( (%s) | (%s) | (%s) )' % (goods, bads, skips)
 
         # 'untested' is all cset that are- in 'range', but not in 'pruned'
@@ -238,6 +247,7 @@
         else:
             raise error.ParseError(_('invalid bisect state'))
 
+
 def label(repo, node):
     rev = repo.changelog.rev(node)
 
@@ -268,6 +278,7 @@
 
     return None
 
+
 def printresult(ui, repo, state, displayer, nodes, good):
     repo = repo.unfiltered()
     if len(nodes) == 1:
@@ -279,18 +290,30 @@
         displayer.show(repo[nodes[0]])
         extendnode = extendrange(repo, state, nodes, good)
         if extendnode is not None:
-            ui.write(_('Not all ancestors of this changeset have been'
-                       ' checked.\nUse bisect --extend to continue the '
-                       'bisection from\nthe common ancestor, %s.\n')
-                     % extendnode)
+            ui.write(
+                _(
+                    'Not all ancestors of this changeset have been'
+                    ' checked.\nUse bisect --extend to continue the '
+                    'bisection from\nthe common ancestor, %s.\n'
+                )
+                % extendnode
+            )
     else:
         # multiple possible revisions
         if good:
-            ui.write(_("Due to skipped revisions, the first "
-                    "good revision could be any of:\n"))
+            ui.write(
+                _(
+                    "Due to skipped revisions, the first "
+                    "good revision could be any of:\n"
+                )
+            )
         else:
-            ui.write(_("Due to skipped revisions, the first "
-                    "bad revision could be any of:\n"))
+            ui.write(
+                _(
+                    "Due to skipped revisions, the first "
+                    "bad revision could be any of:\n"
+                )
+            )
         for n in nodes:
             displayer.show(repo[n])
     displayer.close()
--- a/mercurial/help.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/help.py	Sun Oct 06 09:45:02 2019 -0400
@@ -34,12 +34,8 @@
     ui as uimod,
     util,
 )
-from .hgweb import (
-    webcommands,
-)
-from .utils import (
-    compression,
-)
+from .hgweb import webcommands
+from .utils import compression
 
 _exclkeywords = {
     "(ADVANCED)",
@@ -64,7 +60,7 @@
     registrar.command.CATEGORY_CHANGE_MANAGEMENT,
     registrar.command.CATEGORY_CHANGE_ORGANIZATION,
     registrar.command.CATEGORY_FILE_CONTENTS,
-    registrar.command.CATEGORY_CHANGE_NAVIGATION ,
+    registrar.command.CATEGORY_CHANGE_NAVIGATION,
     registrar.command.CATEGORY_WORKING_DIRECTORY,
     registrar.command.CATEGORY_IMPORT_EXPORT,
     registrar.command.CATEGORY_MAINTENANCE,
@@ -77,14 +73,12 @@
 # Extensions with custom categories should add their names here.
 CATEGORY_NAMES = {
     registrar.command.CATEGORY_REPO_CREATION: 'Repository creation',
-    registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT:
-        'Remote repository management',
+    registrar.command.CATEGORY_REMOTE_REPO_MANAGEMENT: 'Remote repository management',
     registrar.command.CATEGORY_COMMITTING: 'Change creation',
     registrar.command.CATEGORY_CHANGE_NAVIGATION: 'Change navigation',
     registrar.command.CATEGORY_CHANGE_MANAGEMENT: 'Change manipulation',
     registrar.command.CATEGORY_CHANGE_ORGANIZATION: 'Change organization',
-    registrar.command.CATEGORY_WORKING_DIRECTORY:
-        'Working directory management',
+    registrar.command.CATEGORY_WORKING_DIRECTORY: 'Working directory management',
     registrar.command.CATEGORY_FILE_CONTENTS: 'File content management',
     registrar.command.CATEGORY_IMPORT_EXPORT: 'Change import/export',
     registrar.command.CATEGORY_MAINTENANCE: 'Repository maintenance',
@@ -124,6 +118,7 @@
     TOPIC_CATEGORY_NONE: 'Uncategorized topics',
 }
 
+
 def listexts(header, exts, indent=1, showdeprecated=False):
     '''return a text listing of the given extensions'''
     rst = []
@@ -136,15 +131,25 @@
         rst.insert(0, '\n%s\n\n' % header)
     return rst
 
+
 def extshelp(ui):
     rst = loaddoc('extensions')(ui).splitlines(True)
-    rst.extend(listexts(
-        _('enabled extensions:'), extensions.enabled(), showdeprecated=True))
-    rst.extend(listexts(_('disabled extensions:'), extensions.disabled(),
-                        showdeprecated=ui.verbose))
+    rst.extend(
+        listexts(
+            _('enabled extensions:'), extensions.enabled(), showdeprecated=True
+        )
+    )
+    rst.extend(
+        listexts(
+            _('disabled extensions:'),
+            extensions.disabled(),
+            showdeprecated=ui.verbose,
+        )
+    )
     doc = ''.join(rst)
     return doc
 
+
 def optrst(header, options, verbose):
     data = []
     multioccur = False
@@ -153,7 +158,7 @@
             shortopt, longopt, default, desc, optlabel = option
         else:
             shortopt, longopt, default, desc = option
-            optlabel = _("VALUE") # default label
+            optlabel = _("VALUE")  # default label
 
         if not verbose and any(w in desc for w in _exclkeywords):
             continue
@@ -186,18 +191,20 @@
         data.append((so, lo, desc))
 
     if multioccur:
-        header += (_(" ([+] can be repeated)"))
+        header += _(" ([+] can be repeated)")
 
     rst = ['\n%s:\n\n' % header]
     rst.extend(minirst.maketable(data, 1))
 
     return ''.join(rst)
 
+
 def indicateomitted(rst, omitted, notomitted=None):
     rst.append('\n\n.. container:: omitted\n\n    %s\n\n' % omitted)
     if notomitted:
         rst.append('\n\n.. container:: notomitted\n\n    %s\n\n' % notomitted)
 
+
 def filtercmd(ui, cmd, func, kw, doc):
     if not ui.debugflag and cmd.startswith("debug") and kw != "debug":
         # Debug command, and user is not looking for those.
@@ -220,9 +227,11 @@
         return True
     return False
 
+
 def filtertopic(ui, topic):
     return ui.configbool('help', 'hidden-topic.%s' % topic, False)
 
+
 def topicmatch(ui, commands, kw):
     """Return help topics matching kw.
 
@@ -230,19 +239,24 @@
     one of topics, commands, extensions, or extensioncommands.
     """
     kw = encoding.lower(kw)
+
     def lowercontains(container):
         return kw in encoding.lower(container)  # translated in helptable
-    results = {'topics': [],
-               'commands': [],
-               'extensions': [],
-               'extensioncommands': [],
-               }
+
+    results = {
+        'topics': [],
+        'commands': [],
+        'extensions': [],
+        'extensioncommands': [],
+    }
     for topic in helptable:
         names, header, doc = topic[0:3]
         # Old extensions may use a str as doc.
-        if (sum(map(lowercontains, names))
+        if (
+            sum(map(lowercontains, names))
             or lowercontains(header)
-            or (callable(doc) and lowercontains(doc(ui)))):
+            or (callable(doc) and lowercontains(doc(ui)))
+        ):
             name = names[0]
             if not filtertopic(ui, name):
                 results['topics'].append((names[0], header))
@@ -263,8 +277,8 @@
                 continue
             results['commands'].append((cmdname, summary))
     for name, docs in itertools.chain(
-        extensions.enabled(False).iteritems(),
-        extensions.disabled().iteritems()):
+        extensions.enabled(False).iteritems(), extensions.disabled().iteritems()
+    ):
         if not docs:
             continue
         name = name.rpartition('.')[-1]
@@ -290,6 +304,7 @@
                 results['extensioncommands'].append((cmdname, cmddoc))
     return results
 
+
 def loaddoc(topic, subdir=None):
     """Return a delayed loader for help/topic.txt."""
 
@@ -305,87 +320,201 @@
 
     return loader
 
-internalstable = sorted([
-    (['bundle2'], _('Bundle2'),
-     loaddoc('bundle2', subdir='internals')),
-    (['bundles'], _('Bundles'),
-     loaddoc('bundles', subdir='internals')),
-    (['cbor'], _('CBOR'),
-     loaddoc('cbor', subdir='internals')),
-    (['censor'], _('Censor'),
-     loaddoc('censor', subdir='internals')),
-    (['changegroups'], _('Changegroups'),
-     loaddoc('changegroups', subdir='internals')),
-    (['config'], _('Config Registrar'),
-     loaddoc('config', subdir='internals')),
-    (['extensions', 'extension'], _('Extension API'),
-     loaddoc('extensions', subdir='internals')),
-    (['mergestate'], _('Mergestate'),
-     loaddoc('mergestate', subdir='internals')),
-    (['requirements'], _('Repository Requirements'),
-     loaddoc('requirements', subdir='internals')),
-    (['revlogs'], _('Revision Logs'),
-     loaddoc('revlogs', subdir='internals')),
-    (['wireprotocol'], _('Wire Protocol'),
-     loaddoc('wireprotocol', subdir='internals')),
-    (['wireprotocolrpc'], _('Wire Protocol RPC'),
-     loaddoc('wireprotocolrpc', subdir='internals')),
-    (['wireprotocolv2'], _('Wire Protocol Version 2'),
-     loaddoc('wireprotocolv2', subdir='internals')),
-])
+
+internalstable = sorted(
+    [
+        (['bundle2'], _('Bundle2'), loaddoc('bundle2', subdir='internals')),
+        (['bundles'], _('Bundles'), loaddoc('bundles', subdir='internals')),
+        (['cbor'], _('CBOR'), loaddoc('cbor', subdir='internals')),
+        (['censor'], _('Censor'), loaddoc('censor', subdir='internals')),
+        (
+            ['changegroups'],
+            _('Changegroups'),
+            loaddoc('changegroups', subdir='internals'),
+        ),
+        (
+            ['config'],
+            _('Config Registrar'),
+            loaddoc('config', subdir='internals'),
+        ),
+        (
+            ['extensions', 'extension'],
+            _('Extension API'),
+            loaddoc('extensions', subdir='internals'),
+        ),
+        (
+            ['mergestate'],
+            _('Mergestate'),
+            loaddoc('mergestate', subdir='internals'),
+        ),
+        (
+            ['requirements'],
+            _('Repository Requirements'),
+            loaddoc('requirements', subdir='internals'),
+        ),
+        (
+            ['revlogs'],
+            _('Revision Logs'),
+            loaddoc('revlogs', subdir='internals'),
+        ),
+        (
+            ['wireprotocol'],
+            _('Wire Protocol'),
+            loaddoc('wireprotocol', subdir='internals'),
+        ),
+        (
+            ['wireprotocolrpc'],
+            _('Wire Protocol RPC'),
+            loaddoc('wireprotocolrpc', subdir='internals'),
+        ),
+        (
+            ['wireprotocolv2'],
+            _('Wire Protocol Version 2'),
+            loaddoc('wireprotocolv2', subdir='internals'),
+        ),
+    ]
+)
+
 
 def internalshelp(ui):
     """Generate the index for the "internals" topic."""
-    lines = ['To access a subtopic, use "hg help internals.{subtopic-name}"\n',
-             '\n']
+    lines = [
+        'To access a subtopic, use "hg help internals.{subtopic-name}"\n',
+        '\n',
+    ]
     for names, header, doc in internalstable:
         lines.append(' :%s: %s\n' % (names[0], header))
 
     return ''.join(lines)
 
-helptable = sorted([
-    (['bundlespec'], _("Bundle File Formats"), loaddoc('bundlespec'),
-     TOPIC_CATEGORY_CONCEPTS),
-    (['color'], _("Colorizing Outputs"), loaddoc('color'),
-     TOPIC_CATEGORY_OUTPUT),
-    (["config", "hgrc"], _("Configuration Files"), loaddoc('config'),
-     TOPIC_CATEGORY_CONFIG),
-    (['deprecated'], _("Deprecated Features"), loaddoc('deprecated'),
-     TOPIC_CATEGORY_MISC),
-    (["dates"], _("Date Formats"), loaddoc('dates'), TOPIC_CATEGORY_OUTPUT),
-    (["flags"], _("Command-line flags"), loaddoc('flags'),
-     TOPIC_CATEGORY_CONFIG),
-    (["patterns"], _("File Name Patterns"), loaddoc('patterns'),
-     TOPIC_CATEGORY_IDS),
-    (['environment', 'env'], _('Environment Variables'),
-     loaddoc('environment'), TOPIC_CATEGORY_CONFIG),
-    (['revisions', 'revs', 'revsets', 'revset', 'multirevs', 'mrevs'],
-      _('Specifying Revisions'), loaddoc('revisions'), TOPIC_CATEGORY_IDS),
-    (['filesets', 'fileset'], _("Specifying File Sets"), loaddoc('filesets'),
-     TOPIC_CATEGORY_IDS),
-    (['diffs'], _('Diff Formats'), loaddoc('diffs'), TOPIC_CATEGORY_OUTPUT),
-    (['merge-tools', 'mergetools', 'mergetool'], _('Merge Tools'),
-     loaddoc('merge-tools'), TOPIC_CATEGORY_CONFIG),
-    (['templating', 'templates', 'template', 'style'], _('Template Usage'),
-     loaddoc('templates'), TOPIC_CATEGORY_OUTPUT),
-    (['urls'], _('URL Paths'), loaddoc('urls'), TOPIC_CATEGORY_IDS),
-    (["extensions"], _("Using Additional Features"), extshelp,
-     TOPIC_CATEGORY_CONFIG),
-    (["subrepos", "subrepo"], _("Subrepositories"), loaddoc('subrepos'),
-     TOPIC_CATEGORY_CONCEPTS),
-    (["hgweb"], _("Configuring hgweb"), loaddoc('hgweb'),
-     TOPIC_CATEGORY_CONFIG),
-    (["glossary"], _("Glossary"), loaddoc('glossary'), TOPIC_CATEGORY_CONCEPTS),
-    (["hgignore", "ignore"], _("Syntax for Mercurial Ignore Files"),
-     loaddoc('hgignore'), TOPIC_CATEGORY_IDS),
-    (["phases"], _("Working with Phases"), loaddoc('phases'),
-     TOPIC_CATEGORY_CONCEPTS),
-    (['scripting'], _('Using Mercurial from scripts and automation'),
-     loaddoc('scripting'), TOPIC_CATEGORY_MISC),
-    (['internals'], _("Technical implementation topics"), internalshelp,
-     TOPIC_CATEGORY_MISC),
-    (['pager'], _("Pager Support"), loaddoc('pager'), TOPIC_CATEGORY_CONFIG),
-])
+
+helptable = sorted(
+    [
+        (
+            ['bundlespec'],
+            _("Bundle File Formats"),
+            loaddoc('bundlespec'),
+            TOPIC_CATEGORY_CONCEPTS,
+        ),
+        (
+            ['color'],
+            _("Colorizing Outputs"),
+            loaddoc('color'),
+            TOPIC_CATEGORY_OUTPUT,
+        ),
+        (
+            ["config", "hgrc"],
+            _("Configuration Files"),
+            loaddoc('config'),
+            TOPIC_CATEGORY_CONFIG,
+        ),
+        (
+            ['deprecated'],
+            _("Deprecated Features"),
+            loaddoc('deprecated'),
+            TOPIC_CATEGORY_MISC,
+        ),
+        (["dates"], _("Date Formats"), loaddoc('dates'), TOPIC_CATEGORY_OUTPUT),
+        (
+            ["flags"],
+            _("Command-line flags"),
+            loaddoc('flags'),
+            TOPIC_CATEGORY_CONFIG,
+        ),
+        (
+            ["patterns"],
+            _("File Name Patterns"),
+            loaddoc('patterns'),
+            TOPIC_CATEGORY_IDS,
+        ),
+        (
+            ['environment', 'env'],
+            _('Environment Variables'),
+            loaddoc('environment'),
+            TOPIC_CATEGORY_CONFIG,
+        ),
+        (
+            ['revisions', 'revs', 'revsets', 'revset', 'multirevs', 'mrevs'],
+            _('Specifying Revisions'),
+            loaddoc('revisions'),
+            TOPIC_CATEGORY_IDS,
+        ),
+        (
+            ['filesets', 'fileset'],
+            _("Specifying File Sets"),
+            loaddoc('filesets'),
+            TOPIC_CATEGORY_IDS,
+        ),
+        (['diffs'], _('Diff Formats'), loaddoc('diffs'), TOPIC_CATEGORY_OUTPUT),
+        (
+            ['merge-tools', 'mergetools', 'mergetool'],
+            _('Merge Tools'),
+            loaddoc('merge-tools'),
+            TOPIC_CATEGORY_CONFIG,
+        ),
+        (
+            ['templating', 'templates', 'template', 'style'],
+            _('Template Usage'),
+            loaddoc('templates'),
+            TOPIC_CATEGORY_OUTPUT,
+        ),
+        (['urls'], _('URL Paths'), loaddoc('urls'), TOPIC_CATEGORY_IDS),
+        (
+            ["extensions"],
+            _("Using Additional Features"),
+            extshelp,
+            TOPIC_CATEGORY_CONFIG,
+        ),
+        (
+            ["subrepos", "subrepo"],
+            _("Subrepositories"),
+            loaddoc('subrepos'),
+            TOPIC_CATEGORY_CONCEPTS,
+        ),
+        (
+            ["hgweb"],
+            _("Configuring hgweb"),
+            loaddoc('hgweb'),
+            TOPIC_CATEGORY_CONFIG,
+        ),
+        (
+            ["glossary"],
+            _("Glossary"),
+            loaddoc('glossary'),
+            TOPIC_CATEGORY_CONCEPTS,
+        ),
+        (
+            ["hgignore", "ignore"],
+            _("Syntax for Mercurial Ignore Files"),
+            loaddoc('hgignore'),
+            TOPIC_CATEGORY_IDS,
+        ),
+        (
+            ["phases"],
+            _("Working with Phases"),
+            loaddoc('phases'),
+            TOPIC_CATEGORY_CONCEPTS,
+        ),
+        (
+            ['scripting'],
+            _('Using Mercurial from scripts and automation'),
+            loaddoc('scripting'),
+            TOPIC_CATEGORY_MISC,
+        ),
+        (
+            ['internals'],
+            _("Technical implementation topics"),
+            internalshelp,
+            TOPIC_CATEGORY_MISC,
+        ),
+        (
+            ['pager'],
+            _("Pager Support"),
+            loaddoc('pager'),
+            TOPIC_CATEGORY_CONFIG,
+        ),
+    ]
+)
 
 # Maps topics with sub-topics to a list of their sub-topics.
 subtopics = {
@@ -396,9 +525,11 @@
 # returning the updated version
 helphooks = {}
 
+
 def addtopichook(topic, rewriter):
     helphooks.setdefault(topic, []).append(rewriter)
 
+
 def makeitemsdoc(ui, topic, doc, marker, items, dedent=False):
     """Extract docstring from the items key to function mapping, build a
     single documentation block and use it to overwrite the marker in doc.
@@ -406,8 +537,7 @@
     entries = []
     for name in sorted(items):
         text = (pycompat.getdoc(items[name]) or '').rstrip()
-        if (not text
-            or not ui.verbose and any(w in text for w in _exclkeywords)):
+        if not text or not ui.verbose and any(w in text for w in _exclkeywords):
             continue
         text = gettext(text)
         if dedent:
@@ -427,35 +557,54 @@
     entries = '\n\n'.join(entries)
     return doc.replace(marker, entries)
 
+
 def addtopicsymbols(topic, marker, symbols, dedent=False):
     def add(ui, topic, doc):
         return makeitemsdoc(ui, topic, doc, marker, symbols, dedent=dedent)
+
     addtopichook(topic, add)
 
-addtopicsymbols('bundlespec', '.. bundlecompressionmarker',
-                compression.bundlecompressiontopics())
+
+addtopicsymbols(
+    'bundlespec',
+    '.. bundlecompressionmarker',
+    compression.bundlecompressiontopics(),
+)
 addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols)
-addtopicsymbols('merge-tools', '.. internaltoolsmarker',
-                filemerge.internalsdoc)
+addtopicsymbols('merge-tools', '.. internaltoolsmarker', filemerge.internalsdoc)
 addtopicsymbols('revisions', '.. predicatesmarker', revset.symbols)
 addtopicsymbols('templates', '.. keywordsmarker', templatekw.keywords)
 addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters)
 addtopicsymbols('templates', '.. functionsmarker', templatefuncs.funcs)
-addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands,
-                dedent=True)
+addtopicsymbols(
+    'hgweb', '.. webcommandsmarker', webcommands.commands, dedent=True
+)
+
 
 def inserttweakrc(ui, topic, doc):
     marker = '.. tweakdefaultsmarker'
     repl = uimod.tweakrc
+
     def sub(m):
         lines = [m.group(1) + s for s in repl.splitlines()]
         return '\n'.join(lines)
+
     return re.sub(br'( *)%s' % re.escape(marker), sub, doc)
 
+
 addtopichook('config', inserttweakrc)
 
-def help_(ui, commands, name, unknowncmd=False, full=True, subtopic=None,
-          fullname=None, **opts):
+
+def help_(
+    ui,
+    commands,
+    name,
+    unknowncmd=False,
+    full=True,
+    subtopic=None,
+    fullname=None,
+    **opts
+):
     '''
     Generate the help for 'name' as unformatted restructured text. If
     'name' is None, describe the commands available.
@@ -465,8 +614,9 @@
 
     def helpcmd(name, subtopic=None):
         try:
-            aliases, entry = cmdutil.findcmd(name, commands.table,
-                                             strict=unknowncmd)
+            aliases, entry = cmdutil.findcmd(
+                name, commands.table, strict=unknowncmd
+            )
         except error.AmbiguousCommand as inst:
             # py3 fix: except vars can't be used outside the scope of the
             # except block, nor can be used inside a lambda. python issue4617
@@ -507,11 +657,17 @@
         if util.safehasattr(entry[0], 'definition'):  # aliased command
             source = entry[0].source
             if entry[0].definition.startswith('!'):  # shell alias
-                doc = (_('shell alias for: %s\n\n%s\n\ndefined by: %s\n') %
-                       (entry[0].definition[1:], doc, source))
+                doc = _('shell alias for: %s\n\n%s\n\ndefined by: %s\n') % (
+                    entry[0].definition[1:],
+                    doc,
+                    source,
+                )
             else:
-                doc = (_('alias for: hg %s\n\n%s\n\ndefined by: %s\n') %
-                       (entry[0].definition, doc, source))
+                doc = _('alias for: hg %s\n\n%s\n\ndefined by: %s\n') % (
+                    entry[0].definition,
+                    doc,
+                    source,
+                )
         doc = doc.splitlines(True)
         if ui.quiet or not full:
             rst.append(doc[0])
@@ -525,8 +681,9 @@
             mod = extensions.find(name)
             doc = gettext(pycompat.getdoc(mod)) or ''
             if '\n' in doc.strip():
-                msg = _("(use 'hg help -e %s' to show help for "
-                        "the %s extension)") % (name, name)
+                msg = _(
+                    "(use 'hg help -e %s' to show help for " "the %s extension)"
+                ) % (name, name)
                 rst.append('\n%s\n' % msg)
         except KeyError:
             pass
@@ -536,16 +693,20 @@
             rst.append(optrst(_("options"), entry[1], ui.verbose))
 
         if ui.verbose:
-            rst.append(optrst(_("global options"),
-                              commands.globalopts, ui.verbose))
+            rst.append(
+                optrst(_("global options"), commands.globalopts, ui.verbose)
+            )
 
         if not ui.verbose:
             if not full:
-                rst.append(_("\n(use 'hg %s -h' to show more help)\n")
-                           % name)
+                rst.append(_("\n(use 'hg %s -h' to show more help)\n") % name)
             elif not ui.quiet:
-                rst.append(_('\n(some details hidden, use --verbose '
-                               'to show complete help)'))
+                rst.append(
+                    _(
+                        '\n(some details hidden, use --verbose '
+                        'to show complete help)'
+                    )
+                )
 
         return rst
 
@@ -572,7 +733,8 @@
             h[f] = doc.splitlines()[0].rstrip()
 
             cat = getattr(func, 'helpcategory', None) or (
-                registrar.command.CATEGORY_NONE)
+                registrar.command.CATEGORY_NONE
+            )
             cats.setdefault(cat, []).append(f)
 
         rst = []
@@ -605,8 +767,10 @@
             # Check that all categories have an order.
             missing_order = set(cats.keys()) - set(CATEGORY_ORDER)
             if missing_order:
-                ui.develwarn('help categories missing from CATEGORY_ORDER: %s' %
-                             missing_order)
+                ui.develwarn(
+                    'help categories missing from CATEGORY_ORDER: %s'
+                    % missing_order
+                )
 
             # List per category.
             for cat in CATEGORY_ORDER:
@@ -619,10 +783,13 @@
                     appendcmds(catfns)
 
         ex = opts.get
-        anyopts = (ex(r'keyword') or not (ex(r'command') or ex(r'extension')))
+        anyopts = ex(r'keyword') or not (ex(r'command') or ex(r'extension'))
         if not name and anyopts:
-            exts = listexts(_('enabled extensions:'), extensions.enabled(),
-                            showdeprecated=ui.verbose)
+            exts = listexts(
+                _('enabled extensions:'),
+                extensions.enabled(),
+                showdeprecated=ui.verbose,
+            )
             if exts:
                 rst.append('\n')
                 rst.extend(exts)
@@ -640,14 +807,16 @@
                 topicname = names[0]
                 if not filtertopic(ui, topicname):
                     topiccats.setdefault(category, []).append(
-                        (topicname, header))
+                        (topicname, header)
+                    )
 
             # Check that all categories have an order.
             missing_order = set(topiccats.keys()) - set(TOPIC_CATEGORY_ORDER)
             if missing_order:
                 ui.develwarn(
-                    'help categories missing from TOPIC_CATEGORY_ORDER: %s' %
-                    missing_order)
+                    'help categories missing from TOPIC_CATEGORY_ORDER: %s'
+                    % missing_order
+                )
 
             # Output topics per category.
             for cat in TOPIC_CATEGORY_ORDER:
@@ -663,25 +832,43 @@
         if ui.quiet:
             pass
         elif ui.verbose:
-            rst.append('\n%s\n' % optrst(_("global options"),
-                                         commands.globalopts, ui.verbose))
+            rst.append(
+                '\n%s\n'
+                % optrst(_("global options"), commands.globalopts, ui.verbose)
+            )
             if name == 'shortlist':
-                rst.append(_("\n(use 'hg help' for the full list "
-                             "of commands)\n"))
+                rst.append(
+                    _("\n(use 'hg help' for the full list " "of commands)\n")
+                )
         else:
             if name == 'shortlist':
-                rst.append(_("\n(use 'hg help' for the full list of commands "
-                             "or 'hg -v' for details)\n"))
+                rst.append(
+                    _(
+                        "\n(use 'hg help' for the full list of commands "
+                        "or 'hg -v' for details)\n"
+                    )
+                )
             elif name and not full:
-                rst.append(_("\n(use 'hg help %s' to show the full help "
-                             "text)\n") % name)
+                rst.append(
+                    _("\n(use 'hg help %s' to show the full help " "text)\n")
+                    % name
+                )
             elif name and syns and name in syns.keys():
-                rst.append(_("\n(use 'hg help -v -e %s' to show built-in "
-                             "aliases and global options)\n") % name)
+                rst.append(
+                    _(
+                        "\n(use 'hg help -v -e %s' to show built-in "
+                        "aliases and global options)\n"
+                    )
+                    % name
+                )
             else:
-                rst.append(_("\n(use 'hg help -v%s' to show built-in aliases "
-                             "and global options)\n")
-                           % (name and " " + name or ""))
+                rst.append(
+                    _(
+                        "\n(use 'hg help -v%s' to show built-in aliases "
+                        "and global options)\n"
+                    )
+                    % (name and " " + name or "")
+                )
         return rst
 
     def helptopic(name, subtopic=None):
@@ -711,14 +898,17 @@
             rst += ["    %s\n" % l for l in doc(ui).splitlines()]
 
         if not ui.verbose:
-            omitted = _('(some details hidden, use --verbose'
-                         ' to show complete help)')
+            omitted = _(
+                '(some details hidden, use --verbose' ' to show complete help)'
+            )
             indicateomitted(rst, omitted)
 
         try:
             cmdutil.findcmd(name, commands.table)
-            rst.append(_("\nuse 'hg help -c %s' to see help for "
-                       "the %s command\n") % (name, name))
+            rst.append(
+                _("\nuse 'hg help -c %s' to see help for " "the %s command\n")
+                % (name, name)
+            )
         except error.UnknownCommand:
             pass
         return rst
@@ -743,8 +933,9 @@
             rst.append('\n')
 
         if not ui.verbose:
-            omitted = _('(some details hidden, use --verbose'
-                         ' to show complete help)')
+            omitted = _(
+                '(some details hidden, use --verbose' ' to show complete help)'
+            )
             indicateomitted(rst, omitted)
 
         if mod:
@@ -755,24 +946,35 @@
             modcmds = {c.partition('|')[0] for c in ct}
             rst.extend(helplist(modcmds.__contains__))
         else:
-            rst.append(_("(use 'hg help extensions' for information on enabling"
-                       " extensions)\n"))
+            rst.append(
+                _(
+                    "(use 'hg help extensions' for information on enabling"
+                    " extensions)\n"
+                )
+            )
         return rst
 
     def helpextcmd(name, subtopic=None):
-        cmd, ext, doc = extensions.disabledcmd(ui, name,
-                                               ui.configbool('ui', 'strict'))
+        cmd, ext, doc = extensions.disabledcmd(
+            ui, name, ui.configbool('ui', 'strict')
+        )
         doc = doc.splitlines()[0]
 
-        rst = listexts(_("'%s' is provided by the following "
-                              "extension:") % cmd, {ext: doc}, indent=4,
-                       showdeprecated=True)
+        rst = listexts(
+            _("'%s' is provided by the following " "extension:") % cmd,
+            {ext: doc},
+            indent=4,
+            showdeprecated=True,
+        )
         rst.append('\n')
-        rst.append(_("(use 'hg help extensions' for information on enabling "
-                   "extensions)\n"))
+        rst.append(
+            _(
+                "(use 'hg help extensions' for information on enabling "
+                "extensions)\n"
+            )
+        )
         return rst
 
-
     rst = []
     kw = opts.get('keyword')
     if kw or name is None and any(opts[o] for o in opts):
@@ -783,10 +985,12 @@
         if opts.get('command'):
             helpareas += [('commands', _('Commands'))]
         if not helpareas:
-            helpareas = [('topics', _('Topics')),
-                         ('commands', _('Commands')),
-                         ('extensions', _('Extensions')),
-                         ('extensioncommands', _('Extension Commands'))]
+            helpareas = [
+                ('topics', _('Topics')),
+                ('commands', _('Commands')),
+                ('extensions', _('Extensions')),
+                ('extensioncommands', _('Extension Commands')),
+            ]
         for t, title in helpareas:
             if matches[t]:
                 rst.append('%s:\n\n' % title)
@@ -835,8 +1039,10 @@
 
     return ''.join(rst)
 
-def formattedhelp(ui, commands, fullname, keep=None, unknowncmd=False,
-                  full=True, **opts):
+
+def formattedhelp(
+    ui, commands, fullname, keep=None, unknowncmd=False, full=True, **opts
+):
     """get help for a given topic (as a dotted name) as rendered rst
 
     Either returns the rendered help text or raises an exception.
@@ -844,7 +1050,7 @@
     if keep is None:
         keep = []
     else:
-        keep = list(keep) # make a copy so we can mutate this later
+        keep = list(keep)  # make a copy so we can mutate this later
 
     # <fullname> := <name>[.<subtopic][.<section>]
     name = subtopic = section = None
@@ -860,8 +1066,16 @@
     termwidth = ui.termwidth() - 2
     if textwidth <= 0 or termwidth < textwidth:
         textwidth = termwidth
-    text = help_(ui, commands, name, fullname=fullname,
-                 subtopic=subtopic, unknowncmd=unknowncmd, full=full, **opts)
+    text = help_(
+        ui,
+        commands,
+        name,
+        fullname=fullname,
+        subtopic=subtopic,
+        unknowncmd=unknowncmd,
+        full=full,
+        **opts
+    )
 
     blocks, pruned = minirst.parse(text, keep=keep)
     if 'verbose' in pruned:
--- a/mercurial/hg.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hg.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,9 +15,7 @@
 import stat
 
 from .i18n import _
-from .node import (
-    nullid,
-)
+from .node import nullid
 
 from . import (
     bookmarks,
@@ -50,15 +48,14 @@
     vfs as vfsmod,
 )
 
-from .interfaces import (
-    repository as repositorymod,
-)
+from .interfaces import repository as repositorymod
 
 release = lock.release
 
 # shared features
 sharedbookmarks = 'bookmarks'
 
+
 def _local(path):
     path = util.expandpath(util.urllocalpath(path))
 
@@ -66,13 +63,15 @@
         isfile = os.path.isfile(path)
     # Python 2 raises TypeError, Python 3 ValueError.
     except (TypeError, ValueError) as e:
-        raise error.Abort(_('invalid path %s: %s') % (
-            path, pycompat.bytestr(e)))
+        raise error.Abort(
+            _('invalid path %s: %s') % (path, pycompat.bytestr(e))
+        )
 
     return isfile and bundlerepo or localrepo
 
+
 def addbranchrevs(lrepo, other, branches, revs):
-    peer = other.peer() # a courtesy to callers using a localrepo for other
+    peer = other.peer()  # a courtesy to callers using a localrepo for other
     hashbranch, branches = branches
     if not hashbranch and not branches:
         x = revs or None
@@ -114,6 +113,7 @@
             revs.append(hashbranch)
     return revs, revs[0]
 
+
 def parseurl(path, branches=None):
     '''parse url#branch, returning (url, (branch, branches))'''
 
@@ -124,6 +124,7 @@
         u.fragment = None
     return bytes(u), (branch, branches or [])
 
+
 schemes = {
     'bundle': bundlerepo,
     'union': unionrepo,
@@ -134,6 +135,7 @@
     'static-http': statichttprepo,
 }
 
+
 def _peerlookup(path):
     u = util.url(path)
     scheme = u.scheme or 'file'
@@ -147,6 +149,7 @@
             raise
         return thing
 
+
 def islocal(repo):
     '''return true if repo (or path pointing to repo) is local'''
     if isinstance(repo, bytes):
@@ -156,6 +159,7 @@
             return False
     return repo.local()
 
+
 def openpath(ui, path, sendaccept=True):
     '''open path with open if local, url.open if remote'''
     pathurl = util.url(path, parsequery=False, parsefragment=False)
@@ -164,14 +168,18 @@
     else:
         return url.open(ui, path, sendaccept=sendaccept)
 
+
 # a list of (ui, repo) functions called for wire peer initialization
 wirepeersetupfuncs = []
 
-def _peerorrepo(ui, path, create=False, presetupfuncs=None,
-                intents=None, createopts=None):
+
+def _peerorrepo(
+    ui, path, create=False, presetupfuncs=None, intents=None, createopts=None
+):
     """return a repository object for the specified path"""
-    obj = _peerlookup(path).instance(ui, path, create, intents=intents,
-                                     createopts=createopts)
+    obj = _peerlookup(path).instance(
+        ui, path, create, intents=intents, createopts=createopts
+    )
     ui = getattr(obj, "ui", ui)
     for f in presetupfuncs or []:
         f(ui, obj)
@@ -183,30 +191,43 @@
             if hook:
                 with util.timedcm('reposetup %r', name) as stats:
                     hook(ui, obj)
-                ui.log(b'extension', b'  > reposetup for %s took %s\n',
-                       name, stats)
+                ui.log(
+                    b'extension', b'  > reposetup for %s took %s\n', name, stats
+                )
     ui.log(b'extension', b'> all reposetup took %s\n', allreposetupstats)
     if not obj.local():
         for f in wirepeersetupfuncs:
             f(ui, obj)
     return obj
 
-def repository(ui, path='', create=False, presetupfuncs=None, intents=None,
-               createopts=None):
+
+def repository(
+    ui, path='', create=False, presetupfuncs=None, intents=None, createopts=None
+):
     """return a repository object for the specified path"""
-    peer = _peerorrepo(ui, path, create, presetupfuncs=presetupfuncs,
-                       intents=intents, createopts=createopts)
+    peer = _peerorrepo(
+        ui,
+        path,
+        create,
+        presetupfuncs=presetupfuncs,
+        intents=intents,
+        createopts=createopts,
+    )
     repo = peer.local()
     if not repo:
-        raise error.Abort(_("repository '%s' is not local") %
-                         (path or peer.url()))
+        raise error.Abort(
+            _("repository '%s' is not local") % (path or peer.url())
+        )
     return repo.filtered('visible')
 
+
 def peer(uiorrepo, opts, path, create=False, intents=None, createopts=None):
     '''return a repository peer for the specified path'''
     rui = remoteui(uiorrepo, opts)
-    return _peerorrepo(rui, path, create, intents=intents,
-                       createopts=createopts).peer()
+    return _peerorrepo(
+        rui, path, create, intents=intents, createopts=createopts
+    ).peer()
+
 
 def defaultdest(source):
     '''return default destination of clone if none is given
@@ -229,6 +250,7 @@
         return ''
     return os.path.basename(os.path.normpath(path))
 
+
 def sharedreposource(repo):
     """Returns repository object for source repository of a shared repo.
 
@@ -247,8 +269,16 @@
     repo.srcrepo = srcrepo
     return srcrepo
 
-def share(ui, source, dest=None, update=True, bookmarks=True, defaultpath=None,
-          relative=False):
+
+def share(
+    ui,
+    source,
+    dest=None,
+    update=True,
+    bookmarks=True,
+    defaultpath=None,
+    relative=False,
+):
     '''create a shared repository'''
 
     if not islocal(source):
@@ -272,17 +302,23 @@
     if bookmarks:
         shareditems.add(sharedbookmarks)
 
-    r = repository(ui, dest, create=True, createopts={
-        'sharedrepo': srcrepo,
-        'sharedrelative': relative,
-        'shareditems': shareditems,
-    })
+    r = repository(
+        ui,
+        dest,
+        create=True,
+        createopts={
+            'sharedrepo': srcrepo,
+            'sharedrelative': relative,
+            'shareditems': shareditems,
+        },
+    )
 
     postshare(srcrepo, r, defaultpath=defaultpath)
     r = repository(ui, dest)
     _postshareupdate(r, update, checkout=checkout)
     return r
 
+
 def unshare(ui, repo):
     """convert a shared repository to a normal one
 
@@ -325,6 +361,7 @@
 
     return newrepo
 
+
 def postshare(sourcerepo, destrepo, defaultpath=None):
     """Called after a new shared repo is created.
 
@@ -336,13 +373,13 @@
     """
     default = defaultpath or sourcerepo.ui.config('paths', 'default')
     if default:
-        template = ('[paths]\n'
-                    'default = %s\n')
+        template = '[paths]\n' 'default = %s\n'
         destrepo.vfs.write('hgrc', util.tonativeeol(template % default))
     if repositorymod.NARROW_REQUIREMENT in sourcerepo.requirements:
         with destrepo.wlock():
             narrowspec.copytoworkingcopy(destrepo)
 
+
 def _postshareupdate(repo, update, checkout=None):
     """Maybe perform a working directory update after a shared repo is created.
 
@@ -364,6 +401,7 @@
             continue
     _update(repo, uprev)
 
+
 def copystore(ui, srcrepo, destpath):
     '''copy files from store of srcrepo in destpath
 
@@ -390,20 +428,32 @@
                         lockfile = os.path.join(dstbase, "lock")
                         # lock to avoid premature writing to the target
                         destlock = lock.lock(dstvfs, lockfile)
-                    hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
-                                                 hardlink, progress)
+                    hardlink, n = util.copyfiles(
+                        srcvfs.join(f), dstvfs.join(f), hardlink, progress
+                    )
                     num += n
             if hardlink:
                 ui.debug("linked %d files\n" % num)
             else:
                 ui.debug("copied %d files\n" % num)
         return destlock
-    except: # re-raises
+    except:  # re-raises
         release(destlock)
         raise
 
-def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
-                   rev=None, update=True, stream=False):
+
+def clonewithshare(
+    ui,
+    peeropts,
+    sharepath,
+    source,
+    srcpeer,
+    dest,
+    pull=False,
+    rev=None,
+    update=True,
+    stream=False,
+):
     """Perform a clone using a shared repo.
 
     The store for the repository will be located at <sharepath>/.hg. The
@@ -414,17 +464,19 @@
     revs = None
     if rev:
         if not srcpeer.capable('lookup'):
-            raise error.Abort(_("src repository does not support "
-                               "revision lookup and so doesn't "
-                               "support clone by revision"))
+            raise error.Abort(
+                _(
+                    "src repository does not support "
+                    "revision lookup and so doesn't "
+                    "support clone by revision"
+                )
+            )
 
         # TODO this is batchable.
         remoterevs = []
         for r in rev:
             with srcpeer.commandexecutor() as e:
-                remoterevs.append(e.callcommand('lookup', {
-                    'key': r,
-                }).result())
+                remoterevs.append(e.callcommand('lookup', {'key': r,}).result())
         revs = remoterevs
 
     # Obtain a lock before checking for or cloning the pooled repo otherwise
@@ -442,15 +494,24 @@
 
     with lock.lock(poolvfs, '%s.lock' % basename):
         if os.path.exists(sharepath):
-            ui.status(_('(sharing from existing pooled repository %s)\n') %
-                      basename)
+            ui.status(
+                _('(sharing from existing pooled repository %s)\n') % basename
+            )
         else:
             ui.status(_('(sharing from new pooled repository %s)\n') % basename)
             # Always use pull mode because hardlinks in share mode don't work
             # well. Never update because working copies aren't necessary in
             # share mode.
-            clone(ui, peeropts, source, dest=sharepath, pull=True,
-                  revs=rev, update=False, stream=stream)
+            clone(
+                ui,
+                peeropts,
+                source,
+                dest=sharepath,
+                pull=True,
+                revs=rev,
+                update=False,
+                stream=stream,
+            )
 
     # Resolve the value to put in [paths] section for the source.
     if islocal(source):
@@ -459,8 +520,14 @@
         defaultpath = source
 
     sharerepo = repository(ui, path=sharepath)
-    destrepo = share(ui, sharerepo, dest=dest, update=False, bookmarks=False,
-                     defaultpath=defaultpath)
+    destrepo = share(
+        ui,
+        sharerepo,
+        dest=dest,
+        update=False,
+        bookmarks=False,
+        defaultpath=defaultpath,
+    )
 
     # We need to perform a pull against the dest repo to fetch bookmarks
     # and other non-store data that isn't shared by default. In the case of
@@ -473,6 +540,7 @@
 
     return srcpeer, peer(ui, peeropts, dest)
 
+
 # Recomputing branch cache might be slow on big repos,
 # so just copy it
 def _copycache(srcrepo, dstcachedir, fname):
@@ -484,9 +552,22 @@
             os.mkdir(dstcachedir)
         util.copyfile(srcbranchcache, dstbranchcache)
 
-def clone(ui, peeropts, source, dest=None, pull=False, revs=None,
-          update=True, stream=False, branch=None, shareopts=None,
-          storeincludepats=None, storeexcludepats=None, depth=None):
+
+def clone(
+    ui,
+    peeropts,
+    source,
+    dest=None,
+    pull=False,
+    revs=None,
+    update=True,
+    stream=False,
+    branch=None,
+    shareopts=None,
+    storeincludepats=None,
+    storeexcludepats=None,
+    depth=None,
+):
     """Make a copy of an existing repository.
 
     Create a copy of an existing repository in a new directory.  The
@@ -542,7 +623,7 @@
         source, branches = parseurl(origsource, branch)
         srcpeer = peer(ui, peeropts, source)
     else:
-        srcpeer = source.peer() # in case we were called with a localrepo
+        srcpeer = source.peer()  # in case we were called with a localrepo
         branches = (None, branch or [])
         origsource = source = srcpeer.url()
     revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
@@ -599,11 +680,19 @@
         createopts['lfs'] = True
 
         if extensions.disabledext('lfs'):
-            ui.status(_('(remote is using large file support (lfs), but it is '
-                        'explicitly disabled in the local configuration)\n'))
+            ui.status(
+                _(
+                    '(remote is using large file support (lfs), but it is '
+                    'explicitly disabled in the local configuration)\n'
+                )
+            )
         else:
-            ui.status(_('(remote is using large file support (lfs); lfs will '
-                        'be enabled for this repository)\n'))
+            ui.status(
+                _(
+                    '(remote is using large file support (lfs); lfs will '
+                    'be enabled for this repository)\n'
+                )
+            )
 
     shareopts = shareopts or {}
     sharepool = shareopts.get('pool')
@@ -617,24 +706,32 @@
             # not available. If we fail to resolve, sharing is not enabled.
             try:
                 with srcpeer.commandexecutor() as e:
-                    rootnode = e.callcommand('lookup', {
-                        'key': '0',
-                    }).result()
+                    rootnode = e.callcommand('lookup', {'key': '0',}).result()
 
                 if rootnode != node.nullid:
                     sharepath = os.path.join(sharepool, node.hex(rootnode))
                 else:
-                    ui.status(_('(not using pooled storage: '
-                                'remote appears to be empty)\n'))
+                    ui.status(
+                        _(
+                            '(not using pooled storage: '
+                            'remote appears to be empty)\n'
+                        )
+                    )
             except error.RepoLookupError:
-                ui.status(_('(not using pooled storage: '
-                            'unable to resolve identity of remote)\n'))
+                ui.status(
+                    _(
+                        '(not using pooled storage: '
+                        'unable to resolve identity of remote)\n'
+                    )
+                )
         elif sharenamemode == 'remote':
             sharepath = os.path.join(
-                sharepool, node.hex(hashlib.sha1(source).digest()))
+                sharepool, node.hex(hashlib.sha1(source).digest())
+            )
         else:
-            raise error.Abort(_('unknown share naming mode: %s') %
-                              sharenamemode)
+            raise error.Abort(
+                _('unknown share naming mode: %s') % sharenamemode
+            )
 
         # TODO this is a somewhat arbitrary restriction.
         if narrow:
@@ -642,9 +739,18 @@
             sharepath = None
 
         if sharepath:
-            return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
-                                  dest, pull=pull, rev=revs, update=update,
-                                  stream=stream)
+            return clonewithshare(
+                ui,
+                peeropts,
+                sharepath,
+                source,
+                srcpeer,
+                dest,
+                pull=pull,
+                rev=revs,
+                update=update,
+                stream=stream,
+            )
 
     srclock = destlock = cleandir = None
     srcrepo = srcpeer.local()
@@ -657,8 +763,12 @@
             cleandir = dest
 
         copy = False
-        if (srcrepo and srcrepo.cancopy() and islocal(dest)
-            and not phases.hassecret(srcrepo)):
+        if (
+            srcrepo
+            and srcrepo.cancopy()
+            and islocal(dest)
+            and not phases.hassecret(srcrepo)
+        ):
             copy = not pull and not revs
 
         # TODO this is a somewhat arbitrary restriction.
@@ -689,8 +799,9 @@
             except OSError as inst:
                 if inst.errno == errno.EEXIST:
                     cleandir = None
-                    raise error.Abort(_("destination '%s' already exists")
-                                     % dest)
+                    raise error.Abort(
+                        _("destination '%s' already exists") % dest
+                    )
                 raise
 
             destlock = copystore(ui, srcrepo, destpath)
@@ -707,33 +818,42 @@
             # we need to re-init the repo after manually copying the data
             # into it
             destpeer = peer(srcrepo, peeropts, dest)
-            srcrepo.hook('outgoing', source='clone',
-                          node=node.hex(node.nullid))
+            srcrepo.hook('outgoing', source='clone', node=node.hex(node.nullid))
         else:
             try:
                 # only pass ui when no srcrepo
-                destpeer = peer(srcrepo or ui, peeropts, dest, create=True,
-                                createopts=createopts)
+                destpeer = peer(
+                    srcrepo or ui,
+                    peeropts,
+                    dest,
+                    create=True,
+                    createopts=createopts,
+                )
             except OSError as inst:
                 if inst.errno == errno.EEXIST:
                     cleandir = None
-                    raise error.Abort(_("destination '%s' already exists")
-                                     % dest)
+                    raise error.Abort(
+                        _("destination '%s' already exists") % dest
+                    )
                 raise
 
             if revs:
                 if not srcpeer.capable('lookup'):
-                    raise error.Abort(_("src repository does not support "
-                                       "revision lookup and so doesn't "
-                                       "support clone by revision"))
+                    raise error.Abort(
+                        _(
+                            "src repository does not support "
+                            "revision lookup and so doesn't "
+                            "support clone by revision"
+                        )
+                    )
 
                 # TODO this is batchable.
                 remoterevs = []
                 for rev in revs:
                     with srcpeer.commandexecutor() as e:
-                        remoterevs.append(e.callcommand('lookup', {
-                            'key': rev,
-                        }).result())
+                        remoterevs.append(
+                            e.callcommand('lookup', {'key': rev,}).result()
+                        )
                 revs = remoterevs
 
                 checkout = revs[0]
@@ -757,23 +877,36 @@
                 # internal config: ui.quietbookmarkmove
                 overrides = {('ui', 'quietbookmarkmove'): True}
                 with local.ui.configoverride(overrides, 'clone'):
-                    exchange.pull(local, srcpeer, revs,
-                                  streamclonerequested=stream,
-                                  includepats=storeincludepats,
-                                  excludepats=storeexcludepats,
-                                  depth=depth)
+                    exchange.pull(
+                        local,
+                        srcpeer,
+                        revs,
+                        streamclonerequested=stream,
+                        includepats=storeincludepats,
+                        excludepats=storeexcludepats,
+                        depth=depth,
+                    )
             elif srcrepo:
                 # TODO lift restriction once exchange.push() accepts narrow
                 # push.
                 if narrow:
-                    raise error.Abort(_('narrow clone not available for '
-                                        'remote destinations'))
+                    raise error.Abort(
+                        _(
+                            'narrow clone not available for '
+                            'remote destinations'
+                        )
+                    )
 
-                exchange.push(srcrepo, destpeer, revs=revs,
-                              bookmarks=srcrepo._bookmarks.keys())
+                exchange.push(
+                    srcrepo,
+                    destpeer,
+                    revs=revs,
+                    bookmarks=srcrepo._bookmarks.keys(),
+                )
             else:
-                raise error.Abort(_("clone from remote to remote not supported")
-                                 )
+                raise error.Abort(
+                    _("clone from remote to remote not supported")
+                )
 
         cleandir = None
 
@@ -792,9 +925,9 @@
             if update:
                 if update is not True:
                     with srcpeer.commandexecutor() as e:
-                        checkout = e.callcommand('lookup', {
-                            'key': update,
-                        }).result()
+                        checkout = e.callcommand(
+                            'lookup', {'key': update,}
+                        ).result()
 
                 uprev = None
                 status = None
@@ -821,8 +954,9 @@
                         if bn == 'default':
                             status = _("updating to bookmark @\n")
                         else:
-                            status = (_("updating to bookmark @ on branch %s\n")
-                                      % bn)
+                            status = (
+                                _("updating to bookmark @ on branch %s\n") % bn
+                            )
                     except KeyError:
                         try:
                             uprev = destrepo.branchtip('default')
@@ -843,13 +977,23 @@
             srcpeer.close()
     return srcpeer, destpeer
 
+
 def _showstats(repo, stats, quietempty=False):
     if quietempty and stats.isempty():
         return
-    repo.ui.status(_("%d files updated, %d files merged, "
-                     "%d files removed, %d files unresolved\n") % (
-                   stats.updatedcount, stats.mergedcount,
-                   stats.removedcount, stats.unresolvedcount))
+    repo.ui.status(
+        _(
+            "%d files updated, %d files merged, "
+            "%d files removed, %d files unresolved\n"
+        )
+        % (
+            stats.updatedcount,
+            stats.mergedcount,
+            stats.removedcount,
+            stats.unresolvedcount,
+        )
+    )
+
 
 def updaterepo(repo, node, overwrite, updatecheck=None):
     """Update the working directory to node.
@@ -857,9 +1001,15 @@
     When overwrite is set, changes are clobbered, merged else
 
     returns stats (see pydoc mercurial.merge.applyupdates)"""
-    return mergemod.update(repo, node, branchmerge=False, force=overwrite,
-                           labels=['working copy', 'destination'],
-                           updatecheck=updatecheck)
+    return mergemod.update(
+        repo,
+        node,
+        branchmerge=False,
+        force=overwrite,
+        labels=['working copy', 'destination'],
+        updatecheck=updatecheck,
+    )
+
 
 def update(repo, node, quietempty=False, updatecheck=None):
     """update the working directory to node"""
@@ -869,9 +1019,11 @@
         repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
     return stats.unresolvedcount > 0
 
+
 # naming conflict in clone()
 _update = update
 
+
 def clean(repo, node, show_stats=True, quietempty=False):
     """forcibly switch the working directory to node, clobbering changes"""
     stats = updaterepo(repo, node, True)
@@ -880,15 +1032,18 @@
         _showstats(repo, stats, quietempty)
     return stats.unresolvedcount > 0
 
+
 # naming conflict in updatetotally()
 _clean = clean
 
-_VALID_UPDATECHECKS = {mergemod.UPDATECHECK_ABORT,
-                       mergemod.UPDATECHECK_NONE,
-                       mergemod.UPDATECHECK_LINEAR,
-                       mergemod.UPDATECHECK_NO_CONFLICT,
+_VALID_UPDATECHECKS = {
+    mergemod.UPDATECHECK_ABORT,
+    mergemod.UPDATECHECK_NONE,
+    mergemod.UPDATECHECK_LINEAR,
+    mergemod.UPDATECHECK_NO_CONFLICT,
 }
 
+
 def updatetotally(ui, repo, checkout, brev, clean=False, updatecheck=None):
     """Update the working directory with extra care for non-file components
 
@@ -921,8 +1076,10 @@
             # If not configured, or invalid value configured
             updatecheck = mergemod.UPDATECHECK_LINEAR
     if updatecheck not in _VALID_UPDATECHECKS:
-        raise ValueError(r'Invalid updatecheck value %r (can accept %r)' % (
-            updatecheck, _VALID_UPDATECHECKS))
+        raise ValueError(
+            r'Invalid updatecheck value %r (can accept %r)'
+            % (updatecheck, _VALID_UPDATECHECKS)
+        )
     with repo.wlock():
         movemarkfrom = None
         warndest = False
@@ -941,7 +1098,7 @@
 
         if not ret and movemarkfrom:
             if movemarkfrom == repo['.'].node():
-                pass # no-op update
+                pass  # no-op update
             elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
                 b = ui.label(repo._activebookmark, 'bookmarks.active')
                 ui.status(_("updating bookmark %s\n") % b)
@@ -966,23 +1123,42 @@
 
     return ret
 
-def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None,
-          abort=False):
+
+def merge(
+    repo,
+    node,
+    force=None,
+    remind=True,
+    mergeforce=False,
+    labels=None,
+    abort=False,
+):
     """Branch merge with node, resolving changes. Return true if any
     unresolved conflicts."""
     if abort:
         return abortmerge(repo.ui, repo)
 
-    stats = mergemod.update(repo, node, branchmerge=True, force=force,
-                            mergeforce=mergeforce, labels=labels)
+    stats = mergemod.update(
+        repo,
+        node,
+        branchmerge=True,
+        force=force,
+        mergeforce=mergeforce,
+        labels=labels,
+    )
     _showstats(repo, stats)
     if stats.unresolvedcount:
-        repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
-                         "or 'hg merge --abort' to abandon\n"))
+        repo.ui.status(
+            _(
+                "use 'hg resolve' to retry unresolved file merges "
+                "or 'hg merge --abort' to abandon\n"
+            )
+        )
     elif remind:
         repo.ui.status(_("(branch merge, don't forget to commit)\n"))
     return stats.unresolvedcount > 0
 
+
 def abortmerge(ui, repo):
     ms = mergemod.mergestate.read(repo)
     if ms.active():
@@ -992,14 +1168,17 @@
         # there were no conficts, mergestate was not stored
         node = repo['.'].hex()
 
-    repo.ui.status(_("aborting the merge, updating back to"
-                     " %s\n") % node[:12])
+    repo.ui.status(
+        _("aborting the merge, updating back to" " %s\n") % node[:12]
+    )
     stats = mergemod.update(repo, node, branchmerge=False, force=True)
     _showstats(repo, stats)
     return stats.unresolvedcount > 0
 
-def _incoming(displaychlist, subreporecurse, ui, repo, source,
-        opts, buffered=False):
+
+def _incoming(
+    displaychlist, subreporecurse, ui, repo, source, opts, buffered=False
+):
     """
     Helper for incoming / gincoming.
     displaychlist gets called with
@@ -1013,21 +1192,24 @@
 
     if revs:
         revs = [other.lookup(rev) for rev in revs]
-    other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
-                                revs, opts["bundle"], opts["force"])
+    other, chlist, cleanupfn = bundlerepo.getremotechanges(
+        ui, repo, other, revs, opts["bundle"], opts["force"]
+    )
     try:
         if not chlist:
             ui.status(_("no changes found\n"))
             return subreporecurse()
         ui.pager('incoming')
-        displayer = logcmdutil.changesetdisplayer(ui, other, opts,
-                                                  buffered=buffered)
+        displayer = logcmdutil.changesetdisplayer(
+            ui, other, opts, buffered=buffered
+        )
         displaychlist(other, chlist, displayer)
         displayer.close()
     finally:
         cleanupfn()
     subreporecurse()
-    return 0 # exit code is zero since we found incoming changes
+    return 0  # exit code is zero since we found incoming changes
+
 
 def incoming(ui, repo, source, opts):
     def subreporecurse():
@@ -1052,13 +1234,17 @@
                 continue
             count += 1
             displayer.show(other[n])
+
     return _incoming(display, subreporecurse, ui, repo, source, opts)
 
+
 def _outgoing(ui, repo, dest, opts):
     path = ui.paths.getpath(dest, default=('default-push', 'default'))
     if not path:
-        raise error.Abort(_('default repository not configured!'),
-                hint=_("see 'hg help config.paths'"))
+        raise error.Abort(
+            _('default repository not configured!'),
+            hint=_("see 'hg help config.paths'"),
+        )
     dest = path.pushloc or path.loc
     branches = path.branch, opts.get('branch') or []
 
@@ -1068,13 +1254,15 @@
         revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
 
     other = peer(repo, opts, dest)
-    outgoing = discovery.findcommonoutgoing(repo, other, revs,
-                                            force=opts.get('force'))
+    outgoing = discovery.findcommonoutgoing(
+        repo, other, revs, force=opts.get('force')
+    )
     o = outgoing.missing
     if not o:
         scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
     return o, other
 
+
 def outgoing(ui, repo, dest, opts):
     def recurse():
         ret = 1
@@ -1107,7 +1295,8 @@
     displayer.close()
     cmdutil.outgoinghooks(ui, repo, other, opts, o)
     recurse()
-    return 0 # exit code is zero since we found outgoing changes
+    return 0  # exit code is zero since we found outgoing changes
+
 
 def verify(repo, level=None):
     """verify the consistency of a repository"""
@@ -1118,8 +1307,9 @@
     # concern.
 
     # pathto() is needed for -R case
-    revs = repo.revs("filelog(%s)",
-                     util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
+    revs = repo.revs(
+        "filelog(%s)", util.pathto(repo.root, repo.getcwd(), '.hgsubstate')
+    )
 
     if revs:
         repo.ui.status(_('checking subrepo links\n'))
@@ -1128,23 +1318,27 @@
             try:
                 for subpath in ctx.substate:
                     try:
-                        ret = (ctx.sub(subpath, allowcreate=False).verify()
-                               or ret)
+                        ret = (
+                            ctx.sub(subpath, allowcreate=False).verify() or ret
+                        )
                     except error.RepoError as e:
-                        repo.ui.warn(('%d: %s\n') % (rev, e))
+                        repo.ui.warn('%d: %s\n' % (rev, e))
             except Exception:
-                repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
-                             node.short(ctx.node()))
+                repo.ui.warn(
+                    _('.hgsubstate is corrupt in revision %s\n')
+                    % node.short(ctx.node())
+                )
 
     return ret
 
+
 def remoteui(src, opts):
     'build a remote ui from ui or repo and opts'
-    if util.safehasattr(src, 'baseui'): # looks like a repository
-        dst = src.baseui.copy() # drop repo-specific config
-        src = src.ui # copy target options from repo
-    else: # assume it's a global ui object
-        dst = src.copy() # keep all global options
+    if util.safehasattr(src, 'baseui'):  # looks like a repository
+        dst = src.baseui.copy()  # drop repo-specific config
+        src = src.ui  # copy target options from repo
+    else:  # assume it's a global ui object
+        dst = src.copy()  # keep all global options
 
     # copy ssh-specific options
     for o in 'ssh', 'remotecmd':
@@ -1167,14 +1361,17 @@
 
     return dst
 
+
 # Files of interest
 # Used to check if the repository has changed looking at mtime and size of
 # these files.
-foi = [('spath', '00changelog.i'),
-       ('spath', 'phaseroots'), # ! phase can change content at the same size
-       ('spath', 'obsstore'),
-       ('path', 'bookmarks'), # ! bookmark can change content at the same size
-      ]
+foi = [
+    ('spath', '00changelog.i'),
+    ('spath', 'phaseroots'),  # ! phase can change content at the same size
+    ('spath', 'obsstore'),
+    ('path', 'bookmarks'),  # ! bookmark can change content at the same size
+]
+
 
 class cachedlocalrepo(object):
     """Holds a localrepository that can be cached and reused."""
--- a/mercurial/hgweb/__init__.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/__init__.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,9 +17,7 @@
     pycompat,
 )
 
-from ..utils import (
-    procutil,
-)
+from ..utils import procutil
 
 from . import (
     hgweb_mod,
@@ -27,6 +25,7 @@
     server,
 )
 
+
 def hgweb(config, name=None, baseui=None):
     '''create an hgweb wsgi object
 
@@ -40,16 +39,22 @@
 
     if isinstance(config, pycompat.unicode):
         raise error.ProgrammingError(
-            'Mercurial only supports encoded strings: %r' % config)
-    if ((isinstance(config, bytes) and not os.path.isdir(config)) or
-        isinstance(config, dict) or isinstance(config, list)):
+            'Mercurial only supports encoded strings: %r' % config
+        )
+    if (
+        (isinstance(config, bytes) and not os.path.isdir(config))
+        or isinstance(config, dict)
+        or isinstance(config, list)
+    ):
         # create a multi-dir interface
         return hgwebdir_mod.hgwebdir(config, baseui=baseui)
     return hgweb_mod.hgweb(config, name=name, baseui=baseui)
 
+
 def hgwebdir(config, baseui=None):
     return hgwebdir_mod.hgwebdir(config, baseui=baseui)
 
+
 class httpservice(object):
     def __init__(self, ui, app, opts):
         self.ui = ui
@@ -60,9 +65,11 @@
         procutil.setsignalhandler()
         self.httpd = server.create_server(self.ui, self.app)
 
-        if (self.opts['port'] and
-            not self.ui.verbose and
-            not self.opts['print_url']):
+        if (
+            self.opts['port']
+            and not self.ui.verbose
+            and not self.opts['print_url']
+        ):
             return
 
         if self.httpd.prefix:
@@ -77,7 +84,7 @@
         bindaddr = self.httpd.addr
         if bindaddr == r'0.0.0.0':
             bindaddr = r'*'
-        elif r':' in bindaddr: # IPv6
+        elif r':' in bindaddr:  # IPv6
             bindaddr = r'[%s]' % bindaddr
 
         fqaddr = self.httpd.fqaddr
@@ -85,7 +92,10 @@
             fqaddr = r'[%s]' % fqaddr
 
         url = 'http://%s%s/%s' % (
-            pycompat.sysbytes(fqaddr), pycompat.sysbytes(port), prefix)
+            pycompat.sysbytes(fqaddr),
+            pycompat.sysbytes(port),
+            prefix,
+        )
         if self.opts['print_url']:
             self.ui.write('%s\n' % url)
         else:
@@ -93,18 +103,22 @@
                 write = self.ui.status
             else:
                 write = self.ui.write
-            write(_('listening at %s (bound to %s:%d)\n') %
-                  (url, pycompat.sysbytes(bindaddr), self.httpd.port))
+            write(
+                _('listening at %s (bound to %s:%d)\n')
+                % (url, pycompat.sysbytes(bindaddr), self.httpd.port)
+            )
         self.ui.flush()  # avoid buffering of status message
 
     def run(self):
         self.httpd.serve_forever()
 
+
 def createapp(baseui, repo, webconf):
     if webconf:
         return hgwebdir_mod.hgwebdir(webconf, baseui=baseui)
     else:
         if not repo:
-            raise error.RepoError(_("there is no Mercurial repository"
-                                    " here (.hg not found)"))
+            raise error.RepoError(
+                _("there is no Mercurial repository" " here (.hg not found)")
+            )
         return hgweb_mod.hgweb(repo, baseui=baseui)
--- a/mercurial/hgweb/common.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/common.py	Sun Oct 06 09:45:02 2019 -0400
@@ -44,6 +44,7 @@
     """
     return userlist == ['*'] or username in userlist
 
+
 def checkauthz(hgweb, req, op):
     '''Check permission for operation based on request data (including
     authentication info). Return if op allowed, else raise an ErrorResponse
@@ -61,7 +62,7 @@
 
     if op == 'pull' and not hgweb.allowpull:
         raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized')
-    elif op == 'pull' or op is None: # op is None for interface requests
+    elif op == 'pull' or op is None:  # op is None for interface requests
         return
 
     # Allow LFS uploading via PUT requests
@@ -87,6 +88,7 @@
     if not (allow and ismember(hgweb.repo.ui, user, allow)):
         raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized')
 
+
 # Hooks for hgweb permission checks; extensions can add hooks here.
 # Each hook is invoked like this: hook(hgweb, request, operation),
 # where operation is either read, pull, push or upload. Hooks should either
@@ -108,6 +110,7 @@
         self.headers = headers
         self.message = message
 
+
 class continuereader(object):
     """File object wrapper to handle HTTP 100-continue.
 
@@ -116,6 +119,7 @@
     response is sent. This should trigger the client into actually sending
     the request body.
     """
+
     def __init__(self, f, write):
         self.f = f
         self._write = write
@@ -132,14 +136,18 @@
             return getattr(self.f, attr)
         raise AttributeError
 
+
 def _statusmessage(code):
     responses = httpserver.basehttprequesthandler.responses
     return pycompat.bytesurl(
-        responses.get(code, (r'Error', r'Unknown error'))[0])
+        responses.get(code, (r'Error', r'Unknown error'))[0]
+    )
+
 
 def statusmessage(code, message=None):
     return '%d %s' % (code, message or _statusmessage(code))
 
+
 def get_stat(spath, fn):
     """stat fn if it exists, spath otherwise"""
     cl_path = os.path.join(spath, fn)
@@ -148,20 +156,26 @@
     else:
         return os.stat(spath)
 
+
 def get_mtime(spath):
     return get_stat(spath, "00changelog.i")[stat.ST_MTIME]
 
+
 def ispathsafe(path):
     """Determine if a path is safe to use for filesystem access."""
     parts = path.split('/')
     for part in parts:
-        if (part in ('', pycompat.oscurdir, pycompat.ospardir) or
-            pycompat.ossep in part or
-            pycompat.osaltsep is not None and pycompat.osaltsep in part):
+        if (
+            part in ('', pycompat.oscurdir, pycompat.ospardir)
+            or pycompat.ossep in part
+            or pycompat.osaltsep is not None
+            and pycompat.osaltsep in part
+        ):
             return False
 
     return True
 
+
 def staticfile(directory, fname, res):
     """return a file inside directory with guessed Content-Type header
 
@@ -184,7 +198,8 @@
     try:
         os.stat(path)
         ct = pycompat.sysbytes(
-            mimetypes.guess_type(pycompat.fsdecode(path))[0] or r"text/plain")
+            mimetypes.guess_type(pycompat.fsdecode(path))[0] or r"text/plain"
+        )
         with open(path, 'rb') as fh:
             data = fh.read()
 
@@ -197,8 +212,10 @@
         if err.errno == errno.ENOENT:
             raise ErrorResponse(HTTP_NOT_FOUND)
         else:
-            raise ErrorResponse(HTTP_SERVER_ERROR,
-                                encoding.strtolocal(err.strerror))
+            raise ErrorResponse(
+                HTTP_SERVER_ERROR, encoding.strtolocal(err.strerror)
+            )
+
 
 def paritygen(stripecount, offset=0):
     """count parity of horizontal stripes for easier reading"""
@@ -216,15 +233,20 @@
             parity = 1 - parity
             count = 0
 
+
 def get_contact(config):
     """Return repo contact information or empty string.
 
     web.contact is the primary source, but if that is not set, try
     ui.username or $EMAIL as a fallback to display something useful.
     """
-    return (config("web", "contact") or
-            config("ui", "username") or
-            encoding.environ.get("EMAIL") or "")
+    return (
+        config("web", "contact")
+        or config("ui", "username")
+        or encoding.environ.get("EMAIL")
+        or ""
+    )
+
 
 def cspvalues(ui):
     """Obtain the Content-Security-Policy header and nonce value.
--- a/mercurial/hgweb/hgweb_mod.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/hgweb_mod.py	Sun Oct 06 09:45:02 2019 -0400
@@ -45,6 +45,7 @@
     wsgicgi,
 )
 
+
 def getstyle(req, configfn, templatepath):
     styles = (
         req.qsparams.get('style', None),
@@ -53,6 +54,7 @@
     )
     return styles, templater.stylemap(styles, templatepath)
 
+
 def makebreadcrumb(url, prefix=''):
     '''Return a 'URL breadcrumb' list
 
@@ -78,6 +80,7 @@
         urlel = os.path.dirname(urlel)
     return templateutil.mappinglist(reversed(breadcrumb))
 
+
 class requestcontext(object):
     """Holds state/context for an individual request.
 
@@ -85,6 +88,7 @@
     is prone to race conditions. Instances of this class exist to hold
     mutable and race-free state for requests.
     """
+
     def __init__(self, app, repo, req, res):
         self.repo = repo
         self.reponame = app.reponame
@@ -113,20 +117,22 @@
 
     # Trust the settings from the .hg/hgrc files by default.
     def config(self, section, name, default=uimod._unset, untrusted=True):
-        return self.repo.ui.config(section, name, default,
-                                   untrusted=untrusted)
+        return self.repo.ui.config(section, name, default, untrusted=untrusted)
 
     def configbool(self, section, name, default=uimod._unset, untrusted=True):
-        return self.repo.ui.configbool(section, name, default,
-                                       untrusted=untrusted)
+        return self.repo.ui.configbool(
+            section, name, default, untrusted=untrusted
+        )
 
     def configint(self, section, name, default=uimod._unset, untrusted=True):
-        return self.repo.ui.configint(section, name, default,
-                                      untrusted=untrusted)
+        return self.repo.ui.configint(
+            section, name, default, untrusted=untrusted
+        )
 
     def configlist(self, section, name, default=uimod._unset, untrusted=True):
-        return self.repo.ui.configlist(section, name, default,
-                                       untrusted=untrusted)
+        return self.repo.ui.configlist(
+            section, name, default, untrusted=untrusted
+        )
 
     def archivelist(self, nodeid):
         return webutil.archivelist(self.repo.ui, nodeid)
@@ -136,29 +142,33 @@
         # this is needed to create absolute urls
         logourl = self.config('web', 'logourl')
         logoimg = self.config('web', 'logoimg')
-        staticurl = (self.config('web', 'staticurl')
-                     or req.apppath.rstrip('/') + '/static/')
+        staticurl = (
+            self.config('web', 'staticurl')
+            or req.apppath.rstrip('/') + '/static/'
+        )
         if not staticurl.endswith('/'):
             staticurl += '/'
 
         # figure out which style to use
 
         vars = {}
-        styles, (style, mapfile) = getstyle(req, self.config,
-                                            self.templatepath)
+        styles, (style, mapfile) = getstyle(req, self.config, self.templatepath)
         if style == styles[0]:
             vars['style'] = style
 
         sessionvars = webutil.sessionvars(vars, '?')
 
         if not self.reponame:
-            self.reponame = (self.config('web', 'name', '')
-                             or req.reponame
-                             or req.apppath
-                             or self.repo.root)
+            self.reponame = (
+                self.config('web', 'name', '')
+                or req.reponame
+                or req.apppath
+                or self.repo.root
+            )
 
         filters = {}
         templatefilter = registrar.templatefilter(filters)
+
         @templatefilter('websub', intype=bytes)
         def websubfilter(text):
             return templatefilters.websub(text, self.websubtable)
@@ -179,15 +189,15 @@
             'nonce': self.nonce,
         }
         templatekeyword = registrar.templatekeyword(defaults)
+
         @templatekeyword('motd', requires=())
         def motd(context, mapping):
             yield self.config('web', 'motd')
 
         tres = formatter.templateresources(self.repo.ui, self.repo)
-        tmpl = templater.templater.frommapfile(mapfile,
-                                               filters=filters,
-                                               defaults=defaults,
-                                               resources=tres)
+        tmpl = templater.templater.frommapfile(
+            mapfile, filters=filters, defaults=defaults, resources=tres
+        )
         return tmpl
 
     def sendtemplate(self, name, **kwargs):
@@ -196,6 +206,7 @@
         self.res.setbodygen(self.tmpl.generate(name, kwargs))
         return self.res.sendresponse()
 
+
 class hgweb(object):
     """HTTP server for individual repositories.
 
@@ -207,6 +218,7 @@
     Some servers are multi-threaded. On these servers, there may
     be multiple active threads inside __call__.
     """
+
     def __init__(self, repo, name=None, baseui=None):
         if isinstance(repo, bytes):
             if baseui:
@@ -282,10 +294,13 @@
         Modern servers should be using WSGI and should avoid this
         method, if possible.
         """
-        if not encoding.environ.get('GATEWAY_INTERFACE',
-                                    '').startswith("CGI/1."):
-            raise RuntimeError("This function is only intended to be "
-                               "called while running as a CGI script.")
+        if not encoding.environ.get('GATEWAY_INTERFACE', '').startswith(
+            "CGI/1."
+        ):
+            raise RuntimeError(
+                "This function is only intended to be "
+                "called while running as a CGI script."
+            )
         wsgicgi.launch(self)
 
     def __call__(self, env, respond):
@@ -328,12 +343,14 @@
         # expose the URLs if the feature is enabled.
         apienabled = rctx.repo.ui.configbool('experimental', 'web.apiserver')
         if apienabled and req.dispatchparts and req.dispatchparts[0] == b'api':
-            wireprotoserver.handlewsgiapirequest(rctx, req, res,
-                                                 self.check_perm)
+            wireprotoserver.handlewsgiapirequest(
+                rctx, req, res, self.check_perm
+            )
             return res.sendresponse()
 
         handled = wireprotoserver.handlewsgirequest(
-            rctx, req, res, self.check_perm)
+            rctx, req, res, self.check_perm
+        )
         if handled:
             return res.sendresponse()
 
@@ -354,7 +371,7 @@
             style = cmd.rfind('-')
             if style != -1:
                 req.qsparams['style'] = cmd[:style]
-                cmd = cmd[style + 1:]
+                cmd = cmd[style + 1 :]
 
             # avoid accepting e.g. style parameter as command
             if util.safehasattr(webcommands, cmd):
@@ -381,7 +398,7 @@
                 for type_, spec in webutil.archivespecs.iteritems():
                     ext = spec[2]
                     if fn.endswith(ext):
-                        req.qsparams['node'] = fn[:-len(ext)]
+                        req.qsparams['node'] = fn[: -len(ext)]
                         req.qsparams['type'] = type_
         else:
             cmd = req.qsparams.get('cmd', '')
@@ -390,8 +407,9 @@
 
         try:
             rctx.tmpl = rctx.templater(req)
-            ctype = rctx.tmpl.render('mimetype',
-                                     {'encoding': encoding.encoding})
+            ctype = rctx.tmpl.render(
+                'mimetype', {'encoding': encoding.encoding}
+            )
 
             # check read permissions non-static content
             if cmd != 'static':
@@ -431,8 +449,9 @@
 
         except (error.LookupError, error.RepoLookupError) as err:
             msg = pycompat.bytestr(err)
-            if (util.safehasattr(err, 'name') and
-                not isinstance(err,  error.ManifestLookupError)):
+            if util.safehasattr(err, 'name') and not isinstance(
+                err, error.ManifestLookupError
+            ):
                 msg = 'revision not found: %s' % err.name
 
             res.status = '404 Not Found'
@@ -457,6 +476,7 @@
         for permhook in permhooks:
             permhook(rctx, req, op)
 
+
 def getwebview(repo):
     """The 'web.view' config controls changeset filter to hgweb. Possible
     values are ``served``, ``visible`` and ``all``. Default is ``served``.
--- a/mercurial/hgweb/hgwebdir_mod.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/hgwebdir_mod.py	Sun Oct 06 09:45:02 2019 -0400
@@ -50,9 +50,11 @@
 )
 from ..utils import dateutil
 
+
 def cleannames(items):
     return [(util.pconvert(name).strip('/'), path) for name, path in items]
 
+
 def findrepos(paths):
     repos = []
     for prefix, root in cleannames(paths):
@@ -71,6 +73,7 @@
         repos.extend(urlrepos(prefix, roothead, paths))
     return repos
 
+
 def urlrepos(prefix, roothead, paths):
     """yield url paths and filesystem paths from a list of repo paths
 
@@ -82,8 +85,10 @@
     """
     for path in paths:
         path = os.path.normpath(path)
-        yield (prefix + '/' +
-               util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path
+        yield (
+            prefix + '/' + util.pconvert(path[len(roothead) :]).lstrip('/')
+        ).strip('/'), path
+
 
 def readallowed(ui, req):
     """Check allow_read and deny_read config options of a repo's ui object
@@ -107,6 +112,7 @@
 
     return False
 
+
 def rawindexentries(ui, repos, req, subdir=''):
     descend = ui.configbool('web', 'descend')
     collapse = ui.configbool('web', 'collapse')
@@ -116,7 +122,7 @@
 
         if not name.startswith(subdir):
             continue
-        name = name[len(subdir):]
+        name = name[len(subdir) :]
         directory = False
 
         if '/' in name:
@@ -140,7 +146,7 @@
                 discarded = '/'.join(nameparts[1:])
 
                 # remove name parts plus accompanying slash
-                path = path[:-len(discarded) - 1]
+                path = path[: -len(discarded) - 1]
 
                 try:
                     hg.repository(ui, path)
@@ -165,19 +171,20 @@
 
             # add '/' to the name to make it obvious that
             # the entry is a directory, not a regular repository
-            row = {'contact': "",
-                   'contact_sort': "",
-                   'name': name + '/',
-                   'name_sort': name,
-                   'url': url,
-                   'description': "",
-                   'description_sort': "",
-                   'lastchange': d,
-                   'lastchange_sort': d[1] - d[0],
-                   'archives': templateutil.mappinglist([]),
-                   'isdirectory': True,
-                   'labels': templateutil.hybridlist([], name='label'),
-                   }
+            row = {
+                'contact': "",
+                'contact_sort': "",
+                'name': name + '/',
+                'name_sort': name,
+                'url': url,
+                'description': "",
+                'description_sort': "",
+                'lastchange': d,
+                'lastchange_sort': d[1] - d[0],
+                'archives': templateutil.mappinglist([]),
+                'isdirectory': True,
+                'labels': templateutil.hybridlist([], name='label'),
+            }
 
             seendirs.add(name)
             yield row
@@ -218,42 +225,47 @@
         seenrepos.add(name)
         name = get("web", "name", name)
         labels = u.configlist('web', 'labels', untrusted=True)
-        row = {'contact': contact or "unknown",
-               'contact_sort': contact.upper() or "unknown",
-               'name': name,
-               'name_sort': name,
-               'url': url,
-               'description': description or "unknown",
-               'description_sort': description.upper() or "unknown",
-               'lastchange': d,
-               'lastchange_sort': d[1] - d[0],
-               'archives': webutil.archivelist(u, "tip", url),
-               'isdirectory': None,
-               'labels': templateutil.hybridlist(labels, name='label'),
-               }
+        row = {
+            'contact': contact or "unknown",
+            'contact_sort': contact.upper() or "unknown",
+            'name': name,
+            'name_sort': name,
+            'url': url,
+            'description': description or "unknown",
+            'description_sort': description.upper() or "unknown",
+            'lastchange': d,
+            'lastchange_sort': d[1] - d[0],
+            'archives': webutil.archivelist(u, "tip", url),
+            'isdirectory': None,
+            'labels': templateutil.hybridlist(labels, name='label'),
+        }
 
         yield row
 
-def _indexentriesgen(context, ui, repos, req, stripecount, sortcolumn,
-                     descending, subdir):
+
+def _indexentriesgen(
+    context, ui, repos, req, stripecount, sortcolumn, descending, subdir
+):
     rows = rawindexentries(ui, repos, req, subdir=subdir)
 
     sortdefault = None, False
 
     if sortcolumn and sortdefault != (sortcolumn, descending):
         sortkey = '%s_sort' % sortcolumn
-        rows = sorted(rows, key=lambda x: x[sortkey],
-                      reverse=descending)
+        rows = sorted(rows, key=lambda x: x[sortkey], reverse=descending)
 
     for row, parity in zip(rows, paritygen(stripecount)):
         row['parity'] = parity
         yield row
 
-def indexentries(ui, repos, req, stripecount, sortcolumn='',
-                 descending=False, subdir=''):
+
+def indexentries(
+    ui, repos, req, stripecount, sortcolumn='', descending=False, subdir=''
+):
     args = (ui, repos, req, stripecount, sortcolumn, descending, subdir)
     return templateutil.mappinggenerator(_indexentriesgen, args=args)
 
+
 class hgwebdir(object):
     """HTTP server for multiple repositories.
 
@@ -262,6 +274,7 @@
 
     Instances are typically used as WSGI applications.
     """
+
     def __init__(self, conf, baseui=None):
         self.conf = conf
         self.baseui = baseui
@@ -282,8 +295,10 @@
             refreshinterval = item.default
 
         # refreshinterval <= 0 means to always refresh.
-        if (refreshinterval > 0 and
-            self.lastrefresh + refreshinterval > time.time()):
+        if (
+            refreshinterval > 0
+            and self.lastrefresh + refreshinterval > time.time()
+        ):
             return
 
         if self.baseui:
@@ -318,7 +333,7 @@
                 repo = os.path.normpath(path)
                 name = util.pconvert(repo)
                 if name.startswith(prefix):
-                    name = name[len(prefix):]
+                    name = name[len(prefix) :]
                 repos.append((name.lstrip('/'), repo))
 
         self.repos = repos
@@ -338,10 +353,13 @@
         self.lastrefresh = time.time()
 
     def run(self):
-        if not encoding.environ.get('GATEWAY_INTERFACE',
-                                    '').startswith("CGI/1."):
-            raise RuntimeError("This function is only intended to be "
-                               "called while running as a CGI script.")
+        if not encoding.environ.get('GATEWAY_INTERFACE', '').startswith(
+            "CGI/1."
+        ):
+            raise RuntimeError(
+                "This function is only intended to be "
+                "called while running as a CGI script."
+            )
         wsgicgi.launch(self)
 
     def __call__(self, env, respond):
@@ -409,7 +427,7 @@
             # nested indexes and hgwebs
 
             if virtual.endswith('/index') and virtual not in repos:
-                subdir = virtual[:-len('index')]
+                subdir = virtual[: -len('index')]
                 if any(r.startswith(subdir) for r in repos):
                     return self.makeindex(req, res, tmpl, subdir)
 
@@ -426,14 +444,17 @@
                     # repository path component.
                     uenv = req.rawenv
                     if pycompat.ispy3:
-                        uenv = {k.decode('latin1'): v for k, v in
-                                uenv.iteritems()}
+                        uenv = {
+                            k.decode('latin1'): v for k, v in uenv.iteritems()
+                        }
                     req = requestmod.parserequestfromenv(
-                        uenv, reponame=virtualrepo,
+                        uenv,
+                        reponame=virtualrepo,
                         altbaseurl=self.ui.config('web', 'baseurl'),
                         # Reuse wrapped body file object otherwise state
                         # tracking can get confused.
-                        bodyfh=req.bodyfh)
+                        bodyfh=req.bodyfh,
+                    )
                     try:
                         # ensure caller gets private copy of ui
                         repo = hg.repository(self.ui.copy(), real)
@@ -473,16 +494,29 @@
             if sortcolumn not in sortable:
                 sortcolumn = ""
 
-        sort = [("sort_%s" % column,
-                 "%s%s" % ((not descending and column == sortcolumn)
-                            and "-" or "", column))
-                for column in sortable]
+        sort = [
+            (
+                "sort_%s" % column,
+                "%s%s"
+                % (
+                    (not descending and column == sortcolumn) and "-" or "",
+                    column,
+                ),
+            )
+            for column in sortable
+        ]
 
         self.refresh()
 
-        entries = indexentries(self.ui, self.repos, req,
-                               self.stripecount, sortcolumn=sortcolumn,
-                               descending=descending, subdir=subdir)
+        entries = indexentries(
+            self.ui,
+            self.repos,
+            req,
+            self.stripecount,
+            sortcolumn=sortcolumn,
+            descending=descending,
+            subdir=subdir,
+        )
 
         mapping = {
             'entries': entries,
@@ -496,21 +530,22 @@
         return res.sendresponse()
 
     def templater(self, req, nonce):
-
         def config(section, name, default=uimod._unset, untrusted=True):
             return self.ui.config(section, name, default, untrusted)
 
         vars = {}
-        styles, (style, mapfile) = hgweb_mod.getstyle(req, config,
-                                                      self.templatepath)
+        styles, (style, mapfile) = hgweb_mod.getstyle(
+            req, config, self.templatepath
+        )
         if style == styles[0]:
             vars['style'] = style
 
         sessionvars = webutil.sessionvars(vars, '?')
         logourl = config('web', 'logourl')
         logoimg = config('web', 'logoimg')
-        staticurl = (config('web', 'staticurl')
-                     or req.apppath.rstrip('/') + '/static/')
+        staticurl = (
+            config('web', 'staticurl') or req.apppath.rstrip('/') + '/static/'
+        )
         if not staticurl.endswith('/'):
             staticurl += '/'
 
@@ -525,6 +560,7 @@
             "nonce": nonce,
         }
         templatekeyword = registrar.templatekeyword(defaults)
+
         @templatekeyword('motd', requires=())
         def motd(context, mapping):
             if self.motd is not None:
--- a/mercurial/hgweb/request.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/request.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,17 +8,16 @@
 
 from __future__ import absolute_import
 
-#import wsgiref.validate
+# import wsgiref.validate
 
-from ..thirdparty import (
-    attr,
-)
+from ..thirdparty import attr
 from .. import (
     error,
     pycompat,
     util,
 )
 
+
 class multidict(object):
     """A dict like object that can store multiple values for a key.
 
@@ -26,6 +25,7 @@
 
     This is inspired by WebOb's class of the same name.
     """
+
     def __init__(self):
         self._items = {}
 
@@ -76,6 +76,7 @@
     def asdictoflists(self):
         return {k: list(v) for k, v in self._items.iteritems()}
 
+
 @attr.s(frozen=True)
 class parsedrequest(object):
     """Represents a parsed WSGI request.
@@ -124,6 +125,7 @@
     # WSGI environment dict, unmodified.
     rawenv = attr.ib()
 
+
 def parserequestfromenv(env, reponame=None, altbaseurl=None, bodyfh=None):
     """Parse URL components from environment variables.
 
@@ -153,7 +155,7 @@
     # We first validate that the incoming object conforms with the WSGI spec.
     # We only want to be dealing with spec-conforming WSGI implementations.
     # TODO enable this once we fix internal violations.
-    #wsgiref.validate.check_environ(env)
+    # wsgiref.validate.check_environ(env)
 
     # PEP-0333 states that environment keys and values are native strings
     # (bytes on Python 2 and str on Python 3). The code points for the Unicode
@@ -161,8 +163,10 @@
     # in Mercurial, so mass convert string keys and values to bytes.
     if pycompat.ispy3:
         env = {k.encode('latin-1'): v for k, v in env.iteritems()}
-        env = {k: v.encode('latin-1') if isinstance(v, str) else v
-               for k, v in env.iteritems()}
+        env = {
+            k: v.encode('latin-1') if isinstance(v, str) else v
+            for k, v in env.iteritems()
+        }
 
     # Some hosting solutions are emulating hgwebdir, and dispatching directly
     # to an hgweb instance using this environment variable.  This was always
@@ -255,16 +259,19 @@
             raise error.ProgrammingError('reponame requires PATH_INFO')
 
         if not env['PATH_INFO'].startswith(repoprefix):
-            raise error.ProgrammingError('PATH_INFO does not begin with repo '
-                                         'name: %s (%s)' % (env['PATH_INFO'],
-                                                            reponame))
+            raise error.ProgrammingError(
+                'PATH_INFO does not begin with repo '
+                'name: %s (%s)' % (env['PATH_INFO'], reponame)
+            )
 
-        dispatchpath = env['PATH_INFO'][len(repoprefix):]
+        dispatchpath = env['PATH_INFO'][len(repoprefix) :]
 
         if dispatchpath and not dispatchpath.startswith('/'):
-            raise error.ProgrammingError('reponame prefix of PATH_INFO does '
-                                         'not end at path delimiter: %s (%s)' %
-                                         (env['PATH_INFO'], reponame))
+            raise error.ProgrammingError(
+                'reponame prefix of PATH_INFO does '
+                'not end at path delimiter: %s (%s)'
+                % (env['PATH_INFO'], reponame)
+            )
 
         apppath = apppath.rstrip('/') + repoprefix
         dispatchparts = dispatchpath.strip('/').split('/')
@@ -295,9 +302,10 @@
     headers = []
     for k, v in env.iteritems():
         if k.startswith('HTTP_'):
-            headers.append((k[len('HTTP_'):].replace('_', '-'), v))
+            headers.append((k[len('HTTP_') :].replace('_', '-'), v))
 
-    from . import wsgiheaders # avoid cycle
+    from . import wsgiheaders  # avoid cycle
+
     headers = wsgiheaders.Headers(headers)
 
     # This is kind of a lie because the HTTP header wasn't explicitly
@@ -313,24 +321,30 @@
     if bodyfh is None:
         bodyfh = env['wsgi.input']
         if 'Content-Length' in headers:
-            bodyfh = util.cappedreader(bodyfh,
-                                       int(headers['Content-Length'] or '0'))
+            bodyfh = util.cappedreader(
+                bodyfh, int(headers['Content-Length'] or '0')
+            )
 
-    return parsedrequest(method=env['REQUEST_METHOD'],
-                         url=fullurl, baseurl=baseurl,
-                         advertisedurl=advertisedfullurl,
-                         advertisedbaseurl=advertisedbaseurl,
-                         urlscheme=env['wsgi.url_scheme'],
-                         remoteuser=env.get('REMOTE_USER'),
-                         remotehost=env.get('REMOTE_HOST'),
-                         apppath=apppath,
-                         dispatchparts=dispatchparts, dispatchpath=dispatchpath,
-                         reponame=reponame,
-                         querystring=querystring,
-                         qsparams=qsparams,
-                         headers=headers,
-                         bodyfh=bodyfh,
-                         rawenv=env)
+    return parsedrequest(
+        method=env['REQUEST_METHOD'],
+        url=fullurl,
+        baseurl=baseurl,
+        advertisedurl=advertisedfullurl,
+        advertisedbaseurl=advertisedbaseurl,
+        urlscheme=env['wsgi.url_scheme'],
+        remoteuser=env.get('REMOTE_USER'),
+        remotehost=env.get('REMOTE_HOST'),
+        apppath=apppath,
+        dispatchparts=dispatchparts,
+        dispatchpath=dispatchpath,
+        reponame=reponame,
+        querystring=querystring,
+        qsparams=qsparams,
+        headers=headers,
+        bodyfh=bodyfh,
+        rawenv=env,
+    )
+
 
 class offsettrackingwriter(object):
     """A file object like object that is append only and tracks write count.
@@ -345,6 +359,7 @@
     a WSGI ``start_response()`` function. Since ``write()`` is a callable and
     not a file object, it doesn't implement other file object methods.
     """
+
     def __init__(self, writefn):
         self._write = writefn
         self._offset = 0
@@ -363,6 +378,7 @@
     def tell(self):
         return self._offset
 
+
 class wsgiresponse(object):
     """Represents a response to a WSGI request.
 
@@ -389,7 +405,8 @@
         self._startresponse = startresponse
 
         self.status = None
-        from . import wsgiheaders # avoid cycle
+        from . import wsgiheaders  # avoid cycle
+
         self.headers = wsgiheaders.Headers([])
 
         self._bodybytes = None
@@ -399,8 +416,11 @@
         self._bodywritefn = None
 
     def _verifybody(self):
-        if (self._bodybytes is not None or self._bodygen is not None
-            or self._bodywillwrite):
+        if (
+            self._bodybytes is not None
+            or self._bodygen is not None
+            or self._bodywillwrite
+        ):
             raise error.ProgrammingError('cannot define body multiple times')
 
     def setbodybytes(self, b):
@@ -450,8 +470,11 @@
         if not self.status:
             raise error.ProgrammingError('status line not defined')
 
-        if (self._bodybytes is None and self._bodygen is None
-            and not self._bodywillwrite):
+        if (
+            self._bodybytes is None
+            and self._bodygen is None
+            and not self._bodywillwrite
+        ):
             raise error.ProgrammingError('response body not defined')
 
         # RFC 7232 Section 4.1 states that a 304 MUST generate one of
@@ -469,20 +492,30 @@
 
             # Strictly speaking, this is too strict. But until it causes
             # problems, let's be strict.
-            badheaders = {k for k in self.headers.keys()
-                          if k.lower() not in ('date', 'etag', 'expires',
-                                               'cache-control',
-                                               'content-location',
-                                               'content-security-policy',
-                                               'vary')}
+            badheaders = {
+                k
+                for k in self.headers.keys()
+                if k.lower()
+                not in (
+                    'date',
+                    'etag',
+                    'expires',
+                    'cache-control',
+                    'content-location',
+                    'content-security-policy',
+                    'vary',
+                )
+            }
             if badheaders:
                 raise error.ProgrammingError(
-                    'illegal header on 304 response: %s' %
-                    ', '.join(sorted(badheaders)))
+                    'illegal header on 304 response: %s'
+                    % ', '.join(sorted(badheaders))
+                )
 
             if self._bodygen is not None or self._bodywillwrite:
-                raise error.ProgrammingError("must use setbodybytes('') with "
-                                             "304 responses")
+                raise error.ProgrammingError(
+                    "must use setbodybytes('') with " "304 responses"
+                )
 
         # Various HTTP clients (notably httplib) won't read the HTTP response
         # until the HTTP request has been sent in full. If servers (us) send a
@@ -531,10 +564,11 @@
                 if not chunk:
                     break
 
-        strheaders = [(pycompat.strurl(k), pycompat.strurl(v)) for
-                      k, v in self.headers.items()]
-        write = self._startresponse(pycompat.sysstr(self.status),
-                                    strheaders)
+        strheaders = [
+            (pycompat.strurl(k), pycompat.strurl(v))
+            for k, v in self.headers.items()
+        ]
+        write = self._startresponse(pycompat.sysstr(self.status), strheaders)
 
         if self._bodybytes:
             yield self._bodybytes
@@ -566,17 +600,22 @@
             raise error.ProgrammingError('must call setbodywillwrite() first')
 
         if not self._started:
-            raise error.ProgrammingError('must call sendresponse() first; did '
-                                         'you remember to consume it since it '
-                                         'is a generator?')
+            raise error.ProgrammingError(
+                'must call sendresponse() first; did '
+                'you remember to consume it since it '
+                'is a generator?'
+            )
 
         assert self._bodywritefn
         return offsettrackingwriter(self._bodywritefn)
 
+
 def wsgiapplication(app_maker):
     '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
     can and should now be used as a WSGI application.'''
     application = app_maker()
+
     def run_wsgi(env, respond):
         return application(env, respond)
+
     return run_wsgi
--- a/mercurial/hgweb/server.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/server.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,9 +30,8 @@
 urlerr = util.urlerr
 urlreq = util.urlreq
 
-from . import (
-    common,
-)
+from . import common
+
 
 def _splitURI(uri):
     """Return path and query that has been split from uri
@@ -46,17 +45,22 @@
         path, query = uri, r''
     return urlreq.unquote(path), query
 
+
 class _error_logger(object):
     def __init__(self, handler):
         self.handler = handler
+
     def flush(self):
         pass
+
     def write(self, str):
         self.writelines(str.split('\n'))
+
     def writelines(self, seq):
         for msg in seq:
             self.handler.log_error(r"HG error:  %s", encoding.strfromlocal(msg))
 
+
 class _httprequesthandler(httpservermod.basehttprequesthandler):
 
     url_scheme = 'http'
@@ -70,10 +74,17 @@
         httpservermod.basehttprequesthandler.__init__(self, *args, **kargs)
 
     def _log_any(self, fp, format, *args):
-        fp.write(pycompat.sysbytes(
-            r"%s - - [%s] %s" % (self.client_address[0],
-                                 self.log_date_time_string(),
-                                 format % args)) + '\n')
+        fp.write(
+            pycompat.sysbytes(
+                r"%s - - [%s] %s"
+                % (
+                    self.client_address[0],
+                    self.log_date_time_string(),
+                    format % args,
+                )
+            )
+            + '\n'
+        )
         fp.flush()
 
     def log_error(self, format, *args):
@@ -85,11 +96,16 @@
     def log_request(self, code=r'-', size=r'-'):
         xheaders = []
         if util.safehasattr(self, 'headers'):
-            xheaders = [h for h in self.headers.items()
-                        if h[0].startswith(r'x-')]
-        self.log_message(r'"%s" %s %s%s',
-                         self.requestline, str(code), str(size),
-                         r''.join([r' %s:%s' % h for h in sorted(xheaders)]))
+            xheaders = [
+                h for h in self.headers.items() if h[0].startswith(r'x-')
+            ]
+        self.log_message(
+            r'"%s" %s %s%s',
+            self.requestline,
+            str(code),
+            str(size),
+            r''.join([r' %s:%s' % h for h in sorted(xheaders)]),
+        )
 
     def do_write(self):
         try:
@@ -104,15 +120,22 @@
         except Exception as e:
             # I/O below could raise another exception. So log the original
             # exception first to ensure it is recorded.
-            if not (isinstance(e, (OSError, socket.error))
-                    and e.errno == errno.ECONNRESET):
+            if not (
+                isinstance(e, (OSError, socket.error))
+                and e.errno == errno.ECONNRESET
+            ):
                 tb = r"".join(traceback.format_exception(*sys.exc_info()))
                 # We need a native-string newline to poke in the log
                 # message, because we won't get a newline when using an
                 # r-string. This is the easy way out.
                 newline = chr(10)
-                self.log_error(r"Exception happened during processing "
-                               r"request '%s':%s%s", self.path, newline, tb)
+                self.log_error(
+                    r"Exception happened during processing "
+                    r"request '%s':%s%s",
+                    self.path,
+                    newline,
+                    tb,
+                )
 
             self._start_response(r"500 Internal Server Error", [])
             self._write(b"Internal Server Error")
@@ -129,10 +152,10 @@
         path, query = _splitURI(self.path)
 
         # Ensure the slicing of path below is valid
-        if (path != self.server.prefix
-            and not path.startswith(self.server.prefix + b'/')):
-            self._start_response(pycompat.strurl(common.statusmessage(404)),
-                                 [])
+        if path != self.server.prefix and not path.startswith(
+            self.server.prefix + b'/'
+        ):
+            self._start_response(pycompat.strurl(common.statusmessage(404)), [])
             if self.command == 'POST':
                 # Paranoia: tell the client we're going to close the
                 # socket so they don't try and reuse a socket that
@@ -151,7 +174,7 @@
         env[r'SERVER_PORT'] = str(self.server.server_port)
         env[r'REQUEST_URI'] = self.path
         env[r'SCRIPT_NAME'] = pycompat.sysstr(self.server.prefix)
-        env[r'PATH_INFO'] = pycompat.sysstr(path[len(self.server.prefix):])
+        env[r'PATH_INFO'] = pycompat.sysstr(path[len(self.server.prefix) :])
         env[r'REMOTE_HOST'] = self.client_address[0]
         env[r'REMOTE_ADDR'] = self.client_address[0]
         env[r'QUERY_STRING'] = query or r''
@@ -170,8 +193,11 @@
             length = self.headers.getheader(r'content-length')
         if length:
             env[r'CONTENT_LENGTH'] = length
-        for header in [h for h in self.headers.keys()
-                      if h.lower() not in (r'content-type', r'content-length')]:
+        for header in [
+            h
+            for h in self.headers.keys()
+            if h.lower() not in (r'content-type', r'content-length')
+        ]:
             hkey = r'HTTP_' + header.replace(r'-', r'_').upper()
             hval = self.headers.get(header)
             hval = hval.replace(r'\n', r'').strip()
@@ -185,11 +211,13 @@
 
         env[r'wsgi.input'] = self.rfile
         env[r'wsgi.errors'] = _error_logger(self)
-        env[r'wsgi.multithread'] = isinstance(self.server,
-                                             socketserver.ThreadingMixIn)
+        env[r'wsgi.multithread'] = isinstance(
+            self.server, socketserver.ThreadingMixIn
+        )
         if util.safehasattr(socketserver, 'ForkingMixIn'):
-            env[r'wsgi.multiprocess'] = isinstance(self.server,
-                                                   socketserver.ForkingMixIn)
+            env[r'wsgi.multiprocess'] = isinstance(
+                self.server, socketserver.ForkingMixIn
+            )
         else:
             env[r'wsgi.multiprocess'] = False
 
@@ -209,8 +237,9 @@
 
     def send_headers(self):
         if not self.saved_status:
-            raise AssertionError("Sending headers before "
-                                 "start_response() called")
+            raise AssertionError(
+                "Sending headers before " "start_response() called"
+            )
         saved_status = self.saved_status.split(None, 1)
         saved_status[0] = int(saved_status[0])
         self.send_response(*saved_status)
@@ -220,10 +249,11 @@
             self.send_header(*h)
             if h[0].lower() == r'content-length':
                 self.length = int(h[1])
-        if (self.length is None and
-            saved_status[0] != common.HTTP_NOT_MODIFIED):
-            self._chunked = (not self.close_connection and
-                             self.request_version == r'HTTP/1.1')
+        if self.length is None and saved_status[0] != common.HTTP_NOT_MODIFIED:
+            self._chunked = (
+                not self.close_connection
+                and self.request_version == r'HTTP/1.1'
+            )
             if self._chunked:
                 self.send_header(r'Transfer-Encoding', r'chunked')
             else:
@@ -237,8 +267,9 @@
         code = int(code)
         self.saved_status = http_status
         bad_headers = (r'connection', r'transfer-encoding')
-        self.saved_headers = [h for h in headers
-                              if h[0].lower() not in bad_headers]
+        self.saved_headers = [
+            h for h in headers if h[0].lower() not in bad_headers
+        ]
         return self._write
 
     def _write(self, data):
@@ -248,8 +279,10 @@
             self.send_headers()
         if self.length is not None:
             if len(data) > self.length:
-                raise AssertionError("Content-length header sent, but more "
-                                     "bytes than specified are being written.")
+                raise AssertionError(
+                    "Content-length header sent, but more "
+                    "bytes than specified are being written."
+                )
             self.length = self.length - len(data)
         elif self._chunked and data:
             data = '%x\r\n%s\r\n' % (len(data), data)
@@ -266,6 +299,7 @@
             return encoding.strfromlocal(self.server.serverheader)
         return httpservermod.basehttprequesthandler.version_string(self)
 
+
 class _httprequesthandlerssl(_httprequesthandler):
     """HTTPS handler based on Python's ssl module"""
 
@@ -275,6 +309,7 @@
     def preparehttpserver(httpserver, ui):
         try:
             from .. import sslutil
+
             sslutil.modernssl
         except ImportError:
             raise error.Abort(_("SSL support is unavailable"))
@@ -286,33 +321,40 @@
         cafile = ui.config('devel', 'servercafile')
         reqcert = ui.configbool('devel', 'serverrequirecert')
 
-        httpserver.socket = sslutil.wrapserversocket(httpserver.socket,
-                                                     ui,
-                                                     certfile=certfile,
-                                                     cafile=cafile,
-                                                     requireclientcert=reqcert)
+        httpserver.socket = sslutil.wrapserversocket(
+            httpserver.socket,
+            ui,
+            certfile=certfile,
+            cafile=cafile,
+            requireclientcert=reqcert,
+        )
 
     def setup(self):
         self.connection = self.request
         self.rfile = self.request.makefile(r"rb", self.rbufsize)
         self.wfile = self.request.makefile(r"wb", self.wbufsize)
 
+
 try:
     import threading
-    threading.activeCount() # silence pyflakes and bypass demandimport
+
+    threading.activeCount()  # silence pyflakes and bypass demandimport
     _mixin = socketserver.ThreadingMixIn
 except ImportError:
     if util.safehasattr(os, "fork"):
         _mixin = socketserver.ForkingMixIn
     else:
+
         class _mixin(object):
             pass
 
+
 def openlog(opt, default):
     if opt and opt != '-':
         return open(opt, 'ab')
     return default
 
+
 class MercurialHTTPServer(_mixin, httpservermod.httpserver, object):
 
     # SO_REUSEADDR has broken semantics on windows
@@ -341,13 +383,16 @@
 
         self.serverheader = ui.config('web', 'server-header')
 
+
 class IPv6HTTPServer(MercurialHTTPServer):
     address_family = getattr(socket, 'AF_INET6', None)
+
     def __init__(self, *args, **kwargs):
         if self.address_family is None:
             raise error.RepoError(_('IPv6 is not available on this system'))
         super(IPv6HTTPServer, self).__init__(*args, **kwargs)
 
+
 def create_server(ui, app):
 
     if ui.config('web', 'certificate'):
@@ -363,6 +408,7 @@
     # ugly hack due to python issue5853 (for threaded use)
     try:
         import mimetypes
+
         mimetypes.init()
     except UnicodeDecodeError:
         # Python 2.x's mimetypes module attempts to decode strings
@@ -370,14 +416,14 @@
         # as ascii (clown fail), because the default Python Unicode
         # codec is hardcoded as ascii.
 
-        sys.argv # unwrap demand-loader so that reload() works
+        sys.argv  # unwrap demand-loader so that reload() works
         # resurrect sys.setdefaultencoding()
         try:
             importlib.reload(sys)
         except AttributeError:
             reload(sys)
         oldenc = sys.getdefaultencoding()
-        sys.setdefaultencoding("latin1") # or any full 8-bit encoding
+        sys.setdefaultencoding("latin1")  # or any full 8-bit encoding
         mimetypes.init()
         sys.setdefaultencoding(oldenc)
 
@@ -386,5 +432,7 @@
     try:
         return cls(ui, app, (address, port), handler)
     except socket.error as inst:
-        raise error.Abort(_("cannot start server at '%s:%d': %s")
-                          % (address, port, encoding.strtolocal(inst.args[1])))
+        raise error.Abort(
+            _("cannot start server at '%s:%d': %s")
+            % (address, port, encoding.strtolocal(inst.args[1]))
+        )
--- a/mercurial/hgweb/webcommands.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/webcommands.py	Sun Oct 06 09:45:02 2019 -0400
@@ -39,17 +39,14 @@
     templateutil,
 )
 
-from ..utils import (
-    stringutil,
-)
+from ..utils import stringutil
 
-from . import (
-    webutil,
-)
+from . import webutil
 
 __all__ = []
 commands = {}
 
+
 class webcommand(object):
     """Decorator used to register a web command handler.
 
@@ -81,6 +78,7 @@
         commands[self.name] = func
         return func
 
+
 @webcommand('log')
 def log(web):
     """
@@ -103,6 +101,7 @@
     else:
         return changelog(web)
 
+
 @webcommand('rawfile')
 def rawfile(web):
     guessmime = web.configbool('web', 'guessmime')
@@ -136,12 +135,14 @@
         mt += '; charset="%s"' % encoding.encoding
 
     web.res.headers['Content-Type'] = mt
-    filename = (path.rpartition('/')[-1]
-                .replace('\\', '\\\\').replace('"', '\\"'))
+    filename = (
+        path.rpartition('/')[-1].replace('\\', '\\\\').replace('"', '\\"')
+    )
     web.res.headers['Content-Disposition'] = 'inline; filename="%s"' % filename
     web.res.setbodybytes(text)
     return web.res.sendresponse()
 
+
 def _filerevision(web, fctx):
     f = fctx.path()
     text = fctx.data()
@@ -151,15 +152,18 @@
     if stringutil.binary(text):
         mt = pycompat.sysbytes(
             mimetypes.guess_type(pycompat.fsdecode(f))[0]
-            or r'application/octet-stream')
+            or r'application/octet-stream'
+        )
         text = '(binary:%s)' % mt
 
     def lines(context):
         for lineno, t in enumerate(text.splitlines(True)):
-            yield {"line": t,
-                   "lineid": "l%d" % (lineno + 1),
-                   "linenumber": "% 6d" % (lineno + 1),
-                   "parity": next(parity)}
+            yield {
+                "line": t,
+                "lineid": "l%d" % (lineno + 1),
+                "linenumber": "% 6d" % (lineno + 1),
+                "parity": next(parity),
+            }
 
     return web.sendtemplate(
         'filerevision',
@@ -170,7 +174,9 @@
         rename=webutil.renamelink(fctx),
         permissions=fctx.manifest().flags(f),
         ishead=int(ishead),
-        **pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))
+        **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
+    )
+
 
 @webcommand('file')
 def file(web):
@@ -206,6 +212,7 @@
         except ErrorResponse:
             raise inst
 
+
 def _search(web):
     MODE_REVISION = 'rev'
     MODE_KEYWORD = 'keyword'
@@ -232,9 +239,11 @@
         for ctx in revgen():
             miss = 0
             for q in qw:
-                if not (q in lower(ctx.user()) or
-                        q in lower(ctx.description()) or
-                        q in lower(" ".join(ctx.files()))):
+                if not (
+                    q in lower(ctx.user())
+                    or q in lower(ctx.description())
+                    or q in lower(" ".join(ctx.files()))
+                ):
                     miss = 1
                     break
             if miss:
@@ -273,8 +282,10 @@
             # no revset syntax used
             return MODE_KEYWORD, query
 
-        if any((token, (value or '')[:3]) == ('string', 're:')
-               for token, value, pos in revsetlang.tokenize(revdef)):
+        if any(
+            (token, (value or '')[:3]) == ('string', 're:')
+            for token, value, pos in revsetlang.tokenize(revdef)
+        ):
             return MODE_KEYWORD, query
 
         funcsused = revsetlang.funcsused(tree)
@@ -282,16 +293,21 @@
             return MODE_KEYWORD, query
 
         try:
-            mfunc = revset.match(web.repo.ui, revdef,
-                                 lookup=revset.lookupfn(web.repo))
+            mfunc = revset.match(
+                web.repo.ui, revdef, lookup=revset.lookupfn(web.repo)
+            )
             revs = mfunc(web.repo)
             return MODE_REVSET, revs
             # ParseError: wrongly placed tokens, wrongs arguments, etc
             # RepoLookupError: no such revision, e.g. in 'revision:'
             # Abort: bookmark/tag not exists
             # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo
-        except (error.ParseError, error.RepoLookupError, error.Abort,
-                LookupError):
+        except (
+            error.ParseError,
+            error.RepoLookupError,
+            error.Abort,
+            LookupError,
+        ):
             return MODE_KEYWORD, query
 
     def changelist(context):
@@ -304,11 +320,13 @@
             files = webutil.listfilediffs(ctx.files(), n, web.maxfiles)
 
             lm = webutil.commonentry(web.repo, ctx)
-            lm.update({
-                'parity': next(parity),
-                'changelogtag': showtags,
-                'files': files,
-            })
+            lm.update(
+                {
+                    'parity': next(parity),
+                    'changelogtag': showtags,
+                    'files': files,
+                }
+            )
             yield lm
 
             if count >= revcount:
@@ -361,7 +379,9 @@
         lessvars=lessvars,
         modedesc=searchfunc[1],
         showforcekw=showforcekw,
-        showunforcekw=showunforcekw)
+        showunforcekw=showunforcekw,
+    )
+
 
 @webcommand('changelog')
 def changelog(web, shortlog=False):
@@ -453,7 +473,9 @@
         revcount=revcount,
         morevars=morevars,
         lessvars=lessvars,
-        query=query)
+        query=query,
+    )
+
 
 @webcommand('shortlog')
 def shortlog(web):
@@ -469,6 +491,7 @@
     """
     return changelog(web, shortlog=True)
 
+
 @webcommand('changeset')
 def changeset(web):
     """
@@ -487,12 +510,12 @@
     """
     ctx = webutil.changectx(web.repo, web.req)
 
-    return web.sendtemplate(
-        'changeset',
-        **webutil.changesetentry(web, ctx))
+    return web.sendtemplate('changeset', **webutil.changesetentry(web, ctx))
+
 
 rev = webcommand('rev')(changeset)
 
+
 def decodepath(path):
     """Hook for mapping a path in the repository to a path in the
     working copy.
@@ -501,6 +524,7 @@
     the virtual file system presented by the manifest command below."""
     return path
 
+
 @webcommand('manifest')
 def manifest(web):
     """
@@ -549,14 +573,14 @@
         if len(elements) == 1:
             files[remain] = full
         else:
-            h = dirs # need to retain ref to dirs (root)
+            h = dirs  # need to retain ref to dirs (root)
             for elem in elements[0:-1]:
                 if elem not in h:
                     h[elem] = {}
                 h = h[elem]
                 if len(h) > 1:
                     break
-            h[None] = None # denotes files present
+            h[None] = None  # denotes files present
 
     if mf and not files and not dirs:
         raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path)
@@ -566,12 +590,14 @@
             full = files[f]
 
             fctx = ctx.filectx(full)
-            yield {"file": full,
-                   "parity": next(parity),
-                   "basename": f,
-                   "date": fctx.date(),
-                   "size": fctx.size(),
-                   "permissions": mf.flags(full)}
+            yield {
+                "file": full,
+                "parity": next(parity),
+                "basename": f,
+                "date": fctx.date(),
+                "size": fctx.size(),
+                "permissions": mf.flags(full),
+            }
 
     def dirlist(context):
         for d in sorted(dirs):
@@ -585,10 +611,12 @@
                 h = v
 
             path = "%s%s" % (abspath, d)
-            yield {"parity": next(parity),
-                   "path": path,
-                   "emptydirs": "/".join(emptydirs),
-                   "basename": d}
+            yield {
+                "parity": next(parity),
+                "path": path,
+                "emptydirs": "/".join(emptydirs),
+                "basename": d,
+            }
 
     return web.sendtemplate(
         'manifest',
@@ -599,7 +627,9 @@
         fentries=templateutil.mappinggenerator(filelist),
         dentries=templateutil.mappinggenerator(dirlist),
         archives=web.archivelist(hex(node)),
-        **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))
+        **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
+    )
+
 
 @webcommand('tags')
 def tags(web):
@@ -623,18 +653,21 @@
         if latestonly:
             t = t[:1]
         for k, n in t:
-            yield {"parity": next(parity),
-                   "tag": k,
-                   "date": web.repo[n].date(),
-                   "node": hex(n)}
+            yield {
+                "parity": next(parity),
+                "tag": k,
+                "date": web.repo[n].date(),
+                "node": hex(n),
+            }
 
     return web.sendtemplate(
         'tags',
         node=hex(web.repo.changelog.tip()),
         entries=templateutil.mappinggenerator(entries, args=(False, False)),
-        entriesnotip=templateutil.mappinggenerator(entries,
-                                                   args=(True, False)),
-        latestentry=templateutil.mappinggenerator(entries, args=(True, True)))
+        entriesnotip=templateutil.mappinggenerator(entries, args=(True, False)),
+        latestentry=templateutil.mappinggenerator(entries, args=(True, True)),
+    )
+
 
 @webcommand('bookmarks')
 def bookmarks(web):
@@ -658,10 +691,12 @@
         if latestonly:
             t = i[:1]
         for k, n in t:
-            yield {"parity": next(parity),
-                   "bookmark": k,
-                   "date": web.repo[n].date(),
-                   "node": hex(n)}
+            yield {
+                "parity": next(parity),
+                "bookmark": k,
+                "date": web.repo[n].date(),
+                "node": hex(n),
+            }
 
     if i:
         latestrev = i[0][1]
@@ -674,7 +709,9 @@
         node=hex(web.repo.changelog.tip()),
         lastchange=templateutil.mappinglist([{'date': lastdate}]),
         entries=templateutil.mappinggenerator(entries, args=(False,)),
-        latestentry=templateutil.mappinggenerator(entries, args=(True,)))
+        latestentry=templateutil.mappinggenerator(entries, args=(True,)),
+    )
+
 
 @webcommand('branches')
 def branches(web):
@@ -697,7 +734,9 @@
         'branches',
         node=hex(web.repo.changelog.tip()),
         entries=entries,
-        latestentry=latestentry)
+        latestentry=latestentry,
+    )
+
 
 @webcommand('summary')
 def summary(web):
@@ -718,11 +757,11 @@
         parity = paritygen(web.stripecount)
         count = 0
         for k, n in i:
-            if k == "tip": # skip tip
+            if k == "tip":  # skip tip
                 continue
 
             count += 1
-            if count > 10: # limit to 10 tags
+            if count > 10:  # limit to 10 tags
                 break
 
             yield {
@@ -738,14 +777,16 @@
         sortkey = lambda b: (web.repo[b[1]].rev(), b[0])
         marks = sorted(marks, key=sortkey, reverse=True)
         for k, n in marks[:10]:  # limit to 10 bookmarks
-            yield {'parity': next(parity),
-                   'bookmark': k,
-                   'date': web.repo[n].date(),
-                   'node': hex(n)}
+            yield {
+                'parity': next(parity),
+                'bookmark': k,
+                'date': web.repo[n].date(),
+                'node': hex(n),
+            }
 
     def changelist(context):
         parity = paritygen(web.stripecount, offset=start - end)
-        l = [] # build a list in forward order for efficiency
+        l = []  # build a list in forward order for efficiency
         revs = []
         if start < end:
             revs = web.repo.changelog.revs(start, end - 1)
@@ -776,12 +817,15 @@
         tags=templateutil.mappinggenerator(tagentries, name='tagentry'),
         bookmarks=templateutil.mappinggenerator(bookmarks),
         branches=webutil.branchentries(web.repo, web.stripecount, 10),
-        shortlog=templateutil.mappinggenerator(changelist,
-                                               name='shortlogentry'),
+        shortlog=templateutil.mappinggenerator(
+            changelist, name='shortlogentry'
+        ),
         node=tip.hex(),
         symrev='tip',
         archives=web.archivelist('tip'),
-        labels=templateutil.hybridlist(labels, name='label'))
+        labels=templateutil.hybridlist(labels, name='label'),
+    )
+
 
 @webcommand('filediff')
 def filediff(web):
@@ -828,10 +872,13 @@
         symrev=webutil.symrevorshortnode(web.req, ctx),
         rename=rename,
         diff=diffs,
-        **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))
+        **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
+    )
+
 
 diff = webcommand('diff')(filediff)
 
+
 @webcommand('comparison')
 def comparison(web):
     """
@@ -864,7 +911,8 @@
         if f.isbinary():
             mt = pycompat.sysbytes(
                 mimetypes.guess_type(pycompat.fsdecode(f.path()))[0]
-                or r'application/octet-stream')
+                or r'application/octet-stream'
+            )
             return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
         return f.data().splitlines()
 
@@ -905,7 +953,9 @@
         rightrev=rightrev,
         rightnode=hex(rightnode),
         comparison=comparison,
-        **pycompat.strkwargs(webutil.commonentry(web.repo, ctx)))
+        **pycompat.strkwargs(webutil.commonentry(web.repo, ctx))
+    )
+
 
 @webcommand('annotate')
 def annotate(web):
@@ -934,6 +984,7 @@
     # TODO there are still redundant operations within basefilectx.parents()
     # and from the fctx.annotate() call itself that could be cached.
     parentscache = {}
+
     def parents(context, f):
         rev = f.rev()
         if rev not in parentscache:
@@ -952,9 +1003,15 @@
         if fctx.isbinary():
             mt = pycompat.sysbytes(
                 mimetypes.guess_type(pycompat.fsdecode(fctx.path()))[0]
-                or r'application/octet-stream')
-            lines = [dagop.annotateline(fctx=fctx.filectx(fctx.filerev()),
-                                        lineno=1, text='(binary:%s)' % mt)]
+                or r'application/octet-stream'
+            )
+            lines = [
+                dagop.annotateline(
+                    fctx=fctx.filectx(fctx.filerev()),
+                    lineno=1,
+                    text='(binary:%s)' % mt,
+                )
+            ]
         else:
             lines = webutil.annotate(web.req, fctx, web.repo.ui)
 
@@ -969,22 +1026,24 @@
             else:
                 blockhead = None
             previousrev = rev
-            yield {"parity": next(parity),
-                   "node": f.hex(),
-                   "rev": rev,
-                   "author": f.user(),
-                   "parents": templateutil.mappinggenerator(parents, args=(f,)),
-                   "desc": f.description(),
-                   "extra": f.extra(),
-                   "file": f.path(),
-                   "blockhead": blockhead,
-                   "blockparity": blockparity,
-                   "targetline": aline.lineno,
-                   "line": aline.text,
-                   "lineno": lineno + 1,
-                   "lineid": "l%d" % (lineno + 1),
-                   "linenumber": "% 6d" % (lineno + 1),
-                   "revdate": f.date()}
+            yield {
+                "parity": next(parity),
+                "node": f.hex(),
+                "rev": rev,
+                "author": f.user(),
+                "parents": templateutil.mappinggenerator(parents, args=(f,)),
+                "desc": f.description(),
+                "extra": f.extra(),
+                "file": f.path(),
+                "blockhead": blockhead,
+                "blockparity": blockparity,
+                "targetline": aline.lineno,
+                "line": aline.text,
+                "lineno": lineno + 1,
+                "lineid": "l%d" % (lineno + 1),
+                "linenumber": "% 6d" % (lineno + 1),
+                "revdate": f.date(),
+            }
 
     diffopts = webutil.difffeatureopts(web.req, web.repo.ui, 'annotate')
     diffopts = {k: getattr(diffopts, k) for k in diffopts.defaults}
@@ -999,7 +1058,9 @@
         permissions=fctx.manifest().flags(f),
         ishead=int(ishead),
         diffopts=templateutil.hybriddict(diffopts),
-        **pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))
+        **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
+    )
+
 
 @webcommand('filelog')
 def filelog(web):
@@ -1023,11 +1084,11 @@
         f = webutil.cleanpath(web.repo, web.req.qsparams['file'])
         fl = web.repo.file(f)
         numrevs = len(fl)
-        if not numrevs: # file doesn't exist at all
+        if not numrevs:  # file doesn't exist at all
             raise
         rev = webutil.changectx(web.repo, web.req).rev()
         first = fl.linkrev(0)
-        if rev < first: # current rev is from before file existed
+        if rev < first:  # current rev is from before file existed
             raise
         frev = numrevs - 1
         while fl.linkrev(frev) > rev:
@@ -1058,14 +1119,17 @@
         lessvars['descend'] = morevars['descend'] = web.req.qsparams['descend']
 
     count = fctx.filerev() + 1
-    start = max(0, count - revcount) # first rev on this page
-    end = min(count, start + revcount) # last rev on this page
+    start = max(0, count - revcount)  # first rev on this page
+    end = min(count, start + revcount)  # last rev on this page
     parity = paritygen(web.stripecount, offset=start - end)
 
     repo = web.repo
     filelog = fctx.filelog()
-    revs = [filerev for filerev in filelog.revs(start, end - 1)
-            if filelog.linkrev(filerev) in repo]
+    revs = [
+        filerev
+        for filerev in filelog.revs(start, end - 1)
+        if filelog.linkrev(filerev) in repo
+    ]
     entries = []
 
     diffstyle = web.config('web', 'style')
@@ -1076,9 +1140,15 @@
         ctx = fctx.changectx()
         basectx = ctx.p1()
         path = fctx.path()
-        return webutil.diffs(web, ctx, basectx, [path], diffstyle,
-                             linerange=linerange,
-                             lineidprefix='%s-' % ctx.hex()[:12])
+        return webutil.diffs(
+            web,
+            ctx,
+            basectx,
+            [path],
+            diffstyle,
+            linerange=linerange,
+            lineidprefix='%s-' % ctx.hex()[:12],
+        )
 
     linerange = None
     if lrange is not None:
@@ -1097,14 +1167,16 @@
             # follow renames accross filtered (not in range) revisions
             path = c.path()
             lm = webutil.commonentry(repo, c)
-            lm.update({
-                'parity': next(parity),
-                'filerev': c.rev(),
-                'file': path,
-                'diff': diffs,
-                'linerange': webutil.formatlinerange(*lr),
-                'rename': templateutil.mappinglist([]),
-            })
+            lm.update(
+                {
+                    'parity': next(parity),
+                    'filerev': c.rev(),
+                    'file': path,
+                    'diff': diffs,
+                    'linerange': webutil.formatlinerange(*lr),
+                    'rename': templateutil.mappinglist([]),
+                }
+            )
             entries.append(lm)
             if i == revcount:
                 break
@@ -1117,13 +1189,15 @@
             if patch:
                 diffs = diff(iterfctx)
             lm = webutil.commonentry(repo, iterfctx)
-            lm.update({
-                'parity': next(parity),
-                'filerev': i,
-                'file': f,
-                'diff': diffs,
-                'rename': webutil.renamelink(iterfctx),
-            })
+            lm.update(
+                {
+                    'parity': next(parity),
+                    'filerev': i,
+                    'file': f,
+                    'diff': diffs,
+                    'rename': webutil.renamelink(iterfctx),
+                }
+            )
             entries.append(lm)
         entries.reverse()
         revnav = webutil.filerevnav(web.repo, fctx.path())
@@ -1144,7 +1218,9 @@
         revcount=revcount,
         morevars=morevars,
         lessvars=lessvars,
-        **pycompat.strkwargs(webutil.commonentry(web.repo, fctx)))
+        **pycompat.strkwargs(webutil.commonentry(web.repo, fctx))
+    )
+
 
 @webcommand('archive')
 def archive(web):
@@ -1175,8 +1251,7 @@
         msg = 'Unsupported archive type: %s' % stringutil.pprint(type_)
         raise ErrorResponse(HTTP_NOT_FOUND, msg)
 
-    if not ((type_ in allowed or
-             web.configbool("web", "allow" + type_))):
+    if not ((type_ in allowed or web.configbool("web", "allow" + type_))):
         msg = 'Archive type not allowed: %s' % type_
         raise ErrorResponse(HTTP_FORBIDDEN, msg)
 
@@ -1197,30 +1272,42 @@
         if pats:
             files = [f for f in ctx.manifest().keys() if match(f)]
             if not files:
-                raise ErrorResponse(HTTP_NOT_FOUND,
-                    'file(s) not found: %s' % file)
+                raise ErrorResponse(
+                    HTTP_NOT_FOUND, 'file(s) not found: %s' % file
+                )
 
     mimetype, artype, extension, encoding = webutil.archivespecs[type_]
 
     web.res.headers['Content-Type'] = mimetype
     web.res.headers['Content-Disposition'] = 'attachment; filename=%s%s' % (
-        name, extension)
+        name,
+        extension,
+    )
 
     if encoding:
         web.res.headers['Content-Encoding'] = encoding
 
     web.res.setbodywillwrite()
     if list(web.res.sendresponse()):
-        raise error.ProgrammingError('sendresponse() should not emit data '
-                                     'if writing later')
+        raise error.ProgrammingError(
+            'sendresponse() should not emit data ' 'if writing later'
+        )
 
     bodyfh = web.res.getbodyfile()
 
-    archival.archive(web.repo, bodyfh, cnode, artype, prefix=name, match=match,
-                     subrepos=web.configbool("web", "archivesubrepos"))
+    archival.archive(
+        web.repo,
+        bodyfh,
+        cnode,
+        artype,
+        prefix=name,
+        match=match,
+        subrepos=web.configbool("web", "archivesubrepos"),
+    )
 
     return []
 
+
 @webcommand('static')
 def static(web):
     fname = web.req.qsparams['file']
@@ -1236,6 +1323,7 @@
     staticfile(static, fname, web.res)
     return web.res.sendresponse()
 
+
 @webcommand('graph')
 def graph(web):
     """
@@ -1316,8 +1404,11 @@
         # since hgweb graphing code is not itself lazy yet.
         dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
         # As we said one line above... not lazy.
-        tree = list(item for item in graphmod.colored(dag, web.repo)
-                    if item[1] == graphmod.CHANGESET)
+        tree = list(
+            item
+            for item in graphmod.colored(dag, web.repo)
+            if item[1] == graphmod.CHANGESET
+        )
 
     def fulltree():
         pos = web.repo[graphtop].rev()
@@ -1325,34 +1416,47 @@
         if pos != -1:
             revs = web.repo.changelog.revs(pos, lastrev)
             dag = graphmod.dagwalker(web.repo, smartset.baseset(revs))
-            tree = list(item for item in graphmod.colored(dag, web.repo)
-                        if item[1] == graphmod.CHANGESET)
+            tree = list(
+                item
+                for item in graphmod.colored(dag, web.repo)
+                if item[1] == graphmod.CHANGESET
+            )
         return tree
 
     def jsdata(context):
         for (id, type, ctx, vtx, edges) in fulltree():
-            yield {'node': pycompat.bytestr(ctx),
-                   'graphnode': webutil.getgraphnode(web.repo, ctx),
-                   'vertex': vtx,
-                   'edges': edges}
+            yield {
+                'node': pycompat.bytestr(ctx),
+                'graphnode': webutil.getgraphnode(web.repo, ctx),
+                'vertex': vtx,
+                'edges': edges,
+            }
 
     def nodes(context):
         parity = paritygen(web.stripecount)
         for row, (id, type, ctx, vtx, edges) in enumerate(tree):
             entry = webutil.commonentry(web.repo, ctx)
-            edgedata = [{'col': edge[0],
-                         'nextcol': edge[1],
-                         'color': (edge[2] - 1) % 6 + 1,
-                         'width': edge[3],
-                         'bcolor': edge[4]}
-                        for edge in edges]
+            edgedata = [
+                {
+                    'col': edge[0],
+                    'nextcol': edge[1],
+                    'color': (edge[2] - 1) % 6 + 1,
+                    'width': edge[3],
+                    'bcolor': edge[4],
+                }
+                for edge in edges
+            ]
 
-            entry.update({'col': vtx[0],
-                          'color': (vtx[1] - 1) % 6 + 1,
-                          'parity': next(parity),
-                          'edges': templateutil.mappinglist(edgedata),
-                          'row': row,
-                          'nextrow': row + 1})
+            entry.update(
+                {
+                    'col': vtx[0],
+                    'color': (vtx[1] - 1) % 6 + 1,
+                    'parity': next(parity),
+                    'edges': templateutil.mappinglist(edgedata),
+                    'row': row,
+                    'nextrow': row + 1,
+                }
+            )
 
             yield entry
 
@@ -1376,7 +1480,9 @@
         nodes=templateutil.mappinggenerator(nodes),
         node=ctx.hex(),
         archives=web.archivelist('tip'),
-        changenav=changenav)
+        changenav=changenav,
+    )
+
 
 def _getdoc(e):
     doc = e[0].__doc__
@@ -1386,6 +1492,7 @@
         doc = _('(no help text available)')
     return doc
 
+
 @webcommand('help')
 def help(web):
     """
@@ -1405,6 +1512,7 @@
 
     topicname = web.req.qsparams.get('node')
     if not topicname:
+
         def topics(context):
             for h in helpmod.helptable:
                 entries, summary, _doc = h[0:3]
@@ -1438,23 +1546,27 @@
             topics=templateutil.mappinggenerator(topics),
             earlycommands=templateutil.mappinggenerator(earlycommands),
             othercommands=templateutil.mappinggenerator(othercommands),
-            title='Index')
+            title='Index',
+        )
 
     # Render an index of sub-topics.
     if topicname in helpmod.subtopics:
         topics = []
         for entries, summary, _doc in helpmod.subtopics[topicname]:
-            topics.append({
-                'topic': '%s.%s' % (topicname, entries[0]),
-                'basename': entries[0],
-                'summary': summary,
-            })
+            topics.append(
+                {
+                    'topic': '%s.%s' % (topicname, entries[0]),
+                    'basename': entries[0],
+                    'summary': summary,
+                }
+            )
 
         return web.sendtemplate(
             'helptopics',
             topics=templateutil.mappinglist(topics),
             title=topicname,
-            subindex=True)
+            subindex=True,
+        )
 
     u = webutil.wsgiui.load()
     u.verbose = True
@@ -1475,10 +1587,8 @@
     except error.Abort:
         raise ErrorResponse(HTTP_NOT_FOUND)
 
-    return web.sendtemplate(
-        'help',
-        topic=topicname,
-        doc=doc)
+    return web.sendtemplate('help', topic=topicname, doc=doc)
+
 
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = commands.values()
--- a/mercurial/hgweb/webutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/webutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -41,32 +41,32 @@
     util,
 )
 
-from ..utils import (
-    stringutil,
+from ..utils import stringutil
+
+archivespecs = util.sortdict(
+    (
+        ('zip', ('application/zip', 'zip', '.zip', None)),
+        ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)),
+        ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)),
+    )
 )
 
-archivespecs = util.sortdict((
-    ('zip', ('application/zip', 'zip', '.zip', None)),
-    ('gz', ('application/x-gzip', 'tgz', '.tar.gz', None)),
-    ('bz2', ('application/x-bzip2', 'tbz2', '.tar.bz2', None)),
-))
 
 def archivelist(ui, nodeid, url=None):
     allowed = ui.configlist('web', 'allow-archive', untrusted=True)
     archives = []
 
     for typ, spec in archivespecs.iteritems():
-        if typ in allowed or ui.configbool('web', 'allow' + typ,
-                                           untrusted=True):
-            archives.append({
-                'type': typ,
-                'extension': spec[2],
-                'node': nodeid,
-                'url': url,
-            })
+        if typ in allowed or ui.configbool(
+            'web', 'allow' + typ, untrusted=True
+        ):
+            archives.append(
+                {'type': typ, 'extension': spec[2], 'node': nodeid, 'url': url,}
+            )
 
     return templateutil.mappinglist(archives)
 
+
 def up(p):
     if p[0:1] != "/":
         p = "/" + p
@@ -77,6 +77,7 @@
         return "/"
     return up + "/"
 
+
 def _navseq(step, firststep=None):
     if firststep:
         yield firststep
@@ -92,8 +93,8 @@
         yield 3 * step
         step *= 10
 
+
 class revnav(object):
-
     def __init__(self, repo):
         """Navigation generation object
 
@@ -132,10 +133,14 @@
         """
         if not self:
             # empty repo
-            return templateutil.mappinglist([
-                {'before': templateutil.mappinglist([]),
-                 'after': templateutil.mappinglist([])},
-            ])
+            return templateutil.mappinglist(
+                [
+                    {
+                        'before': templateutil.mappinglist([]),
+                        'after': templateutil.mappinglist([]),
+                    },
+                ]
+            )
 
         targets = []
         for f in _navseq(1, pagelen):
@@ -152,22 +157,28 @@
             if rev not in self._revlog:
                 continue
             if pos < rev < limit:
-                navafter.append({'label': '+%d' % abs(rev - pos),
-                                 'node': self.hex(rev)})
+                navafter.append(
+                    {'label': '+%d' % abs(rev - pos), 'node': self.hex(rev)}
+                )
             if 0 < rev < pos:
-                navbefore.append({'label': '-%d' % abs(rev - pos),
-                                  'node': self.hex(rev)})
+                navbefore.append(
+                    {'label': '-%d' % abs(rev - pos), 'node': self.hex(rev)}
+                )
 
         navafter.append({'label': 'tip', 'node': 'tip'})
 
         # TODO: maybe this can be a scalar object supporting tomap()
-        return templateutil.mappinglist([
-            {'before': templateutil.mappinglist(navbefore),
-             'after': templateutil.mappinglist(navafter)},
-        ])
+        return templateutil.mappinglist(
+            [
+                {
+                    'before': templateutil.mappinglist(navbefore),
+                    'after': templateutil.mappinglist(navafter),
+                },
+            ]
+        )
+
 
 class filerevnav(revnav):
-
     def __init__(self, repo, path):
         """Navigation generation object
 
@@ -182,6 +193,7 @@
     def hex(self, rev):
         return hex(self._changelog.node(self._revlog.linkrev(rev)))
 
+
 # TODO: maybe this can be a wrapper class for changectx/filectx list, which
 # yields {'ctx': ctx}
 def _ctxsgen(context, ctxs):
@@ -198,6 +210,7 @@
             d['file'] = s.path()
         yield d
 
+
 def _siblings(siblings=None, hiderev=None):
     if siblings is None:
         siblings = []
@@ -206,9 +219,11 @@
         siblings = []
     return templateutil.mappinggenerator(_ctxsgen, args=(siblings,))
 
+
 def difffeatureopts(req, ui, section):
-    diffopts = diffutil.difffeatureopts(ui, untrusted=True,
-                                        section=section, whitespace=True)
+    diffopts = diffutil.difffeatureopts(
+        ui, untrusted=True, section=section, whitespace=True
+    )
 
     for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
         v = req.qsparams.get(k)
@@ -218,10 +233,12 @@
 
     return diffopts
 
+
 def annotate(req, fctx, ui):
     diffopts = difffeatureopts(req, ui, 'annotate')
     return fctx.annotate(follow=True, diffopts=diffopts)
 
+
 def parents(ctx, hide=None):
     if isinstance(ctx, context.basefilectx):
         introrev = ctx.introrev()
@@ -229,21 +246,26 @@
             return _siblings([ctx.repo()[introrev]], hide)
     return _siblings(ctx.parents(), hide)
 
+
 def children(ctx, hide=None):
     return _siblings(ctx.children(), hide)
 
+
 def renamelink(fctx):
     r = fctx.renamed()
     if r:
         return templateutil.mappinglist([{'file': r[0], 'node': hex(r[1])}])
     return templateutil.mappinglist([])
 
+
 def nodetagsdict(repo, node):
     return templateutil.hybridlist(repo.nodetags(node), name='name')
 
+
 def nodebookmarksdict(repo, node):
     return templateutil.hybridlist(repo.nodebookmarks(node), name='name')
 
+
 def nodebranchdict(repo, ctx):
     branches = []
     branch = ctx.branch()
@@ -257,6 +279,7 @@
         branches.append(branch)
     return templateutil.hybridlist(branches, name='name')
 
+
 def nodeinbranch(repo, ctx):
     branches = []
     branch = ctx.branch()
@@ -268,6 +291,7 @@
         branches.append(branch)
     return templateutil.hybridlist(branches, name='name')
 
+
 def nodebranchnodefault(ctx):
     branches = []
     branch = ctx.branch()
@@ -275,18 +299,22 @@
         branches.append(branch)
     return templateutil.hybridlist(branches, name='name')
 
+
 def _nodenamesgen(context, f, node, name):
     for t in f(node):
         yield {name: t}
 
+
 def showtag(repo, t1, node=nullid):
     args = (repo.nodetags, node, 'tag')
     return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
 
+
 def showbookmark(repo, t1, node=nullid):
     args = (repo.nodebookmarks, node, 'bookmark')
     return templateutil.mappinggenerator(_nodenamesgen, args=args, name=t1)
 
+
 def branchentries(repo, stripecount, limit=0):
     tips = []
     heads = repo.heads()
@@ -313,26 +341,29 @@
                 'branch': ctx.branch(),
                 'status': status,
                 'node': ctx.hex(),
-                'date': ctx.date()
+                'date': ctx.date(),
             }
 
     return templateutil.mappinggenerator(entries)
 
+
 def cleanpath(repo, path):
     path = path.lstrip('/')
     auditor = pathutil.pathauditor(repo.root, realfs=False)
     return pathutil.canonpath(repo.root, '', path, auditor=auditor)
 
+
 def changectx(repo, req):
     changeid = "tip"
     if 'node' in req.qsparams:
         changeid = req.qsparams['node']
         ipos = changeid.find(':')
         if ipos != -1:
-            changeid = changeid[(ipos + 1):]
+            changeid = changeid[(ipos + 1) :]
 
     return scmutil.revsymbol(repo, changeid)
 
+
 def basechangectx(repo, req):
     if 'node' in req.qsparams:
         changeid = req.qsparams['node']
@@ -343,6 +374,7 @@
 
     return None
 
+
 def filectx(repo, req):
     if 'file' not in req.qsparams:
         raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
@@ -360,40 +392,45 @@
 
     return fctx
 
+
 def linerange(req):
     linerange = req.qsparams.getall('linerange')
     if not linerange:
         return None
     if len(linerange) > 1:
-        raise ErrorResponse(HTTP_BAD_REQUEST,
-                            'redundant linerange parameter')
+        raise ErrorResponse(HTTP_BAD_REQUEST, 'redundant linerange parameter')
     try:
         fromline, toline = map(int, linerange[0].split(':', 1))
     except ValueError:
-        raise ErrorResponse(HTTP_BAD_REQUEST,
-                            'invalid linerange parameter')
+        raise ErrorResponse(HTTP_BAD_REQUEST, 'invalid linerange parameter')
     try:
         return util.processlinerange(fromline, toline)
     except error.ParseError as exc:
         raise ErrorResponse(HTTP_BAD_REQUEST, pycompat.bytestr(exc))
 
+
 def formatlinerange(fromline, toline):
     return '%d:%d' % (fromline + 1, toline)
 
+
 def _succsandmarkersgen(context, mapping):
     repo = context.resource(mapping, 'repo')
     itemmappings = templatekw.showsuccsandmarkers(context, mapping)
     for item in itemmappings.tovalue(context, mapping):
-        item['successors'] = _siblings(repo[successor]
-                                       for successor in item['successors'])
+        item['successors'] = _siblings(
+            repo[successor] for successor in item['successors']
+        )
         yield item
 
+
 def succsandmarkers(context, mapping):
     return templateutil.mappinggenerator(_succsandmarkersgen, args=(mapping,))
 
+
 # teach templater succsandmarkers is switched to (context, mapping) API
 succsandmarkers._requires = {'repo', 'ctx'}
 
+
 def _whyunstablegen(context, mapping):
     repo = context.resource(mapping, 'repo')
     ctx = context.resource(mapping, 'ctx')
@@ -404,11 +441,14 @@
             entry['divergentnodes'] = _siblings(entry['divergentnodes'])
         yield entry
 
+
 def whyunstable(context, mapping):
     return templateutil.mappinggenerator(_whyunstablegen, args=(mapping,))
 
+
 whyunstable._requires = {'repo', 'ctx'}
 
+
 def commonentry(repo, ctx):
     node = scmutil.binnode(ctx)
     return {
@@ -425,8 +465,9 @@
         'phase': ctx.phasestr(),
         'obsolete': ctx.obsolete(),
         'succsandmarkers': succsandmarkers,
-        'instabilities': templateutil.hybridlist(ctx.instabilities(),
-                                                 name='instability'),
+        'instabilities': templateutil.hybridlist(
+            ctx.instabilities(), name='instability'
+        ),
         'whyunstable': whyunstable,
         'branch': nodebranchnodefault(ctx),
         'inbranch': nodeinbranch(repo, ctx),
@@ -437,6 +478,7 @@
         'child': lambda context, mapping: children(ctx),
     }
 
+
 def changelistentry(web, ctx):
     '''Obtain a dictionary to be used for entries in a changelist.
 
@@ -450,15 +492,18 @@
     files = listfilediffs(ctx.files(), n, web.maxfiles)
 
     entry = commonentry(repo, ctx)
-    entry.update({
-        'allparents': lambda context, mapping: parents(ctx),
-        'parent': lambda context, mapping: parents(ctx, rev - 1),
-        'child': lambda context, mapping: children(ctx, rev + 1),
-        'changelogtag': showtags,
-        'files': files,
-    })
+    entry.update(
+        {
+            'allparents': lambda context, mapping: parents(ctx),
+            'parent': lambda context, mapping: parents(ctx, rev - 1),
+            'child': lambda context, mapping: children(ctx, rev + 1),
+            'changelogtag': showtags,
+            'files': files,
+        }
+    )
     return entry
 
+
 def changelistentries(web, revs, maxcount, parityfn):
     """Emit up to N records for an iterable of revisions."""
     repo = web.repo
@@ -475,29 +520,36 @@
 
         yield entry
 
+
 def symrevorshortnode(req, ctx):
     if 'node' in req.qsparams:
         return templatefilters.revescape(req.qsparams['node'])
     else:
         return short(scmutil.binnode(ctx))
 
+
 def _listfilesgen(context, ctx, stripecount):
     parity = paritygen(stripecount)
     for blockno, f in enumerate(ctx.files()):
         template = 'filenodelink' if f in ctx else 'filenolink'
-        yield context.process(template, {
-            'node': ctx.hex(),
-            'file': f,
-            'blockno': blockno + 1,
-            'parity': next(parity),
-        })
+        yield context.process(
+            template,
+            {
+                'node': ctx.hex(),
+                'file': f,
+                'blockno': blockno + 1,
+                'parity': next(parity),
+            },
+        )
+
 
 def changesetentry(web, ctx):
     '''Obtain a dictionary to be used to render the "changeset" template.'''
 
     showtags = showtag(web.repo, 'changesettag', scmutil.binnode(ctx))
-    showbookmarks = showbookmark(web.repo, 'changesetbookmark',
-                                 scmutil.binnode(ctx))
+    showbookmarks = showbookmark(
+        web.repo, 'changesetbookmark', scmutil.binnode(ctx)
+    )
     showbranch = nodebranchnodefault(ctx)
 
     basectx = basechangectx(web.repo, web.req)
@@ -521,12 +573,15 @@
         changesettag=showtags,
         changesetbookmark=showbookmarks,
         changesetbranch=showbranch,
-        files=templateutil.mappedgenerator(_listfilesgen,
-                                           args=(ctx, web.stripecount)),
+        files=templateutil.mappedgenerator(
+            _listfilesgen, args=(ctx, web.stripecount)
+        ),
         diffsummary=lambda context, mapping: diffsummary(diffstatsgen),
         diffstat=diffstats,
         archives=web.archivelist(ctx.hex()),
-        **pycompat.strkwargs(commonentry(web.repo, ctx)))
+        **pycompat.strkwargs(commonentry(web.repo, ctx))
+    )
+
 
 def _listfilediffsgen(context, files, node, max):
     for f in files[:max]:
@@ -534,9 +589,12 @@
     if len(files) > max:
         yield context.process('fileellipses', {})
 
+
 def listfilediffs(files, node, max):
-    return templateutil.mappedgenerator(_listfilediffsgen,
-                                        args=(files, node, max))
+    return templateutil.mappedgenerator(
+        _listfilediffsgen, args=(files, node, max)
+    )
+
 
 def _prettyprintdifflines(context, lines, blockno, lineidprefix):
     for lineno, l in enumerate(lines, 1):
@@ -549,15 +607,28 @@
             ltype = "difflineat"
         else:
             ltype = "diffline"
-        yield context.process(ltype, {
-            'line': l,
-            'lineno': lineno,
-            'lineid': lineidprefix + "l%s" % difflineno,
-            'linenumber': "% 8s" % difflineno,
-        })
+        yield context.process(
+            ltype,
+            {
+                'line': l,
+                'lineno': lineno,
+                'lineid': lineidprefix + "l%s" % difflineno,
+                'linenumber': "% 8s" % difflineno,
+            },
+        )
+
 
-def _diffsgen(context, repo, ctx, basectx, files, style, stripecount,
-              linerange, lineidprefix):
+def _diffsgen(
+    context,
+    repo,
+    ctx,
+    basectx,
+    files,
+    style,
+    stripecount,
+    linerange,
+    lineidprefix,
+):
     if files:
         m = match.exact(files)
     else:
@@ -578,20 +649,30 @@
                     continue
             lines.extend(hunklines)
         if lines:
-            l = templateutil.mappedgenerator(_prettyprintdifflines,
-                                             args=(lines, blockno,
-                                                   lineidprefix))
+            l = templateutil.mappedgenerator(
+                _prettyprintdifflines, args=(lines, blockno, lineidprefix)
+            )
             yield {
                 'parity': next(parity),
                 'blockno': blockno,
                 'lines': l,
             }
 
+
 def diffs(web, ctx, basectx, files, style, linerange=None, lineidprefix=''):
-    args = (web.repo, ctx, basectx, files, style, web.stripecount,
-            linerange, lineidprefix)
+    args = (
+        web.repo,
+        ctx,
+        basectx,
+        files,
+        style,
+        web.stripecount,
+        linerange,
+        lineidprefix,
+    )
     return templateutil.mappinggenerator(_diffsgen, args=args, name='diffblock')
 
+
 def _compline(type, leftlineno, leftline, rightlineno, rightline):
     lineid = leftlineno and ("l%d" % leftlineno) or ''
     lineid += rightlineno and ("r%d" % rightlineno) or ''
@@ -608,6 +689,7 @@
         'rightline': rightline or '',
     }
 
+
 def _getcompblockgen(context, leftlines, rightlines, opcodes):
     for type, llo, lhi, rlo, rhi in opcodes:
         type = pycompat.sysbytes(type)
@@ -615,30 +697,39 @@
         len2 = rhi - rlo
         count = min(len1, len2)
         for i in pycompat.xrange(count):
-            yield _compline(type=type,
-                            leftlineno=llo + i + 1,
-                            leftline=leftlines[llo + i],
-                            rightlineno=rlo + i + 1,
-                            rightline=rightlines[rlo + i])
+            yield _compline(
+                type=type,
+                leftlineno=llo + i + 1,
+                leftline=leftlines[llo + i],
+                rightlineno=rlo + i + 1,
+                rightline=rightlines[rlo + i],
+            )
         if len1 > len2:
             for i in pycompat.xrange(llo + count, lhi):
-                yield _compline(type=type,
-                                leftlineno=i + 1,
-                                leftline=leftlines[i],
-                                rightlineno=None,
-                                rightline=None)
+                yield _compline(
+                    type=type,
+                    leftlineno=i + 1,
+                    leftline=leftlines[i],
+                    rightlineno=None,
+                    rightline=None,
+                )
         elif len2 > len1:
             for i in pycompat.xrange(rlo + count, rhi):
-                yield _compline(type=type,
-                                leftlineno=None,
-                                leftline=None,
-                                rightlineno=i + 1,
-                                rightline=rightlines[i])
+                yield _compline(
+                    type=type,
+                    leftlineno=None,
+                    leftline=None,
+                    rightlineno=i + 1,
+                    rightline=rightlines[i],
+                )
+
 
 def _getcompblock(leftlines, rightlines, opcodes):
     args = (leftlines, rightlines, opcodes)
-    return templateutil.mappinggenerator(_getcompblockgen, args=args,
-                                         name='comparisonline')
+    return templateutil.mappinggenerator(
+        _getcompblockgen, args=args, name='comparisonline'
+    )
+
 
 def _comparegen(context, contextnum, leftlines, rightlines):
     '''Generator function that provides side-by-side comparison data.'''
@@ -651,27 +742,34 @@
             l = _getcompblock(leftlines, rightlines, oc)
             yield {'lines': l}
 
+
 def compare(contextnum, leftlines, rightlines):
     args = (contextnum, leftlines, rightlines)
-    return templateutil.mappinggenerator(_comparegen, args=args,
-                                         name='comparisonblock')
+    return templateutil.mappinggenerator(
+        _comparegen, args=args, name='comparisonblock'
+    )
+
 
 def diffstatgen(ui, ctx, basectx):
     '''Generator function that provides the diffstat data.'''
 
     diffopts = patch.diffopts(ui, {'noprefix': False})
-    stats = patch.diffstatdata(
-        util.iterlines(ctx.diff(basectx, opts=diffopts)))
+    stats = patch.diffstatdata(util.iterlines(ctx.diff(basectx, opts=diffopts)))
     maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
     while True:
         yield stats, maxname, maxtotal, addtotal, removetotal, binary
 
+
 def diffsummary(statgen):
     '''Return a short summary of the diff.'''
 
     stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
     return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
-             len(stats), addtotal, removetotal)
+        len(stats),
+        addtotal,
+        removetotal,
+    )
+
 
 def _diffstattmplgen(context, ctx, statgen, parity):
     stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
@@ -687,21 +785,26 @@
         template = 'diffstatlink' if filename in files else 'diffstatnolink'
         total = adds + removes
         fileno += 1
-        yield context.process(template, {
-            'node': ctx.hex(),
-            'file': filename,
-            'fileno': fileno,
-            'total': total,
-            'addpct': pct(adds),
-            'removepct': pct(removes),
-            'parity': next(parity),
-        })
+        yield context.process(
+            template,
+            {
+                'node': ctx.hex(),
+                'file': filename,
+                'fileno': fileno,
+                'total': total,
+                'addpct': pct(adds),
+                'removepct': pct(removes),
+                'parity': next(parity),
+            },
+        )
+
 
 def diffstat(ctx, statgen, parity):
     '''Return a diffstat template for each file in the diff.'''
     args = (ctx, statgen, parity)
     return templateutil.mappedgenerator(_diffstattmplgen, args=args)
 
+
 class sessionvars(templateutil.wrapped):
     def __init__(self, vars, start='?'):
         self._start = start
@@ -737,9 +840,10 @@
     def itermaps(self, context):
         separator = self._start
         for key, value in sorted(self._vars.iteritems()):
-            yield {'name': key,
-                   'value': pycompat.bytestr(value),
-                   'separator': separator,
+            yield {
+                'name': key,
+                'value': pycompat.bytestr(value),
+                'separator': separator,
             }
             separator = '&'
 
@@ -756,11 +860,13 @@
     def tovalue(self, context, mapping):
         return self._vars
 
+
 class wsgiui(uimod.ui):
     # default termwidth breaks under mod_wsgi
     def termwidth(self):
         return 80
 
+
 def getwebsubs(repo):
     websubtable = []
     websubdefs = repo.ui.configitems('websub')
@@ -776,10 +882,13 @@
         # delimiters are required.
         match = re.match(
             br'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
-            % (delim, delim, delim), pattern)
+            % (delim, delim, delim),
+            pattern,
+        )
         if not match:
-            repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
-                              % (key, pattern))
+            repo.ui.warn(
+                _("websub: invalid pattern for %s: %s\n") % (key, pattern)
+            )
             continue
 
         # we need to unescape the delimiter for regexp and format
@@ -798,10 +907,13 @@
             regexp = re.compile(regexp, flags)
             websubtable.append((regexp, format))
         except re.error:
-            repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
-                         % (key, regexp))
+            repo.ui.warn(
+                _("websub: invalid regexp for %s: %s\n") % (key, regexp)
+            )
     return websubtable
 
+
 def getgraphnode(repo, ctx):
-    return (templatekw.getgraphnodecurrent(repo, ctx) +
-            templatekw.getgraphnodesymbol(ctx))
+    return templatekw.getgraphnodecurrent(
+        repo, ctx
+    ) + templatekw.getgraphnodesymbol(ctx)
--- a/mercurial/hgweb/wsgicgi.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/wsgicgi.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,29 +12,24 @@
 
 import os
 
-from .. import (
-    pycompat,
-)
+from .. import pycompat
 
-from ..utils import (
-    procutil,
-)
+from ..utils import procutil
 
-from . import (
-    common,
-)
+from . import common
+
 
 def launch(application):
     procutil.setbinary(procutil.stdin)
     procutil.setbinary(procutil.stdout)
 
-    environ = dict(os.environ.iteritems()) # re-exports
+    environ = dict(os.environ.iteritems())  # re-exports
     environ.setdefault(r'PATH_INFO', '')
     if environ.get(r'SERVER_SOFTWARE', r'').startswith(r'Microsoft-IIS'):
         # IIS includes script_name in PATH_INFO
         scriptname = environ[r'SCRIPT_NAME']
         if environ[r'PATH_INFO'].startswith(scriptname):
-            environ[r'PATH_INFO'] = environ[r'PATH_INFO'][len(scriptname):]
+            environ[r'PATH_INFO'] = environ[r'PATH_INFO'][len(scriptname) :]
 
     stdin = procutil.stdin
     if environ.get(r'HTTP_EXPECT', r'').lower() == r'100-continue':
@@ -65,8 +60,10 @@
             status, response_headers = headers_sent[:] = headers_set
             out.write('Status: %s\r\n' % pycompat.bytesurl(status))
             for hk, hv in response_headers:
-                out.write('%s: %s\r\n' % (pycompat.bytesurl(hk),
-                                          pycompat.bytesurl(hv)))
+                out.write(
+                    '%s: %s\r\n'
+                    % (pycompat.bytesurl(hk), pycompat.bytesurl(hv))
+                )
             out.write('\r\n')
 
         out.write(data)
@@ -79,7 +76,7 @@
                     # Re-raise original exception if headers sent
                     raise exc_info[0](exc_info[1], exc_info[2])
             finally:
-                exc_info = None     # avoid dangling circular ref
+                exc_info = None  # avoid dangling circular ref
         elif headers_set:
             raise AssertionError("Headers already set!")
 
@@ -91,6 +88,6 @@
         for chunk in content:
             write(chunk)
         if not headers_sent:
-            write('')   # send headers now if body was empty
+            write('')  # send headers now if body was empty
     finally:
         getattr(content, 'close', lambda: None)()
--- a/mercurial/hgweb/wsgiheaders.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hgweb/wsgiheaders.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,8 +12,10 @@
 from __future__ import absolute_import, print_function
 
 import re
+
 tspecials = re.compile(br'[ \(\)<>@,;:\\"/\[\]\?=]')
 
+
 def _formatparam(param, value=None, quote=1):
     """Convenience function to format and return a key=value pair.
     This will quote the value if needed or if quote is true.
@@ -45,8 +47,10 @@
         """Convert/check value type."""
         if type(value) is bytes:
             return value
-        raise AssertionError(u"Header names/values must be"
-                             u" of type bytes (got %s)" % repr(value))
+        raise AssertionError(
+            u"Header names/values must be"
+            u" of type bytes (got %s)" % repr(value)
+        )
 
     def __len__(self):
         """Return the total number of headers, including duplicates."""
@@ -56,7 +60,8 @@
         """Set the value of a header."""
         del self[name]
         self._headers.append(
-            (self._convert_string_type(name), self._convert_string_type(val)))
+            (self._convert_string_type(name), self._convert_string_type(val))
+        )
 
     def __delitem__(self, name):
         """Delete all occurrences of a header, if present.
@@ -78,7 +83,6 @@
         """Return true if the message contains the header."""
         return self.get(name) is not None
 
-
     def get_all(self, name):
         """Return a list of all the values for the named field.
         These will be sorted in the order they appeared in the original header
@@ -87,18 +91,16 @@
         If no fields exist with the given name, returns an empty list.
         """
         name = self._convert_string_type(name.lower())
-        return [kv[1] for kv in self._headers if kv[0].lower()==name]
-
+        return [kv[1] for kv in self._headers if kv[0].lower() == name]
 
     def get(self, name, default=None):
         """Get the first header value for 'name', or return 'default'"""
         name = self._convert_string_type(name.lower())
         for k, v in self._headers:
-            if k.lower()==name:
+            if k.lower() == name:
                 return v
         return default
 
-
     def keys(self):
         """Return a list of all the header field names.
         These will be sorted in the order they appeared in the original header
@@ -132,7 +134,7 @@
     def __str__(self):
         """str() returns the formatted headers, complete with end line,
         suitable for direct HTTP transmission."""
-        return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
+        return '\r\n'.join(["%s: %s" % kv for kv in self._headers] + ['', ''])
 
     def __bytes__(self):
         return str(self).encode('iso-8859-1')
@@ -143,8 +145,12 @@
         and value 'value'."""
         result = self.get(name)
         if result is None:
-            self._headers.append((self._convert_string_type(name),
-                self._convert_string_type(value)))
+            self._headers.append(
+                (
+                    self._convert_string_type(name),
+                    self._convert_string_type(value),
+                )
+            )
             return value
         else:
             return result
@@ -173,4 +179,5 @@
                 v = self._convert_string_type(v)
                 parts.append(_formatparam(k.replace('_', '-'), v))
         self._headers.append(
-            (self._convert_string_type(_name), "; ".join(parts)))
+            (self._convert_string_type(_name), "; ".join(parts))
+        )
--- a/mercurial/hook.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/hook.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,6 +24,7 @@
     stringutil,
 )
 
+
 def pythonhook(ui, repo, htype, hname, funcname, args, throw):
     '''call python hook. hook is callable object, looked up as
     name in python module. if callable returns "true", hook
@@ -42,7 +43,8 @@
         if d == -1:
             raise error.HookLoadError(
                 _('%s hook is invalid: "%s" not in a module')
-                % (hname, funcname))
+                % (hname, funcname)
+            )
         modname = funcname[:d]
         oldpaths = sys.path
         if procutil.mainfrozen():
@@ -62,34 +64,46 @@
                 except (ImportError, SyntaxError):
                     e2 = sys.exc_info()
                     if ui.tracebackflag:
-                        ui.warn(_('exception from first failed import '
-                                  'attempt:\n'))
+                        ui.warn(
+                            _(
+                                'exception from first failed import '
+                                'attempt:\n'
+                            )
+                        )
                     ui.traceback(e1)
                     if ui.tracebackflag:
-                        ui.warn(_('exception from second failed import '
-                                  'attempt:\n'))
+                        ui.warn(
+                            _(
+                                'exception from second failed import '
+                                'attempt:\n'
+                            )
+                        )
                     ui.traceback(e2)
 
                     if not ui.tracebackflag:
                         tracebackhint = _(
-                            'run with --traceback for stack trace')
+                            'run with --traceback for stack trace'
+                        )
                     else:
                         tracebackhint = None
                     raise error.HookLoadError(
-                        _('%s hook is invalid: import of "%s" failed') %
-                        (hname, modname), hint=tracebackhint)
+                        _('%s hook is invalid: import of "%s" failed')
+                        % (hname, modname),
+                        hint=tracebackhint,
+                    )
         sys.path = oldpaths
         try:
             for p in funcname.split('.')[1:]:
                 obj = getattr(obj, p)
         except AttributeError:
             raise error.HookLoadError(
-                _('%s hook is invalid: "%s" is not defined')
-                % (hname, funcname))
+                _('%s hook is invalid: "%s" is not defined') % (hname, funcname)
+            )
         if not callable(obj):
             raise error.HookLoadError(
                 _('%s hook is invalid: "%s" is not callable')
-                % (hname, funcname))
+                % (hname, funcname)
+            )
 
     ui.note(_("calling hook %s: %s\n") % (hname, funcname))
     starttime = util.timer()
@@ -98,11 +112,12 @@
         r = obj(ui=ui, repo=repo, hooktype=htype, **pycompat.strkwargs(args))
     except Exception as exc:
         if isinstance(exc, error.Abort):
-            ui.warn(_('error: %s hook failed: %s\n') %
-                         (hname, exc.args[0]))
+            ui.warn(_('error: %s hook failed: %s\n') % (hname, exc.args[0]))
         else:
-            ui.warn(_('error: %s hook raised an exception: '
-                      '%s\n') % (hname, stringutil.forcebytestr(exc)))
+            ui.warn(
+                _('error: %s hook raised an exception: ' '%s\n')
+                % (hname, stringutil.forcebytestr(exc))
+            )
         if throw:
             raise
         if not ui.tracebackflag:
@@ -111,14 +126,20 @@
         return True, True
     finally:
         duration = util.timer() - starttime
-        ui.log('pythonhook', 'pythonhook-%s: %s finished in %0.2f seconds\n',
-               htype, funcname, duration)
+        ui.log(
+            'pythonhook',
+            'pythonhook-%s: %s finished in %0.2f seconds\n',
+            htype,
+            funcname,
+            duration,
+        )
     if r:
         if throw:
             raise error.HookAbort(_('%s hook failed') % hname)
         ui.warn(_('warning: %s hook failed\n') % hname)
     return r, False
 
+
 def _exthook(ui, repo, htype, name, cmd, args, throw):
     starttime = util.timer()
     env = {}
@@ -154,8 +175,13 @@
     r = ui.system(cmd, environ=env, cwd=cwd, blockedtag='exthook-%s' % (name,))
 
     duration = util.timer() - starttime
-    ui.log('exthook', 'exthook-%s: %s finished in %0.2f seconds\n',
-           name, cmd, duration)
+    ui.log(
+        'exthook',
+        'exthook-%s: %s finished in %0.2f seconds\n',
+        name,
+        cmd,
+        duration,
+    )
     if r:
         desc = procutil.explainexit(r)
         if throw:
@@ -163,9 +189,11 @@
         ui.warn(_('warning: %s hook %s\n') % (name, desc))
     return r
 
+
 # represent an untrusted hook command
 _fromuntrusted = object()
 
+
 def _allhooks(ui):
     """return a list of (hook-id, cmd) pairs sorted by priority"""
     hooks = _hookitems(ui)
@@ -181,6 +209,7 @@
     # (end of the security sensitive section)
     return [(k, v) for p, o, k, v in sorted(hooks.values())]
 
+
 def _hookitems(ui, _untrusted=False):
     """return all hooks items ready to be sorted"""
     hooks = {}
@@ -192,11 +221,15 @@
         hooks[name] = (-priority, len(hooks), name, cmd)
     return hooks
 
+
 _redirect = False
+
+
 def redirect(state):
     global _redirect
     _redirect = state
 
+
 def hashook(ui, htype):
     """return True if a hook is configured for 'htype'"""
     if not ui.callhooks:
@@ -206,6 +239,7 @@
             return True
     return False
 
+
 def hook(ui, repo, htype, throw=False, **args):
     if not ui.callhooks:
         return False
@@ -221,6 +255,7 @@
         r = res[hname][0] or r
     return r
 
+
 def runhooks(ui, repo, htype, hooks, throw=False, **args):
     args = pycompat.byteskwargs(args)
     res = {}
@@ -245,13 +280,13 @@
                 if throw:
                     raise error.HookAbort(
                         _('untrusted hook %s not executed') % hname,
-                        hint = _("see 'hg help config.trusted'"))
+                        hint=_("see 'hg help config.trusted'"),
+                    )
                 ui.warn(_('warning: untrusted hook %s not executed\n') % hname)
                 r = 1
                 raised = False
             elif callable(cmd):
-                r, raised = pythonhook(ui, repo, htype, hname, cmd, args,
-                                        throw)
+                r, raised = pythonhook(ui, repo, htype, hname, cmd, args, throw)
             elif cmd.startswith('python:'):
                 if cmd.count(':') >= 2:
                     path, cmd = cmd[7:].rsplit(':', 1)
@@ -266,8 +301,9 @@
                     hookfn = getattr(mod, cmd)
                 else:
                     hookfn = cmd[7:].strip()
-                r, raised = pythonhook(ui, repo, htype, hname, hookfn, args,
-                                        throw)
+                r, raised = pythonhook(
+                    ui, repo, htype, hname, hookfn, args, throw
+                )
             else:
                 r = _exthook(ui, repo, htype, hname, cmd, args, throw)
                 raised = False
--- a/mercurial/httppeer.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/httppeer.py	Sun Oct 06 09:45:02 2019 -0400
@@ -43,6 +43,7 @@
 urlerr = util.urlerr
 urlreq = util.urlreq
 
+
 def encodevalueinheaders(value, header, limit):
     """Encode a string value into multiple HTTP headers.
 
@@ -67,17 +68,21 @@
     n = 0
     for i in pycompat.xrange(0, len(value), valuelen):
         n += 1
-        result.append((fmt % str(n), pycompat.strurl(value[i:i + valuelen])))
+        result.append((fmt % str(n), pycompat.strurl(value[i : i + valuelen])))
 
     return result
 
+
 class _multifile(object):
     def __init__(self, *fileobjs):
         for f in fileobjs:
             if not util.safehasattr(f, 'length'):
                 raise ValueError(
                     '_multifile only supports file objects that '
-                    'have a length but this one does not:', type(f), f)
+                    'have a length but this one does not:',
+                    type(f),
+                    f,
+                )
         self._fileobjs = fileobjs
         self._index = 0
 
@@ -101,17 +106,21 @@
         if whence != os.SEEK_SET:
             raise NotImplementedError(
                 '_multifile does not support anything other'
-                ' than os.SEEK_SET for whence on seek()')
+                ' than os.SEEK_SET for whence on seek()'
+            )
         if offset != 0:
             raise NotImplementedError(
                 '_multifile only supports seeking to start, but that '
-                'could be fixed if you need it')
+                'could be fixed if you need it'
+            )
         for f in self._fileobjs:
             f.seek(0)
         self._index = 0
 
-def makev1commandrequest(ui, requestbuilder, caps, capablefn,
-                         repobaseurl, cmd, args):
+
+def makev1commandrequest(
+    ui, requestbuilder, caps, capablefn, repobaseurl, cmd, args
+):
     """Make an HTTP request to run a command for a version 1 client.
 
     ``caps`` is a set of known server capabilities. The value may be
@@ -162,8 +171,9 @@
         if headersize > 0:
             # The headers can typically carry more data than the URL.
             encargs = urlreq.urlencode(sorted(args.items()))
-            for header, value in encodevalueinheaders(encargs, 'X-HgArg',
-                                                      headersize):
+            for header, value in encodevalueinheaders(
+                encargs, 'X-HgArg', headersize
+            ):
                 headers[header] = value
         # Send arguments via query string (Mercurial <1.9).
         else:
@@ -202,14 +212,16 @@
         # We /could/ compare supported compression formats and prune
         # non-mutually supported or error if nothing is mutually supported.
         # For now, send the full list to the server and have it error.
-        comps = [e.wireprotosupport().name for e in
-                 util.compengines.supportedwireengines(util.CLIENTROLE)]
+        comps = [
+            e.wireprotosupport().name
+            for e in util.compengines.supportedwireengines(util.CLIENTROLE)
+        ]
         protoparams.add('comp=%s' % ','.join(comps))
 
     if protoparams:
-        protoheaders = encodevalueinheaders(' '.join(sorted(protoparams)),
-                                            'X-HgProto',
-                                            headersize or 1024)
+        protoheaders = encodevalueinheaders(
+            ' '.join(sorted(protoparams)), 'X-HgProto', headersize or 1024
+        )
         for header, value in protoheaders:
             headers[header] = value
 
@@ -229,6 +241,7 @@
 
     return req, cu, qs
 
+
 def _reqdata(req):
     """Get request data, if any. If no data, returns None."""
     if pycompat.ispy3:
@@ -237,17 +250,23 @@
         return None
     return req.get_data()
 
+
 def sendrequest(ui, opener, req):
     """Send a prepared HTTP request.
 
     Returns the response object.
     """
     dbg = ui.debug
-    if (ui.debugflag
-        and ui.configbool('devel', 'debug.peer-request')):
+    if ui.debugflag and ui.configbool('devel', 'debug.peer-request'):
         line = 'devel-peer-request: %s\n'
-        dbg(line % '%s %s' % (pycompat.bytesurl(req.get_method()),
-                              pycompat.bytesurl(req.get_full_url())))
+        dbg(
+            line
+            % '%s %s'
+            % (
+                pycompat.bytesurl(req.get_method()),
+                pycompat.bytesurl(req.get_full_url()),
+            )
+        )
         hgargssize = None
 
         for header, value in sorted(req.header_items()):
@@ -261,8 +280,11 @@
                 dbg(line % '  %s %s' % (header, value))
 
         if hgargssize is not None:
-            dbg(line % '  %d bytes of commands arguments in headers'
-                % hgargssize)
+            dbg(
+                line
+                % '  %d bytes of commands arguments in headers'
+                % hgargssize
+            )
         data = _reqdata(req)
         if data is not None:
             length = getattr(data, 'length', None)
@@ -280,33 +302,40 @@
             raise error.Abort(_('authorization failed'))
         raise
     except httplib.HTTPException as inst:
-        ui.debug('http error requesting %s\n' %
-                 util.hidepassword(req.get_full_url()))
+        ui.debug(
+            'http error requesting %s\n' % util.hidepassword(req.get_full_url())
+        )
         ui.traceback()
         raise IOError(None, inst)
     finally:
         if ui.debugflag and ui.configbool('devel', 'debug.peer-request'):
             code = res.code if res else -1
-            dbg(line % '  finished in %.4f seconds (%d)'
-                % (util.timer() - start, code))
+            dbg(
+                line
+                % '  finished in %.4f seconds (%d)'
+                % (util.timer() - start, code)
+            )
 
     # Insert error handlers for common I/O failures.
     urlmod.wrapresponse(res)
 
     return res
 
+
 class RedirectedRepoError(error.RepoError):
     def __init__(self, msg, respurl):
         super(RedirectedRepoError, self).__init__(msg)
         self.respurl = respurl
 
-def parsev1commandresponse(ui, baseurl, requrl, qs, resp, compressible,
-                           allowcbor=False):
+
+def parsev1commandresponse(
+    ui, baseurl, requrl, qs, resp, compressible, allowcbor=False
+):
     # record the url we got redirected to
     redirected = False
     respurl = pycompat.bytesurl(resp.geturl())
     if respurl.endswith(qs):
-        respurl = respurl[:-len(qs)]
+        respurl = respurl[: -len(qs)]
         qsdropped = False
     else:
         qsdropped = True
@@ -329,9 +358,10 @@
     # application/hg-changegroup. We don't support such old servers.
     if not proto.startswith('application/mercurial-'):
         ui.debug("requested URL: '%s'\n" % util.hidepassword(requrl))
-        msg = _("'%s' does not appear to be an hg repository:\n"
-                "---%%<--- (%s)\n%s\n---%%<---\n") % (
-            safeurl, proto or 'no content-type', resp.read(1024))
+        msg = _(
+            "'%s' does not appear to be an hg repository:\n"
+            "---%%<--- (%s)\n%s\n---%%<---\n"
+        ) % (safeurl, proto or 'no content-type', resp.read(1024))
 
         # Some servers may strip the query string from the redirect. We
         # raise a special error type so callers can react to this specially.
@@ -350,13 +380,16 @@
             if allowcbor:
                 return respurl, proto, resp
             else:
-                raise error.RepoError(_('unexpected CBOR response from '
-                                        'server'))
+                raise error.RepoError(
+                    _('unexpected CBOR response from ' 'server')
+                )
 
         version_info = tuple([int(n) for n in subtype.split('.')])
     except ValueError:
-        raise error.RepoError(_("'%s' sent a broken Content-Type "
-                                "header (%s)") % (safeurl, proto))
+        raise error.RepoError(
+            _("'%s' sent a broken Content-Type " "header (%s)")
+            % (safeurl, proto)
+        )
 
     # TODO consider switching to a decompression reader that uses
     # generators.
@@ -373,11 +406,13 @@
 
         resp = engine.decompressorreader(resp)
     else:
-        raise error.RepoError(_("'%s' uses newer protocol %s") %
-                              (safeurl, subtype))
+        raise error.RepoError(
+            _("'%s' uses newer protocol %s") % (safeurl, subtype)
+        )
 
     return respurl, proto, resp
 
+
 class httppeer(wireprotov1peer.wirepeer):
     def __init__(self, ui, path, url, opener, requestbuilder, caps):
         self.ui = ui
@@ -409,14 +444,20 @@
 
     def close(self):
         try:
-            reqs, sent, recv = (self._urlopener.requestscount,
-                                self._urlopener.sentbytescount,
-                                self._urlopener.receivedbytescount)
+            reqs, sent, recv = (
+                self._urlopener.requestscount,
+                self._urlopener.sentbytescount,
+                self._urlopener.receivedbytescount,
+            )
         except AttributeError:
             return
-        self.ui.note(_('(sent %d HTTP requests and %d bytes; '
-                       'received %d bytes in responses)\n') %
-                     (reqs, sent, recv))
+        self.ui.note(
+            _(
+                '(sent %d HTTP requests and %d bytes; '
+                'received %d bytes in responses)\n'
+            )
+            % (reqs, sent, recv)
+        )
 
     # End of ipeerconnection interface.
 
@@ -430,14 +471,21 @@
     def _callstream(self, cmd, _compressible=False, **args):
         args = pycompat.byteskwargs(args)
 
-        req, cu, qs = makev1commandrequest(self.ui, self._requestbuilder,
-                                           self._caps, self.capable,
-                                           self._url, cmd, args)
+        req, cu, qs = makev1commandrequest(
+            self.ui,
+            self._requestbuilder,
+            self._caps,
+            self.capable,
+            self._url,
+            cmd,
+            args,
+        )
 
         resp = sendrequest(self.ui, self._urlopener, req)
 
-        self._url, ct, resp = parsev1commandresponse(self.ui, self._url, cu, qs,
-                                                     resp, _compressible)
+        self._url, ct, resp = parsev1commandresponse(
+            self.ui, self._url, cu, qs, resp, _compressible
+        )
 
         return resp
 
@@ -513,8 +561,10 @@
     def _abort(self, exception):
         raise exception
 
-def sendv2request(ui, opener, requestbuilder, apiurl, permission, requests,
-                  redirect):
+
+def sendv2request(
+    ui, opener, requestbuilder, apiurl, permission, requests, redirect
+):
     wireprotoframing.populatestreamencoders()
 
     uiencoders = ui.configlist(b'experimental', b'httppeer.v2-encoder-order')
@@ -524,22 +574,29 @@
 
         for encoder in uiencoders:
             if encoder not in wireprotoframing.STREAM_ENCODERS:
-                ui.warn(_(b'wire protocol version 2 encoder referenced in '
-                          b'config (%s) is not known; ignoring\n') % encoder)
+                ui.warn(
+                    _(
+                        b'wire protocol version 2 encoder referenced in '
+                        b'config (%s) is not known; ignoring\n'
+                    )
+                    % encoder
+                )
             else:
                 encoders.append(encoder)
 
     else:
         encoders = wireprotoframing.STREAM_ENCODERS_ORDER
 
-    reactor = wireprotoframing.clientreactor(ui,
-                                             hasmultiplesend=False,
-                                             buffersends=True,
-                                             clientcontentencoders=encoders)
+    reactor = wireprotoframing.clientreactor(
+        ui,
+        hasmultiplesend=False,
+        buffersends=True,
+        clientcontentencoders=encoders,
+    )
 
-    handler = wireprotov2peer.clienthandler(ui, reactor,
-                                            opener=opener,
-                                            requestbuilder=requestbuilder)
+    handler = wireprotov2peer.clienthandler(
+        ui, reactor, opener=opener, requestbuilder=requestbuilder
+    )
 
     url = '%s/%s' % (apiurl, permission)
 
@@ -550,10 +607,13 @@
 
     ui.debug('sending %d commands\n' % len(requests))
     for command, args, f in requests:
-        ui.debug('sending command %s: %s\n' % (
-            command, stringutil.pprint(args, indent=2)))
-        assert not list(handler.callcommand(command, args, f,
-                                            redirect=redirect))
+        ui.debug(
+            'sending command %s: %s\n'
+            % (command, stringutil.pprint(args, indent=2))
+        )
+        assert not list(
+            handler.callcommand(command, args, f, redirect=redirect)
+        )
 
     # TODO stream this.
     body = b''.join(map(bytes, handler.flushcommands()))
@@ -580,6 +640,7 @@
 
     return handler, res
 
+
 class queuedcommandfuture(pycompat.futures.Future):
     """Wraps result() on command futures to trigger submission on call."""
 
@@ -593,10 +654,12 @@
         # will resolve to Future.result.
         return self.result(timeout)
 
+
 @interfaceutil.implementer(repository.ipeercommandexecutor)
 class httpv2executor(object):
-    def __init__(self, ui, opener, requestbuilder, apiurl, descriptor,
-                 redirect):
+    def __init__(
+        self, ui, opener, requestbuilder, apiurl, descriptor, redirect
+    ):
         self._ui = ui
         self._opener = opener
         self._requestbuilder = requestbuilder
@@ -619,27 +682,31 @@
 
     def callcommand(self, command, args):
         if self._sent:
-            raise error.ProgrammingError('callcommand() cannot be used after '
-                                         'commands are sent')
+            raise error.ProgrammingError(
+                'callcommand() cannot be used after ' 'commands are sent'
+            )
 
         if self._closed:
-            raise error.ProgrammingError('callcommand() cannot be used after '
-                                         'close()')
+            raise error.ProgrammingError(
+                'callcommand() cannot be used after ' 'close()'
+            )
 
         # The service advertises which commands are available. So if we attempt
         # to call an unknown command or pass an unknown argument, we can screen
         # for this.
         if command not in self._descriptor['commands']:
             raise error.ProgrammingError(
-                'wire protocol command %s is not available' % command)
+                'wire protocol command %s is not available' % command
+            )
 
         cmdinfo = self._descriptor['commands'][command]
         unknownargs = set(args.keys()) - set(cmdinfo.get('args', {}))
 
         if unknownargs:
             raise error.ProgrammingError(
-                'wire protocol command %s does not accept argument: %s' % (
-                    command, ', '.join(sorted(unknownargs))))
+                'wire protocol command %s does not accept argument: %s'
+                % (command, ', '.join(sorted(unknownargs)))
+            )
 
         self._neededpermissions |= set(cmdinfo['permissions'])
 
@@ -675,9 +742,11 @@
                 f._peerexecutor = None
 
         # Mark the future as running and filter out cancelled futures.
-        calls = [(command, args, f)
-                 for command, args, f in self._calls
-                 if f.set_running_or_notify_cancel()]
+        calls = [
+            (command, args, f)
+            for command, args, f in self._calls
+            if f.set_running_or_notify_cancel()
+        ]
 
         # Clear out references, prevent improper object usage.
         self._calls = None
@@ -691,24 +760,29 @@
             permissions.remove('pull')
 
         if len(permissions) > 1:
-            raise error.RepoError(_('cannot make request requiring multiple '
-                                    'permissions: %s') %
-                                  _(', ').join(sorted(permissions)))
+            raise error.RepoError(
+                _('cannot make request requiring multiple ' 'permissions: %s')
+                % _(', ').join(sorted(permissions))
+            )
 
-        permission = {
-            'push': 'rw',
-            'pull': 'ro',
-        }[permissions.pop()]
+        permission = {'push': 'rw', 'pull': 'ro',}[permissions.pop()]
 
         handler, resp = sendv2request(
-            self._ui, self._opener, self._requestbuilder, self._apiurl,
-            permission, calls, self._redirect)
+            self._ui,
+            self._opener,
+            self._requestbuilder,
+            self._apiurl,
+            permission,
+            calls,
+            self._redirect,
+        )
 
         # TODO we probably want to validate the HTTP code, media type, etc.
 
         self._responseexecutor = pycompat.futures.ThreadPoolExecutor(1)
-        self._responsef = self._responseexecutor.submit(self._handleresponse,
-                                                        handler, resp)
+        self._responsef = self._responseexecutor.submit(
+            self._handleresponse, handler, resp
+        )
 
     def close(self):
         if self._closed:
@@ -734,8 +808,9 @@
             # errored, otherwise a result() could wait indefinitely.
             for f in self._futures:
                 if not f.done():
-                    f.set_exception(error.ResponseError(
-                        _('unfulfilled command response')))
+                    f.set_exception(
+                        error.ResponseError(_('unfulfilled command response'))
+                    )
 
             self._futures = None
 
@@ -745,13 +820,15 @@
         while handler.readdata(resp):
             pass
 
+
 @interfaceutil.implementer(repository.ipeerv2)
 class httpv2peer(object):
 
     limitedarguments = False
 
-    def __init__(self, ui, repourl, apipath, opener, requestbuilder,
-                 apidescriptor):
+    def __init__(
+        self, ui, repourl, apipath, opener, requestbuilder, apidescriptor
+    ):
         self.ui = ui
         self.apidescriptor = apidescriptor
 
@@ -782,11 +859,17 @@
         return False
 
     def close(self):
-        self.ui.note(_('(sent %d HTTP requests and %d bytes; '
-                       'received %d bytes in responses)\n') %
-                     (self._opener.requestscount,
-                      self._opener.sentbytescount,
-                      self._opener.receivedbytescount))
+        self.ui.note(
+            _(
+                '(sent %d HTTP requests and %d bytes; '
+                'received %d bytes in responses)\n'
+            )
+            % (
+                self._opener.requestscount,
+                self._opener.sentbytescount,
+                self._opener.receivedbytescount,
+            )
+        )
 
     # End of ipeerconnection.
 
@@ -802,12 +885,12 @@
             return True
 
         # Other concepts.
-        if name in ('bundle2'):
+        if name in 'bundle2':
             return True
 
         # Alias command-* to presence of command of that name.
         if name.startswith('command-'):
-            return name[len('command-'):] in self.apidescriptor['commands']
+            return name[len('command-') :] in self.apidescriptor['commands']
 
         return False
 
@@ -816,8 +899,12 @@
             return
 
         raise error.CapabilityError(
-            _('cannot %s; client or remote repository does not support the '
-              '\'%s\' capability') % (purpose, name))
+            _(
+                'cannot %s; client or remote repository does not support the '
+                '\'%s\' capability'
+            )
+            % (purpose, name)
+        )
 
     # End of ipeercapabilities.
 
@@ -826,8 +913,15 @@
             return e.callcommand(name, args).result()
 
     def commandexecutor(self):
-        return httpv2executor(self.ui, self._opener, self._requestbuilder,
-                              self._apiurl, self.apidescriptor, self._redirect)
+        return httpv2executor(
+            self.ui,
+            self._opener,
+            self._requestbuilder,
+            self._apiurl,
+            self.apidescriptor,
+            self._redirect,
+        )
+
 
 # Registry of API service names to metadata about peers that handle it.
 #
@@ -841,16 +935,15 @@
 #    Integer priority for the service. If we could choose from multiple
 #    services, we choose the one with the highest priority.
 API_PEERS = {
-    wireprototypes.HTTP_WIREPROTO_V2: {
-        'init': httpv2peer,
-        'priority': 50,
-    },
+    wireprototypes.HTTP_WIREPROTO_V2: {'init': httpv2peer, 'priority': 50,},
 }
 
+
 def performhandshake(ui, url, opener, requestbuilder):
     # The handshake is a request to the capabilities command.
 
     caps = None
+
     def capable(x):
         raise error.ProgrammingError('should not be called')
 
@@ -869,15 +962,18 @@
         }
 
         args['headers'].update(
-            encodevalueinheaders(' '.join(sorted(API_PEERS)),
-                                 'X-HgUpgrade',
-                                 # We don't know the header limit this early.
-                                 # So make it small.
-                                 1024))
+            encodevalueinheaders(
+                ' '.join(sorted(API_PEERS)),
+                'X-HgUpgrade',
+                # We don't know the header limit this early.
+                # So make it small.
+                1024,
+            )
+        )
 
-    req, requrl, qs = makev1commandrequest(ui, requestbuilder, caps,
-                                           capable, url, 'capabilities',
-                                           args)
+    req, requrl, qs = makev1commandrequest(
+        ui, requestbuilder, caps, capable, url, 'capabilities', args
+    )
     resp = sendrequest(ui, opener, req)
 
     # The server may redirect us to the repo root, stripping the
@@ -893,17 +989,17 @@
     # be a longstanding bug in some server implementations. So we allow a
     # redirect that drops the query string to "just work."
     try:
-        respurl, ct, resp = parsev1commandresponse(ui, url, requrl, qs, resp,
-                                                   compressible=False,
-                                                   allowcbor=advertisev2)
+        respurl, ct, resp = parsev1commandresponse(
+            ui, url, requrl, qs, resp, compressible=False, allowcbor=advertisev2
+        )
     except RedirectedRepoError as e:
-        req, requrl, qs = makev1commandrequest(ui, requestbuilder, caps,
-                                               capable, e.respurl,
-                                               'capabilities', args)
+        req, requrl, qs = makev1commandrequest(
+            ui, requestbuilder, caps, capable, e.respurl, 'capabilities', args
+        )
         resp = sendrequest(ui, opener, req)
-        respurl, ct, resp = parsev1commandresponse(ui, url, requrl, qs, resp,
-                                                   compressible=False,
-                                                   allowcbor=advertisev2)
+        respurl, ct, resp = parsev1commandresponse(
+            ui, url, requrl, qs, resp, compressible=False, allowcbor=advertisev2
+        )
 
     try:
         rawdata = resp.read()
@@ -918,26 +1014,28 @@
             try:
                 info = cborutil.decodeall(rawdata)[0]
             except cborutil.CBORDecodeError:
-                raise error.Abort(_('error decoding CBOR from remote server'),
-                                  hint=_('try again and consider contacting '
-                                         'the server operator'))
+                raise error.Abort(
+                    _('error decoding CBOR from remote server'),
+                    hint=_(
+                        'try again and consider contacting '
+                        'the server operator'
+                    ),
+                )
 
         # We got a legacy response. That's fine.
         elif ct in ('application/mercurial-0.1', 'application/mercurial-0.2'):
-            info = {
-                'v1capabilities': set(rawdata.split())
-            }
+            info = {'v1capabilities': set(rawdata.split())}
 
         else:
             raise error.RepoError(
-                _('unexpected response type from server: %s') % ct)
+                _('unexpected response type from server: %s') % ct
+            )
     else:
-        info = {
-            'v1capabilities': set(rawdata.split())
-        }
+        info = {'v1capabilities': set(rawdata.split())}
 
     return respurl, info
 
+
 def makepeer(ui, path, opener=None, requestbuilder=urlreq.request):
     """Construct an appropriate HTTP peer instance.
 
@@ -949,8 +1047,9 @@
     """
     u = util.url(path)
     if u.query or u.fragment:
-        raise error.Abort(_('unsupported URL component: "%s"') %
-                          (u.query or u.fragment))
+        raise error.Abort(
+            _('unsupported URL component: "%s"') % (u.query or u.fragment)
+        )
 
     # urllib cannot handle URLs with embedded user or passwd.
     url, authinfo = u.authinfo()
@@ -971,28 +1070,31 @@
     # peer type.
     apipeerchoices = set(info.get('apis', {}).keys()) & set(API_PEERS.keys())
 
-    preferredchoices = sorted(apipeerchoices,
-                              key=lambda x: API_PEERS[x]['priority'],
-                              reverse=True)
+    preferredchoices = sorted(
+        apipeerchoices, key=lambda x: API_PEERS[x]['priority'], reverse=True
+    )
 
     for service in preferredchoices:
         apipath = '%s/%s' % (info['apibase'].rstrip('/'), service)
 
-        return API_PEERS[service]['init'](ui, respurl, apipath, opener,
-                                          requestbuilder,
-                                          info['apis'][service])
+        return API_PEERS[service]['init'](
+            ui, respurl, apipath, opener, requestbuilder, info['apis'][service]
+        )
 
     # Failed to construct an API peer. Fall back to legacy.
-    return httppeer(ui, path, respurl, opener, requestbuilder,
-                    info['v1capabilities'])
+    return httppeer(
+        ui, path, respurl, opener, requestbuilder, info['v1capabilities']
+    )
+
 
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
         raise error.Abort(_('cannot create new http repository'))
     try:
         if path.startswith('https:') and not urlmod.has_https:
-            raise error.Abort(_('Python support for SSL and HTTPS '
-                                'is not installed'))
+            raise error.Abort(
+                _('Python support for SSL and HTTPS ' 'is not installed')
+            )
 
         inst = makepeer(ui, path)
 
@@ -1003,4 +1105,4 @@
             ui.note(_('(falling back to static-http)\n'))
             return r
         except error.RepoError:
-            raise httpexception # use the original http RepoError instead
+            raise httpexception  # use the original http RepoError instead
--- a/mercurial/i18n.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/i18n.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,17 +24,20 @@
     module = pycompat.fsencode(__file__)
 
 _languages = None
-if (pycompat.iswindows
+if (
+    pycompat.iswindows
     and 'LANGUAGE' not in encoding.environ
     and 'LC_ALL' not in encoding.environ
     and 'LC_MESSAGES' not in encoding.environ
-    and 'LANG' not in encoding.environ):
+    and 'LANG' not in encoding.environ
+):
     # Try to detect UI language by "User Interface Language Management" API
     # if no locale variables are set. Note that locale.getdefaultlocale()
     # uses GetLocaleInfo(), which may be different from UI language.
     # (See http://msdn.microsoft.com/en-us/library/dd374098(v=VS.85).aspx )
     try:
         import ctypes
+
         langid = ctypes.windll.kernel32.GetUserDefaultUILanguage()
         _languages = [locale.windows_locale[langid]]
     except (ImportError, AttributeError, KeyError):
@@ -43,6 +46,7 @@
 
 _ugettext = None
 
+
 def setdatapath(datapath):
     datapath = pycompat.fsdecode(datapath)
     localedir = os.path.join(datapath, r'locale')
@@ -53,8 +57,10 @@
     except AttributeError:
         _ugettext = t.gettext
 
+
 _msgcache = {}  # encoding: {message: translation}
 
+
 def gettext(message):
     """Translate message.
 
@@ -94,13 +100,17 @@
             cache[message] = message
     return cache[message]
 
+
 def _plain():
-    if ('HGPLAIN' not in encoding.environ
-        and 'HGPLAINEXCEPT' not in encoding.environ):
+    if (
+        'HGPLAIN' not in encoding.environ
+        and 'HGPLAINEXCEPT' not in encoding.environ
+    ):
         return False
     exceptions = encoding.environ.get('HGPLAINEXCEPT', '').strip().split(',')
     return 'i18n' not in exceptions
 
+
 if _plain():
     _ = lambda message: message
 else:
--- a/mercurial/interfaces/dirstate.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/interfaces/dirstate.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,16 +2,12 @@
 
 import contextlib
 
-from .. import (
-    node as nodemod,
-)
+from .. import node as nodemod
 
-from . import (
-    util as interfaceutil,
-)
+from . import util as interfaceutil
+
 
 class idirstate(interfaceutil.Interface):
-
     def __init__(opener, ui, root, validate, sparsematchfn):
         '''Create a new dirstate object.
 
--- a/mercurial/interfaces/repository.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/interfaces/repository.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,12 +8,8 @@
 from __future__ import absolute_import
 
 from ..i18n import _
-from .. import (
-    error,
-)
-from . import (
-    util as interfaceutil,
-)
+from .. import error
+from . import util as interfaceutil
 
 # When narrowing is finalized and no longer subject to format changes,
 # we should move this to just "narrow" or similar.
@@ -38,7 +34,9 @@
 REVISION_FLAG_SIDEDATA = 1 << 12
 
 REVISION_FLAGS_KNOWN = (
-    REVISION_FLAG_CENSORED | REVISION_FLAG_ELLIPSIS | REVISION_FLAG_EXTSTORED
+    REVISION_FLAG_CENSORED
+    | REVISION_FLAG_ELLIPSIS
+    | REVISION_FLAG_EXTSTORED
     | REVISION_FLAG_SIDEDATA
 )
 
@@ -47,6 +45,7 @@
 CG_DELTAMODE_FULL = b'fulltext'
 CG_DELTAMODE_P1 = b'p1'
 
+
 class ipeerconnection(interfaceutil.Interface):
     """Represents a "connection" to a repository.
 
@@ -56,6 +55,7 @@
     This is not a complete interface definition and should not be used
     outside of this module.
     """
+
     ui = interfaceutil.Attribute("""ui.ui instance""")
 
     def url():
@@ -93,6 +93,7 @@
         associated with the peer should be cleaned up.
         """
 
+
 class ipeercapabilities(interfaceutil.Interface):
     """Peer sub-interface related to capabilities."""
 
@@ -113,6 +114,7 @@
         Raises a ``CapabilityError`` if the capability isn't present.
         """
 
+
 class ipeercommands(interfaceutil.Interface):
     """Client-side interface for communicating over the wire protocol.
 
@@ -202,6 +204,7 @@
         Returns the integer number of heads added to the peer.
         """
 
+
 class ipeerlegacycommands(interfaceutil.Interface):
     """Interface for implementing support for legacy wire protocol commands.
 
@@ -234,6 +237,7 @@
     def changegroupsubset(bases, heads, source):
         pass
 
+
 class ipeercommandexecutor(interfaceutil.Interface):
     """Represents a mechanism to execute remote commands.
 
@@ -291,6 +295,7 @@
         This method may call ``sendcommands()`` if there are buffered commands.
         """
 
+
 class ipeerrequests(interfaceutil.Interface):
     """Interface for executing commands on a peer."""
 
@@ -320,17 +325,21 @@
         being issued.
         """
 
+
 class ipeerbase(ipeerconnection, ipeercapabilities, ipeerrequests):
     """Unified interface for peer repositories.
 
     All peer instances must conform to this interface.
     """
 
+
 class ipeerv2(ipeerconnection, ipeercapabilities, ipeerrequests):
     """Unified peer interface for wire protocol version 2 peers."""
 
     apidescriptor = interfaceutil.Attribute(
-        """Data structure holding description of server API.""")
+        """Data structure holding description of server API."""
+    )
+
 
 @interfaceutil.implementer(ipeerbase)
 class peer(object):
@@ -346,7 +355,7 @@
         name = '%s=' % name
         for cap in caps:
             if cap.startswith(name):
-                return cap[len(name):]
+                return cap[len(name) :]
 
         return False
 
@@ -355,8 +364,13 @@
             return
 
         raise error.CapabilityError(
-            _('cannot %s; remote repository does not support the '
-              '\'%s\' capability') % (purpose, name))
+            _(
+                'cannot %s; remote repository does not support the '
+                '\'%s\' capability'
+            )
+            % (purpose, name)
+        )
+
 
 class iverifyproblem(interfaceutil.Interface):
     """Represents a problem with the integrity of the repository.
@@ -366,17 +380,20 @@
 
     Instances are essentially messages associated with severity.
     """
+
     warning = interfaceutil.Attribute(
-        """Message indicating a non-fatal problem.""")
-
-    error = interfaceutil.Attribute(
-        """Message indicating a fatal problem.""")
+        """Message indicating a non-fatal problem."""
+    )
+
+    error = interfaceutil.Attribute("""Message indicating a fatal problem.""")
 
     node = interfaceutil.Attribute(
         """Revision encountering the problem.
 
         ``None`` means the problem doesn't apply to a single revision.
-        """)
+        """
+    )
+
 
 class irevisiondelta(interfaceutil.Interface):
     """Represents a delta between one revision and another.
@@ -391,45 +408,53 @@
     Typically used for changegroup generation.
     """
 
-    node = interfaceutil.Attribute(
-        """20 byte node of this revision.""")
+    node = interfaceutil.Attribute("""20 byte node of this revision.""")
 
     p1node = interfaceutil.Attribute(
-        """20 byte node of 1st parent of this revision.""")
+        """20 byte node of 1st parent of this revision."""
+    )
 
     p2node = interfaceutil.Attribute(
-        """20 byte node of 2nd parent of this revision.""")
+        """20 byte node of 2nd parent of this revision."""
+    )
 
     linknode = interfaceutil.Attribute(
-        """20 byte node of the changelog revision this node is linked to.""")
+        """20 byte node of the changelog revision this node is linked to."""
+    )
 
     flags = interfaceutil.Attribute(
         """2 bytes of integer flags that apply to this revision.
 
         This is a bitwise composition of the ``REVISION_FLAG_*`` constants.
-        """)
+        """
+    )
 
     basenode = interfaceutil.Attribute(
         """20 byte node of the revision this data is a delta against.
 
         ``nullid`` indicates that the revision is a full revision and not
         a delta.
-        """)
+        """
+    )
 
     baserevisionsize = interfaceutil.Attribute(
         """Size of base revision this delta is against.
 
         May be ``None`` if ``basenode`` is ``nullid``.
-        """)
+        """
+    )
 
     revision = interfaceutil.Attribute(
-        """Raw fulltext of revision data for this node.""")
+        """Raw fulltext of revision data for this node."""
+    )
 
     delta = interfaceutil.Attribute(
         """Delta between ``basenode`` and ``node``.
 
         Stored in the bdiff delta format.
-        """)
+        """
+    )
+
 
 class ifilerevisionssequence(interfaceutil.Interface):
     """Contains index data for all revisions of a file.
@@ -482,6 +507,7 @@
     def insert(self, i, entry):
         """Add an item to the index at specific revision."""
 
+
 class ifileindex(interfaceutil.Interface):
     """Storage interface for index data of a single file.
 
@@ -494,6 +520,7 @@
     * DAG data (storing and querying the relationship between nodes).
     * Metadata to facilitate storage.
     """
+
     def __len__():
         """Obtain the number of revisions stored for this file."""
 
@@ -577,12 +604,14 @@
         Returns a list of nodes.
         """
 
+
 class ifiledata(interfaceutil.Interface):
     """Storage interface for data storage of a specific file.
 
     This complements ``ifileindex`` and provides an interface for accessing
     data for a tracked file.
     """
+
     def size(rev):
         """Obtain the fulltext size of file data.
 
@@ -628,11 +657,13 @@
         TODO better document the copy metadata and censoring logic.
         """
 
-    def emitrevisions(nodes,
-                      nodesorder=None,
-                      revisiondata=False,
-                      assumehaveparentrevisions=False,
-                      deltamode=CG_DELTAMODE_STD):
+    def emitrevisions(
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+        deltamode=CG_DELTAMODE_STD,
+    ):
         """Produce ``irevisiondelta`` for revisions.
 
         Given an iterable of nodes, emits objects conforming to the
@@ -681,6 +712,7 @@
         1st parent.
         """
 
+
 class ifilemutation(interfaceutil.Interface):
     """Storage interface for mutation events of a tracked file."""
 
@@ -695,8 +727,16 @@
         May no-op if a revision matching the supplied data is already stored.
         """
 
-    def addrevision(revisiondata, transaction, linkrev, p1, p2, node=None,
-                    flags=0, cachedelta=None):
+    def addrevision(
+        revisiondata,
+        transaction,
+        linkrev,
+        p1,
+        p2,
+        node=None,
+        flags=0,
+        cachedelta=None,
+    ):
         """Add a new revision to the store.
 
         This is similar to ``add()`` except it operates at a lower level.
@@ -713,8 +753,13 @@
         applying raw data from a peer repo.
         """
 
-    def addgroup(deltas, linkmapper, transaction, addrevisioncb=None,
-                 maybemissingparents=False):
+    def addgroup(
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        maybemissingparents=False,
+    ):
         """Process a series of deltas for storage.
 
         ``deltas`` is an iterable of 7-tuples of
@@ -774,6 +819,7 @@
         higher-level deletion API.
         """
 
+
 class ifilestorage(ifileindex, ifiledata, ifilemutation):
     """Complete storage interface for a single tracked file."""
 
@@ -784,9 +830,13 @@
         be a better API for that.
         """
 
-    def storageinfo(exclusivefiles=False, sharedfiles=False,
-                    revisionscount=False, trackedsize=False,
-                    storedsize=False):
+    def storageinfo(
+        exclusivefiles=False,
+        sharedfiles=False,
+        revisionscount=False,
+        trackedsize=False,
+        storedsize=False,
+    ):
         """Obtain information about storage for this file's data.
 
         Returns a dict describing storage for this tracked path. The keys
@@ -834,6 +884,7 @@
         interface.
         """
 
+
 class idirs(interfaceutil.Interface):
     """Interface representing a collection of directories from paths.
 
@@ -860,6 +911,7 @@
     def __contains__(path):
         """Whether a specific directory is in this collection."""
 
+
 class imanifestdict(interfaceutil.Interface):
     """Interface representing a manifest data structure.
 
@@ -1009,6 +1061,7 @@
         delta between ``base`` and this manifest.
         """
 
+
 class imanifestrevisionbase(interfaceutil.Interface):
     """Base interface representing a single revision of a manifest.
 
@@ -1038,6 +1091,7 @@
         The returned object conforms to the ``imanifestdict`` interface.
         """
 
+
 class imanifestrevisionstored(imanifestrevisionbase):
     """Interface representing a manifest revision committed to storage."""
 
@@ -1069,6 +1123,7 @@
         Returns a 2-tuple of ``(node, flags)`` or raises ``KeyError``.
         """
 
+
 class imanifestrevisionwritable(imanifestrevisionbase):
     """Interface representing a manifest revision that can be committed."""
 
@@ -1087,6 +1142,7 @@
         Returns the binary node of the created revision.
         """
 
+
 class imanifeststorage(interfaceutil.Interface):
     """Storage interface for manifest data."""
 
@@ -1094,40 +1150,47 @@
         """The path to the directory this manifest tracks.
 
         The empty bytestring represents the root manifest.
-        """)
+        """
+    )
 
     index = interfaceutil.Attribute(
-        """An ``ifilerevisionssequence`` instance.""")
+        """An ``ifilerevisionssequence`` instance."""
+    )
 
     indexfile = interfaceutil.Attribute(
         """Path of revlog index file.
 
         TODO this is revlog specific and should not be exposed.
-        """)
+        """
+    )
 
     opener = interfaceutil.Attribute(
         """VFS opener to use to access underlying files used for storage.
 
         TODO this is revlog specific and should not be exposed.
-        """)
+        """
+    )
 
     version = interfaceutil.Attribute(
         """Revlog version number.
 
         TODO this is revlog specific and should not be exposed.
-        """)
+        """
+    )
 
     _generaldelta = interfaceutil.Attribute(
         """Whether generaldelta storage is being used.
 
         TODO this is revlog specific and should not be exposed.
-        """)
+        """
+    )
 
     fulltextcache = interfaceutil.Attribute(
         """Dict with cache of fulltexts.
 
         TODO this doesn't feel appropriate for the storage interface.
-        """)
+        """
+    )
 
     def __len__():
         """Obtain the number of revisions stored for this manifest."""
@@ -1187,10 +1250,12 @@
         Returns True if the fulltext is different from what is stored.
         """
 
-    def emitrevisions(nodes,
-                      nodesorder=None,
-                      revisiondata=False,
-                      assumehaveparentrevisions=False):
+    def emitrevisions(
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+    ):
         """Produce ``irevisiondelta`` describing revisions.
 
         See the documentation for ``ifiledata`` for more.
@@ -1251,8 +1316,9 @@
     def dirlog(d):
         """Obtain a manifest storage instance for a tree."""
 
-    def add(m, transaction, link, p1, p2, added, removed, readtree=None,
-            match=None):
+    def add(
+        m, transaction, link, p1, p2, added, removed, readtree=None, match=None
+    ):
         """Add a revision to storage.
 
         ``m`` is an object conforming to ``imanifestdict``.
@@ -1274,15 +1340,20 @@
         manifest including files that did not match.
         """
 
-    def storageinfo(exclusivefiles=False, sharedfiles=False,
-                    revisionscount=False, trackedsize=False,
-                    storedsize=False):
+    def storageinfo(
+        exclusivefiles=False,
+        sharedfiles=False,
+        revisionscount=False,
+        trackedsize=False,
+        storedsize=False,
+    ):
         """Obtain information about storage for this manifest's data.
 
         See ``ifilestorage.storageinfo()`` for a description of this method.
         This one behaves the same way, except for manifest data.
         """
 
+
 class imanifestlog(interfaceutil.Interface):
     """Interface representing a collection of manifest snapshots.
 
@@ -1336,6 +1407,7 @@
         Raises ``error.LookupError`` if the node is not known.
         """
 
+
 class ilocalrepositoryfilestorage(interfaceutil.Interface):
     """Local repository sub-interface providing access to tracked file storage.
 
@@ -1349,6 +1421,7 @@
         The returned type conforms to the ``ifilestorage`` interface.
         """
 
+
 class ilocalrepositorymain(interfaceutil.Interface):
     """Main interface for local repositories.
 
@@ -1359,13 +1432,16 @@
         """Set of requirements that apply to stream clone.
 
         This is actually a class attribute and is shared among all instances.
-        """)
+        """
+    )
 
     supported = interfaceutil.Attribute(
-        """Set of requirements that this repo is capable of opening.""")
+        """Set of requirements that this repo is capable of opening."""
+    )
 
     requirements = interfaceutil.Attribute(
-        """Set of requirements this repo uses.""")
+        """Set of requirements this repo uses."""
+    )
 
     features = interfaceutil.Attribute(
         """Set of "features" this repository supports.
@@ -1383,35 +1459,41 @@
         requirements are stored on-disk and represent requirements to open the
         repository. Features are more run-time capabilities of the repository
         and more granular capabilities (which may be derived from requirements).
-        """)
+        """
+    )
 
     filtername = interfaceutil.Attribute(
-        """Name of the repoview that is active on this repo.""")
+        """Name of the repoview that is active on this repo."""
+    )
 
     wvfs = interfaceutil.Attribute(
-        """VFS used to access the working directory.""")
+        """VFS used to access the working directory."""
+    )
 
     vfs = interfaceutil.Attribute(
         """VFS rooted at the .hg directory.
 
         Used to access repository data not in the store.
-        """)
+        """
+    )
 
     svfs = interfaceutil.Attribute(
         """VFS rooted at the store.
 
         Used to access repository data in the store. Typically .hg/store.
         But can point elsewhere if the store is shared.
-        """)
+        """
+    )
 
     root = interfaceutil.Attribute(
-        """Path to the root of the working directory.""")
-
-    path = interfaceutil.Attribute(
-        """Path to the .hg directory.""")
+        """Path to the root of the working directory."""
+    )
+
+    path = interfaceutil.Attribute("""Path to the .hg directory.""")
 
     origroot = interfaceutil.Attribute(
-        """The filesystem path that was used to construct the repo.""")
+        """The filesystem path that was used to construct the repo."""
+    )
 
     auditor = interfaceutil.Attribute(
         """A pathauditor for the working directory.
@@ -1419,49 +1501,51 @@
         This checks if a path refers to a nested repository.
 
         Operates on the filesystem.
-        """)
+        """
+    )
 
     nofsauditor = interfaceutil.Attribute(
         """A pathauditor for the working directory.
 
         This is like ``auditor`` except it doesn't do filesystem checks.
-        """)
+        """
+    )
 
     baseui = interfaceutil.Attribute(
-        """Original ui instance passed into constructor.""")
-
-    ui = interfaceutil.Attribute(
-        """Main ui instance for this instance.""")
+        """Original ui instance passed into constructor."""
+    )
+
+    ui = interfaceutil.Attribute("""Main ui instance for this instance.""")
 
     sharedpath = interfaceutil.Attribute(
-        """Path to the .hg directory of the repo this repo was shared from.""")
-
-    store = interfaceutil.Attribute(
-        """A store instance.""")
-
-    spath = interfaceutil.Attribute(
-        """Path to the store.""")
-
-    sjoin = interfaceutil.Attribute(
-        """Alias to self.store.join.""")
+        """Path to the .hg directory of the repo this repo was shared from."""
+    )
+
+    store = interfaceutil.Attribute("""A store instance.""")
+
+    spath = interfaceutil.Attribute("""Path to the store.""")
+
+    sjoin = interfaceutil.Attribute("""Alias to self.store.join.""")
 
     cachevfs = interfaceutil.Attribute(
         """A VFS used to access the cache directory.
 
         Typically .hg/cache.
-        """)
+        """
+    )
 
     wcachevfs = interfaceutil.Attribute(
         """A VFS used to access the cache directory dedicated to working copy
 
         Typically .hg/wcache.
-        """)
+        """
+    )
 
     filteredrevcache = interfaceutil.Attribute(
-        """Holds sets of revisions to be filtered.""")
-
-    names = interfaceutil.Attribute(
-        """A ``namespaces`` instance.""")
+        """Holds sets of revisions to be filtered."""
+    )
+
+    names = interfaceutil.Attribute("""A ``namespaces`` instance.""")
 
     def close():
         """Close the handle on this repository."""
@@ -1475,23 +1559,22 @@
     def filtered(name, visibilityexceptions=None):
         """Obtain a named view of this repository."""
 
-    obsstore = interfaceutil.Attribute(
-        """A store of obsolescence data.""")
-
-    changelog = interfaceutil.Attribute(
-        """A handle on the changelog revlog.""")
+    obsstore = interfaceutil.Attribute("""A store of obsolescence data.""")
+
+    changelog = interfaceutil.Attribute("""A handle on the changelog revlog.""")
 
     manifestlog = interfaceutil.Attribute(
         """An instance conforming to the ``imanifestlog`` interface.
 
         Provides access to manifests for the repository.
-        """)
-
-    dirstate = interfaceutil.Attribute(
-        """Working directory state.""")
+        """
+    )
+
+    dirstate = interfaceutil.Attribute("""Working directory state.""")
 
     narrowpats = interfaceutil.Attribute(
-        """Matcher patterns for this repository's narrowspec.""")
+        """Matcher patterns for this repository's narrowspec."""
+    )
 
     def narrowmatch(match=None, includeexact=False):
         """Obtain a matcher for the narrowspec."""
@@ -1662,8 +1745,15 @@
     def checkcommitpatterns(wctx, vdirs, match, status, fail):
         pass
 
-    def commit(text='', user=None, date=None, match=None, force=False,
-               editor=False, extra=None):
+    def commit(
+        text='',
+        user=None,
+        date=None,
+        match=None,
+        force=False,
+        editor=False,
+        extra=None,
+    ):
         """Add a new revision to the repository."""
 
     def commitctx(ctx, error=False, origctx=None):
@@ -1675,8 +1765,15 @@
     def destroyed():
         """Inform the repository that nodes have been destroyed."""
 
-    def status(node1='.', node2=None, match=None, ignored=False,
-               clean=False, unknown=False, listsubrepos=False):
+    def status(
+        node1='.',
+        node2=None,
+        match=None,
+        ignored=False,
+        clean=False,
+        unknown=False,
+        listsubrepos=False,
+    ):
         """Convenience method to call repo[x].status()."""
 
     def addpostdsstatus(ps):
@@ -1703,8 +1800,7 @@
     def checkpush(pushop):
         pass
 
-    prepushoutgoinghooks = interfaceutil.Attribute(
-        """util.hooks instance.""")
+    prepushoutgoinghooks = interfaceutil.Attribute("""util.hooks instance.""")
 
     def pushkey(namespace, key, old, new):
         pass
@@ -1718,10 +1814,13 @@
     def savecommitmessage(text):
         pass
 
-class completelocalrepository(ilocalrepositorymain,
-                              ilocalrepositoryfilestorage):
+
+class completelocalrepository(
+    ilocalrepositorymain, ilocalrepositoryfilestorage
+):
     """Complete interface for a local repository."""
 
+
 class iwireprotocolcommandcacher(interfaceutil.Interface):
     """Represents a caching backend for wire protocol commands.
 
@@ -1804,6 +1903,7 @@
     could wrap the encoded object data in ``wireprototypes.encodedresponse``
     instances to avoid this overhead.
     """
+
     def __enter__():
         """Marks the instance as active.
 
--- a/mercurial/interfaces/util.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/interfaces/util.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,26 +11,24 @@
 
 from __future__ import absolute_import
 
-from .. import (
-    encoding,
-)
+from .. import encoding
 
 if encoding.environ.get('HGREALINTERFACES'):
-    from ..thirdparty.zope import (
-        interface as zi,
-    )
+    from ..thirdparty.zope import interface as zi
 
     Attribute = zi.Attribute
     Interface = zi.Interface
     implementer = zi.implementer
 else:
+
     class Attribute(object):
         def __init__(self, __name__, __doc__=''):
             pass
 
     class Interface(object):
-        def __init__(self, name, bases=(), attrs=None, __doc__=None,
-                 __module__=None):
+        def __init__(
+            self, name, bases=(), attrs=None, __doc__=None, __module__=None
+        ):
             pass
 
     def implementer(*ifaces):
--- a/mercurial/keepalive.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/keepalive.py	Sun Oct 06 09:45:02 2019 -0400
@@ -98,9 +98,7 @@
     urllibcompat,
     util,
 )
-from .utils import (
-    procutil,
-)
+from .utils import procutil
 
 httplib = util.httplib
 urlerr = util.urlerr
@@ -108,16 +106,18 @@
 
 DEBUG = None
 
+
 class ConnectionManager(object):
     """
     The connection manager must be able to:
       * keep track of all existing
       """
+
     def __init__(self):
         self._lock = threading.Lock()
-        self._hostmap = collections.defaultdict(list) # host -> [connection]
-        self._connmap = {} # map connections to host
-        self._readymap = {} # map connection to ready state
+        self._hostmap = collections.defaultdict(list)  # host -> [connection]
+        self._connmap = {}  # map connections to host
+        self._readymap = {}  # map connection to ready state
 
     def add(self, host, connection, ready):
         self._lock.acquire()
@@ -169,6 +169,7 @@
         else:
             return dict(self._hostmap)
 
+
 class KeepAliveHandler(object):
     def __init__(self, timeout=None):
         self._cm = ConnectionManager()
@@ -235,8 +236,9 @@
                 # no (working) free connections were found.  Create a new one.
                 h = http_class(host, timeout=self._timeout)
                 if DEBUG:
-                    DEBUG.info("creating new connection to %s (%d)",
-                               host, id(h))
+                    DEBUG.info(
+                        "creating new connection to %s (%d)", host, id(h)
+                    )
                 self._cm.add(host, h, False)
                 self._start_transaction(h, req)
                 r = h.getresponse()
@@ -244,7 +246,8 @@
         # to make the error message slightly more useful.
         except httplib.BadStatusLine as err:
             raise urlerr.urlerror(
-                _('bad HTTP status line: %s') % pycompat.sysbytes(err.line))
+                _('bad HTTP status line: %s') % pycompat.sysbytes(err.line)
+            )
         except (socket.error, httplib.HTTPException) as err:
             raise urlerr.urlerror(err)
 
@@ -280,7 +283,7 @@
             # worked.  We'll check the version below, too.
         except (socket.error, httplib.HTTPException):
             r = None
-        except: # re-raises
+        except:  # re-raises
             # adding this block just in case we've missed
             # something we will still raise the exception, but
             # lets try and close the connection and remove it
@@ -291,8 +294,11 @@
             # that it's now possible this call will raise
             # a DIFFERENT exception
             if DEBUG:
-                DEBUG.error("unexpected exception - closing "
-                            "connection to %s (%d)", host, id(h))
+                DEBUG.error(
+                    "unexpected exception - closing " "connection to %s (%d)",
+                    host,
+                    id(h),
+                )
             self._cm.remove(h)
             h.close()
             raise
@@ -303,8 +309,9 @@
             # the socket has been closed by the server since we
             # last used the connection.
             if DEBUG:
-                DEBUG.info("failed to re-use connection to %s (%d)",
-                           host, id(h))
+                DEBUG.info(
+                    "failed to re-use connection to %s (%d)", host, id(h)
+                )
             r = None
         else:
             if DEBUG:
@@ -330,17 +337,22 @@
             if urllibcompat.hasdata(req):
                 data = urllibcompat.getdata(req)
                 h.putrequest(
-                    req.get_method(), urllibcompat.getselector(req),
-                    **skipheaders)
+                    req.get_method(),
+                    urllibcompat.getselector(req),
+                    **skipheaders
+                )
                 if r'content-type' not in headers:
-                    h.putheader(r'Content-type',
-                                r'application/x-www-form-urlencoded')
+                    h.putheader(
+                        r'Content-type', r'application/x-www-form-urlencoded'
+                    )
                 if r'content-length' not in headers:
                     h.putheader(r'Content-length', r'%d' % len(data))
             else:
                 h.putrequest(
-                    req.get_method(), urllibcompat.getselector(req),
-                    **skipheaders)
+                    req.get_method(),
+                    urllibcompat.getselector(req),
+                    **skipheaders
+                )
         except socket.error as err:
             raise urlerr.urlerror(err)
         for k, v in headers.items():
@@ -356,13 +368,16 @@
         try:
             self.parent.requestscount += 1
             self.parent.sentbytescount += (
-                getattr(h, 'sentbytescount', 0) - oldbytescount)
+                getattr(h, 'sentbytescount', 0) - oldbytescount
+            )
         except AttributeError:
             pass
 
+
 class HTTPHandler(KeepAliveHandler, urlreq.httphandler):
     pass
 
+
 class HTTPResponse(httplib.HTTPResponse):
     # we need to subclass HTTPResponse in order to
     # 1) add readline(), readlines(), and readinto() methods
@@ -382,23 +397,23 @@
     # Both readline and readlines have been stolen with almost no
     # modification from socket.py
 
-
     def __init__(self, sock, debuglevel=0, strict=0, method=None):
         extrakw = {}
         if not pycompat.ispy3:
             extrakw[r'strict'] = True
             extrakw[r'buffering'] = True
-        httplib.HTTPResponse.__init__(self, sock, debuglevel=debuglevel,
-                                      method=method, **extrakw)
+        httplib.HTTPResponse.__init__(
+            self, sock, debuglevel=debuglevel, method=method, **extrakw
+        )
         self.fileno = sock.fileno
         self.code = None
         self.receivedbytescount = 0
         self._rbuf = ''
         self._rbufsize = 8096
-        self._handler = None # inserted by the handler later
-        self._host = None    # (same)
-        self._url = None     # (same)
-        self._connection = None # (same)
+        self._handler = None  # inserted by the handler later
+        self._host = None  # (same)
+        self._url = None  # (same)
+        self._connection = None  # (same)
 
     _raw_read = httplib.HTTPResponse.read
     _raw_readinto = getattr(httplib.HTTPResponse, 'readinto', None)
@@ -413,8 +428,9 @@
             self.fp.close()
             self.fp = None
             if self._handler:
-                self._handler._request_closed(self, self._host,
-                                              self._connection)
+                self._handler._request_closed(
+                    self, self._host, self._connection
+                )
 
     def _close_conn(self):
         self.close()
@@ -470,7 +486,7 @@
                 line = self.fp.readline()
                 i = line.find(';')
                 if i >= 0:
-                    line = line[:i] # strip chunk-extensions
+                    line = line[:i]  # strip chunk-extensions
                 try:
                     chunk_left = int(line, 16)
                 except ValueError:
@@ -496,7 +512,7 @@
                 amt -= chunk_left
 
             # we read the whole chunk, get another
-            self._safe_read(2)      # toss the CRLF at the end of the chunk
+            self._safe_read(2)  # toss the CRLF at the end of the chunk
             chunk_left = None
 
         # read and discard trailer up to the CRLF terminator
@@ -575,7 +591,7 @@
             res = self.read(len(dest))
             if not res:
                 return 0
-            dest[0:len(res)] = res
+            dest[0 : len(res)] = res
             return len(res)
         total = len(dest)
         have = len(self._rbuf)
@@ -598,6 +614,7 @@
         self._rbuf = ''
         return got
 
+
 def safesend(self, str):
     """Send `str' to the server.
 
@@ -642,7 +659,7 @@
             self.sentbytescount += len(str)
     except socket.error as v:
         reraise = True
-        if v.args[0] == errno.EPIPE:      # Broken pipe
+        if v.args[0] == errno.EPIPE:  # Broken pipe
             if self._HTTPConnection__state == httplib._CS_REQ_SENT:
                 self._broken_pipe_resp = None
                 self._broken_pipe_resp = self.getresponse()
@@ -651,9 +668,11 @@
         if reraise:
             raise
 
+
 def wrapgetresponse(cls):
     """Wraps getresponse in cls with a broken-pipe sane version.
     """
+
     def safegetresponse(self):
         # In safesend() we might set the _broken_pipe_resp
         # attribute, in which case the socket has already
@@ -663,9 +682,11 @@
         if r is not None:
             return r
         return cls.getresponse(self)
+
     safegetresponse.__doc__ = cls.getresponse.__doc__
     return safegetresponse
 
+
 class HTTPConnection(httplib.HTTPConnection):
     # url.httpsconnection inherits from this. So when adding/removing
     # attributes, be sure to audit httpsconnection() for unintended
@@ -681,6 +702,7 @@
         self.sentbytescount = 0
         self.receivedbytescount = 0
 
+
 #########################################################################
 #####   TEST FUNCTIONS
 #########################################################################
@@ -721,6 +743,7 @@
     m = md5(foo)
     print(format % ('keepalive readline', node.hex(m.digest())))
 
+
 def comp(N, url):
     print('  making %i connections to:\n  %s' % (N, url))
 
@@ -739,8 +762,10 @@
     print('  TIME: %.3f s' % t2)
     print('  improvement factor: %.2f' % (t1 / t2))
 
+
 def fetch(N, url, delay=0):
     import time
+
     lens = []
     starttime = time.time()
     for i in range(N):
@@ -760,13 +785,17 @@
 
     return diff
 
+
 def test_timeout(url):
     global DEBUG
     dbbackup = DEBUG
+
     class FakeLogger(object):
         def debug(self, msg, *args):
             print(msg % args)
+
         info = warning = error = debug
+
     DEBUG = FakeLogger()
     print("  fetching the file to establish a connection")
     fo = urlreq.urlopen(url)
@@ -805,8 +834,10 @@
     print("performing dropped-connection check")
     test_timeout(url)
 
+
 if __name__ == '__main__':
     import time
+
     try:
         N = int(sys.argv[1])
         url = sys.argv[2]
--- a/mercurial/linelog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/linelog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,18 +23,16 @@
 import abc
 import struct
 
-from .thirdparty import (
-    attr,
-)
-from . import (
-    pycompat,
-)
+from .thirdparty import attr
+from . import pycompat
 
 _llentry = struct.Struct('>II')
 
+
 class LineLogError(Exception):
     """Error raised when something bad happens internally in linelog."""
 
+
 @attr.s
 class lineinfo(object):
     # Introducing revision of this line.
@@ -44,6 +42,7 @@
     # Private. Offset in the linelog program of this line. Used internally.
     _offset = attr.ib()
 
+
 @attr.s
 class annotateresult(object):
     rev = attr.ib()
@@ -53,6 +52,7 @@
     def __iter__(self):
         return iter(self.lines)
 
+
 class _llinstruction(object):
 
     __metaclass__ = abc.ABCMeta
@@ -90,6 +90,7 @@
           (that is, we've found the end of the file.)
         """
 
+
 class _jge(_llinstruction):
     """If the current rev is greater than or equal to op1, jump to op2."""
 
@@ -101,9 +102,11 @@
         return r'JGE %d %d' % (self._cmprev, self._target)
 
     def __eq__(self, other):
-        return (type(self) == type(other)
-                and self._cmprev == other._cmprev
-                and self._target == other._target)
+        return (
+            type(self) == type(other)
+            and self._cmprev == other._cmprev
+            and self._target == other._target
+        )
 
     def encode(self):
         return _llentry.pack(self._cmprev << 2, self._target)
@@ -113,6 +116,7 @@
             return self._target
         return pc + 1
 
+
 class _jump(_llinstruction):
     """Unconditional jumps are expressed as a JGE with op1 set to 0."""
 
@@ -125,8 +129,7 @@
         return r'JUMP %d' % (self._target)
 
     def __eq__(self, other):
-        return (type(self) == type(other)
-                and self._target == other._target)
+        return type(self) == type(other) and self._target == other._target
 
     def encode(self):
         return _llentry.pack(0, self._target)
@@ -134,6 +137,7 @@
     def execute(self, rev, pc, emit):
         return self._target
 
+
 class _eof(_llinstruction):
     """EOF is expressed as a JGE that always jumps to 0."""
 
@@ -155,6 +159,7 @@
     def execute(self, rev, pc, emit):
         return None
 
+
 class _jl(_llinstruction):
     """If the current rev is less than op1, jump to op2."""
 
@@ -166,9 +171,11 @@
         return r'JL %d %d' % (self._cmprev, self._target)
 
     def __eq__(self, other):
-        return (type(self) == type(other)
-                and self._cmprev == other._cmprev
-                and self._target == other._target)
+        return (
+            type(self) == type(other)
+            and self._cmprev == other._cmprev
+            and self._target == other._target
+        )
 
     def encode(self):
         return _llentry.pack(1 | (self._cmprev << 2), self._target)
@@ -178,6 +185,7 @@
             return self._target
         return pc + 1
 
+
 class _line(_llinstruction):
     """Emit a line."""
 
@@ -191,9 +199,11 @@
         return r'LINE %d %d' % (self._rev, self._origlineno)
 
     def __eq__(self, other):
-        return (type(self) == type(other)
-                and self._rev == other._rev
-                and self._origlineno == other._origlineno)
+        return (
+            type(self) == type(other)
+            and self._rev == other._rev
+            and self._origlineno == other._origlineno
+        )
 
     def encode(self):
         return _llentry.pack(2 | (self._rev << 2), self._origlineno)
@@ -202,6 +212,7 @@
         emit(lineinfo(self._rev, self._origlineno, pc))
         return pc + 1
 
+
 def _decodeone(data, offset):
     """Decode a single linelog instruction from an offset in a buffer."""
     try:
@@ -222,6 +233,7 @@
         return _line(op1, op2)
     raise NotImplementedError('Unimplemented opcode %r' % opcode)
 
+
 class linelog(object):
     """Efficient cache for per-line history information."""
 
@@ -236,25 +248,32 @@
         self._maxrev = maxrev
 
     def __eq__(self, other):
-        return (type(self) == type(other)
-                and self._program == other._program
-                and self._maxrev == other._maxrev)
+        return (
+            type(self) == type(other)
+            and self._program == other._program
+            and self._maxrev == other._maxrev
+        )
 
     def __repr__(self):
         return '<linelog at %s: maxrev=%d size=%d>' % (
-            hex(id(self)), self._maxrev, len(self._program))
+            hex(id(self)),
+            self._maxrev,
+            len(self._program),
+        )
 
     def debugstr(self):
         fmt = r'%%%dd %%s' % len(str(len(self._program)))
         return pycompat.sysstr('\n').join(
-            fmt % (idx, i) for idx, i in enumerate(self._program[1:], 1))
+            fmt % (idx, i) for idx, i in enumerate(self._program[1:], 1)
+        )
 
     @classmethod
     def fromdata(cls, buf):
         if len(buf) % _llentry.size != 0:
             raise LineLogError(
-                "invalid linelog buffer size %d (must be a multiple of %d)" % (
-                    len(buf), _llentry.size))
+                "invalid linelog buffer size %d (must be a multiple of %d)"
+                % (len(buf), _llentry.size)
+            )
         expected = len(buf) / _llentry.size
         fakejge = _decodeone(buf, 0)
         if isinstance(fakejge, _jump):
@@ -263,9 +282,11 @@
             maxrev = fakejge._cmprev
         numentries = fakejge._target
         if expected != numentries:
-            raise LineLogError("corrupt linelog data: claimed"
-                               " %d entries but given data for %d entries" % (
-                                   expected, numentries))
+            raise LineLogError(
+                "corrupt linelog data: claimed"
+                " %d entries but given data for %d entries"
+                % (expected, numentries)
+            )
         instructions = [_eof(0, 0)]
         for offset in pycompat.xrange(1, numentries):
             instructions.append(_decodeone(buf, offset * _llentry.size))
@@ -281,8 +302,9 @@
         self._lastannotate = None
 
     def replacelines_vec(self, rev, a1, a2, blines):
-        return self.replacelines(rev, a1, a2, 0, len(blines),
-                                 _internal_blines=blines)
+        return self.replacelines(
+            rev, a1, a2, 0, len(blines), _internal_blines=blines
+        )
 
     def replacelines(self, rev, a1, a2, b1, b2, _internal_blines=None):
         """Replace lines [a1, a2) with lines [b1, b2)."""
@@ -298,8 +320,9 @@
             #        ar = self.annotate(self._maxrev)
         if a1 > len(ar.lines):
             raise LineLogError(
-                '%d contains %d lines, tried to access line %d' % (
-                    rev, len(ar.lines), a1))
+                '%d contains %d lines, tried to access line %d'
+                % (rev, len(ar.lines), a1)
+            )
         elif a1 == len(ar.lines):
             # Simulated EOF instruction since we're at EOF, which
             # doesn't have a "real" line.
@@ -333,8 +356,9 @@
         if a1 < a2:
             if a2 > len(ar.lines):
                 raise LineLogError(
-                    '%d contains %d lines, tried to access line %d' % (
-                        rev, len(ar.lines), a2))
+                    '%d contains %d lines, tried to access line %d'
+                    % (rev, len(ar.lines), a2)
+                )
             elif a2 == len(ar.lines):
                 endaddr = ar._eof
             else:
@@ -384,8 +408,9 @@
             executed += 1
         if pc is not None:
             raise LineLogError(
-                r'Probably hit an infinite loop in linelog. Program:\n' +
-                self.debugstr())
+                r'Probably hit an infinite loop in linelog. Program:\n'
+                + self.debugstr()
+            )
         ar = annotateresult(rev, lines, lastpc)
         self._lastannotate = ar
         return ar
--- a/mercurial/localrepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/localrepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -74,9 +74,7 @@
     stringutil,
 )
 
-from .revlogutils import (
-    constants as revlogconst,
-)
+from .revlogutils import constants as revlogconst
 
 release = lockmod.release
 urlerr = util.urlerr
@@ -87,9 +85,11 @@
 # - '' for svfs relative paths
 _cachedfiles = set()
 
+
 class _basefilecache(scmutil.filecache):
     """All filecache usage on repo are done for logic that should be unfiltered
     """
+
     def __get__(self, repo, type=None):
         if repo is None:
             return self
@@ -104,8 +104,10 @@
     def set(self, repo, value):
         return super(_basefilecache, self).set(repo.unfiltered(), value)
 
+
 class repofilecache(_basefilecache):
     """filecache for files in .hg but outside of .hg/store"""
+
     def __init__(self, *paths):
         super(repofilecache, self).__init__(*paths)
         for path in paths:
@@ -114,8 +116,10 @@
     def join(self, obj, fname):
         return obj.vfs.join(fname)
 
+
 class storecache(_basefilecache):
     """filecache for files in the store"""
+
     def __init__(self, *paths):
         super(storecache, self).__init__(*paths)
         for path in paths:
@@ -124,8 +128,10 @@
     def join(self, obj, fname):
         return obj.sjoin(fname)
 
+
 class mixedrepostorecache(_basefilecache):
     """filecache for a mix files in .hg/store and outside"""
+
     def __init__(self, *pathsandlocations):
         # scmutil.filecache only uses the path for passing back into our
         # join(), so we can safely pass a list of paths and locations
@@ -138,10 +144,12 @@
             return obj.vfs.join(fname)
         else:
             if location != '':
-                raise error.ProgrammingError('unexpected location: %s' %
-                                             location)
+                raise error.ProgrammingError(
+                    'unexpected location: %s' % location
+                )
             return obj.sjoin(fname)
 
+
 def isfilecached(repo, name):
     """check if a repo has already cached "name" filecache-ed property
 
@@ -152,6 +160,7 @@
         return None, False
     return cacheentry.obj, True
 
+
 class unfilteredpropertycache(util.propertycache):
     """propertycache that apply to unfiltered repo only"""
 
@@ -161,6 +170,7 @@
             return super(unfilteredpropertycache, self).__get__(unfi)
         return getattr(unfi, self.name)
 
+
 class filteredpropertycache(util.propertycache):
     """propertycache that must take filtering in account"""
 
@@ -172,16 +182,27 @@
     """check if a repo has an unfilteredpropertycache value for <name>"""
     return name in vars(repo.unfiltered())
 
+
 def unfilteredmethod(orig):
     """decorate method that always need to be run on unfiltered version"""
+
     def wrapper(repo, *args, **kwargs):
         return orig(repo.unfiltered(), *args, **kwargs)
+
     return wrapper
 
-moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
-              'unbundle'}
+
+moderncaps = {
+    'lookup',
+    'branchmap',
+    'pushkey',
+    'known',
+    'getbundle',
+    'unbundle',
+}
 legacycaps = moderncaps.union({'changegroupsubset'})
 
+
 @interfaceutil.implementer(repository.ipeercommandexecutor)
 class localcommandexecutor(object):
     def __init__(self, peer):
@@ -197,12 +218,14 @@
 
     def callcommand(self, command, args):
         if self._sent:
-            raise error.ProgrammingError('callcommand() cannot be used after '
-                                         'sendcommands()')
+            raise error.ProgrammingError(
+                'callcommand() cannot be used after ' 'sendcommands()'
+            )
 
         if self._closed:
-            raise error.ProgrammingError('callcommand() cannot be used after '
-                                         'close()')
+            raise error.ProgrammingError(
+                'callcommand() cannot be used after ' 'close()'
+            )
 
         # We don't need to support anything fancy. Just call the named
         # method on the peer and return a resolved future.
@@ -225,6 +248,7 @@
     def close(self):
         self._closed = True
 
+
 @interfaceutil.implementer(repository.ipeercommands)
 class localpeer(repository.peer):
     '''peer for a local repo; reflects only the most recent API'''
@@ -270,15 +294,25 @@
 
     def debugwireargs(self, one, two, three=None, four=None, five=None):
         """Used to test argument passing over the wire"""
-        return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
-                                   pycompat.bytestr(four),
-                                   pycompat.bytestr(five))
-
-    def getbundle(self, source, heads=None, common=None, bundlecaps=None,
-                  **kwargs):
-        chunks = exchange.getbundlechunks(self._repo, source, heads=heads,
-                                          common=common, bundlecaps=bundlecaps,
-                                          **kwargs)[1]
+        return "%s %s %s %s %s" % (
+            one,
+            two,
+            pycompat.bytestr(three),
+            pycompat.bytestr(four),
+            pycompat.bytestr(five),
+        )
+
+    def getbundle(
+        self, source, heads=None, common=None, bundlecaps=None, **kwargs
+    ):
+        chunks = exchange.getbundlechunks(
+            self._repo,
+            source,
+            heads=heads,
+            common=common,
+            bundlecaps=bundlecaps,
+            **kwargs
+        )[1]
         cb = util.chunkbuffer(chunks)
 
         if exchange.bundle2requested(bundlecaps):
@@ -305,8 +339,9 @@
         return self._repo.pushkey(namespace, key, old, new)
 
     def stream_out(self):
-        raise error.Abort(_('cannot perform stream clone against local '
-                            'peer'))
+        raise error.Abort(
+            _('cannot perform stream clone against local ' 'peer')
+        )
 
     def unbundle(self, bundle, heads, url):
         """apply a bundle on a repo
@@ -341,8 +376,9 @@
                     bundle2.processbundle(self._repo, b)
                 raise
         except error.PushRaced as exc:
-            raise error.ResponseError(_('push failed:'),
-                                      stringutil.forcebytestr(exc))
+            raise error.ResponseError(
+                _('push failed:'), stringutil.forcebytestr(exc)
+            )
 
     # End of _basewirecommands interface.
 
@@ -353,6 +389,7 @@
 
     # End of peer interface.
 
+
 @interfaceutil.implementer(repository.ipeerlegacycommands)
 class locallegacypeer(localpeer):
     '''peer extension which implements legacy methods too; used for tests with
@@ -370,17 +407,20 @@
         return self._repo.branches(nodes)
 
     def changegroup(self, nodes, source):
-        outgoing = discovery.outgoing(self._repo, missingroots=nodes,
-                                      missingheads=self._repo.heads())
+        outgoing = discovery.outgoing(
+            self._repo, missingroots=nodes, missingheads=self._repo.heads()
+        )
         return changegroup.makechangegroup(self._repo, outgoing, '01', source)
 
     def changegroupsubset(self, bases, heads, source):
-        outgoing = discovery.outgoing(self._repo, missingroots=bases,
-                                      missingheads=heads)
+        outgoing = discovery.outgoing(
+            self._repo, missingroots=bases, missingheads=heads
+        )
         return changegroup.makechangegroup(self._repo, outgoing, '01', source)
 
     # End of baselegacywirecommands interface.
 
+
 # Increment the sub-version when the revlog v2 format changes to lock out old
 # clients.
 REVLOGV2_REQUIREMENT = 'exp-revlogv2.1'
@@ -407,6 +447,7 @@
 # set to reflect that the extension knows how to handle that requirements.
 featuresetupfuncs = set()
 
+
 def makelocalrepository(baseui, path, intents=None):
     """Create a local repository object.
 
@@ -522,8 +563,10 @@
         sharedvfs = vfsmod.vfs(sharedpath, realpath=True)
 
         if not sharedvfs.exists():
-            raise error.RepoError(_(b'.hg/sharedpath points to nonexistent '
-                                    b'directory %s') % sharedvfs.base)
+            raise error.RepoError(
+                _(b'.hg/sharedpath points to nonexistent ' b'directory %s')
+                % sharedvfs.base
+            )
 
         features.add(repository.REPO_FEATURE_SHARED_STORAGE)
 
@@ -534,12 +577,14 @@
         cachepath = hgvfs.join(b'cache')
     wcachepath = hgvfs.join(b'wcache')
 
-
     # The store has changed over time and the exact layout is dictated by
     # requirements. The store interface abstracts differences across all
     # of them.
-    store = makestore(requirements, storebasepath,
-                      lambda base: vfsmod.vfs(base, cacheaudited=True))
+    store = makestore(
+        requirements,
+        storebasepath,
+        lambda base: vfsmod.vfs(base, cacheaudited=True),
+    )
     hgvfs.createmode = store.createmode
 
     storevfs = store.vfs
@@ -563,33 +608,36 @@
     for iface, fn in REPO_INTERFACES:
         # We pass all potentially useful state to give extensions tons of
         # flexibility.
-        typ = fn()(ui=ui,
-                 intents=intents,
-                 requirements=requirements,
-                 features=features,
-                 wdirvfs=wdirvfs,
-                 hgvfs=hgvfs,
-                 store=store,
-                 storevfs=storevfs,
-                 storeoptions=storevfs.options,
-                 cachevfs=cachevfs,
-                 wcachevfs=wcachevfs,
-                 extensionmodulenames=extensionmodulenames,
-                 extrastate=extrastate,
-                 baseclasses=bases)
+        typ = fn()(
+            ui=ui,
+            intents=intents,
+            requirements=requirements,
+            features=features,
+            wdirvfs=wdirvfs,
+            hgvfs=hgvfs,
+            store=store,
+            storevfs=storevfs,
+            storeoptions=storevfs.options,
+            cachevfs=cachevfs,
+            wcachevfs=wcachevfs,
+            extensionmodulenames=extensionmodulenames,
+            extrastate=extrastate,
+            baseclasses=bases,
+        )
 
         if not isinstance(typ, type):
-            raise error.ProgrammingError('unable to construct type for %s' %
-                                         iface)
+            raise error.ProgrammingError(
+                'unable to construct type for %s' % iface
+            )
 
         bases.append(typ)
 
     # type() allows you to use characters in type names that wouldn't be
     # recognized as Python symbols in source code. We abuse that to add
     # rich information about our constructed repo.
-    name = pycompat.sysstr(b'derivedrepo:%s<%s>' % (
-        wdirvfs.base,
-        b','.join(sorted(requirements))))
+    name = pycompat.sysstr(
+        b'derivedrepo:%s<%s>' % (wdirvfs.base, b','.join(sorted(requirements)))
+    )
 
     cls = type(name, tuple(bases), {})
 
@@ -606,7 +654,9 @@
         cachevfs=cachevfs,
         wcachevfs=wcachevfs,
         features=features,
-        intents=intents)
+        intents=intents,
+    )
+
 
 def loadhgrc(ui, wdirvfs, hgvfs, requirements):
     """Load hgrc files/content into a ui instance.
@@ -626,6 +676,7 @@
     except IOError:
         return False
 
+
 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
     """Perform additional actions after .hg/hgrc is loaded.
 
@@ -651,6 +702,7 @@
             if not ui.hasconfig(b'extensions', name):
                 ui.setconfig(b'extensions', name, b'', source='autoload')
 
+
 def gathersupportedrequirements(ui):
     """Determine the complete set of recognized requirements."""
     # Start with all requirements supported by this file.
@@ -674,6 +726,7 @@
 
     return supported
 
+
 def ensurerequirementsrecognized(requirements, supported):
     """Validate that a set of local requirements is recognized.
 
@@ -696,10 +749,14 @@
 
     if missing:
         raise error.RequirementError(
-            _(b'repository requires features unknown to this Mercurial: %s') %
-            b' '.join(sorted(missing)),
-            hint=_(b'see https://mercurial-scm.org/wiki/MissingRequirement '
-                   b'for more information'))
+            _(b'repository requires features unknown to this Mercurial: %s')
+            % b' '.join(sorted(missing)),
+            hint=_(
+                b'see https://mercurial-scm.org/wiki/MissingRequirement '
+                b'for more information'
+            ),
+        )
+
 
 def ensurerequirementscompatible(ui, requirements):
     """Validates that a set of recognized requirements is mutually compatible.
@@ -715,21 +772,28 @@
     ``error.RepoError`` should be raised on failure.
     """
     if b'exp-sparse' in requirements and not sparse.enabled:
-        raise error.RepoError(_(b'repository is using sparse feature but '
-                                b'sparse is not enabled; enable the '
-                                b'"sparse" extensions to access'))
+        raise error.RepoError(
+            _(
+                b'repository is using sparse feature but '
+                b'sparse is not enabled; enable the '
+                b'"sparse" extensions to access'
+            )
+        )
+
 
 def makestore(requirements, path, vfstype):
     """Construct a storage object for a repository."""
     if b'store' in requirements:
         if b'fncache' in requirements:
-            return storemod.fncachestore(path, vfstype,
-                                         b'dotencode' in requirements)
+            return storemod.fncachestore(
+                path, vfstype, b'dotencode' in requirements
+            )
 
         return storemod.encodedstore(path, vfstype)
 
     return storemod.basicstore(path, vfstype)
 
+
 def resolvestorevfsoptions(ui, requirements, features):
     """Resolve the options to pass to the store vfs opener.
 
@@ -752,16 +816,17 @@
     # meaningful on such old repos.
     if b'revlogv1' in requirements or REVLOGV2_REQUIREMENT in requirements:
         options.update(resolverevlogstorevfsoptions(ui, requirements, features))
-    else: # explicitly mark repo as using revlogv0
+    else:  # explicitly mark repo as using revlogv0
         options['revlogv0'] = True
 
     writecopiesto = ui.config('experimental', 'copies.write-to')
     copiesextramode = ('changeset-only', 'compatibility')
-    if (writecopiesto in copiesextramode):
+    if writecopiesto in copiesextramode:
         options['copies-storage'] = 'extra'
 
     return options
 
+
 def resolverevlogstorevfsoptions(ui, requirements, features):
     """Resolve opener options specific to revlogs."""
 
@@ -781,15 +846,17 @@
     if chunkcachesize is not None:
         options[b'chunkcachesize'] = chunkcachesize
 
-    deltabothparents = ui.configbool(b'storage',
-                                     b'revlog.optimize-delta-parent-choice')
+    deltabothparents = ui.configbool(
+        b'storage', b'revlog.optimize-delta-parent-choice'
+    )
     options[b'deltabothparents'] = deltabothparents
 
     lazydelta = ui.configbool(b'storage', b'revlog.reuse-external-delta')
     lazydeltabase = False
     if lazydelta:
-        lazydeltabase = ui.configbool(b'storage',
-                                      b'revlog.reuse-external-delta-parent')
+        lazydeltabase = ui.configbool(
+            b'storage', b'revlog.reuse-external-delta-parent'
+        )
     if lazydeltabase is None:
         lazydeltabase = not scmutil.gddeltaconfig(ui)
     options[b'lazydelta'] = lazydelta
@@ -799,16 +866,15 @@
     if 0 <= chainspan:
         options[b'maxdeltachainspan'] = chainspan
 
-    mmapindexthreshold = ui.configbytes(b'experimental',
-                                        b'mmapindexthreshold')
+    mmapindexthreshold = ui.configbytes(b'experimental', b'mmapindexthreshold')
     if mmapindexthreshold is not None:
         options[b'mmapindexthreshold'] = mmapindexthreshold
 
     withsparseread = ui.configbool(b'experimental', b'sparse-read')
-    srdensitythres = float(ui.config(b'experimental',
-                                     b'sparse-read.density-threshold'))
-    srmingapsize = ui.configbytes(b'experimental',
-                                  b'sparse-read.min-gap-size')
+    srdensitythres = float(
+        ui.config(b'experimental', b'sparse-read.density-threshold')
+    )
+    srmingapsize = ui.configbytes(b'experimental', b'sparse-read.min-gap-size')
     options[b'with-sparse-read'] = withsparseread
     options[b'sparse-read-density-threshold'] = srdensitythres
     options[b'sparse-read-min-gap-size'] = srmingapsize
@@ -854,10 +920,12 @@
 
     return options
 
+
 def makemain(**kwargs):
     """Produce a type conforming to ``ilocalrepositorymain``."""
     return localrepository
 
+
 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
 class revlogfilestorage(object):
     """File storage when using revlogs."""
@@ -868,6 +936,7 @@
 
         return filelog.filelog(self.svfs, path)
 
+
 @interfaceutil.implementer(repository.ilocalrepositoryfilestorage)
 class revlognarrowfilestorage(object):
     """File storage when using revlogs and narrow files."""
@@ -878,6 +947,7 @@
 
         return filelog.narrowfilelog(self.svfs, path, self._storenarrowmatch)
 
+
 def makefilestorage(requirements, features, **kwargs):
     """Produce a type conforming to ``ilocalrepositoryfilestorage``."""
     features.add(repository.REPO_FEATURE_REVLOG_FILE_STORAGE)
@@ -888,6 +958,7 @@
     else:
         return revlogfilestorage
 
+
 # List of repository interfaces and factory functions for them. Each
 # will be called in order during ``makelocalrepository()`` to iteratively
 # derive the final type for a local repository instance. We capture the
@@ -898,6 +969,7 @@
     (repository.ilocalrepositoryfilestorage, lambda: makefilestorage),
 ]
 
+
 @interfaceutil.implementer(repository.ilocalrepositorymain)
 class localrepository(object):
     """Main class for representing local repositories.
@@ -935,7 +1007,7 @@
         'relshared',
         'dotencode',
         'exp-sparse',
-        'internal-phase'
+        'internal-phase',
     }
 
     # list of prefix for file which can be written without 'wlock'
@@ -958,9 +1030,22 @@
         'bisect.state',
     }
 
-    def __init__(self, baseui, ui, origroot, wdirvfs, hgvfs, requirements,
-                 supportedrequirements, sharedpath, store, cachevfs, wcachevfs,
-                 features, intents=None):
+    def __init__(
+        self,
+        baseui,
+        ui,
+        origroot,
+        wdirvfs,
+        hgvfs,
+        requirements,
+        supportedrequirements,
+        sharedpath,
+        store,
+        cachevfs,
+        wcachevfs,
+        features,
+        intents=None,
+    ):
         """Create a new local repository instance.
 
         Most callers should use ``hg.repository()``, ``localrepo.instance()``,
@@ -1032,8 +1117,9 @@
 
         self.filtername = None
 
-        if (self.ui.configbool('devel', 'all-warnings') or
-            self.ui.configbool('devel', 'check-locks')):
+        if self.ui.configbool('devel', 'all-warnings') or self.ui.configbool(
+            'devel', 'check-locks'
+        ):
             self.vfs.audit = self._getvfsward(self.vfs.audit)
         # A list of callback to shape the phase if no data were found.
         # Callback are in the form: func(repo, roots) --> processed root.
@@ -1045,11 +1131,12 @@
         self.spath = self.store.path
         self.svfs = self.store.vfs
         self.sjoin = self.store.join
-        if (self.ui.configbool('devel', 'all-warnings') or
-            self.ui.configbool('devel', 'check-locks')):
-            if util.safehasattr(self.svfs, 'vfs'): # this is filtervfs
+        if self.ui.configbool('devel', 'all-warnings') or self.ui.configbool(
+            'devel', 'check-locks'
+        ):
+            if util.safehasattr(self.svfs, 'vfs'):  # this is filtervfs
                 self.svfs.vfs.audit = self._getsvfsward(self.svfs.vfs.audit)
-            else: # standard vfs
+            else:  # standard vfs
                 self.svfs.audit = self._getsvfsward(self.svfs.audit)
 
         self._dirstatevalidatewarned = False
@@ -1091,26 +1178,32 @@
     def _getvfsward(self, origfunc):
         """build a ward for self.vfs"""
         rref = weakref.ref(self)
+
         def checkvfs(path, mode=None):
             ret = origfunc(path, mode=mode)
             repo = rref()
-            if (repo is None
+            if (
+                repo is None
                 or not util.safehasattr(repo, '_wlockref')
-                or not util.safehasattr(repo, '_lockref')):
+                or not util.safehasattr(repo, '_lockref')
+            ):
                 return
             if mode in (None, 'r', 'rb'):
                 return
             if path.startswith(repo.path):
                 # truncate name relative to the repository (.hg)
-                path = path[len(repo.path) + 1:]
+                path = path[len(repo.path) + 1 :]
             if path.startswith('cache/'):
                 msg = 'accessing cache with vfs instead of cachevfs: "%s"'
                 repo.ui.develwarn(msg % path, stacklevel=3, config="cache-vfs")
             if path.startswith('journal.') or path.startswith('undo.'):
                 # journal is covered by 'lock'
                 if repo._currentlock(repo._lockref) is None:
-                    repo.ui.develwarn('write with no lock: "%s"' % path,
-                                      stacklevel=3, config='check-locks')
+                    repo.ui.develwarn(
+                        'write with no lock: "%s"' % path,
+                        stacklevel=3,
+                        config='check-locks',
+                    )
             elif repo._currentlock(repo._wlockref) is None:
                 # rest of vfs files are covered by 'wlock'
                 #
@@ -1118,14 +1211,19 @@
                 for prefix in self._wlockfreeprefix:
                     if path.startswith(prefix):
                         return
-                repo.ui.develwarn('write with no wlock: "%s"' % path,
-                                  stacklevel=3, config='check-locks')
+                repo.ui.develwarn(
+                    'write with no wlock: "%s"' % path,
+                    stacklevel=3,
+                    config='check-locks',
+                )
             return ret
+
         return checkvfs
 
     def _getsvfsward(self, origfunc):
         """build a ward for self.svfs"""
         rref = weakref.ref(self)
+
         def checksvfs(path, mode=None):
             ret = origfunc(path, mode=mode)
             repo = rref()
@@ -1135,11 +1233,13 @@
                 return
             if path.startswith(repo.sharedpath):
                 # truncate name relative to the repository (.hg)
-                path = path[len(repo.sharedpath) + 1:]
+                path = path[len(repo.sharedpath) + 1 :]
             if repo._currentlock(repo._lockref) is None:
-                repo.ui.develwarn('write with no lock: "%s"' % path,
-                                  stacklevel=4)
+                repo.ui.develwarn(
+                    'write with no lock: "%s"' % path, stacklevel=4
+                )
             return ret
+
         return checksvfs
 
     def close(self):
@@ -1152,8 +1252,9 @@
     def _restrictcapabilities(self, caps):
         if self.ui.configbool('experimental', 'bundle2-advertise'):
             caps = set(caps)
-            capsblob = bundle2.encodecaps(bundle2.getrepocaps(self,
-                                                              role='client'))
+            capsblob = bundle2.encodecaps(
+                bundle2.getrepocaps(self, role='client')
+            )
             caps.add('bundle2=' + urlreq.quote(capsblob))
         return caps
 
@@ -1173,14 +1274,15 @@
     def nofsauditor(self):
         # This is only used by context.basectx.match in order to detect
         # files in subrepos.
-        return pathutil.pathauditor(self.root, callback=self._checknested,
-                                    realfs=False, cached=True)
+        return pathutil.pathauditor(
+            self.root, callback=self._checknested, realfs=False, cached=True
+        )
 
     def _checknested(self, path):
         """Determine if path is a legal nested repository."""
         if not path.startswith(self.root):
             return False
-        subpath = path[len(self.root) + 1:]
+        subpath = path[len(self.root) + 1 :]
         normsubpath = util.pconvert(subpath)
 
         # XXX: Checking against the current working copy is wrong in
@@ -1209,13 +1311,13 @@
                     return True
                 else:
                     sub = ctx.sub(prefix)
-                    return sub.checknested(subpath[len(prefix) + 1:])
+                    return sub.checknested(subpath[len(prefix) + 1 :])
             else:
                 parts.pop()
         return False
 
     def peer(self):
-        return localpeer(self) # not cached to avoid reference cycle
+        return localpeer(self)  # not cached to avoid reference cycle
 
     def unfiltered(self):
         """Return unfiltered version of the repository
@@ -1236,13 +1338,17 @@
         In other word, there is always only one level of `repoview` "filtering".
         """
         if self._extrafilterid is not None and '%' not in name:
-            name = name + '%'  + self._extrafilterid
+            name = name + '%' + self._extrafilterid
 
         cls = repoview.newtype(self.unfiltered().__class__)
         return cls(self, name, visibilityexceptions)
 
-    @mixedrepostorecache(('bookmarks', 'plain'), ('bookmarks.current', 'plain'),
-                         ('bookmarks', ''), ('00changelog.i', ''))
+    @mixedrepostorecache(
+        ('bookmarks', 'plain'),
+        ('bookmarks.current', 'plain'),
+        ('bookmarks', ''),
+        ('00changelog.i', ''),
+    )
     def _bookmarks(self):
         # Since the multiple files involved in the transaction cannot be
         # written atomically (with current repository format), there is a race
@@ -1297,7 +1403,7 @@
 
     def _refreshchangelog(self):
         """make sure the in memory changelog match the on-disk one"""
-        if ('changelog' in vars(self) and self.currenttransaction() is None):
+        if 'changelog' in vars(self) and self.currenttransaction() is None:
             del self.changelog
 
     @property
@@ -1331,8 +1437,9 @@
         """Extension point for wrapping the dirstate per-repo."""
         sparsematchfn = lambda: sparse.matcher(self)
 
-        return dirstate.dirstate(self.vfs, self.ui, self.root,
-                                 self._dirstatevalidate, sparsematchfn)
+        return dirstate.dirstate(
+            self.vfs, self.ui, self.root, self._dirstatevalidate, sparsematchfn
+        )
 
     def _dirstatevalidate(self, node):
         try:
@@ -1341,8 +1448,10 @@
         except error.LookupError:
             if not self._dirstatevalidatewarned:
                 self._dirstatevalidatewarned = True
-                self.ui.warn(_("warning: ignoring unknown"
-                               " working parent %s!\n") % short(node))
+                self.ui.warn(
+                    _("warning: ignoring unknown" " working parent %s!\n")
+                    % short(node)
+                )
             return nullid
 
     @storecache(narrowspec.FILENAME)
@@ -1398,9 +1507,11 @@
             return changeid
         if isinstance(changeid, slice):
             # wdirrev isn't contiguous so the slice shouldn't include it
-            return [self[i]
-                    for i in pycompat.xrange(*changeid.indices(len(self)))
-                    if i not in self.changelog.filteredrevs]
+            return [
+                self[i]
+                for i in pycompat.xrange(*changeid.indices(len(self)))
+                if i not in self.changelog.filteredrevs
+            ]
         try:
             if isinstance(changeid, int):
                 node = self.changelog.node(changeid)
@@ -1421,18 +1532,20 @@
                     node = changeid
                     rev = self.changelog.rev(changeid)
                 except error.FilteredLookupError:
-                    changeid = hex(changeid) # for the error message
+                    changeid = hex(changeid)  # for the error message
                     raise
                 except LookupError:
                     # check if it might have come from damaged dirstate
                     #
                     # XXX we could avoid the unfiltered if we had a recognizable
                     # exception for filtered changeset access
-                    if (self.local()
-                        and changeid in self.unfiltered().dirstate.parents()):
+                    if (
+                        self.local()
+                        and changeid in self.unfiltered().dirstate.parents()
+                    ):
                         msg = _("working directory has unknown parent '%s'!")
                         raise error.Abort(msg % short(changeid))
-                    changeid = hex(changeid) # for the error message
+                    changeid = hex(changeid)  # for the error message
                     raise
 
             elif len(changeid) == 40:
@@ -1440,17 +1553,20 @@
                 rev = self.changelog.rev(node)
             else:
                 raise error.ProgrammingError(
-                        "unsupported changeid '%s' of type %s" %
-                        (changeid, type(changeid)))
+                    "unsupported changeid '%s' of type %s"
+                    % (changeid, type(changeid))
+                )
 
             return context.changectx(self, rev, node)
 
         except (error.FilteredIndexError, error.FilteredLookupError):
-            raise error.FilteredRepoLookupError(_("filtered revision '%s'")
-                                                % pycompat.bytestr(changeid))
+            raise error.FilteredRepoLookupError(
+                _("filtered revision '%s'") % pycompat.bytestr(changeid)
+            )
         except (IndexError, LookupError):
             raise error.RepoLookupError(
-                _("unknown revision '%s'") % pycompat.bytestr(changeid))
+                _("unknown revision '%s'") % pycompat.bytestr(changeid)
+            )
         except error.WdirUnsupported:
             return context.workingctx(self)
 
@@ -1516,9 +1632,12 @@
         ``{name: definitionstring}``.
         '''
         if user:
-            m = revset.matchany(self.ui, specs,
-                                lookup=revset.lookupfn(self),
-                                localalias=localalias)
+            m = revset.matchany(
+                self.ui,
+                specs,
+                lookup=revset.lookupfn(self),
+                localalias=localalias,
+            )
         else:
             m = revset.matchany(None, specs, localalias=localalias)
         return m(self)
@@ -1589,7 +1708,6 @@
         # be one tagtype for all such "virtual" tags?  Or is the status
         # quo fine?
 
-
         # map tag name to (node, hist)
         alltags = tagsmod.findglobaltags(self.ui, self)
         # map tag name to tag type
@@ -1606,8 +1724,12 @@
             if node != nullid:
                 tags[encoding.tolocal(name)] = node
         tags['tip'] = self.changelog.tip()
-        tagtypes = dict([(encoding.tolocal(name), value)
-                         for (name, value) in tagtypes.iteritems()])
+        tagtypes = dict(
+            [
+                (encoding.tolocal(name), value)
+                for (name, value) in tagtypes.iteritems()
+            ]
+        )
         return (tags, tagtypes)
 
     def tagtype(self, tagname):
@@ -1741,8 +1863,9 @@
     def filectx(self, path, changeid=None, fileid=None, changectx=None):
         """changeid must be a changeset revision, if specified.
            fileid can be a file revision or node."""
-        return context.filectx(self, path, changeid, fileid,
-                               changectx=changectx)
+        return context.filectx(
+            self, path, changeid, fileid, changectx=changectx
+        )
 
     def getcwd(self):
         return self.dirstate.getcwd()
@@ -1762,7 +1885,7 @@
                 for name, filterfn in self._datafilters.iteritems():
                     if cmd.startswith(name):
                         fn = filterfn
-                        params = cmd[len(name):].lstrip()
+                        params = cmd[len(name) :].lstrip()
                         break
                 if not fn:
                     fn = lambda s, c, **kwargs: procutil.filter(s, c)
@@ -1810,8 +1933,9 @@
         if 'l' in flags:
             self.wvfs.symlink(data, filename)
         else:
-            self.wvfs.write(filename, data, backgroundclose=backgroundclose,
-                            **kwargs)
+            self.wvfs.write(
+                filename, data, backgroundclose=backgroundclose, **kwargs
+            )
             if 'x' in flags:
                 self.wvfs.setflags(filename, False, True)
             else:
@@ -1833,8 +1957,9 @@
         return None
 
     def transaction(self, desc, report=None):
-        if (self.ui.configbool('devel', 'all-warnings')
-                or self.ui.configbool('devel', 'check-locks')):
+        if self.ui.configbool('devel', 'all-warnings') or self.ui.configbool(
+            'devel', 'check-locks'
+        ):
             if self._currentlock(self._lockref) is None:
                 raise error.ProgrammingError('transaction requires locking')
         tr = self.currenttransaction()
@@ -1845,7 +1970,8 @@
         if self.svfs.exists("journal"):
             raise error.RepoError(
                 _("abandoned transaction found"),
-                hint=_("run 'hg recover' to clean up transaction"))
+                hint=_("run 'hg recover' to clean up transaction"),
+            )
 
         idbase = "%.40f#%f" % (random.random(), time.time())
         ha = hex(hashlib.sha1(idbase).digest())
@@ -1858,7 +1984,7 @@
             rp = report
         else:
             rp = self.ui.warn
-        vfsmap = {'plain': self.vfs, 'store': self.svfs} # root of .hg/
+        vfsmap = {'plain': self.vfs, 'store': self.svfs}  # root of .hg/
         # we must avoid cyclic reference between repo and transaction.
         reporef = weakref.ref(self)
         # Code to track tag movement
@@ -1899,6 +2025,7 @@
         shouldtracktags = self.ui.configbool('experimental', 'hook-track-tags')
         if desc != 'strip' and shouldtracktags:
             oldheads = self.changelog.headrevs()
+
             def tracktags(tr2):
                 repo = reporef()
                 oldfnodes = tagsmod.fnoderevs(repo.ui, repo, oldheads)
@@ -1909,12 +2036,14 @@
                 changes = tagsmod.difftags(repo.ui, repo, oldfnodes, newfnodes)
                 if changes:
                     tr2.hookargs['tag_moved'] = '1'
-                    with repo.vfs('changes/tags.changes', 'w',
-                                  atomictemp=True) as changesfile:
+                    with repo.vfs(
+                        'changes/tags.changes', 'w', atomictemp=True
+                    ) as changesfile:
                         # note: we do not register the file to the transaction
                         # because we needs it to still exist on the transaction
                         # is close (for txnclose hooks)
                         tagsmod.writediff(changesfile, changes)
+
         def validate(tr2):
             """will run pre-closing hooks"""
             # XXX the transaction API is a bit lacking here so we take a hacky
@@ -1936,8 +2065,9 @@
             tracktags(tr2)
             repo = reporef()
 
-            r = repo.ui.configsuboptions('experimental',
-                                         'single-head-per-branch')
+            r = repo.ui.configsuboptions(
+                'experimental', 'single-head-per-branch'
+            )
             singlehead, singleheadsub = r
             if singlehead:
                 accountclosed = singleheadsub.get("account-closed-heads", False)
@@ -1946,19 +2076,27 @@
                 for name, (old, new) in sorted(tr.changes['bookmarks'].items()):
                     args = tr.hookargs.copy()
                     args.update(bookmarks.preparehookargs(name, old, new))
-                    repo.hook('pretxnclose-bookmark', throw=True,
-                              **pycompat.strkwargs(args))
+                    repo.hook(
+                        'pretxnclose-bookmark',
+                        throw=True,
+                        **pycompat.strkwargs(args)
+                    )
             if hook.hashook(repo.ui, 'pretxnclose-phase'):
                 cl = repo.unfiltered().changelog
                 for rev, (old, new) in tr.changes['phases'].items():
                     args = tr.hookargs.copy()
                     node = hex(cl.node(rev))
                     args.update(phases.preparehookargs(node, old, new))
-                    repo.hook('pretxnclose-phase', throw=True,
-                              **pycompat.strkwargs(args))
-
-            repo.hook('pretxnclose', throw=True,
-                      **pycompat.strkwargs(tr.hookargs))
+                    repo.hook(
+                        'pretxnclose-phase',
+                        throw=True,
+                        **pycompat.strkwargs(args)
+                    )
+
+            repo.hook(
+                'pretxnclose', throw=True, **pycompat.strkwargs(tr.hookargs)
+            )
+
         def releasefn(tr, success):
             repo = reporef()
             if repo is None:
@@ -1983,15 +2121,19 @@
 
                 repo.invalidate(clearfilecache=True)
 
-        tr = transaction.transaction(rp, self.svfs, vfsmap,
-                                     "journal",
-                                     "undo",
-                                     aftertrans(renames),
-                                     self.store.createmode,
-                                     validator=validate,
-                                     releasefn=releasefn,
-                                     checkambigfiles=_cachedfiles,
-                                     name=desc)
+        tr = transaction.transaction(
+            rp,
+            self.svfs,
+            vfsmap,
+            "journal",
+            "undo",
+            aftertrans(renames),
+            self.store.createmode,
+            validator=validate,
+            releasefn=releasefn,
+            checkambigfiles=_cachedfiles,
+            name=desc,
+        )
         tr.changes['origrepolen'] = len(self)
         tr.changes['obsmarkers'] = set()
         tr.changes['phases'] = {}
@@ -2003,6 +2145,7 @@
         # outdated when running hooks. As fncache is used for streaming clone,
         # this is not expected to break anything that happen during the hooks.
         tr.addfinalize('flush-fncache', self.store.write)
+
         def txnclosehook(tr2):
             """To be run if transaction is successful, will schedule a hook run
             """
@@ -2019,8 +2162,11 @@
                     for name, (old, new) in bmchanges:
                         args = tr.hookargs.copy()
                         args.update(bookmarks.preparehookargs(name, old, new))
-                        repo.hook('txnclose-bookmark', throw=False,
-                                  **pycompat.strkwargs(args))
+                        repo.hook(
+                            'txnclose-bookmark',
+                            throw=False,
+                            **pycompat.strkwargs(args)
+                        )
 
                 if hook.hashook(repo.ui, 'txnclose-phase'):
                     cl = repo.unfiltered().changelog
@@ -2029,23 +2175,32 @@
                         args = tr.hookargs.copy()
                         node = hex(cl.node(rev))
                         args.update(phases.preparehookargs(node, old, new))
-                        repo.hook('txnclose-phase', throw=False,
-                                  **pycompat.strkwargs(args))
-
-                repo.hook('txnclose', throw=False,
-                          **pycompat.strkwargs(hookargs))
+                        repo.hook(
+                            'txnclose-phase',
+                            throw=False,
+                            **pycompat.strkwargs(args)
+                        )
+
+                repo.hook(
+                    'txnclose', throw=False, **pycompat.strkwargs(hookargs)
+                )
+
             reporef()._afterlock(hookfunc)
+
         tr.addfinalize('txnclose-hook', txnclosehook)
         # Include a leading "-" to make it happen before the transaction summary
         # reports registered via scmutil.registersummarycallback() whose names
         # are 00-txnreport etc. That way, the caches will be warm when the
         # callbacks run.
         tr.addpostclose('-warm-cache', self._buildcacheupdater(tr))
+
         def txnaborthook(tr2):
             """To be run if transaction is aborted
             """
-            reporef().hook('txnabort', throw=False,
-                           **pycompat.strkwargs(tr2.hookargs))
+            reporef().hook(
+                'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
+            )
+
         tr.addabort('txnabort-hook', txnaborthook)
         # avoid eager cache invalidation. in-memory data should be identical
         # to stored data if transaction has no error.
@@ -2055,14 +2210,16 @@
         return tr
 
     def _journalfiles(self):
-        return ((self.svfs, 'journal'),
-                (self.svfs, 'journal.narrowspec'),
-                (self.vfs, 'journal.narrowspec.dirstate'),
-                (self.vfs, 'journal.dirstate'),
-                (self.vfs, 'journal.branch'),
-                (self.vfs, 'journal.desc'),
-                (bookmarks.bookmarksvfs(self), 'journal.bookmarks'),
-                (self.svfs, 'journal.phaseroots'))
+        return (
+            (self.svfs, 'journal'),
+            (self.svfs, 'journal.narrowspec'),
+            (self.vfs, 'journal.narrowspec.dirstate'),
+            (self.vfs, 'journal.dirstate'),
+            (self.vfs, 'journal.branch'),
+            (self.vfs, 'journal.desc'),
+            (bookmarks.bookmarksvfs(self), 'journal.bookmarks'),
+            (self.svfs, 'journal.phaseroots'),
+        )
 
     def undofiles(self):
         return [(vfs, undoname(x)) for vfs, x in self._journalfiles()]
@@ -2072,25 +2229,31 @@
         self.dirstate.savebackup(None, 'journal.dirstate')
         narrowspec.savewcbackup(self, 'journal.narrowspec.dirstate')
         narrowspec.savebackup(self, 'journal.narrowspec')
-        self.vfs.write("journal.branch",
-                          encoding.fromlocal(self.dirstate.branch()))
-        self.vfs.write("journal.desc",
-                          "%d\n%s\n" % (len(self), desc))
+        self.vfs.write(
+            "journal.branch", encoding.fromlocal(self.dirstate.branch())
+        )
+        self.vfs.write("journal.desc", "%d\n%s\n" % (len(self), desc))
         bookmarksvfs = bookmarks.bookmarksvfs(self)
-        bookmarksvfs.write("journal.bookmarks",
-                           bookmarksvfs.tryread("bookmarks"))
-        self.svfs.write("journal.phaseroots",
-                           self.svfs.tryread("phaseroots"))
+        bookmarksvfs.write(
+            "journal.bookmarks", bookmarksvfs.tryread("bookmarks")
+        )
+        self.svfs.write("journal.phaseroots", self.svfs.tryread("phaseroots"))
 
     def recover(self):
         with self.lock():
             if self.svfs.exists("journal"):
                 self.ui.status(_("rolling back interrupted transaction\n"))
-                vfsmap = {'': self.svfs,
-                          'plain': self.vfs,}
-                transaction.rollback(self.svfs, vfsmap, "journal",
-                                     self.ui.warn,
-                                     checkambigfiles=_cachedfiles)
+                vfsmap = {
+                    '': self.svfs,
+                    'plain': self.vfs,
+                }
+                transaction.rollback(
+                    self.svfs,
+                    vfsmap,
+                    "journal",
+                    self.ui.warn,
+                    checkambigfiles=_cachedfiles,
+                )
                 self.invalidate()
                 return True
             else:
@@ -2112,7 +2275,7 @@
         finally:
             release(dsguard, lock, wlock)
 
-    @unfilteredmethod # Until we get smarter cache management
+    @unfilteredmethod  # Until we get smarter cache management
     def _rollback(self, dryrun, force, dsguard):
         ui = self.ui
         try:
@@ -2123,21 +2286,26 @@
             oldtip = oldlen - 1
 
             if detail and ui.verbose:
-                msg = (_('repository tip rolled back to revision %d'
-                         ' (undo %s: %s)\n')
-                       % (oldtip, desc, detail))
+                msg = _(
+                    'repository tip rolled back to revision %d'
+                    ' (undo %s: %s)\n'
+                ) % (oldtip, desc, detail)
             else:
-                msg = (_('repository tip rolled back to revision %d'
-                         ' (undo %s)\n')
-                       % (oldtip, desc))
+                msg = _(
+                    'repository tip rolled back to revision %d' ' (undo %s)\n'
+                ) % (oldtip, desc)
         except IOError:
             msg = _('rolling back unknown transaction\n')
             desc = None
 
         if not force and self['.'] != self['tip'] and desc == 'commit':
             raise error.Abort(
-                _('rollback of last commit while not checked out '
-                  'may lose data'), hint=_('use -f to force'))
+                _(
+                    'rollback of last commit while not checked out '
+                    'may lose data'
+                ),
+                hint=_('use -f to force'),
+            )
 
         ui.status(msg)
         if dryrun:
@@ -2146,8 +2314,9 @@
         parents = self.dirstate.parents()
         self.destroying()
         vfsmap = {'plain': self.vfs, '': self.svfs}
-        transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn,
-                             checkambigfiles=_cachedfiles)
+        transaction.rollback(
+            self.svfs, vfsmap, 'undo', ui.warn, checkambigfiles=_cachedfiles
+        )
         bookmarksvfs = bookmarks.bookmarksvfs(self)
         if bookmarksvfs.exists('undo.bookmarks'):
             bookmarksvfs.rename('undo.bookmarks', 'bookmarks', checkambig=True)
@@ -2167,17 +2336,25 @@
                 branch = self.vfs.read('undo.branch')
                 self.dirstate.setbranch(encoding.tolocal(branch))
             except IOError:
-                ui.warn(_('named branch could not be reset: '
-                          'current branch is still \'%s\'\n')
-                        % self.dirstate.branch())
+                ui.warn(
+                    _(
+                        'named branch could not be reset: '
+                        'current branch is still \'%s\'\n'
+                    )
+                    % self.dirstate.branch()
+                )
 
             parents = tuple([p.rev() for p in self[None].parents()])
             if len(parents) > 1:
-                ui.status(_('working directory now based on '
-                            'revisions %d and %d\n') % parents)
+                ui.status(
+                    _('working directory now based on ' 'revisions %d and %d\n')
+                    % parents
+                )
             else:
-                ui.status(_('working directory now based on '
-                            'revision %d\n') % parents)
+                ui.status(
+                    _('working directory now based on ' 'revision %d\n')
+                    % parents
+                )
             mergemod.mergestate.clean(self, self['.'].node())
 
         # TODO: if we know which new heads may result from this rollback, pass
@@ -2195,9 +2372,11 @@
         """
         # we must avoid cyclic reference between repo and transaction.
         reporef = weakref.ref(self)
+
         def updater(tr):
             repo = reporef()
             repo.updatecaches(tr)
+
         return updater
 
     @unfilteredmethod
@@ -2288,14 +2467,16 @@
         (e.g. incomplete fncache causes unintentional failure, but
         redundant one doesn't).
         '''
-        unfiltered = self.unfiltered() # all file caches are stored unfiltered
+        unfiltered = self.unfiltered()  # all file caches are stored unfiltered
         for k in list(self._filecache.keys()):
             # dirstate is invalidated separately in invalidatedirstate()
             if k == 'dirstate':
                 continue
-            if (k == 'changelog' and
-                self.currenttransaction() and
-                self.changelog._delayed):
+            if (
+                k == 'changelog'
+                and self.currenttransaction()
+                and self.changelog._delayed
+            ):
                 # The changelog object may store unwritten revisions. We don't
                 # want to lose them.
                 # TODO: Solve the problem instead of working around it.
@@ -2330,8 +2511,17 @@
                 continue
             ce.refresh()
 
-    def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc,
-              inheritchecker=None, parentenvvar=None):
+    def _lock(
+        self,
+        vfs,
+        lockname,
+        wait,
+        releasefn,
+        acquirefn,
+        desc,
+        inheritchecker=None,
+        parentenvvar=None,
+    ):
         parentlock = None
         # the contents of parentenvvar are used by the underlying lock to
         # determine whether it can be inherited
@@ -2346,12 +2536,19 @@
         # internal config: ui.signal-safe-lock
         signalsafe = self.ui.configbool('ui', 'signal-safe-lock')
 
-        l = lockmod.trylock(self.ui, vfs, lockname, timeout, warntimeout,
-                            releasefn=releasefn,
-                            acquirefn=acquirefn, desc=desc,
-                            inheritchecker=inheritchecker,
-                            parentlock=parentlock,
-                            signalsafe=signalsafe)
+        l = lockmod.trylock(
+            self.ui,
+            vfs,
+            lockname,
+            timeout,
+            warntimeout,
+            releasefn=releasefn,
+            acquirefn=acquirefn,
+            desc=desc,
+            inheritchecker=inheritchecker,
+            parentlock=parentlock,
+            signalsafe=signalsafe,
+        )
         return l
 
     def _afterlock(self, callback):
@@ -2364,7 +2561,7 @@
             if l and l.held:
                 l.postrelease.append(callback)
                 break
-        else: # no lock have been found.
+        else:  # no lock have been found.
             callback()
 
     def lock(self, wait=True):
@@ -2379,19 +2576,22 @@
             l.lock()
             return l
 
-        l = self._lock(vfs=self.svfs,
-                       lockname="lock",
-                       wait=wait,
-                       releasefn=None,
-                       acquirefn=self.invalidate,
-                       desc=_('repository %s') % self.origroot)
+        l = self._lock(
+            vfs=self.svfs,
+            lockname="lock",
+            wait=wait,
+            releasefn=None,
+            acquirefn=self.invalidate,
+            desc=_('repository %s') % self.origroot,
+        )
         self._lockref = weakref.ref(l)
         return l
 
     def _wlockchecktransaction(self):
         if self.currenttransaction() is not None:
             raise error.LockInheritanceContractViolation(
-                'wlock cannot be inherited in the middle of a transaction')
+                'wlock cannot be inherited in the middle of a transaction'
+            )
 
     def wlock(self, wait=True):
         '''Lock the non-store parts of the repository (everything under
@@ -2408,8 +2608,10 @@
 
         # We do not need to check for non-waiting lock acquisition.  Such
         # acquisition would not cause dead-lock as they would just fail.
-        if wait and (self.ui.configbool('devel', 'all-warnings')
-                     or self.ui.configbool('devel', 'check-locks')):
+        if wait and (
+            self.ui.configbool('devel', 'all-warnings')
+            or self.ui.configbool('devel', 'check-locks')
+        ):
             if self._currentlock(self._lockref) is not None:
                 self.ui.develwarn('"wlock" acquired after "lock"')
 
@@ -2421,11 +2623,16 @@
 
             self._filecache['dirstate'].refresh()
 
-        l = self._lock(self.vfs, "wlock", wait, unlock,
-                       self.invalidatedirstate, _('working directory of %s') %
-                       self.origroot,
-                       inheritchecker=self._wlockchecktransaction,
-                       parentenvvar='HG_WLOCK_LOCKER')
+        l = self._lock(
+            self.vfs,
+            "wlock",
+            wait,
+            unlock,
+            self.invalidatedirstate,
+            _('working directory of %s') % self.origroot,
+            inheritchecker=self._wlockchecktransaction,
+            parentenvvar='HG_WLOCK_LOCKER',
+        )
         self._wlockref = weakref.ref(l)
         return l
 
@@ -2442,8 +2649,16 @@
         """Returns the wlock if it's held, or None if it's not."""
         return self._currentlock(self._wlockref)
 
-    def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist,
-                    includecopymeta):
+    def _filecommit(
+        self,
+        fctx,
+        manifest1,
+        manifest2,
+        linkrev,
+        tr,
+        changelist,
+        includecopymeta,
+    ):
         """
         commit an individual file as part of a larger transaction
         """
@@ -2455,10 +2670,13 @@
             node = fctx.filenode()
             if node in [fparent1, fparent2]:
                 self.ui.debug('reusing %s filelog entry\n' % fname)
-                if ((fparent1 != nullid and
-                     manifest1.flags(fname) != fctx.flags()) or
-                    (fparent2 != nullid and
-                     manifest2.flags(fname) != fctx.flags())):
+                if (
+                    fparent1 != nullid
+                    and manifest1.flags(fname) != fctx.flags()
+                ) or (
+                    fparent2 != nullid
+                    and manifest2.flags(fname) != fctx.flags()
+                ):
                     changelist.append(fname)
                 return node
 
@@ -2488,8 +2706,8 @@
             cnode = manifest1.get(cfname)
             newfparent = fparent2
 
-            if manifest2: # branch merge
-                if fparent2 == nullid or cnode is None: # copied on remote side
+            if manifest2:  # branch merge
+                if fparent2 == nullid or cnode is None:  # copied on remote side
                     if cfname in manifest2:
                         cnode = manifest2[cfname]
                         newfparent = fparent1
@@ -2510,8 +2728,13 @@
                     meta["copyrev"] = hex(cnode)
                 fparent1, fparent2 = nullid, newfparent
             else:
-                self.ui.warn(_("warning: can't find ancestor for '%s' "
-                               "copied from '%s'!\n") % (fname, cfname))
+                self.ui.warn(
+                    _(
+                        "warning: can't find ancestor for '%s' "
+                        "copied from '%s'!\n"
+                    )
+                    % (fname, cfname)
+                )
 
         elif fparent1 == nullid:
             fparent1, fparent2 = fparent2, nullid
@@ -2545,7 +2768,7 @@
                     continue
                 if f in status.deleted:
                     fail(f, _('file not found!'))
-                if f in vdirs: # visited directory
+                if f in vdirs:  # visited directory
                     d = f + '/'
                     for mf in matched:
                         if mf.startswith(d):
@@ -2556,8 +2779,16 @@
                     fail(f, _("file not tracked!"))
 
     @unfilteredmethod
-    def commit(self, text="", user=None, date=None, match=None, force=False,
-               editor=False, extra=None):
+    def commit(
+        self,
+        text="",
+        user=None,
+        date=None,
+        match=None,
+        force=False,
+        editor=False,
+        extra=None,
+    ):
         """Add a new revision to current repository.
 
         Revision information is gathered from the working directory,
@@ -2584,28 +2815,40 @@
             merge = len(wctx.parents()) > 1
 
             if not force and merge and not match.always():
-                raise error.Abort(_('cannot partially commit a merge '
-                                   '(do not specify files or patterns)'))
+                raise error.Abort(
+                    _(
+                        'cannot partially commit a merge '
+                        '(do not specify files or patterns)'
+                    )
+                )
 
             status = self.status(match=match, clean=force)
             if force:
-                status.modified.extend(status.clean) # mq may commit clean files
+                status.modified.extend(
+                    status.clean
+                )  # mq may commit clean files
 
             # check subrepos
             subs, commitsubs, newstate = subrepoutil.precommit(
-                self.ui, wctx, status, match, force=force)
+                self.ui, wctx, status, match, force=force
+            )
 
             # make sure all explicit patterns are matched
             if not force:
                 self.checkcommitpatterns(wctx, vdirs, match, status, fail)
 
-            cctx = context.workingcommitctx(self, status,
-                                            text, user, date, extra)
+            cctx = context.workingcommitctx(
+                self, status, text, user, date, extra
+            )
 
             # internal config: ui.allowemptycommit
-            allowemptycommit = (wctx.branch() != wctx.p1().branch()
-                                or extra.get('close') or merge or cctx.files()
-                                or self.ui.configbool('ui', 'allowemptycommit'))
+            allowemptycommit = (
+                wctx.branch() != wctx.p1().branch()
+                or extra.get('close')
+                or merge
+                or cctx.files()
+                or self.ui.configbool('ui', 'allowemptycommit')
+            )
             if not allowemptycommit:
                 return None
 
@@ -2617,7 +2860,7 @@
 
             if editor:
                 cctx._text = editor(self, cctx, subs)
-            edited = (text != cctx._text)
+            edited = text != cctx._text
 
             # Save commit message in case this transaction gets rolled back
             # (e.g. by a pretxncommit hook).  Leave the content alone on
@@ -2629,8 +2872,10 @@
                 uipathfn = scmutil.getuipathfn(self)
                 for s in sorted(commitsubs):
                     sub = wctx.sub(s)
-                    self.ui.status(_('committing subrepository %s\n') %
-                                   uipathfn(subrepoutil.subrelpath(sub)))
+                    self.ui.status(
+                        _('committing subrepository %s\n')
+                        % uipathfn(subrepoutil.subrelpath(sub))
+                    )
                     sr = sub.commit(cctx._text, user, date)
                     newstate[s] = (newstate[s][0], sr)
                 subrepoutil.writestate(self, newstate)
@@ -2638,26 +2883,30 @@
             p1, p2 = self.dirstate.parents()
             hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
             try:
-                self.hook("precommit", throw=True, parent1=hookp1,
-                          parent2=hookp2)
+                self.hook(
+                    "precommit", throw=True, parent1=hookp1, parent2=hookp2
+                )
                 with self.transaction('commit'):
                     ret = self.commitctx(cctx, True)
                     # update bookmarks, dirstate and mergestate
                     bookmarks.update(self, [p1, p2], ret)
                     cctx.markcommitted(ret)
                     ms.reset()
-            except: # re-raises
+            except:  # re-raises
                 if edited:
                     self.ui.write(
-                        _('note: commit message saved in %s\n') % msgfn)
+                        _('note: commit message saved in %s\n') % msgfn
+                    )
                 raise
 
         def commithook():
             # hack for command that use a temporary commit (eg: histedit)
             # temporary commit got stripped before hook release
             if self.changelog.hasnode(ret):
-                self.hook("commit", node=hex(ret), parent1=hookp1,
-                          parent2=hookp2)
+                self.hook(
+                    "commit", node=hex(ret), parent1=hookp1, parent2=hookp2
+                )
+
         self._afterlock(commithook)
         return ret
 
@@ -2683,8 +2932,10 @@
 
         writecopiesto = self.ui.config('experimental', 'copies.write-to')
         writefilecopymeta = writecopiesto != 'changeset-only'
-        writechangesetcopy = (writecopiesto in
-                              ('changeset-only', 'compatibility'))
+        writechangesetcopy = writecopiesto in (
+            'changeset-only',
+            'compatibility',
+        )
         p1copies, p2copies = None, None
         if writechangesetcopy:
             p1copies = ctx.p1copies()
@@ -2725,19 +2976,27 @@
                             removed.append(f)
                         else:
                             added.append(f)
-                            m[f] = self._filecommit(fctx, m1, m2, linkrev,
-                                                    trp, changed,
-                                                    writefilecopymeta)
+                            m[f] = self._filecommit(
+                                fctx,
+                                m1,
+                                m2,
+                                linkrev,
+                                trp,
+                                changed,
+                                writefilecopymeta,
+                            )
                             m.setflag(f, fctx.flags())
                     except OSError:
-                        self.ui.warn(_("trouble committing %s!\n") %
-                                     uipathfn(f))
+                        self.ui.warn(
+                            _("trouble committing %s!\n") % uipathfn(f)
+                        )
                         raise
                     except IOError as inst:
                         errcode = getattr(inst, 'errno', errno.ENOENT)
                         if error or errcode and errcode != errno.ENOENT:
-                            self.ui.warn(_("trouble committing %s!\n") %
-                                         uipathfn(f))
+                            self.ui.warn(
+                                _("trouble committing %s!\n") % uipathfn(f)
+                            )
                         raise
 
                 # update manifest
@@ -2746,6 +3005,7 @@
                 for f in drop:
                     del m[f]
                 if p2.rev() != nullrev:
+
                     @util.cachefunc
                     def mas():
                         p1n = p1.node()
@@ -2754,6 +3014,7 @@
                         if not cahs:
                             cahs = [nullrev]
                         return [self[r].manifest() for r in cahs]
+
                     def deletionfromparent(f):
                         # When a file is removed relative to p1 in a merge, this
                         # function determines whether the absence is due to a
@@ -2776,14 +3037,18 @@
                         # it does something very similar by comparing filelog
                         # nodes.
                         if f in m1:
-                            return (f not in m2
-                                    and all(f in ma and ma.find(f) == m1.find(f)
-                                            for ma in mas()))
+                            return f not in m2 and all(
+                                f in ma and ma.find(f) == m1.find(f)
+                                for ma in mas()
+                            )
                         elif f in m2:
-                            return all(f in ma and ma.find(f) == m2.find(f)
-                                       for ma in mas())
+                            return all(
+                                f in ma and ma.find(f) == m2.find(f)
+                                for ma in mas()
+                            )
                         else:
                             return True
+
                     removed = [f for f in removed if not deletionfromparent(f)]
 
                 files = changed + removed
@@ -2794,8 +3059,10 @@
                     # exact same commit can be reproduced later on convert.
                     md = m1.diff(m, scmutil.matchfiles(self, ctx.files()))
                 if not files and md:
-                    self.ui.debug('not reusing manifest (no file change in '
-                                  'changelog, but manifest differs)\n')
+                    self.ui.debug(
+                        'not reusing manifest (no file change in '
+                        'changelog, but manifest differs)\n'
+                    )
                 if files or md:
                     self.ui.note(_("committing manifest\n"))
                     # we're using narrowmatch here since it's already applied at
@@ -2805,17 +3072,26 @@
                     # at this point is merges, and we already error out in the
                     # case where the merge has files outside of the narrowspec,
                     # so this is safe.
-                    mn = mctx.write(trp, linkrev,
-                                    p1.manifestnode(), p2.manifestnode(),
-                                    added, drop, match=self.narrowmatch())
+                    mn = mctx.write(
+                        trp,
+                        linkrev,
+                        p1.manifestnode(),
+                        p2.manifestnode(),
+                        added,
+                        drop,
+                        match=self.narrowmatch(),
+                    )
 
                     if writechangesetcopy:
-                        filesadded = [f for f in changed
-                                      if not (f in m1 or f in m2)]
+                        filesadded = [
+                            f for f in changed if not (f in m1 or f in m2)
+                        ]
                         filesremoved = removed
                 else:
-                    self.ui.debug('reusing manifest from p1 (listed files '
-                                  'actually unchanged)\n')
+                    self.ui.debug(
+                        'reusing manifest from p1 (listed files '
+                        'actually unchanged)\n'
+                    )
                     mn = p1.manifestnode()
             else:
                 self.ui.debug('reusing manifest from p1 (no file change)\n')
@@ -2838,13 +3114,29 @@
             # update changelog
             self.ui.note(_("committing changelog\n"))
             self.changelog.delayupdate(tr)
-            n = self.changelog.add(mn, files, ctx.description(),
-                                   trp, p1.node(), p2.node(),
-                                   user, ctx.date(), ctx.extra().copy(),
-                                   p1copies, p2copies, filesadded, filesremoved)
+            n = self.changelog.add(
+                mn,
+                files,
+                ctx.description(),
+                trp,
+                p1.node(),
+                p2.node(),
+                user,
+                ctx.date(),
+                ctx.extra().copy(),
+                p1copies,
+                p2copies,
+                filesadded,
+                filesremoved,
+            )
             xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
-            self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
-                      parent2=xp2)
+            self.hook(
+                'pretxncommit',
+                throw=True,
+                node=hex(n),
+                parent1=xp1,
+                parent2=xp2,
+            )
             # set the new commit is proper phase
             targetphase = subrepoutil.newcommitphase(self.ui, ctx)
             if targetphase:
@@ -2906,12 +3198,20 @@
         # tag cache retrieval" case to work.
         self.invalidate()
 
-    def status(self, node1='.', node2=None, match=None,
-               ignored=False, clean=False, unknown=False,
-               listsubrepos=False):
+    def status(
+        self,
+        node1='.',
+        node2=None,
+        match=None,
+        ignored=False,
+        clean=False,
+        unknown=False,
+        listsubrepos=False,
+    ):
         '''a convenience method that calls node1.status(node2)'''
-        return self[node1].status(node2, match, ignored, clean, unknown,
-                                  listsubrepos)
+        return self[node1].status(
+            node2, match, ignored, clean, unknown, listsubrepos
+        )
 
     def addpostdsstatus(self, ps):
         """Add a callback to run within the wlock, at the point at which status
@@ -3039,9 +3339,17 @@
             return False
         self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
         ret = pushkey.push(self, namespace, key, old, new)
+
         def runhook():
-            self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
-                      ret=ret)
+            self.hook(
+                'pushkey',
+                namespace=namespace,
+                key=key,
+                old=old,
+                new=new,
+                ret=ret,
+            )
+
         self._afterlock(runhook)
         return ret
 
@@ -3054,9 +3362,13 @@
 
     def debugwireargs(self, one, two, three=None, four=None, five=None):
         '''used to test argument passing over the wire'''
-        return "%s %s %s %s %s" % (one, two, pycompat.bytestr(three),
-                                   pycompat.bytestr(four),
-                                   pycompat.bytestr(five))
+        return "%s %s %s %s %s" % (
+            one,
+            two,
+            pycompat.bytestr(three),
+            pycompat.bytestr(four),
+            pycompat.bytestr(five),
+        )
 
     def savecommitmessage(self, text):
         fp = self.vfs('last-message.txt', 'wb')
@@ -3064,11 +3376,13 @@
             fp.write(text)
         finally:
             fp.close()
-        return self.pathto(fp.name[len(self.root) + 1:])
+        return self.pathto(fp.name[len(self.root) + 1 :])
+
 
 # used to avoid circular references so destructors work
 def aftertrans(files):
     renamefiles = [tuple(t) for t in files]
+
     def a():
         for vfs, src, dest in renamefiles:
             # if src and dest refer to a same file, vfs.rename is a no-op,
@@ -3077,15 +3391,18 @@
             vfs.tryunlink(dest)
             try:
                 vfs.rename(src, dest)
-            except OSError: # journal file does not yet exist
+            except OSError:  # journal file does not yet exist
                 pass
+
     return a
 
+
 def undoname(fn):
     base, name = os.path.split(fn)
     assert name.startswith('journal')
     return os.path.join(base, name.replace('journal', 'undo', 1))
 
+
 def instance(ui, path, create, intents=None, createopts=None):
     localpath = util.urllocalpath(path)
     if create:
@@ -3093,9 +3410,11 @@
 
     return makelocalrepository(ui, localpath, intents=intents)
 
+
 def islocal(path):
     return True
 
+
 def defaultcreateopts(ui, createopts=None):
     """Populate the default creation options for a repository.
 
@@ -3110,6 +3429,7 @@
 
     return createopts
 
+
 def newreporequirements(ui, createopts):
     """Determine the set of requirements for a new local repository.
 
@@ -3128,12 +3448,19 @@
         return requirements
 
     if 'backend' not in createopts:
-        raise error.ProgrammingError('backend key not present in createopts; '
-                                     'was defaultcreateopts() called?')
+        raise error.ProgrammingError(
+            'backend key not present in createopts; '
+            'was defaultcreateopts() called?'
+        )
 
     if createopts['backend'] != 'revlogv1':
-        raise error.Abort(_('unable to determine repository requirements for '
-                            'storage backend: %s') % createopts['backend'])
+        raise error.Abort(
+            _(
+                'unable to determine repository requirements for '
+                'storage backend: %s'
+            )
+            % createopts['backend']
+        )
 
     requirements = {'revlogv1'}
     if ui.configbool('format', 'usestore'):
@@ -3145,11 +3472,16 @@
 
     compengine = ui.config('format', 'revlog-compression')
     if compengine not in util.compengines:
-        raise error.Abort(_('compression engine %s defined by '
-                            'format.revlog-compression not available') %
-                          compengine,
-                          hint=_('run "hg debuginstall" to list available '
-                                 'compression engines'))
+        raise error.Abort(
+            _(
+                'compression engine %s defined by '
+                'format.revlog-compression not available'
+            )
+            % compengine,
+            hint=_(
+                'run "hg debuginstall" to list available ' 'compression engines'
+            ),
+        )
 
     # zlib is the historical default and doesn't need an explicit requirement.
     elif compengine == 'zstd':
@@ -3189,6 +3521,7 @@
 
     return requirements
 
+
 def filterknowncreateopts(ui, createopts):
     """Filters a dict of repo creation options against options that are known.
 
@@ -3215,6 +3548,7 @@
 
     return {k: v for k, v in createopts.items() if k not in known}
 
+
 def createrepository(ui, path, createopts=None):
     """Create a new repository in a vfs.
 
@@ -3247,14 +3581,19 @@
     unknownopts = filterknowncreateopts(ui, createopts)
 
     if not isinstance(unknownopts, dict):
-        raise error.ProgrammingError('filterknowncreateopts() did not return '
-                                     'a dict')
+        raise error.ProgrammingError(
+            'filterknowncreateopts() did not return ' 'a dict'
+        )
 
     if unknownopts:
-        raise error.Abort(_('unable to create repository because of unknown '
-                            'creation option: %s') %
-                          ', '.join(sorted(unknownopts)),
-                          hint=_('is a required extension not loaded?'))
+        raise error.Abort(
+            _(
+                'unable to create repository because of unknown '
+                'creation option: %s'
+            )
+            % ', '.join(sorted(unknownopts)),
+            hint=_('is a required extension not loaded?'),
+        )
 
     requirements = newreporequirements(ui, createopts=createopts)
 
@@ -3273,8 +3612,10 @@
             except (IOError, ValueError) as e:
                 # ValueError is raised on Windows if the drive letters differ
                 # on each path.
-                raise error.Abort(_('cannot calculate relative path'),
-                                  hint=stringutil.forcebytestr(e))
+                raise error.Abort(
+                    _('cannot calculate relative path'),
+                    hint=stringutil.forcebytestr(e),
+                )
 
     if not wdirvfs.exists():
         wdirvfs.makedirs()
@@ -3295,9 +3636,11 @@
         #
         # The revlog header has version 2, which won't be recognized by
         # such old clients.
-        hgvfs.append(b'00changelog.i',
-                     b'\0\0\0\2 dummy changelog to prevent using the old repo '
-                     b'layout')
+        hgvfs.append(
+            b'00changelog.i',
+            b'\0\0\0\2 dummy changelog to prevent using the old repo '
+            b'layout',
+        )
 
     scmutil.writerequires(hgvfs, requirements)
 
@@ -3309,6 +3652,7 @@
         shared = b'\n'.join(sorted(createopts['shareditems'])) + b'\n'
         hgvfs.write(b'shared', shared)
 
+
 def poisonrepository(repo):
     """Poison a repository instance so it can no longer be used."""
     # Perform any cleanup on the instance.
@@ -3324,8 +3668,9 @@
             if item == r'close':
                 return object.__getattribute__(self, item)
 
-            raise error.ProgrammingError('repo instances should not be used '
-                                         'after unshare')
+            raise error.ProgrammingError(
+                'repo instances should not be used ' 'after unshare'
+            )
 
         def close(self):
             pass
--- a/mercurial/lock.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/lock.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,9 +24,8 @@
     util,
 )
 
-from .utils import (
-    procutil,
-)
+from .utils import procutil
+
 
 def _getlockprefix():
     """Return a string which is used to differentiate pid namespaces
@@ -44,6 +43,7 @@
                 raise
     return result
 
+
 @contextlib.contextmanager
 def _delayedinterrupt():
     """Block signal interrupt while doing something critical
@@ -60,11 +60,13 @@
     orighandlers = {}
 
     def raiseinterrupt(num):
-        if (num == getattr(signal, 'SIGINT', None) or
-            num == getattr(signal, 'CTRL_C_EVENT', None)):
+        if num == getattr(signal, 'SIGINT', None) or num == getattr(
+            signal, 'CTRL_C_EVENT', None
+        ):
             raise KeyboardInterrupt
         else:
             raise error.SignalInterrupt
+
     def catchterm(num, frame):
         if blocked:
             assertedsigs.append(num)
@@ -82,7 +84,7 @@
             for num in orighandlers:
                 signal.signal(num, catchterm)
         except ValueError:
-            pass # in a thread? no luck
+            pass  # in a thread? no luck
 
         blocked = True
         yield
@@ -95,13 +97,14 @@
             for num, handler in orighandlers.items():
                 signal.signal(num, handler)
         except ValueError:
-            pass # in a thread?
+            pass  # in a thread?
 
     # re-raise interrupt exception if any, which may be shadowed by a new
     # interrupt occurred while re-raising the first one
     if assertedsigs:
         raiseinterrupt(assertedsigs[0])
 
+
 def trylock(ui, vfs, lockname, timeout, warntimeout, *args, **kwargs):
     """return an acquired lock or raise an a LockHeld exception
 
@@ -113,12 +116,18 @@
         # show more details for new-style locks
         if ':' in locker:
             host, pid = locker.split(":", 1)
-            msg = (_("waiting for lock on %s held by process %r on host %r\n")
-                   % (pycompat.bytestr(l.desc), pycompat.bytestr(pid),
-                      pycompat.bytestr(host)))
+            msg = _(
+                "waiting for lock on %s held by process %r on host %r\n"
+            ) % (
+                pycompat.bytestr(l.desc),
+                pycompat.bytestr(pid),
+                pycompat.bytestr(host),
+            )
         else:
-            msg = (_("waiting for lock on %s held by %r\n")
-                   % (l.desc, pycompat.bytestr(locker)))
+            msg = _("waiting for lock on %s held by %r\n") % (
+                l.desc,
+                pycompat.bytestr(locker),
+            )
         printer(msg)
 
     l = lock(vfs, lockname, 0, *args, dolock=False, **kwargs)
@@ -141,8 +150,9 @@
             if delay == warningidx:
                 printwarning(ui.warn, inst.locker)
             if timeout <= delay:
-                raise error.LockHeld(errno.ETIMEDOUT, inst.filename,
-                                     l.desc, inst.locker)
+                raise error.LockHeld(
+                    errno.ETIMEDOUT, inst.filename, l.desc, inst.locker
+                )
             time.sleep(1)
             delay += 1
 
@@ -156,6 +166,7 @@
         l.acquirefn()
     return l
 
+
 class lock(object):
     '''An advisory lock held by one process to control access to a set
     of files.  Non-cooperating processes or incorrectly written scripts
@@ -176,9 +187,19 @@
 
     _host = None
 
-    def __init__(self, vfs, fname, timeout=-1, releasefn=None, acquirefn=None,
-                 desc=None, inheritchecker=None, parentlock=None,
-                 signalsafe=True, dolock=True):
+    def __init__(
+        self,
+        vfs,
+        fname,
+        timeout=-1,
+        releasefn=None,
+        acquirefn=None,
+        desc=None,
+        inheritchecker=None,
+        parentlock=None,
+        signalsafe=True,
+        dolock=True,
+    ):
         self.vfs = vfs
         self.f = fname
         self.held = 0
@@ -194,7 +215,7 @@
             self._maybedelayedinterrupt = _delayedinterrupt
         else:
             self._maybedelayedinterrupt = util.nullcontextmanager
-        self.postrelease  = []
+        self.postrelease = []
         self.pid = self._getpid()
         if dolock:
             self.delay = self.lock()
@@ -209,9 +230,11 @@
 
     def __del__(self):
         if self.held:
-            warnings.warn(r"use lock.release instead of del lock",
-                    category=DeprecationWarning,
-                    stacklevel=2)
+            warnings.warn(
+                r"use lock.release instead of del lock",
+                category=DeprecationWarning,
+                stacklevel=2,
+            )
 
             # ensure the lock will be removed
             # even if recursive locking did occur
@@ -235,8 +258,9 @@
                     if timeout > 0:
                         timeout -= 1
                     continue
-                raise error.LockHeld(errno.ETIMEDOUT, inst.filename, self.desc,
-                                     inst.locker)
+                raise error.LockHeld(
+                    errno.ETIMEDOUT, inst.filename, self.desc, inst.locker
+                )
 
     def _trylock(self):
         if self.held:
@@ -268,18 +292,23 @@
                         return
                     locker = self._testlock(locker)
                     if locker is not None:
-                        raise error.LockHeld(errno.EAGAIN,
-                                             self.vfs.join(self.f), self.desc,
-                                             locker)
+                        raise error.LockHeld(
+                            errno.EAGAIN,
+                            self.vfs.join(self.f),
+                            self.desc,
+                            locker,
+                        )
                 else:
-                    raise error.LockUnavailable(why.errno, why.strerror,
-                                                why.filename, self.desc)
+                    raise error.LockUnavailable(
+                        why.errno, why.strerror, why.filename, self.desc
+                    )
 
         if not self.held:
             # use empty locker to mean "busy for frequent lock/unlock
             # by many processes"
-            raise error.LockHeld(errno.EAGAIN,
-                                 self.vfs.join(self.f), self.desc, "")
+            raise error.LockHeld(
+                errno.EAGAIN, self.vfs.join(self.f), self.desc, ""
+            )
 
     def _readlock(self):
         """read lock and return its value
@@ -342,10 +371,12 @@
         """
         if not self.held:
             raise error.LockInheritanceContractViolation(
-                'inherit can only be called while lock is held')
+                'inherit can only be called while lock is held'
+            )
         if self._inherited:
             raise error.LockInheritanceContractViolation(
-                'inherit cannot be called while lock is already inherited')
+                'inherit cannot be called while lock is already inherited'
+            )
         if self._inheritchecker is not None:
             self._inheritchecker()
         if self.releasefn:
@@ -391,6 +422,7 @@
                 # Prevent double usage and help clear cycles.
                 self.postrelease = None
 
+
 def release(*locks):
     for lock in locks:
         if lock is not None:
--- a/mercurial/logcmdutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/logcmdutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -41,6 +41,7 @@
     stringutil,
 )
 
+
 def getlimit(opts):
     """get the log limit according to option -l/--limit"""
     limit = opts.get('limit')
@@ -55,9 +56,23 @@
         limit = None
     return limit
 
-def diffordiffstat(ui, repo, diffopts, node1, node2, match,
-                   changes=None, stat=False, fp=None, graphwidth=0,
-                   prefix='', root='', listsubrepos=False, hunksfilterfn=None):
+
+def diffordiffstat(
+    ui,
+    repo,
+    diffopts,
+    node1,
+    node2,
+    match,
+    changes=None,
+    stat=False,
+    fp=None,
+    graphwidth=0,
+    prefix='',
+    root='',
+    listsubrepos=False,
+    hunksfilterfn=None,
+):
     '''show diff or diffstat.'''
     ctx1 = repo[node1]
     ctx2 = repo[node2]
@@ -66,10 +81,13 @@
     else:
         relroot = ''
     copysourcematch = None
+
     def compose(f, g):
         return lambda x: f(g(x))
+
     def pathfn(f):
         return posixpath.join(prefix, f)
+
     if relroot != '':
         # XXX relative roots currently don't work if the root is within a
         # subrepo
@@ -78,20 +96,26 @@
         relroot += '/'
         for matchroot in match.files():
             if not matchroot.startswith(relroot):
-                ui.warn(_('warning: %s not inside relative root %s\n') %
-                        (uipathfn(pathfn(matchroot)), uirelroot))
+                ui.warn(
+                    _('warning: %s not inside relative root %s\n')
+                    % (uipathfn(pathfn(matchroot)), uirelroot)
+                )
 
         relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path')
         match = matchmod.intersectmatchers(match, relrootmatch)
         copysourcematch = relrootmatch
 
-        checkroot = (repo.ui.configbool('devel', 'all-warnings') or
-                     repo.ui.configbool('devel', 'check-relroot'))
+        checkroot = repo.ui.configbool(
+            'devel', 'all-warnings'
+        ) or repo.ui.configbool('devel', 'check-relroot')
+
         def relrootpathfn(f):
             if checkroot and not f.startswith(relroot):
                 raise AssertionError(
-                    "file %s doesn't start with relroot %s" % (f, relroot))
-            return f[len(relroot):]
+                    "file %s doesn't start with relroot %s" % (f, relroot)
+                )
+            return f[len(relroot) :]
+
         pathfn = compose(relrootpathfn, pathfn)
 
     if stat:
@@ -103,9 +127,15 @@
         if not relroot:
             pathfn = compose(scmutil.getuipathfn(repo), pathfn)
 
-    chunks = ctx2.diff(ctx1, match, changes, opts=diffopts, pathfn=pathfn,
-                       copysourcematch=copysourcematch,
-                       hunksfilterfn=hunksfilterfn)
+    chunks = ctx2.diff(
+        ctx1,
+        match,
+        changes,
+        opts=diffopts,
+        pathfn=pathfn,
+        copysourcematch=copysourcematch,
+        hunksfilterfn=hunksfilterfn,
+    )
 
     if fp is not None or ui.canwritewithoutlabels():
         out = fp or ui
@@ -117,12 +147,15 @@
         if stat:
             chunks = patch.diffstatui(util.iterlines(chunks), width=width)
         else:
-            chunks = patch.difflabel(lambda chunks, **kwargs: chunks, chunks,
-                                     opts=diffopts)
+            chunks = patch.difflabel(
+                lambda chunks, **kwargs: chunks, chunks, opts=diffopts
+            )
         if ui.canbatchlabeledwrites():
+
             def gen():
                 for chunk, label in chunks:
                     yield ui.label(chunk, label=label)
+
             for chunk in util.filechunkiter(util.chunkbuffer(gen())):
                 ui.write(chunk)
         else:
@@ -142,8 +175,17 @@
         submatch = matchmod.subdirmatcher(subpath, match)
         subprefix = repo.wvfs.reljoin(prefix, subpath)
         if listsubrepos or match.exact(subpath) or any(submatch.files()):
-            sub.diff(ui, diffopts, tempnode2, submatch, changes=changes,
-                     stat=stat, fp=fp, prefix=subprefix)
+            sub.diff(
+                ui,
+                diffopts,
+                tempnode2,
+                submatch,
+                changes=changes,
+                stat=stat,
+                fp=fp,
+                prefix=subprefix,
+            )
+
 
 class changesetdiffer(object):
     """Generate diff of changeset with pre-configured filtering functions"""
@@ -158,10 +200,18 @@
         repo = ctx.repo()
         node = ctx.node()
         prev = ctx.p1().node()
-        diffordiffstat(ui, repo, diffopts, prev, node,
-                       match=self._makefilematcher(ctx), stat=stat,
-                       graphwidth=graphwidth,
-                       hunksfilterfn=self._makehunksfilter(ctx))
+        diffordiffstat(
+            ui,
+            repo,
+            diffopts,
+            prev,
+            node,
+            match=self._makefilematcher(ctx),
+            stat=stat,
+            graphwidth=graphwidth,
+            hunksfilterfn=self._makehunksfilter(ctx),
+        )
+
 
 def changesetlabels(ctx):
     labels = ['log.changeset', 'changeset.%s' % ctx.phasestr()]
@@ -173,6 +223,7 @@
             labels.append('instability.%s' % instability)
     return ' '.join(labels)
 
+
 class changesetprinter(object):
     '''show changeset information when templating not requested.'''
 
@@ -221,13 +272,16 @@
         graphwidth = props.get('graphwidth', 0)
 
         if self.ui.quiet:
-            self.ui.write("%s\n" % scmutil.formatchangeid(ctx),
-                          label='log.node')
+            self.ui.write(
+                "%s\n" % scmutil.formatchangeid(ctx), label='log.node'
+            )
             return
 
         columns = self._columns
-        self.ui.write(columns['changeset'] % scmutil.formatchangeid(ctx),
-                      label=changesetlabels(ctx))
+        self.ui.write(
+            columns['changeset'] % scmutil.formatchangeid(ctx),
+            label=changesetlabels(ctx),
+        )
 
         # branches are shown first before any other names due to backwards
         # compatibility
@@ -244,14 +298,14 @@
             # we will use the templatename as the color name since those two
             # should be the same
             for name in ns.names(self.repo, changenode):
-                self.ui.write(ns.logfmt % name,
-                              label='log.%s' % ns.colorname)
+                self.ui.write(ns.logfmt % name, label='log.%s' % ns.colorname)
         if self.ui.debugflag:
             self.ui.write(columns['phase'] % ctx.phasestr(), label='log.phase')
         for pctx in scmutil.meaningfulparents(self.repo, ctx):
             label = 'log.parent changeset.%s' % pctx.phasestr()
-            self.ui.write(columns['parent'] % scmutil.formatchangeid(pctx),
-                          label=label)
+            self.ui.write(
+                columns['parent'] % scmutil.formatchangeid(pctx), label=label
+            )
 
         if self.ui.debugflag:
             mnode = ctx.manifestnode()
@@ -260,17 +314,22 @@
                 mrev = wdirrev
             else:
                 mrev = self.repo.manifestlog.rev(mnode)
-            self.ui.write(columns['manifest']
-                          % scmutil.formatrevnode(self.ui, mrev, mnode),
-                          label='ui.debug log.manifest')
+            self.ui.write(
+                columns['manifest']
+                % scmutil.formatrevnode(self.ui, mrev, mnode),
+                label='ui.debug log.manifest',
+            )
         self.ui.write(columns['user'] % ctx.user(), label='log.user')
-        self.ui.write(columns['date'] % dateutil.datestr(ctx.date()),
-                      label='log.date')
+        self.ui.write(
+            columns['date'] % dateutil.datestr(ctx.date()), label='log.date'
+        )
 
         if ctx.isunstable():
             instabilities = ctx.instabilities()
-            self.ui.write(columns['instability'] % ', '.join(instabilities),
-                          label='log.instability')
+            self.ui.write(
+                columns['instability'] % ', '.join(instabilities),
+                label='log.instability',
+            )
 
         elif ctx.obsolete():
             self._showobsfate(ctx)
@@ -281,34 +340,42 @@
             files = ctx.p1().status(ctx)[:3]
             for key, value in zip(['files', 'files+', 'files-'], files):
                 if value:
-                    self.ui.write(columns[key] % " ".join(value),
-                                  label='ui.debug log.files')
+                    self.ui.write(
+                        columns[key] % " ".join(value),
+                        label='ui.debug log.files',
+                    )
         elif ctx.files() and self.ui.verbose:
-            self.ui.write(columns['files'] % " ".join(ctx.files()),
-                          label='ui.note log.files')
+            self.ui.write(
+                columns['files'] % " ".join(ctx.files()),
+                label='ui.note log.files',
+            )
         if copies and self.ui.verbose:
             copies = ['%s (%s)' % c for c in copies]
-            self.ui.write(columns['copies'] % ' '.join(copies),
-                          label='ui.note log.copies')
+            self.ui.write(
+                columns['copies'] % ' '.join(copies), label='ui.note log.copies'
+            )
 
         extra = ctx.extra()
         if extra and self.ui.debugflag:
             for key, value in sorted(extra.items()):
-                self.ui.write(columns['extra']
-                              % (key, stringutil.escapestr(value)),
-                              label='ui.debug log.extra')
+                self.ui.write(
+                    columns['extra'] % (key, stringutil.escapestr(value)),
+                    label='ui.debug log.extra',
+                )
 
         description = ctx.description().strip()
         if description:
             if self.ui.verbose:
-                self.ui.write(_("description:\n"),
-                              label='ui.note log.description')
-                self.ui.write(description,
-                              label='ui.note log.description')
+                self.ui.write(
+                    _("description:\n"), label='ui.note log.description'
+                )
+                self.ui.write(description, label='ui.note log.description')
                 self.ui.write("\n\n")
             else:
-                self.ui.write(columns['summary'] % description.splitlines()[0],
-                              label='log.summary')
+                self.ui.write(
+                    columns['summary'] % description.splitlines()[0],
+                    label='log.summary',
+                )
         self.ui.write("\n")
 
         self._showpatch(ctx, graphwidth)
@@ -316,15 +383,19 @@
     def _showobsfate(self, ctx):
         # TODO: do not depend on templater
         tres = formatter.templateresources(self.repo.ui, self.repo)
-        t = formatter.maketemplater(self.repo.ui, '{join(obsfate, "\n")}',
-                                    defaults=templatekw.keywords,
-                                    resources=tres)
+        t = formatter.maketemplater(
+            self.repo.ui,
+            '{join(obsfate, "\n")}',
+            defaults=templatekw.keywords,
+            resources=tres,
+        )
         obsfate = t.renderdefault({'ctx': ctx}).splitlines()
 
         if obsfate:
             for obsfateline in obsfate:
-                self.ui.write(self._columns['obsolete'] % obsfateline,
-                              label='log.obsfate')
+                self.ui.write(
+                    self._columns['obsolete'] % obsfateline, label='log.obsfate'
+                )
 
     def _exthook(self, ctx):
         '''empty method used by extension as a hook point
@@ -332,21 +403,25 @@
 
     def _showpatch(self, ctx, graphwidth=0):
         if self._includestat:
-            self._differ.showdiff(self.ui, ctx, self._diffopts,
-                                  graphwidth, stat=True)
+            self._differ.showdiff(
+                self.ui, ctx, self._diffopts, graphwidth, stat=True
+            )
         if self._includestat and self._includediff:
             self.ui.write("\n")
         if self._includediff:
-            self._differ.showdiff(self.ui, ctx, self._diffopts,
-                                  graphwidth, stat=False)
+            self._differ.showdiff(
+                self.ui, ctx, self._diffopts, graphwidth, stat=False
+            )
         if self._includestat or self._includediff:
             self.ui.write("\n")
 
+
 class changesetformatter(changesetprinter):
     """Format changeset information by generic formatter"""
 
-    def __init__(self, ui, repo, fm, differ=None, diffopts=None,
-                 buffered=False):
+    def __init__(
+        self, ui, repo, fm, differ=None, diffopts=None, buffered=False
+    ):
         changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
         self._diffopts = patch.difffeatureopts(ui, diffopts, git=True)
         self._fm = fm
@@ -359,36 +434,43 @@
         fm = self._fm
         fm.startitem()
         fm.context(ctx=ctx)
-        fm.data(rev=scmutil.intrev(ctx),
-                node=fm.hexfunc(scmutil.binnode(ctx)))
+        fm.data(rev=scmutil.intrev(ctx), node=fm.hexfunc(scmutil.binnode(ctx)))
 
         if self.ui.quiet:
             return
 
-        fm.data(branch=ctx.branch(),
-                phase=ctx.phasestr(),
-                user=ctx.user(),
-                date=fm.formatdate(ctx.date()),
-                desc=ctx.description(),
-                bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'),
-                tags=fm.formatlist(ctx.tags(), name='tag'),
-                parents=fm.formatlist([fm.hexfunc(c.node())
-                                       for c in ctx.parents()], name='node'))
+        fm.data(
+            branch=ctx.branch(),
+            phase=ctx.phasestr(),
+            user=ctx.user(),
+            date=fm.formatdate(ctx.date()),
+            desc=ctx.description(),
+            bookmarks=fm.formatlist(ctx.bookmarks(), name='bookmark'),
+            tags=fm.formatlist(ctx.tags(), name='tag'),
+            parents=fm.formatlist(
+                [fm.hexfunc(c.node()) for c in ctx.parents()], name='node'
+            ),
+        )
 
         if self.ui.debugflag:
-            fm.data(manifest=fm.hexfunc(ctx.manifestnode() or wdirid),
-                    extra=fm.formatdict(ctx.extra()))
+            fm.data(
+                manifest=fm.hexfunc(ctx.manifestnode() or wdirid),
+                extra=fm.formatdict(ctx.extra()),
+            )
 
             files = ctx.p1().status(ctx)
-            fm.data(modified=fm.formatlist(files[0], name='file'),
-                    added=fm.formatlist(files[1], name='file'),
-                    removed=fm.formatlist(files[2], name='file'))
+            fm.data(
+                modified=fm.formatlist(files[0], name='file'),
+                added=fm.formatlist(files[1], name='file'),
+                removed=fm.formatlist(files[2], name='file'),
+            )
 
         elif self.ui.verbose:
             fm.data(files=fm.formatlist(ctx.files(), name='file'))
             if copies:
-                fm.data(copies=fm.formatdict(copies,
-                                             key='name', value='source'))
+                fm.data(
+                    copies=fm.formatdict(copies, key='name', value='source')
+                )
 
         if self._includestat:
             self.ui.pushbuffer()
@@ -399,6 +481,7 @@
             self._differ.showdiff(self.ui, ctx, self._diffopts, stat=False)
             fm.data(diff=self.ui.popbuffer())
 
+
 class changesettemplater(changesetprinter):
     '''format changeset information.
 
@@ -410,22 +493,30 @@
 
     # Arguments before "buffered" used to be positional. Consider not
     # adding/removing arguments before "buffered" to not break callers.
-    def __init__(self, ui, repo, tmplspec, differ=None, diffopts=None,
-                 buffered=False):
+    def __init__(
+        self, ui, repo, tmplspec, differ=None, diffopts=None, buffered=False
+    ):
         changesetprinter.__init__(self, ui, repo, differ, diffopts, buffered)
         # tres is shared with _graphnodeformatter()
         self._tresources = tres = formatter.templateresources(ui, repo)
-        self.t = formatter.loadtemplater(ui, tmplspec,
-                                         defaults=templatekw.keywords,
-                                         resources=tres,
-                                         cache=templatekw.defaulttempl)
+        self.t = formatter.loadtemplater(
+            ui,
+            tmplspec,
+            defaults=templatekw.keywords,
+            resources=tres,
+            cache=templatekw.defaulttempl,
+        )
         self._counter = itertools.count()
 
         self._tref = tmplspec.ref
-        self._parts = {'header': '', 'footer': '',
-                       tmplspec.ref: tmplspec.ref,
-                       'docheader': '', 'docfooter': '',
-                       'separator': ''}
+        self._parts = {
+            'header': '',
+            'footer': '',
+            tmplspec.ref: tmplspec.ref,
+            'docheader': '',
+            'docfooter': '',
+            'separator': '',
+        }
         if tmplspec.mapfile:
             # find correct templates for current mode, for backward
             # compatibility with 'log -v/-q/--debug' using a mapfile
@@ -488,6 +579,7 @@
             if not self.footer:
                 self.footer = self.t.render(self._parts['footer'], props)
 
+
 def templatespec(tmpl, mapfile):
     if pycompat.ispy3:
         assert not isinstance(tmpl, str), 'tmpl must not be a str'
@@ -496,6 +588,7 @@
     else:
         return formatter.templatespec('', tmpl, None)
 
+
 def _lookuptemplate(ui, tmpl, style):
     """Find the template matching the given template spec or style
 
@@ -503,7 +596,7 @@
     """
 
     # ui settings
-    if not tmpl and not style: # template are stronger than style
+    if not tmpl and not style:  # template are stronger than style
         tmpl = ui.config('ui', 'logtemplate')
         if tmpl:
             return templatespec(templater.unquotestring(tmpl), None)
@@ -513,8 +606,9 @@
     if not tmpl and style:
         mapfile = style
         if not os.path.split(mapfile)[0]:
-            mapname = (templater.templatepath('map-cmdline.' + mapfile)
-                       or templater.templatepath(mapfile))
+            mapname = templater.templatepath(
+                'map-cmdline.' + mapfile
+            ) or templater.templatepath(mapfile)
             if mapname:
                 mapfile = mapname
         return templatespec(None, mapfile)
@@ -524,12 +618,14 @@
 
     return formatter.lookuptemplate(ui, 'changeset', tmpl)
 
+
 def maketemplater(ui, repo, tmpl, buffered=False):
     """Create a changesettemplater from a literal template 'tmpl'
     byte-string."""
     spec = templatespec(tmpl, None)
     return changesettemplater(ui, repo, spec, buffered=buffered)
 
+
 def changesetdisplayer(ui, repo, opts, differ=None, buffered=False):
     """show one changeset using template or regular display.
 
@@ -553,6 +649,7 @@
 
     return changesettemplater(ui, repo, spec, *postargs)
 
+
 def _makematcher(repo, revs, pats, opts):
     """Build matcher and expanded patterns from log options
 
@@ -589,15 +686,18 @@
                     slowpath = True
                     continue
                 else:
-                    raise error.Abort(_('cannot follow file not in parent '
-                                        'revision: "%s"') % f)
+                    raise error.Abort(
+                        _('cannot follow file not in parent ' 'revision: "%s"')
+                        % f
+                    )
             filelog = repo.file(f)
             if not filelog:
                 # A zero count may be a directory or deleted file, so
                 # try to find matching entries on the slow path.
                 if follow:
                     raise error.Abort(
-                        _('cannot follow nonexistent file: "%s"') % f)
+                        _('cannot follow nonexistent file: "%s"') % f
+                    )
                 slowpath = True
 
         # We decided to fall back to the slowpath because at least one
@@ -613,6 +713,7 @@
 
     return match, pats, slowpath
 
+
 def _fileancestors(repo, revs, match, followfirst):
     fctxs = []
     for r in revs:
@@ -625,6 +726,7 @@
     # revision, stored in "fcache". "fcache" is populated as a side effect
     # of the graph traversal.
     fcache = {}
+
     def filematcher(ctx):
         return scmutil.matchfiles(repo, fcache.get(ctx.rev(), []))
 
@@ -632,24 +734,28 @@
         for rev, cs in dagop.filectxancestors(fctxs, followfirst=followfirst):
             fcache[rev] = [c.path() for c in cs]
             yield rev
+
     return smartset.generatorset(revgen(), iterasc=False), filematcher
 
+
 def _makenofollowfilematcher(repo, pats, opts):
     '''hook for extensions to override the filematcher for non-follow cases'''
     return None
 
+
 _opt2logrevset = {
-    'no_merges':        ('not merge()', None),
-    'only_merges':      ('merge()', None),
-    '_matchfiles':      (None, '_matchfiles(%ps)'),
-    'date':             ('date(%s)', None),
-    'branch':           ('branch(%s)', '%lr'),
-    '_patslog':         ('filelog(%s)', '%lr'),
-    'keyword':          ('keyword(%s)', '%lr'),
-    'prune':            ('ancestors(%s)', 'not %lr'),
-    'user':             ('user(%s)', '%lr'),
+    'no_merges': ('not merge()', None),
+    'only_merges': ('merge()', None),
+    '_matchfiles': (None, '_matchfiles(%ps)'),
+    'date': ('date(%s)', None),
+    'branch': ('branch(%s)', '%lr'),
+    '_patslog': ('filelog(%s)', '%lr'),
+    'keyword': ('keyword(%s)', '%lr'),
+    'prune': ('ancestors(%s)', 'not %lr'),
+    'user': ('user(%s)', '%lr'),
 }
 
+
 def _makerevset(repo, match, pats, slowpath, opts):
     """Return a revset string built from log options and file patterns"""
     opts = dict(opts)
@@ -703,6 +809,7 @@
         expr = None
     return expr
 
+
 def _initialrevs(repo, opts):
     """Return the initial set of revisions to be filtered or followed"""
     follow = opts.get('follow') or opts.get('follow_first')
@@ -717,6 +824,7 @@
         revs.reverse()
     return revs
 
+
 def getrevs(repo, pats, opts):
     """Return (revs, differ) where revs is a smartset
 
@@ -739,6 +847,7 @@
     if filematcher is None:
         filematcher = _makenofollowfilematcher(repo, pats, opts)
     if filematcher is None:
+
         def filematcher(ctx):
             return match
 
@@ -763,6 +872,7 @@
     differ._makefilematcher = filematcher
     return revs, differ
 
+
 def _parselinerangeopt(repo, opts):
     """Parse --line-range log option and return a list of tuples (filename,
     (fromline, toline)).
@@ -780,9 +890,11 @@
         msg = _("line range pattern '%s' must match exactly one file") % pat
         fname = scmutil.parsefollowlinespattern(repo, None, pat, msg)
         linerangebyfname.append(
-            (fname, util.processlinerange(fromline, toline)))
+            (fname, util.processlinerange(fromline, toline))
+        )
     return linerangebyfname
 
+
 def getlinerangerevs(repo, userrevs, opts):
     """Return (revs, differ).
 
@@ -798,16 +910,17 @@
     linerangesbyrev = {}
     for fname, (fromline, toline) in _parselinerangeopt(repo, opts):
         if fname not in wctx:
-            raise error.Abort(_('cannot follow file not in parent '
-                                'revision: "%s"') % fname)
+            raise error.Abort(
+                _('cannot follow file not in parent ' 'revision: "%s"') % fname
+            )
         fctx = wctx.filectx(fname)
         for fctx, linerange in dagop.blockancestors(fctx, fromline, toline):
             rev = fctx.introrev()
             if rev not in userrevs:
                 continue
-            linerangesbyrev.setdefault(
-                rev, {}).setdefault(
-                    fctx.path(), []).append(linerange)
+            linerangesbyrev.setdefault(rev, {}).setdefault(
+                fctx.path(), []
+            ).append(linerange)
 
     def nofilterhunksfn(fctx, hunks):
         return hunks
@@ -821,11 +934,10 @@
             lineranges = fctxlineranges.get(fctx.path())
             if lineranges is not None:
                 for hr, lines in hunks:
-                    if hr is None: # binary
+                    if hr is None:  # binary
                         yield hr, lines
                         continue
-                    if any(mdiff.hunkinrange(hr[2:], lr)
-                           for lr in lineranges):
+                    if any(mdiff.hunkinrange(hr[2:], lr) for lr in lineranges):
                         yield hr, lines
             else:
                 for hunk in hunks:
@@ -844,6 +956,7 @@
     differ._makehunksfilter = hunksfilter
     return revs, differ
 
+
 def _graphnodeformatter(ui, displayer):
     spec = ui.config('ui', 'graphnodetemplate')
     if not spec:
@@ -855,13 +968,17 @@
         tres = displayer._tresources
     else:
         tres = formatter.templateresources(ui)
-    templ = formatter.maketemplater(ui, spec, defaults=templatekw.keywords,
-                                    resources=tres)
+    templ = formatter.maketemplater(
+        ui, spec, defaults=templatekw.keywords, resources=tres
+    )
+
     def formatnode(repo, ctx):
         props = {'ctx': ctx, 'repo': repo}
         return templ.renderdefault(props)
+
     return formatnode
 
+
 def displaygraph(ui, repo, dag, displayer, edgefn, getcopies=None, props=None):
     props = props or {}
     formatnode = _graphnodeformatter(ui, displayer)
@@ -876,12 +993,13 @@
         edgetypes = {
             'parent': graphmod.PARENT,
             'grandparent': graphmod.GRANDPARENT,
-            'missing': graphmod.MISSINGPARENT
+            'missing': graphmod.MISSINGPARENT,
         }
         for name, key in edgetypes.items():
             # experimental config: experimental.graphstyle.*
-            styles[key] = ui.config('experimental', 'graphstyle.%s' % name,
-                                    styles[key])
+            styles[key] = ui.config(
+                'experimental', 'graphstyle.%s' % name, styles[key]
+            )
             if not styles[key]:
                 styles[key] = None
 
@@ -894,8 +1012,9 @@
         edges = edgefn(type, char, state, rev, parents)
         firstedge = next(edges)
         width = firstedge[2]
-        displayer.show(ctx, copies=copies,
-                       graphwidth=width, **pycompat.strkwargs(props))
+        displayer.show(
+            ctx, copies=copies, graphwidth=width, **pycompat.strkwargs(props)
+        )
         lines = displayer.hunk.pop(rev).split('\n')
         if not lines[-1]:
             del lines[-1]
@@ -905,10 +1024,12 @@
             lines = []
     displayer.close()
 
+
 def displaygraphrevs(ui, repo, revs, displayer, getrenamed):
     revdag = graphmod.dagwalker(repo, revs)
     displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed)
 
+
 def displayrevs(ui, repo, revs, displayer, getcopies):
     for rev in revs:
         ctx = repo[rev]
@@ -917,11 +1038,15 @@
         displayer.flush(ctx)
     displayer.close()
 
+
 def checkunsupportedgraphflags(pats, opts):
     for op in ["newest_first"]:
         if op in opts and opts[op]:
-            raise error.Abort(_("-G/--graph option is incompatible with --%s")
-                             % op.replace("_", "-"))
+            raise error.Abort(
+                _("-G/--graph option is incompatible with --%s")
+                % op.replace("_", "-")
+            )
+
 
 def graphrevs(repo, nodes, opts):
     limit = getlimit(opts)
--- a/mercurial/logexchange.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/logexchange.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
 # directory name in .hg/ in which remotenames files will be present
 remotenamedir = 'logexchange'
 
+
 def readremotenamefile(repo, filename):
     """
     reads a file from .hg/logexchange/ directory and yields it's content
@@ -45,6 +46,7 @@
 
     f.close()
 
+
 def readremotenames(repo):
     """
     read the details about the remotenames stored in .hg/logexchange/ and
@@ -58,6 +60,7 @@
     for branchentry in readremotenamefile(repo, 'branches'):
         yield branchentry
 
+
 def writeremotenamefile(repo, remotepath, names, nametype):
     vfs = vfsmod.vfs(repo.vfs.join(remotenamedir))
     f = vfs(nametype, 'w', atomictemp=True)
@@ -81,6 +84,7 @@
 
     f.close()
 
+
 def saveremotenames(repo, remotepath, branches=None, bookmarks=None):
     """
     save remotenames i.e. remotebookmarks and remotebranches in their
@@ -95,6 +99,7 @@
     finally:
         wlock.release()
 
+
 def activepath(repo, remote):
     """returns remote path"""
     # is the remote a local peer
@@ -123,6 +128,7 @@
 
     return rpath
 
+
 def pullremotenames(localrepo, remoterepo):
     """
     pulls bookmarks and branches information of the remote repo during a
@@ -133,9 +139,9 @@
     remotepath = activepath(localrepo, remoterepo)
 
     with remoterepo.commandexecutor() as e:
-        bookmarks = e.callcommand('listkeys', {
-            'namespace': 'bookmarks',
-        }).result()
+        bookmarks = e.callcommand(
+            'listkeys', {'namespace': 'bookmarks',}
+        ).result()
 
     # on a push, we don't want to keep obsolete heads since
     # they won't show up as heads on the next pull, so we
--- a/mercurial/loggingutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/loggingutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,9 +10,7 @@
 
 import errno
 
-from . import (
-    pycompat,
-)
+from . import pycompat
 
 from .utils import (
     dateutil,
@@ -20,25 +18,31 @@
     stringutil,
 )
 
+
 def openlogfile(ui, vfs, name, maxfiles=0, maxsize=0):
     """Open log file in append mode, with optional rotation
 
     If maxsize > 0, the log file will be rotated up to maxfiles.
     """
+
     def rotate(oldpath, newpath):
         try:
             vfs.unlink(newpath)
         except OSError as err:
             if err.errno != errno.ENOENT:
-                ui.debug("warning: cannot remove '%s': %s\n" %
-                         (newpath, err.strerror))
+                ui.debug(
+                    "warning: cannot remove '%s': %s\n"
+                    % (newpath, err.strerror)
+                )
         try:
             if newpath:
                 vfs.rename(oldpath, newpath)
         except OSError as err:
             if err.errno != errno.ENOENT:
-                ui.debug("warning: cannot rename '%s' to '%s': %s\n" %
-                         (newpath, oldpath, err.strerror))
+                ui.debug(
+                    "warning: cannot rename '%s' to '%s': %s\n"
+                    % (newpath, oldpath, err.strerror)
+                )
 
     if maxsize > 0:
         try:
@@ -49,20 +53,24 @@
             if st.st_size >= maxsize:
                 path = vfs.join(name)
                 for i in pycompat.xrange(maxfiles - 1, 1, -1):
-                    rotate(oldpath='%s.%d' % (path, i - 1),
-                           newpath='%s.%d' % (path, i))
-                rotate(oldpath=path,
-                       newpath=maxfiles > 0 and path + '.1')
+                    rotate(
+                        oldpath='%s.%d' % (path, i - 1),
+                        newpath='%s.%d' % (path, i),
+                    )
+                rotate(oldpath=path, newpath=maxfiles > 0 and path + '.1')
     return vfs(name, 'a', makeparentdirs=False)
 
+
 def _formatlogline(msg):
     date = dateutil.datestr(format=b'%Y/%m/%d %H:%M:%S')
     pid = procutil.getpid()
     return b'%s (%d)> %s' % (date, pid, msg)
 
+
 def _matchevent(event, tracked):
     return b'*' in tracked or event in tracked
 
+
 class filelogger(object):
     """Basic logger backed by physical file with optional rotation"""
 
@@ -79,13 +87,20 @@
     def log(self, ui, event, msg, opts):
         line = _formatlogline(msg)
         try:
-            with openlogfile(ui, self._vfs, self._name,
-                             maxfiles=self._maxfiles,
-                             maxsize=self._maxsize) as fp:
+            with openlogfile(
+                ui,
+                self._vfs,
+                self._name,
+                maxfiles=self._maxfiles,
+                maxsize=self._maxsize,
+            ) as fp:
                 fp.write(line)
         except IOError as err:
-            ui.debug(b'cannot write to %s: %s\n'
-                     % (self._name, stringutil.forcebytestr(err)))
+            ui.debug(
+                b'cannot write to %s: %s\n'
+                % (self._name, stringutil.forcebytestr(err))
+            )
+
 
 class fileobjectlogger(object):
     """Basic logger backed by file-like object"""
@@ -103,9 +118,14 @@
             self._fp.write(line)
             self._fp.flush()
         except IOError as err:
-            ui.debug(b'cannot write to %s: %s\n'
-                     % (stringutil.forcebytestr(self._fp.name),
-                        stringutil.forcebytestr(err)))
+            ui.debug(
+                b'cannot write to %s: %s\n'
+                % (
+                    stringutil.forcebytestr(self._fp.name),
+                    stringutil.forcebytestr(err),
+                )
+            )
+
 
 class proxylogger(object):
     """Forward log events to another logger to be set later"""
--- a/mercurial/lsprof.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/lsprof.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,6 +10,7 @@
 
 __all__ = ['profile', 'Stats']
 
+
 def profile(f, *args, **kwds):
     """XXX docstring"""
     p = Profiler()
@@ -50,21 +51,44 @@
             d = d[:top]
         cols = "% 12d %12d %11.4f %11.4f   %s\n"
         hcols = "% 12s %12s %12s %12s %s\n"
-        file.write(hcols % ("CallCount", "Recursive", "Total(s)",
-                            "Inline(s)", "module:lineno(function)"))
+        file.write(
+            hcols
+            % (
+                "CallCount",
+                "Recursive",
+                "Total(s)",
+                "Inline(s)",
+                "module:lineno(function)",
+            )
+        )
         count = 0
         for e in d:
-            file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
-                               e.inlinetime, label(e.code)))
+            file.write(
+                cols
+                % (
+                    e.callcount,
+                    e.reccallcount,
+                    e.totaltime,
+                    e.inlinetime,
+                    label(e.code),
+                )
+            )
             count += 1
             if limit is not None and count == limit:
                 return
             ccount = 0
             if climit and e.calls:
                 for se in e.calls:
-                    file.write(cols % (se.callcount, se.reccallcount,
-                                       se.totaltime, se.inlinetime,
-                                       "    %s" % label(se.code)))
+                    file.write(
+                        cols
+                        % (
+                            se.callcount,
+                            se.reccallcount,
+                            se.totaltime,
+                            se.inlinetime,
+                            "    %s" % label(se.code),
+                        )
+                    )
                     count += 1
                     ccount += 1
                     if limit is not None and count == limit:
@@ -87,8 +111,10 @@
                     if not isinstance(se.code, str):
                         e.calls[j] = type(se)((label(se.code),) + se[1:])
 
+
 _fn2mod = {}
 
+
 def label(code):
     if isinstance(code, str):
         if sys.version_info.major >= 3:
@@ -115,8 +141,10 @@
 
     return res
 
+
 if __name__ == '__main__':
     import os
+
     sys.argv = sys.argv[1:]
     if not sys.argv:
         print("usage: lsprof.py <script> <arguments...>", file=sys.stderr)
--- a/mercurial/lsprofcalltree.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/lsprofcalltree.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,18 +12,20 @@
 
 from __future__ import absolute_import
 
-from . import (
-    pycompat,
-)
+from . import pycompat
+
 
 def label(code):
     if isinstance(code, str):
         # built-in functions ('~' sorts at the end)
         return '~' + pycompat.sysbytes(code)
     else:
-        return '%s %s:%d' % (pycompat.sysbytes(code.co_name),
-                             pycompat.sysbytes(code.co_filename),
-                             code.co_firstlineno)
+        return '%s %s:%d' % (
+            pycompat.sysbytes(code.co_name),
+            pycompat.sysbytes(code.co_filename),
+            code.co_firstlineno,
+        )
+
 
 class KCacheGrind(object):
     def __init__(self, profiler):
@@ -86,8 +88,9 @@
             out_file.write(b'calls=%d 0\n' % subentry.callcount)
         else:
             out_file.write(b'cfi=%s\n' % pycompat.sysbytes(code.co_filename))
-            out_file.write(b'calls=%d %d\n' % (
-                subentry.callcount, code.co_firstlineno))
+            out_file.write(
+                b'calls=%d %d\n' % (subentry.callcount, code.co_firstlineno)
+            )
 
         totaltime = int(subentry.totaltime * 1000)
         out_file.write(b'%d %d\n' % (lineno, totaltime))
--- a/mercurial/mail.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/mail.py	Sun Oct 06 09:45:02 2019 -0400
@@ -31,11 +31,13 @@
     stringutil,
 )
 
+
 class STARTTLS(smtplib.SMTP):
     '''Derived class to verify the peer certificate for STARTTLS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
     '''
+
     def __init__(self, ui, host=None, **kwargs):
         smtplib.SMTP.__init__(self, **kwargs)
         self._ui = ui
@@ -47,9 +49,13 @@
             raise smtplib.SMTPException(msg)
         (resp, reply) = self.docmd("STARTTLS")
         if resp == 220:
-            self.sock = sslutil.wrapsocket(self.sock, keyfile, certfile,
-                                           ui=self._ui,
-                                           serverhostname=self._host)
+            self.sock = sslutil.wrapsocket(
+                self.sock,
+                keyfile,
+                certfile,
+                ui=self._ui,
+                serverhostname=self._host,
+            )
             self.file = smtplib.SSLFakeFile(self.sock)
             self.helo_resp = None
             self.ehlo_resp = None
@@ -57,13 +63,14 @@
             self.does_esmtp = 0
         return (resp, reply)
 
+
 class SMTPS(smtplib.SMTP):
     '''Derived class to verify the peer certificate for SMTPS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
     '''
-    def __init__(self, ui, keyfile=None, certfile=None, host=None,
-                 **kwargs):
+
+    def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
         self.keyfile = keyfile
         self.certfile = certfile
         smtplib.SMTP.__init__(self, **kwargs)
@@ -75,22 +82,28 @@
         if self.debuglevel > 0:
             self._ui.debug('connect: %r\n' % ((host, port),))
         new_socket = socket.create_connection((host, port), timeout)
-        new_socket = sslutil.wrapsocket(new_socket,
-                                        self.keyfile, self.certfile,
-                                        ui=self._ui,
-                                        serverhostname=self._host)
+        new_socket = sslutil.wrapsocket(
+            new_socket,
+            self.keyfile,
+            self.certfile,
+            ui=self._ui,
+            serverhostname=self._host,
+        )
         self.file = new_socket.makefile(r'rb')
         return new_socket
 
+
 def _pyhastls():
     """Returns true iff Python has TLS support, false otherwise."""
     try:
         import ssl
+
         getattr(ssl, 'HAS_TLS', False)
         return True
     except ImportError:
         return False
 
+
 def _smtp(ui):
     '''build an smtp connection and return a function to send mail'''
     local_hostname = ui.config('smtp', 'local_hostname')
@@ -115,8 +128,7 @@
     else:
         defaultport = 25
     mailport = util.getport(ui.config('smtp', 'port', defaultport))
-    ui.note(_('sending mail: smtp host %s, port %d\n') %
-            (mailhost, mailport))
+    ui.note(_('sending mail: smtp host %s, port %d\n') % (mailhost, mailport))
     s.connect(host=mailhost, port=mailport)
     if starttls:
         ui.note(_('(using starttls)\n'))
@@ -131,8 +143,7 @@
     if username and not password:
         password = ui.getpass()
     if username and password:
-        ui.note(_('(authenticating to mail server as %s)\n') %
-                  (username))
+        ui.note(_('(authenticating to mail server as %s)\n') % username)
         try:
             s.login(username, password)
         except smtplib.SMTPException as inst:
@@ -149,21 +160,31 @@
 
     return send
 
+
 def _sendmail(ui, sender, recipients, msg):
     '''send mail using sendmail.'''
     program = ui.config('email', 'method')
     stremail = lambda x: (
-        procutil.quote(stringutil.email(encoding.strtolocal(x))))
-    cmdline = '%s -f %s %s' % (program, stremail(sender),
-                               ' '.join(map(stremail, recipients)))
+        procutil.quote(stringutil.email(encoding.strtolocal(x)))
+    )
+    cmdline = '%s -f %s %s' % (
+        program,
+        stremail(sender),
+        ' '.join(map(stremail, recipients)),
+    )
     ui.note(_('sending mail: %s\n') % cmdline)
     fp = procutil.popen(cmdline, 'wb')
     fp.write(util.tonativeeol(msg))
     ret = fp.close()
     if ret:
-        raise error.Abort('%s %s' % (
-            os.path.basename(program.split(None, 1)[0]),
-            procutil.explainexit(ret)))
+        raise error.Abort(
+            '%s %s'
+            % (
+                os.path.basename(program.split(None, 1)[0]),
+                procutil.explainexit(ret),
+            )
+        )
+
 
 def _mbox(mbox, sender, recipients, msg):
     '''write mails to mbox'''
@@ -171,12 +192,15 @@
     # Should be time.asctime(), but Windows prints 2-characters day
     # of month instead of one. Make them print the same thing.
     date = time.strftime(r'%a %b %d %H:%M:%S %Y', time.localtime())
-    fp.write('From %s %s\n' % (encoding.strtolocal(sender),
-                               encoding.strtolocal(date)))
+    fp.write(
+        'From %s %s\n'
+        % (encoding.strtolocal(sender), encoding.strtolocal(date))
+    )
     fp.write(msg)
     fp.write('\n\n')
     fp.close()
 
+
 def connect(ui, mbox=None):
     '''make a mail connection. return a function to send mail.
     call as sendmail(sender, list-of-recipients, msg).'''
@@ -187,21 +211,30 @@
         return _smtp(ui)
     return lambda s, r, m: _sendmail(ui, s, r, m)
 
+
 def sendmail(ui, sender, recipients, msg, mbox=None):
     send = connect(ui, mbox=mbox)
     return send(sender, recipients, msg)
 
+
 def validateconfig(ui):
     '''determine if we have enough config data to try sending email.'''
     method = ui.config('email', 'method')
     if method == 'smtp':
         if not ui.config('smtp', 'host'):
-            raise error.Abort(_('smtp specified as email transport, '
-                               'but no smtp host configured'))
+            raise error.Abort(
+                _(
+                    'smtp specified as email transport, '
+                    'but no smtp host configured'
+                )
+            )
     else:
         if not procutil.findexe(method):
-            raise error.Abort(_('%r specified as email transport, '
-                               'but not in PATH') % method)
+            raise error.Abort(
+                _('%r specified as email transport, ' 'but not in PATH')
+                % method
+            )
+
 
 def codec2iana(cs):
     ''''''
@@ -212,6 +245,7 @@
         return "iso-" + cs[3:]
     return cs
 
+
 def mimetextpatch(s, subtype='plain', display=False):
     '''Return MIME message suitable for a patch.
     Charset will be detected by first trying to decode as us-ascii, then utf-8,
@@ -231,6 +265,7 @@
 
     return mimetextqp(s, subtype, "iso-8859-1")
 
+
 def mimetextqp(body, subtype, charset):
     '''Return MIME message.
     Quoted-printable transfer encoding will be used if necessary.
@@ -255,16 +290,21 @@
 
     return msg
 
+
 def _charsets(ui):
     '''Obtains charsets to send mail parts not containing patches.'''
     charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')]
-    fallbacks = [encoding.fallbackencoding.lower(),
-                 encoding.encoding.lower(), 'utf-8']
-    for cs in fallbacks: # find unique charsets while keeping order
+    fallbacks = [
+        encoding.fallbackencoding.lower(),
+        encoding.encoding.lower(),
+        'utf-8',
+    ]
+    for cs in fallbacks:  # find unique charsets while keeping order
         if cs not in charsets:
             charsets.append(cs)
     return [cs for cs in charsets if not cs.endswith('ascii')]
 
+
 def _encode(ui, s, charsets):
     '''Returns (converted) string, charset tuple.
     Finds out best charset by cycling through sendcharsets in descending
@@ -307,6 +347,7 @@
     # if ascii, or all conversion attempts fail, send (broken) ascii
     return s, 'us-ascii'
 
+
 def headencode(ui, s, charsets=None, display=False):
     '''Returns RFC-2047 compliant header from given string.'''
     if not display:
@@ -315,6 +356,7 @@
         return str(email.header.Header(s, cs))
     return s
 
+
 def _addressencode(ui, name, addr, charsets=None):
     assert isinstance(addr, bytes)
     name = headencode(ui, name, charsets)
@@ -332,7 +374,9 @@
         except UnicodeDecodeError:
             raise error.Abort(_('invalid local address: %s') % addr)
     return pycompat.bytesurl(
-        email.utils.formataddr((name, encoding.strfromlocal(addr))))
+        email.utils.formataddr((name, encoding.strfromlocal(addr)))
+    )
+
 
 def addressencode(ui, address, charsets=None, display=False):
     '''Turns address into RFC-2047 compliant header.'''
@@ -341,23 +385,26 @@
     name, addr = email.utils.parseaddr(encoding.strfromlocal(address))
     return _addressencode(ui, name, encoding.strtolocal(addr), charsets)
 
+
 def addrlistencode(ui, addrs, charsets=None, display=False):
     '''Turns a list of addresses into a list of RFC-2047 compliant headers.
     A single element of input list may contain multiple addresses, but output
     always has one address per item'''
     for a in addrs:
-        assert isinstance(a, bytes), (r'%r unexpectedly not a bytestr' % a)
+        assert isinstance(a, bytes), r'%r unexpectedly not a bytestr' % a
     if display:
         return [a.strip() for a in addrs if a.strip()]
 
     result = []
     for name, addr in email.utils.getaddresses(
-            [encoding.strfromlocal(a) for a in addrs]):
+        [encoding.strfromlocal(a) for a in addrs]
+    ):
         if name or addr:
             r = _addressencode(ui, name, encoding.strtolocal(addr), charsets)
             result.append(r)
     return result
 
+
 def mimeencode(ui, s, charsets=None, display=False):
     '''creates mime text object, encodes it if needed, and sets
     charset and transfer-encoding accordingly.'''
@@ -366,23 +413,30 @@
         s, cs = _encode(ui, s, charsets)
     return mimetextqp(s, 'plain', cs)
 
+
 if pycompat.ispy3:
+
     def parse(fp):
         ep = email.parser.Parser()
         # disable the "universal newlines" mode, which isn't binary safe.
         # I have no idea if ascii/surrogateescape is correct, but that's
         # what the standard Python email parser does.
-        fp = io.TextIOWrapper(fp, encoding=r'ascii',
-                              errors=r'surrogateescape', newline=chr(10))
+        fp = io.TextIOWrapper(
+            fp, encoding=r'ascii', errors=r'surrogateescape', newline=chr(10)
+        )
         try:
             return ep.parse(fp)
         finally:
             fp.detach()
+
+
 else:
+
     def parse(fp):
         ep = email.parser.Parser()
         return ep.parse(fp)
 
+
 def headdecode(s):
     '''Decodes RFC-2047 header'''
     uparts = []
--- a/mercurial/manifest.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/manifest.py	Sun Oct 06 09:45:02 2019 -0400
@@ -38,6 +38,7 @@
 # Allow tests to more easily test the alternate path in manifestdict.fastdelta()
 FASTDELTA_TEXTDIFF_THRESHOLD = 1000
 
+
 def _parse(data):
     # This method does a little bit of excessive-looking
     # precondition checking. This is so that the behavior of this
@@ -57,6 +58,7 @@
         else:
             yield f, bin(n), ''
 
+
 def _text(it):
     files = []
     lines = []
@@ -69,6 +71,7 @@
     _checkforbidden(files)
     return ''.join(lines)
 
+
 class lazymanifestiter(object):
     def __init__(self, lm):
         self.pos = 0
@@ -91,6 +94,7 @@
 
     __next__ = next
 
+
 class lazymanifestiterentries(object):
     def __init__(self, lm):
         self.lm = lm
@@ -108,23 +112,25 @@
             self.pos += 1
             return data
         zeropos = data.find('\x00', pos)
-        hashval = unhexlify(data, self.lm.extrainfo[self.pos],
-                            zeropos + 1, 40)
+        hashval = unhexlify(data, self.lm.extrainfo[self.pos], zeropos + 1, 40)
         flags = self.lm._getflags(data, self.pos, zeropos)
         self.pos += 1
         return (data[pos:zeropos], hashval, flags)
 
     __next__ = next
 
+
 def unhexlify(data, extra, pos, length):
-    s = bin(data[pos:pos + length])
+    s = bin(data[pos : pos + length])
     if extra:
-        s += chr(extra & 0xff)
+        s += chr(extra & 0xFF)
     return s
 
+
 def _cmp(a, b):
     return (a > b) - (a < b)
 
+
 class _lazymanifest(object):
     """A pure python manifest backed by a byte string.  It is supplimented with
     internal lists as it is modified, until it is compacted back to a pure byte
@@ -142,8 +148,15 @@
     ``extradata`` is a list of (key, hash, flags) for entries that were added or
     modified since the manifest was created or compacted.
     """
-    def __init__(self, data, positions=None, extrainfo=None, extradata=None,
-                 hasremovals=False):
+
+    def __init__(
+        self,
+        data,
+        positions=None,
+        extrainfo=None,
+        extradata=None,
+        hasremovals=False,
+    ):
         if positions is None:
             self.positions = self.findlines(data)
             self.extrainfo = [0] * len(self.positions)
@@ -164,10 +177,10 @@
         if pos == -1 or data[-1:] != '\n':
             raise ValueError("Manifest did not end in a newline.")
         positions = [0]
-        prev = data[:data.find('\x00')]
+        prev = data[: data.find('\x00')]
         while pos < len(data) - 1 and pos != -1:
             positions.append(pos + 1)
-            nexts = data[pos + 1:data.find('\x00', pos + 1)]
+            nexts = data[pos + 1 : data.find('\x00', pos + 1)]
             if nexts < prev:
                 raise ValueError("Manifest lines not in sorted order.")
             prev = nexts
@@ -185,7 +198,7 @@
 
     def _getkey(self, pos):
         if pos >= 0:
-            return self.data[pos:self.data.find('\x00', pos + 1)]
+            return self.data[pos : self.data.find('\x00', pos + 1)]
         return self.extradata[-pos - 1][0]
 
     def bsearch(self, key):
@@ -193,7 +206,7 @@
         last = len(self.positions) - 1
 
         while first <= last:
-            midpoint = (first + last)//2
+            midpoint = (first + last) // 2
             nextpos = self.positions[midpoint]
             candidate = self._getkey(nextpos)
             r = _cmp(key, candidate)
@@ -213,7 +226,7 @@
         last = len(self.positions) - 1
 
         while first <= last:
-            midpoint = (first + last)//2
+            midpoint = (first + last) // 2
             nextpos = self.positions[midpoint]
             candidate = self._getkey(nextpos)
             r = _cmp(key, candidate)
@@ -259,12 +272,12 @@
         if not found:
             raise KeyError
         cur = self.positions[needle]
-        self.positions = self.positions[:needle] + self.positions[needle + 1:]
-        self.extrainfo = self.extrainfo[:needle] + self.extrainfo[needle + 1:]
+        self.positions = self.positions[:needle] + self.positions[needle + 1 :]
+        self.extrainfo = self.extrainfo[:needle] + self.extrainfo[needle + 1 :]
         if cur >= 0:
             # This does NOT unsort the list as far as the search functions are
             # concerned, as they only examine lines mapped by self.positions.
-            self.data = self.data[:cur] + '\x00' + self.data[cur + 1:]
+            self.data = self.data[:cur] + '\x00' + self.data[cur + 1 :]
             self.hasremovals = True
 
     def __setitem__(self, key, value):
@@ -293,15 +306,24 @@
         else:
             # not found, put it in with extra positions
             self.extradata.append((key, hashval, value[1]))
-            self.positions = (self.positions[:needle] + [-len(self.extradata)]
-                              + self.positions[needle:])
-            self.extrainfo = (self.extrainfo[:needle] + [0] +
-                              self.extrainfo[needle:])
+            self.positions = (
+                self.positions[:needle]
+                + [-len(self.extradata)]
+                + self.positions[needle:]
+            )
+            self.extrainfo = (
+                self.extrainfo[:needle] + [0] + self.extrainfo[needle:]
+            )
 
     def copy(self):
         # XXX call _compact like in C?
-        return _lazymanifest(self.data, self.positions, self.extrainfo,
-            self.extradata, self.hasremovals)
+        return _lazymanifest(
+            self.data,
+            self.positions,
+            self.extrainfo,
+            self.extradata,
+            self.hasremovals,
+        )
 
     def _compact(self):
         # hopefully not called TOO often
@@ -329,9 +351,11 @@
                     # overwritten first byte.  Break out and find the end of the
                     # current good entry/entries if there is a removed file
                     # before the next position.
-                    if (self.hasremovals
-                        and self.data.find('\n\x00', cur,
-                                           self.positions[i]) != -1):
+                    if (
+                        self.hasremovals
+                        and self.data.find('\n\x00', cur, self.positions[i])
+                        != -1
+                    ):
                         break
 
                     offset += self.positions[i] - cur
@@ -403,11 +427,13 @@
                 c[f] = n, fl
         return c
 
+
 try:
     _lazymanifest = parsers.lazymanifest
 except AttributeError:
     pass
 
+
 @interfaceutil.implementer(repository.imanifestdict)
 class manifestdict(object):
     def __init__(self, data=''):
@@ -456,9 +482,11 @@
             m2 = m2.matches(match)
             return m1.filesnotin(m2)
         diff = self.diff(m2)
-        files = set(filepath
-                    for filepath, hashflags in diff.iteritems()
-                    if hashflags[1][0] is None)
+        files = set(
+            filepath
+            for filepath, hashflags in diff.iteritems()
+            if hashflags[1][0] is None
+        )
         return files
 
     @propertycache
@@ -475,8 +503,10 @@
         '''Checks whether we can correctly and quickly iterate over matcher
         files instead of over manifest files.'''
         files = match.files()
-        return (len(files) < 100 and (match.isexact() or
-            (match.prefix() and all(fn in self for fn in files))))
+        return len(files) < 100 and (
+            match.isexact()
+            or (match.prefix() and all(fn in self for fn in files))
+        )
 
     def walk(self, match):
         '''Generates matching file names.
@@ -611,7 +641,8 @@
                     if start == end:
                         # item we want to delete was not found, error out
                         raise AssertionError(
-                                _("failed to remove %s from manifest") % f)
+                            _("failed to remove %s from manifest") % f
+                        )
                     l = ""
                 if dstart is not None and dstart <= start and dend >= start:
                     if dend < end:
@@ -634,10 +665,12 @@
             # diff it.
             arraytext = bytearray(self.text())
             deltatext = mdiff.textdiff(
-                util.buffer(base), util.buffer(arraytext))
+                util.buffer(base), util.buffer(arraytext)
+            )
 
         return arraytext, deltatext
 
+
 def _msearch(m, s, lo=0, hi=None):
     '''return a tuple (start, end) that says where to find s within m.
 
@@ -647,10 +680,12 @@
 
     m should be a buffer, a memoryview or a byte string.
     s is a byte string'''
+
     def advance(i, c):
-        while i < lenm and m[i:i + 1] != c:
+        while i < lenm and m[i : i + 1] != c:
             i += 1
         return i
+
     if not s:
         return (lo, lo)
     lenm = len(m)
@@ -659,7 +694,7 @@
     while lo < hi:
         mid = (lo + hi) // 2
         start = mid
-        while start > 0 and m[start - 1:start] != '\n':
+        while start > 0 and m[start - 1 : start] != '\n':
             start -= 1
         end = advance(start, '\0')
         if bytes(m[start:end]) < s:
@@ -678,13 +713,15 @@
     else:
         return (lo, lo)
 
+
 def _checkforbidden(l):
     """Check filenames for illegal characters."""
     for f in l:
         if '\n' in f or '\r' in f:
             raise error.StorageError(
                 _("'\\n' and '\\r' disallowed in filenames: %r")
-                % pycompat.bytestr(f))
+                % pycompat.bytestr(f)
+            )
 
 
 # apply the changes collected during the bisect loop to our addlist
@@ -704,10 +741,13 @@
 
     newaddlist += addlist[currentposition:]
 
-    deltatext = "".join(struct.pack(">lll", start, end, len(content))
-                   + content for start, end, content in x)
+    deltatext = "".join(
+        struct.pack(">lll", start, end, len(content)) + content
+        for start, end, content in x
+    )
     return deltatext, newaddlist
 
+
 def _splittopdir(f):
     if '/' in f:
         dir, subpath = f.split('/', 1)
@@ -715,8 +755,10 @@
     else:
         return '', f
 
+
 _noop = lambda s: None
 
+
 class treemanifest(object):
     def __init__(self, dir='', text=''):
         self._dir = dir
@@ -730,11 +772,14 @@
         self._files = {}
         self._flags = {}
         if text:
+
             def readsubtree(subdir, subm):
-                raise AssertionError('treemanifest constructor only accepts '
-                                     'flat manifests')
+                raise AssertionError(
+                    'treemanifest constructor only accepts ' 'flat manifests'
+                )
+
             self.parse(text, readsubtree)
-            self._dirty = True # Mark flat manifest dirty after parsing
+            self._dirty = True  # Mark flat manifest dirty after parsing
 
     def _subpath(self, path):
         return self._dir + path
@@ -807,20 +852,23 @@
     __bool__ = __nonzero__
 
     def _isempty(self):
-        self._load() # for consistency; already loaded by all callers
+        self._load()  # for consistency; already loaded by all callers
         # See if we can skip loading everything.
-        if self._files or (self._dirs and
-                           any(not m._isempty() for m in self._dirs.values())):
+        if self._files or (
+            self._dirs and any(not m._isempty() for m in self._dirs.values())
+        ):
             return False
         self._loadalllazy()
-        return (not self._dirs or
-                all(m._isempty() for m in self._dirs.values()))
+        return not self._dirs or all(m._isempty() for m in self._dirs.values())
 
     def __repr__(self):
-        return ('<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' %
-                (self._dir, hex(self._node),
-                 bool(self._loadfunc is _noop),
-                 self._dirty, id(self)))
+        return '<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' % (
+            self._dir,
+            hex(self._node),
+            bool(self._loadfunc is _noop),
+            self._dirty,
+            id(self),
+        )
 
     def dir(self):
         '''The directory that this tree manifest represents, including a
@@ -841,8 +889,9 @@
     def iterentries(self):
         self._load()
         self._loadalllazy()
-        for p, n in sorted(itertools.chain(self._dirs.items(),
-                                           self._files.items())):
+        for p, n in sorted(
+            itertools.chain(self._dirs.items(), self._files.items())
+        ):
             if p in self._files:
                 yield self._subpath(p), n, self._flags.get(p, '')
             else:
@@ -852,8 +901,9 @@
     def items(self):
         self._load()
         self._loadalllazy()
-        for p, n in sorted(itertools.chain(self._dirs.items(),
-                                           self._files.items())):
+        for p, n in sorted(
+            itertools.chain(self._dirs.items(), self._files.items())
+        ):
             if p in self._files:
                 yield self._subpath(p), n
             else:
@@ -965,7 +1015,7 @@
                 self._dirs[dir] = treemanifest(self._subpath(dir))
             self._dirs[dir].__setitem__(subpath, n)
         else:
-            self._files[f] = n[:21] # to match manifestdict's behavior
+            self._files[f] = n[:21]  # to match manifestdict's behavior
         self._dirty = True
 
     def _load(self):
@@ -994,15 +1044,19 @@
         copy._node = self._node
         copy._dirty = self._dirty
         if self._copyfunc is _noop:
+
             def _copyfunc(s):
                 self._load()
-                s._lazydirs = {d: (p, n, r, True) for
-                               d, (p, n, r, c) in self._lazydirs.iteritems()}
+                s._lazydirs = {
+                    d: (p, n, r, True)
+                    for d, (p, n, r, c) in self._lazydirs.iteritems()
+                }
                 sdirs = s._dirs
                 for d, v in self._dirs.iteritems():
                     sdirs[d] = v.copy()
                 s._files = dict.copy(self._files)
                 s._flags = dict.copy(self._flags)
+
             if self._loadfunc is _noop:
                 _copyfunc(copy)
             else:
@@ -1019,6 +1073,7 @@
             return m1.filesnotin(m2)
 
         files = set()
+
         def _filesnotin(t1, t2):
             if t1._node == t2._node and not t1._dirty and not t2._dirty:
                 return
@@ -1261,13 +1316,15 @@
         def _load_for_read(s):
             s.parse(gettext(), readsubtree)
             s._dirty = False
+
         self._loadfunc = _load_for_read
 
     def writesubtrees(self, m1, m2, writesubtree, match):
-        self._load() # for consistency; should never have any effect here
+        self._load()  # for consistency; should never have any effect here
         m1._load()
         m2._load()
         emptytree = treemanifest()
+
         def getnode(m, d):
             ld = m._lazydirs.get(d)
             if ld:
@@ -1306,6 +1363,7 @@
             for subtree in subm.walksubtrees(matcher=matcher):
                 yield subtree
 
+
 class manifestfulltextcache(util.lrucachedict):
     """File-backed LRU cache for the manifest cache
 
@@ -1356,8 +1414,9 @@
         if not self._dirty or self._opener is None:
             return
         # rotate backwards to the first used node
-        with self._opener(self._file, 'w', atomictemp=True, checkambig=True
-            ) as fp:
+        with self._opener(
+            self._file, 'w', atomictemp=True, checkambig=True
+        ) as fp:
             node = self._head.prev
             while True:
                 if node.key in self._cache:
@@ -1417,17 +1476,26 @@
             self.write()
         self._read = False
 
+
 # and upper bound of what we expect from compression
 # (real live value seems to be "3")
 MAXCOMPRESSION = 3
 
+
 @interfaceutil.implementer(repository.imanifeststorage)
 class manifestrevlog(object):
     '''A revlog that stores manifest texts. This is responsible for caching the
     full-text manifest contents.
     '''
-    def __init__(self, opener, tree='', dirlogcache=None, indexfile=None,
-                 treemanifest=False):
+
+    def __init__(
+        self,
+        opener,
+        tree='',
+        dirlogcache=None,
+        indexfile=None,
+        treemanifest=False,
+    ):
         """Constructs a new manifest revlog
 
         `indexfile` - used by extensions to have two manifests at once, like
@@ -1468,11 +1536,14 @@
         else:
             self._dirlogcache = {'': self}
 
-        self._revlog = revlog.revlog(opener, indexfile,
-                                     # only root indexfile is cached
-                                     checkambig=not bool(tree),
-                                     mmaplargeindex=True,
-                                     upperboundcomp=MAXCOMPRESSION)
+        self._revlog = revlog.revlog(
+            opener,
+            indexfile,
+            # only root indexfile is cached
+            checkambig=not bool(tree),
+            mmaplargeindex=True,
+            upperboundcomp=MAXCOMPRESSION,
+        )
 
         self.index = self._revlog.index
         self.version = self._revlog.version
@@ -1515,14 +1586,24 @@
         if d:
             assert self._treeondisk
         if d not in self._dirlogcache:
-            mfrevlog = manifestrevlog(self.opener, d,
-                                      self._dirlogcache,
-                                      treemanifest=self._treeondisk)
+            mfrevlog = manifestrevlog(
+                self.opener, d, self._dirlogcache, treemanifest=self._treeondisk
+            )
             self._dirlogcache[d] = mfrevlog
         return self._dirlogcache[d]
 
-    def add(self, m, transaction, link, p1, p2, added, removed, readtree=None,
-            match=None):
+    def add(
+        self,
+        m,
+        transaction,
+        link,
+        p1,
+        p2,
+        added,
+        removed,
+        readtree=None,
+        match=None,
+    ):
         if p1 in self.fulltextcache and util.safehasattr(m, 'fastdelta'):
             # If our first parent is in the manifest cache, we can
             # compute a delta here using properties we know about the
@@ -1531,14 +1612,17 @@
 
             _checkforbidden(added)
             # combine the changed lists into one sorted iterator
-            work = heapq.merge([(x, False) for x in sorted(added)],
-                               [(x, True) for x in sorted(removed)])
+            work = heapq.merge(
+                [(x, False) for x in sorted(added)],
+                [(x, True) for x in sorted(removed)],
+            )
 
             arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
             cachedelta = self._revlog.rev(p1), deltatext
             text = util.buffer(arraytext)
-            n = self._revlog.addrevision(text, transaction, link, p1, p2,
-                                         cachedelta)
+            n = self._revlog.addrevision(
+                text, transaction, link, p1, p2, cachedelta
+            )
         else:
             # The first parent manifest isn't already loaded, so we'll
             # just encode a fulltext of the manifest and pass that
@@ -1549,8 +1633,9 @@
                 assert match, "match must be specified for treemanifest writes"
                 m1 = readtree(self.tree, p1)
                 m2 = readtree(self.tree, p2)
-                n = self._addtree(m, transaction, link, m1, m2, readtree,
-                                  match=match)
+                n = self._addtree(
+                    m, transaction, link, m1, m2, readtree, match=match
+                )
                 arraytext = None
             else:
                 text = m.text()
@@ -1565,13 +1650,23 @@
     def _addtree(self, m, transaction, link, m1, m2, readtree, match):
         # If the manifest is unchanged compared to one parent,
         # don't write a new revision
-        if self.tree != '' and (m.unmodifiedsince(m1) or m.unmodifiedsince(
-            m2)):
+        if self.tree != '' and (m.unmodifiedsince(m1) or m.unmodifiedsince(m2)):
             return m.node()
+
         def writesubtree(subm, subp1, subp2, match):
             sublog = self.dirlog(subm.dir())
-            sublog.add(subm, transaction, link, subp1, subp2, None, None,
-                       readtree=readtree, match=match)
+            sublog.add(
+                subm,
+                transaction,
+                link,
+                subp1,
+                subp2,
+                None,
+                None,
+                readtree=readtree,
+                match=match,
+            )
+
         m.writesubtrees(m1, m2, writesubtree, match)
         text = m.dirtext()
         n = None
@@ -1583,8 +1678,9 @@
                 n = m2.node()
 
         if not n:
-            n = self._revlog.addrevision(text, transaction, link, m1.node(),
-                                         m2.node())
+            n = self._revlog.addrevision(
+                text, transaction, link, m1.node(), m2.node()
+            )
 
         # Save nodeid so parent manifest can calculate its nodeid
         m.setnode(n)
@@ -1632,17 +1728,26 @@
     def deltaparent(self, rev):
         return self._revlog.deltaparent(rev)
 
-    def emitrevisions(self, nodes, nodesorder=None,
-                      revisiondata=False, assumehaveparentrevisions=False,
-                      deltamode=repository.CG_DELTAMODE_STD):
+    def emitrevisions(
+        self,
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+        deltamode=repository.CG_DELTAMODE_STD,
+    ):
         return self._revlog.emitrevisions(
-            nodes, nodesorder=nodesorder, revisiondata=revisiondata,
+            nodes,
+            nodesorder=nodesorder,
+            revisiondata=revisiondata,
             assumehaveparentrevisions=assumehaveparentrevisions,
-            deltamode=deltamode)
+            deltamode=deltamode,
+        )
 
     def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None):
-        return self._revlog.addgroup(deltas, linkmapper, transaction,
-                                     addrevisioncb=addrevisioncb)
+        return self._revlog.addgroup(
+            deltas, linkmapper, transaction, addrevisioncb=addrevisioncb
+        )
 
     def rawsize(self, rev):
         return self._revlog.rawsize(rev)
@@ -1662,13 +1767,21 @@
 
         return self._revlog.clone(tr, destrevlog._revlog, **kwargs)
 
-    def storageinfo(self, exclusivefiles=False, sharedfiles=False,
-                    revisionscount=False, trackedsize=False,
-                    storedsize=False):
+    def storageinfo(
+        self,
+        exclusivefiles=False,
+        sharedfiles=False,
+        revisionscount=False,
+        trackedsize=False,
+        storedsize=False,
+    ):
         return self._revlog.storageinfo(
-            exclusivefiles=exclusivefiles, sharedfiles=sharedfiles,
-            revisionscount=revisionscount, trackedsize=trackedsize,
-            storedsize=storedsize)
+            exclusivefiles=exclusivefiles,
+            sharedfiles=sharedfiles,
+            revisionscount=revisionscount,
+            trackedsize=trackedsize,
+            storedsize=storedsize,
+        )
 
     @property
     def indexfile(self):
@@ -1686,6 +1799,7 @@
     def opener(self, value):
         self._revlog.opener = value
 
+
 @interfaceutil.implementer(repository.imanifestlog)
 class manifestlog(object):
     """A collection class representing the collection of manifest snapshots
@@ -1695,6 +1809,7 @@
     of the list of files in the given commit. Consumers of the output of this
     class do not care about the implementation details of the actual manifests
     they receive (i.e. tree or flat or lazily loaded, etc)."""
+
     def __init__(self, opener, repo, rootstore, narrowmatch):
         usetreemanifest = False
         cachesize = 4
@@ -1745,8 +1860,12 @@
                 m = treemanifestctx(self, tree, node)
             else:
                 raise error.Abort(
-                        _("cannot ask for manifest directory '%s' in a flat "
-                          "manifest") % tree)
+                    _(
+                        "cannot ask for manifest directory '%s' in a flat "
+                        "manifest"
+                    )
+                    % tree
+                )
         else:
             if verify:
                 # Side-effect is LookupError is raised if node doesn't exist.
@@ -1775,6 +1894,7 @@
     def rev(self, node):
         return self._rootstore.rev(node)
 
+
 @interfaceutil.implementer(repository.imanifestrevisionwritable)
 class memmanifestctx(object):
     def __init__(self, manifestlog):
@@ -1796,14 +1916,24 @@
         return self._manifestdict
 
     def write(self, transaction, link, p1, p2, added, removed, match=None):
-        return self._storage().add(self._manifestdict, transaction, link,
-                                   p1, p2, added, removed, match=match)
+        return self._storage().add(
+            self._manifestdict,
+            transaction,
+            link,
+            p1,
+            p2,
+            added,
+            removed,
+            match=match,
+        )
+
 
 @interfaceutil.implementer(repository.imanifestrevisionstored)
 class manifestctx(object):
     """A class representing a single revision of a manifest, including its
     contents, its parent revs, and its linkrev.
     """
+
     def __init__(self, manifestlog, node):
         self._manifestlog = manifestlog
         self._data = None
@@ -1813,9 +1943,9 @@
         # TODO: We eventually want p1, p2, and linkrev exposed on this class,
         # but let's add it later when something needs it and we can load it
         # lazily.
-        #self.p1, self.p2 = store.parents(node)
-        #rev = store.rev(node)
-        #self.linkrev = store.linkrev(rev)
+        # self.p1, self.p2 = store.parents(node)
+        # rev = store.rev(node)
+        # self.linkrev = store.linkrev(rev)
 
     def _storage(self):
         return self._manifestlog.getstorage(b'')
@@ -1879,6 +2009,7 @@
     def find(self, key):
         return self.read().find(key)
 
+
 @interfaceutil.implementer(repository.imanifestrevisionwritable)
 class memtreemanifestctx(object):
     def __init__(self, manifestlog, dir=''):
@@ -1903,9 +2034,19 @@
     def write(self, transaction, link, p1, p2, added, removed, match=None):
         def readtree(dir, node):
             return self._manifestlog.get(dir, node).read()
-        return self._storage().add(self._treemanifest, transaction, link,
-                                   p1, p2, added, removed, readtree=readtree,
-                                   match=match)
+
+        return self._storage().add(
+            self._treemanifest,
+            transaction,
+            link,
+            p1,
+            p2,
+            added,
+            removed,
+            readtree=readtree,
+            match=match,
+        )
+
 
 @interfaceutil.implementer(repository.imanifestrevisionstored)
 class treemanifestctx(object):
@@ -1919,9 +2060,9 @@
         # TODO: Load p1/p2/linkrev lazily. They need to be lazily loaded so that
         # we can instantiate treemanifestctx objects for directories we don't
         # have on disk.
-        #self.p1, self.p2 = store.parents(node)
-        #rev = store.rev(node)
-        #self.linkrev = store.linkrev(rev)
+        # self.p1, self.p2 = store.parents(node)
+        # rev = store.rev(node)
+        # self.linkrev = store.linkrev(rev)
 
     def _storage(self):
         narrowmatch = self._manifestlog._narrowmatch
@@ -1938,12 +2079,15 @@
             # TODO accessing non-public API
             elif store._treeondisk:
                 m = treemanifest(dir=self._dir)
+
                 def gettext():
                     return store.revision(self._node)
+
                 def readsubtree(dir, subm):
                     # Set verify to False since we need to be able to create
                     # subtrees for trees that don't exist on disk.
                     return self._manifestlog.get(dir, subm, verify=False).read()
+
                 m.read(gettext, readsubtree)
                 m.setnode(self._node)
                 self._data = m
@@ -2013,8 +2157,7 @@
         store = self._storage()
         r = store.rev(self._node)
         deltaparent = store.deltaparent(r)
-        if (deltaparent != nullrev and
-            deltaparent in store.parentrevs(r)):
+        if deltaparent != nullrev and deltaparent in store.parentrevs(r):
             return self.readdelta(shallow=shallow)
 
         if shallow:
@@ -2025,6 +2168,7 @@
     def find(self, key):
         return self.read().find(key)
 
+
 class excludeddir(treemanifest):
     """Stand-in for a directory that is excluded from the repository.
 
@@ -2036,6 +2180,7 @@
     class is: it stands in for a directory whose node is known, but
     whose contents are unknown.
     """
+
     def __init__(self, dir, node):
         super(excludeddir, self).__init__(dir)
         self._node = node
@@ -2052,8 +2197,10 @@
     def copy(self):
         return self
 
+
 class excludeddirmanifestctx(treemanifestctx):
     """context wrapper for excludeddir - see that docstring for rationale"""
+
     def __init__(self, dir, node):
         self._dir = dir
         self._node = node
@@ -2063,7 +2210,9 @@
 
     def write(self, *args):
         raise error.ProgrammingError(
-            'attempt to write manifest from excluded dir %s' % self._dir)
+            'attempt to write manifest from excluded dir %s' % self._dir
+        )
+
 
 class excludedmanifestrevlog(manifestrevlog):
     """Stand-in for excluded treemanifest revlogs.
@@ -2080,19 +2229,23 @@
 
     def __len__(self):
         raise error.ProgrammingError(
-            'attempt to get length of excluded dir %s' % self._dir)
+            'attempt to get length of excluded dir %s' % self._dir
+        )
 
     def rev(self, node):
         raise error.ProgrammingError(
-            'attempt to get rev from excluded dir %s' % self._dir)
+            'attempt to get rev from excluded dir %s' % self._dir
+        )
 
     def linkrev(self, node):
         raise error.ProgrammingError(
-            'attempt to get linkrev from excluded dir %s' % self._dir)
+            'attempt to get linkrev from excluded dir %s' % self._dir
+        )
 
     def node(self, rev):
         raise error.ProgrammingError(
-            'attempt to get node from excluded dir %s' % self._dir)
+            'attempt to get node from excluded dir %s' % self._dir
+        )
 
     def add(self, *args, **kwargs):
         # We should never write entries in dirlogs outside the narrow clone.
--- a/mercurial/match.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/match.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,20 +21,30 @@
     pycompat,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 rustmod = policy.importrust(r'filepatterns')
 
-allpatternkinds = ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
-                   'rootglob',
-                   'listfile', 'listfile0', 'set', 'include', 'subinclude',
-                   'rootfilesin')
+allpatternkinds = (
+    're',
+    'glob',
+    'path',
+    'relglob',
+    'relpath',
+    'relre',
+    'rootglob',
+    'listfile',
+    'listfile0',
+    'set',
+    'include',
+    'subinclude',
+    'rootfilesin',
+)
 cwdrelativepatternkinds = ('relpath', 'glob')
 
 propertycache = util.propertycache
 
+
 def _rematcher(regex):
     '''compile the regexp with the best available regexp engine and return a
     matcher function'''
@@ -45,6 +55,7 @@
     except AttributeError:
         return m.match
 
+
 def _expandsets(kindpats, ctx=None, listsubrepos=False, badfn=None):
     '''Returns the kindpats list with the 'set' patterns expanded to matchers'''
     matchers = []
@@ -53,8 +64,9 @@
     for kind, pat, source in kindpats:
         if kind == 'set':
             if ctx is None:
-                raise error.ProgrammingError("fileset expression with no "
-                                             "context")
+                raise error.ProgrammingError(
+                    "fileset expression with no " "context"
+                )
             matchers.append(ctx.matchfileset(pat, badfn=badfn))
 
             if listsubrepos:
@@ -67,6 +79,7 @@
         other.append((kind, pat, source))
     return matchers, other
 
+
 def _expandsubinclude(kindpats, root):
     '''Returns the list of subinclude matcher args and the kindpats without the
     subincludes in it.'''
@@ -91,6 +104,7 @@
 
     return relmatchers, other
 
+
 def _kindpatsalwaysmatch(kindpats):
     """"Checks whether the kindspats match everything, as e.g.
     'relpath:.' does.
@@ -100,11 +114,14 @@
             return False
     return True
 
-def _buildkindpatsmatcher(matchercls, root, kindpats, ctx=None,
-                          listsubrepos=False, badfn=None):
+
+def _buildkindpatsmatcher(
+    matchercls, root, kindpats, ctx=None, listsubrepos=False, badfn=None
+):
     matchers = []
-    fms, kindpats = _expandsets(kindpats, ctx=ctx,
-                                listsubrepos=listsubrepos, badfn=badfn)
+    fms, kindpats = _expandsets(
+        kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn
+    )
     if kindpats:
         m = matchercls(root, kindpats, badfn=badfn)
         matchers.append(m)
@@ -116,9 +133,21 @@
         return matchers[0]
     return unionmatcher(matchers)
 
-def match(root, cwd, patterns=None, include=None, exclude=None, default='glob',
-          auditor=None, ctx=None, listsubrepos=False, warn=None,
-          badfn=None, icasefs=False):
+
+def match(
+    root,
+    cwd,
+    patterns=None,
+    include=None,
+    exclude=None,
+    default='glob',
+    auditor=None,
+    ctx=None,
+    listsubrepos=False,
+    warn=None,
+    badfn=None,
+    icasefs=False,
+):
     r"""build an object to match a set of file patterns
 
     arguments:
@@ -223,8 +252,14 @@
         if _kindpatsalwaysmatch(kindpats):
             m = alwaysmatcher(badfn)
         else:
-            m = _buildkindpatsmatcher(patternmatcher, root, kindpats, ctx=ctx,
-                                      listsubrepos=listsubrepos, badfn=badfn)
+            m = _buildkindpatsmatcher(
+                patternmatcher,
+                root,
+                kindpats,
+                ctx=ctx,
+                listsubrepos=listsubrepos,
+                badfn=badfn,
+            )
     else:
         # It's a little strange that no patterns means to match everything.
         # Consider changing this to match nothing (probably using nevermatcher).
@@ -232,25 +267,41 @@
 
     if include:
         kindpats = normalize(include, 'glob', root, cwd, auditor, warn)
-        im = _buildkindpatsmatcher(includematcher, root, kindpats, ctx=ctx,
-                                   listsubrepos=listsubrepos, badfn=None)
+        im = _buildkindpatsmatcher(
+            includematcher,
+            root,
+            kindpats,
+            ctx=ctx,
+            listsubrepos=listsubrepos,
+            badfn=None,
+        )
         m = intersectmatchers(m, im)
     if exclude:
         kindpats = normalize(exclude, 'glob', root, cwd, auditor, warn)
-        em = _buildkindpatsmatcher(includematcher, root, kindpats, ctx=ctx,
-                                   listsubrepos=listsubrepos, badfn=None)
+        em = _buildkindpatsmatcher(
+            includematcher,
+            root,
+            kindpats,
+            ctx=ctx,
+            listsubrepos=listsubrepos,
+            badfn=None,
+        )
         m = differencematcher(m, em)
     return m
 
+
 def exact(files, badfn=None):
     return exactmatcher(files, badfn=badfn)
 
+
 def always(badfn=None):
     return alwaysmatcher(badfn)
 
+
 def never(badfn=None):
     return nevermatcher(badfn)
 
+
 def badmatch(match, badfn):
     """Make a copy of the given matcher, replacing its bad method with the given
     one.
@@ -259,6 +310,7 @@
     m.bad = badfn
     return m
 
+
 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
     '''Convert 'kind:pat' from the patterns list to tuples with kind and
     normalized and rooted patterns and with listfiles expanded.'''
@@ -278,36 +330,41 @@
                 files = [f for f in files if f]
             except EnvironmentError:
                 raise error.Abort(_("unable to read file list (%s)") % pat)
-            for k, p, source in _donormalize(files, default, root, cwd,
-                                             auditor, warn):
+            for k, p, source in _donormalize(
+                files, default, root, cwd, auditor, warn
+            ):
                 kindpats.append((k, p, pat))
             continue
         elif kind == 'include':
             try:
                 fullpath = os.path.join(root, util.localpath(pat))
                 includepats = readpatternfile(fullpath, warn)
-                for k, p, source in _donormalize(includepats, default,
-                                                 root, cwd, auditor, warn):
+                for k, p, source in _donormalize(
+                    includepats, default, root, cwd, auditor, warn
+                ):
                     kindpats.append((k, p, source or pat))
             except error.Abort as inst:
                 raise error.Abort('%s: %s' % (pat, inst[0]))
             except IOError as inst:
                 if warn:
-                    warn(_("skipping unreadable pattern file '%s': %s\n") %
-                         (pat, stringutil.forcebytestr(inst.strerror)))
+                    warn(
+                        _("skipping unreadable pattern file '%s': %s\n")
+                        % (pat, stringutil.forcebytestr(inst.strerror))
+                    )
             continue
         # else: re or relre - which cannot be normalized
         kindpats.append((kind, pat, ''))
     return kindpats
 
+
 class basematcher(object):
-
     def __init__(self, badfn=None):
         if badfn is not None:
             self.bad = badfn
 
     def __call__(self, fn):
         return self.matchfn(fn)
+
     # Callbacks related to how the matcher is used by dirstate.walk.
     # Subscribers to these events must monkeypatch the matcher object.
     def bad(self, f, msg):
@@ -419,6 +476,7 @@
         optimizations will be difficult.'''
         return not self.always() and not self.isexact() and not self.prefix()
 
+
 class alwaysmatcher(basematcher):
     '''Matches everything.'''
 
@@ -440,6 +498,7 @@
     def __repr__(self):
         return r'<alwaysmatcher>'
 
+
 class nevermatcher(basematcher):
     '''Matches nothing.'''
 
@@ -466,6 +525,7 @@
     def __repr__(self):
         return r'<nevermatcher>'
 
+
 class predicatematcher(basematcher):
     """A matcher adapter for a simple boolean function"""
 
@@ -476,14 +536,18 @@
 
     @encoding.strmethod
     def __repr__(self):
-        s = (stringutil.buildrepr(self._predrepr)
-             or pycompat.byterepr(self.matchfn))
+        s = stringutil.buildrepr(self._predrepr) or pycompat.byterepr(
+            self.matchfn
+        )
         return '<predicatenmatcher pred=%s>' % s
 
+
 def normalizerootdir(dir, funcname):
     if dir == '.':
-        util.nouideprecwarn("match.%s() no longer accepts "
-                            "'.', use '' instead." % funcname, '5.1')
+        util.nouideprecwarn(
+            "match.%s() no longer accepts " "'.', use '' instead." % funcname,
+            '5.1',
+        )
         return ''
     return dir
 
@@ -536,10 +600,13 @@
         dir = normalizerootdir(dir, 'visitdir')
         if self._prefix and dir in self._fileset:
             return 'all'
-        return (dir in self._fileset or
-                dir in self._dirs or
-                any(parentdir in self._fileset
-                    for parentdir in util.finddirs(dir)))
+        return (
+            dir in self._fileset
+            or dir in self._dirs
+            or any(
+                parentdir in self._fileset for parentdir in util.finddirs(dir)
+            )
+        )
 
     def visitchildrenset(self, dir):
         ret = self.visitdir(dir)
@@ -555,7 +622,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats))
+        return '<patternmatcher patterns=%r>' % pycompat.bytestr(self._pats)
+
 
 # This is basically a reimplementation of util.dirs that stores the children
 # instead of just a count of them, plus a small optional optimization to avoid
@@ -588,7 +656,7 @@
         oldpos = len(path)
         pos = path.rfind('/')
         while pos != -1:
-            yield path[:pos], path[pos + 1:oldpos]
+            yield path[:pos], path[pos + 1 : oldpos]
             oldpos = pos
             pos = path.rfind('/', 0, pos)
         yield '', path[:oldpos]
@@ -596,8 +664,8 @@
     def get(self, path):
         return self._dirs.get(path, set())
 
+
 class includematcher(basematcher):
-
     def __init__(self, root, kindpats, badfn=None):
         super(includematcher, self).__init__(badfn)
 
@@ -616,11 +684,12 @@
         dir = normalizerootdir(dir, 'visitdir')
         if self._prefix and dir in self._roots:
             return 'all'
-        return (dir in self._roots or
-                dir in self._dirs or
-                dir in self._parents or
-                any(parentdir in self._roots
-                    for parentdir in util.finddirs(dir)))
+        return (
+            dir in self._roots
+            or dir in self._dirs
+            or dir in self._parents
+            or any(parentdir in self._roots for parentdir in util.finddirs(dir))
+        )
 
     @propertycache
     def _allparentschildren(self):
@@ -631,19 +700,21 @@
         # if we asked for the children of 'foo', but had only added
         # self._parents, we wouldn't be able to respond ['bar'].
         return _dirchildren(
-                itertools.chain(self._dirs, self._roots, self._parents),
-                onlyinclude=self._parents)
+            itertools.chain(self._dirs, self._roots, self._parents),
+            onlyinclude=self._parents,
+        )
 
     def visitchildrenset(self, dir):
         if self._prefix and dir in self._roots:
             return 'all'
         # Note: this does *not* include the 'dir in self._parents' case from
         # visitdir, that's handled below.
-        if ('' in self._roots or
-            dir in self._roots or
-            dir in self._dirs or
-            any(parentdir in self._roots
-                for parentdir in util.finddirs(dir))):
+        if (
+            '' in self._roots
+            or dir in self._roots
+            or dir in self._dirs
+            or any(parentdir in self._roots for parentdir in util.finddirs(dir))
+        ):
             return 'this'
 
         if dir in self._parents:
@@ -652,7 +723,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<includematcher includes=%r>' % pycompat.bytestr(self._pats))
+        return '<includematcher includes=%r>' % pycompat.bytestr(self._pats)
+
 
 class exactmatcher(basematcher):
     r'''Matches the input files exactly. They are interpreted as paths, not
@@ -702,8 +774,7 @@
         candidates = self._fileset | self._dirs - {''}
         if dir != '':
             d = dir + '/'
-            candidates = set(c[len(d):] for c in candidates if
-                             c.startswith(d))
+            candidates = set(c[len(d) :] for c in candidates if c.startswith(d))
         # self._dirs includes all of the directories, recursively, so if
         # we're attempting to match foo/bar/baz.txt, it'll have '', 'foo',
         # 'foo/bar' in it. Thus we can safely ignore a candidate that has a
@@ -720,7 +791,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<exactmatcher files=%r>' % self._files)
+        return '<exactmatcher files=%r>' % self._files
+
 
 class differencematcher(basematcher):
     '''Composes two matchers by matching if the first matches and the second
@@ -729,6 +801,7 @@
     The second matcher's non-matching-attributes (bad, explicitdir,
     traversedir) are ignored.
     '''
+
     def __init__(self, m1, m2):
         super(differencematcher, self).__init__()
         self._m1 = m1
@@ -789,7 +862,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2))
+        return '<differencematcher m1=%r, m2=%r>' % (self._m1, self._m2)
+
 
 def intersectmatchers(m1, m2):
     '''Composes two matchers by matching if both of them match.
@@ -812,6 +886,7 @@
         return m
     return intersectionmatcher(m1, m2)
 
+
 class intersectionmatcher(basematcher):
     def __init__(self, m1, m2):
         super(intersectionmatcher, self).__init__()
@@ -871,7 +946,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2))
+        return '<intersectionmatcher m1=%r, m2=%r>' % (self._m1, self._m2)
+
 
 class subdirmatcher(basematcher):
     """Adapt a matcher to work on a subdirectory only.
@@ -906,8 +982,11 @@
         self._matcher = matcher
         self._always = matcher.always()
 
-        self._files = [f[len(path) + 1:] for f in matcher._files
-                       if f.startswith(path + "/")]
+        self._files = [
+            f[len(path) + 1 :]
+            for f in matcher._files
+            if f.startswith(path + "/")
+        ]
 
         # If the parent repo had a path to this subrepo and the matcher is
         # a prefix matcher, this submatcher always matches.
@@ -948,8 +1027,11 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<subdirmatcher path=%r, matcher=%r>' %
-                (self._path, self._matcher))
+        return '<subdirmatcher path=%r, matcher=%r>' % (
+            self._path,
+            self._matcher,
+        )
+
 
 class prefixdirmatcher(basematcher):
     """Adapt a matcher to work on a parent directory.
@@ -999,7 +1081,7 @@
     def matchfn(self, f):
         if not f.startswith(self._pathprefix):
             return False
-        return self._matcher.matchfn(f[len(self._pathprefix):])
+        return self._matcher.matchfn(f[len(self._pathprefix) :])
 
     @propertycache
     def _pathdirs(self):
@@ -1009,14 +1091,14 @@
         if dir == self._path:
             return self._matcher.visitdir('')
         if dir.startswith(self._pathprefix):
-            return self._matcher.visitdir(dir[len(self._pathprefix):])
+            return self._matcher.visitdir(dir[len(self._pathprefix) :])
         return dir in self._pathdirs
 
     def visitchildrenset(self, dir):
         if dir == self._path:
             return self._matcher.visitchildrenset('')
         if dir.startswith(self._pathprefix):
-            return self._matcher.visitchildrenset(dir[len(self._pathprefix):])
+            return self._matcher.visitchildrenset(dir[len(self._pathprefix) :])
         if dir in self._pathdirs:
             return 'this'
         return set()
@@ -1029,8 +1111,11 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<prefixdirmatcher path=%r, matcher=%r>'
-                % (pycompat.bytestr(self._path), self._matcher))
+        return '<prefixdirmatcher path=%r, matcher=%r>' % (
+            pycompat.bytestr(self._path),
+            self._matcher,
+        )
+
 
 class unionmatcher(basematcher):
     """A matcher that is the union of several matchers.
@@ -1082,7 +1167,8 @@
 
     @encoding.strmethod
     def __repr__(self):
-        return ('<unionmatcher matchers=%r>' % self._matchers)
+        return '<unionmatcher matchers=%r>' % self._matchers
+
 
 def patkind(pattern, default=None):
     '''If pattern is 'kind:pat' with a known kind, return kind.
@@ -1099,6 +1185,7 @@
     '''
     return _patsplit(pattern, default)[0]
 
+
 def _patsplit(pattern, default):
     """Split a string into the optional pattern kind prefix and the actual
     pattern."""
@@ -1108,6 +1195,7 @@
             return kind, pat
     return default, pattern
 
+
 def _globre(pat):
     r'''Convert an extended glob string to a regexp string.
 
@@ -1135,10 +1223,12 @@
     res = ''
     group = 0
     escape = util.stringutil.regexbytesescapemap.get
+
     def peek():
-        return i < n and pat[i:i + 1]
+        return i < n and pat[i : i + 1]
+
     while i < n:
-        c = pat[i:i + 1]
+        c = pat[i : i + 1]
         i += 1
         if c not in '*?[{},\\':
             res += escape(c, c)
@@ -1156,14 +1246,14 @@
             res += '.'
         elif c == '[':
             j = i
-            if j < n and pat[j:j + 1] in '!]':
+            if j < n and pat[j : j + 1] in '!]':
                 j += 1
-            while j < n and pat[j:j + 1] != ']':
+            while j < n and pat[j : j + 1] != ']':
                 j += 1
             if j >= n:
                 res += '\\['
             else:
-                stuff = pat[i:j].replace('\\','\\\\')
+                stuff = pat[i:j].replace('\\', '\\\\')
                 i = j + 1
                 if stuff[0:1] == '!':
                     stuff = '^' + stuff[1:]
@@ -1189,6 +1279,7 @@
             res += escape(c, c)
     return res
 
+
 def _regex(kind, pat, globsuffix):
     '''Convert a (normalized) pattern of any kind into a
     regular expression.
@@ -1196,11 +1287,7 @@
 
     if rustmod is not None:
         try:
-            return rustmod.build_single_regex(
-                kind,
-                pat,
-                globsuffix
-            )
+            return rustmod.build_single_regex(kind, pat, globsuffix)
         except rustmod.PatternError:
             raise error.ProgrammingError(
                 'not a regex pattern: %s:%s' % (kind, pat)
@@ -1227,7 +1314,7 @@
         if globre.startswith('[^/]*'):
             # When pat has the form *XYZ (common), make the returned regex more
             # legible by returning the regex for **XYZ instead of **/*XYZ.
-            return '.*' + globre[len('[^/]*'):] + globsuffix
+            return '.*' + globre[len('[^/]*') :] + globsuffix
         return '(?:|.*/)' + globre + globsuffix
     if kind == 'relre':
         if pat.startswith('^'):
@@ -1237,6 +1324,7 @@
         return _globre(pat) + globsuffix
     raise error.ProgrammingError('not a regex pattern: %s:%s' % (kind, pat))
 
+
 def _buildmatch(kindpats, globsuffix, root):
     '''Return regexp string and a matcher function for kindpats.
     globsuffix is appended to the regexp of globs.'''
@@ -1245,6 +1333,7 @@
     subincludes, kindpats = _expandsubinclude(kindpats, root)
     if subincludes:
         submatchers = {}
+
         def matchsubinclude(f):
             for prefix, matcherargs in subincludes:
                 if f.startswith(prefix):
@@ -1253,15 +1342,17 @@
                         mf = match(*matcherargs)
                         submatchers[prefix] = mf
 
-                    if mf(f[len(prefix):]):
+                    if mf(f[len(prefix) :]):
                         return True
             return False
+
         matchfuncs.append(matchsubinclude)
 
     regex = ''
     if kindpats:
         if all(k == 'rootfilesin' for k, p, s in kindpats):
             dirs = {p for k, p, s in kindpats}
+
             def mf(f):
                 i = f.rfind('/')
                 if i >= 0:
@@ -1269,6 +1360,7 @@
                 else:
                     dir = '.'
                 return dir in dirs
+
             regex = b'rootfilesin: %s' % stringutil.pprint(list(sorted(dirs)))
             matchfuncs.append(mf)
         else:
@@ -1280,12 +1372,15 @@
     else:
         return regex, lambda f: any(mf(f) for mf in matchfuncs)
 
+
 MAX_RE_SIZE = 20000
 
+
 def _joinregexes(regexps):
     """gather multiple regular expressions into a single one"""
     return '|'.join(regexps)
 
+
 def _buildregexmatch(kindpats, globsuffix):
     """Build a match function from a list of kinds and kindpats,
     return regexp string and a matcher function.
@@ -1332,12 +1427,14 @@
                 _rematcher(_regex(k, p, globsuffix))
             except re.error:
                 if s:
-                    raise error.Abort(_("%s: invalid pattern (%s): %s") %
-                                      (s, k, p))
+                    raise error.Abort(
+                        _("%s: invalid pattern (%s): %s") % (s, k, p)
+                    )
                 else:
                     raise error.Abort(_("invalid pattern (%s): %s") % (k, p))
         raise error.Abort(_("invalid pattern"))
 
+
 def _patternrootsanddirs(kindpats):
     '''Returns roots and directories corresponding to each pattern.
 
@@ -1349,7 +1446,7 @@
     r = []
     d = []
     for kind, pat, source in kindpats:
-        if kind in ('glob', 'rootglob'): # find the non-glob prefix
+        if kind in ('glob', 'rootglob'):  # find the non-glob prefix
             root = []
             for p in pat.split('/'):
                 if '[' in p or '{' in p or '*' in p or '?' in p:
@@ -1364,15 +1461,17 @@
             if pat == '.':
                 pat = ''
             d.append(pat)
-        else: # relglob, re, relre
+        else:  # relglob, re, relre
             r.append('')
     return r, d
 
+
 def _roots(kindpats):
     '''Returns root directories to match recursively from the given patterns.'''
     roots, dirs = _patternrootsanddirs(kindpats)
     return roots
 
+
 def _rootsdirsandparents(kindpats):
     '''Returns roots and exact directories from patterns.
 
@@ -1416,6 +1515,7 @@
     # 'dirs' to also be in 'parents', consider removing them before returning.
     return r, d, p
 
+
 def _explicitfiles(kindpats):
     '''Returns the potential explicit filenames from the patterns.
 
@@ -1429,6 +1529,7 @@
     filable = [kp for kp in kindpats if kp[0] not in ('rootfilesin',)]
     return _roots(filable)
 
+
 def _prefix(kindpats):
     '''Whether all the patterns match a prefix (i.e. recursively)'''
     for kind, pat, source in kindpats:
@@ -1436,8 +1537,10 @@
             return False
     return True
 
+
 _commentre = None
 
+
 def readpatternfile(filepath, warn, sourceinfo=False):
     '''parse a pattern file, returning a list of
     patterns. These patterns should be given to compile()
@@ -1464,9 +1567,7 @@
 
     if rustmod is not None:
         result, warnings = rustmod.read_pattern_file(
-            filepath,
-            bool(warn),
-            sourceinfo,
+            filepath, bool(warn), sourceinfo,
         )
 
         for warning_params in warnings:
@@ -1496,7 +1597,7 @@
             # remove comments prefixed by an even number of escapes
             m = _commentre.search(line)
             if m:
-                line = line[:m.end(1)]
+                line = line[: m.end(1)]
             # fixup properly escaped comments that survived the above
             line = line.replace("\\#", "#")
         line = line.rstrip()
@@ -1509,19 +1610,20 @@
                 syntax = syntaxes[s]
             except KeyError:
                 if warn:
-                    warn(_("%s: ignoring invalid syntax '%s'\n") %
-                         (filepath, s))
+                    warn(
+                        _("%s: ignoring invalid syntax '%s'\n") % (filepath, s)
+                    )
             continue
 
         linesyntax = syntax
         for s, rels in syntaxes.iteritems():
             if line.startswith(rels):
                 linesyntax = rels
-                line = line[len(rels):]
+                line = line[len(rels) :]
                 break
-            elif line.startswith(s+':'):
+            elif line.startswith(s + ':'):
                 linesyntax = rels
-                line = line[len(s) + 1:]
+                line = line[len(s) + 1 :]
                 break
         if sourceinfo:
             patterns.append((linesyntax + line, lineno, line))
--- a/mercurial/mdiff.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/mdiff.py	Sun Oct 06 09:45:02 2019 -0400
@@ -33,6 +33,7 @@
 textdiff = bdiff.bdiff
 splitnewlines = bdiff.splitnewlines
 
+
 class diffopts(object):
     '''context is the number of context lines
     text treats all files as text
@@ -64,7 +65,7 @@
         'showsimilarity': False,
         'worddiff': False,
         'xdiff': False,
-        }
+    }
 
     def __init__(self, **opts):
         opts = pycompat.byteskwargs(opts)
@@ -77,9 +78,10 @@
         try:
             self.context = int(self.context)
         except ValueError:
-            raise error.Abort(_('diff context lines count must be '
-                                'an integer, not %r') %
-                              pycompat.bytestr(self.context))
+            raise error.Abort(
+                _('diff context lines count must be ' 'an integer, not %r')
+                % pycompat.bytestr(self.context)
+            )
 
     def copy(self, **kwargs):
         opts = dict((k, getattr(self, k)) for k in self.defaults)
@@ -87,8 +89,10 @@
         opts.update(kwargs)
         return diffopts(**opts)
 
+
 defaultopts = diffopts()
 
+
 def wsclean(opts, text, blank=True):
     if opts.ignorews:
         text = bdiff.fixws(text, 1)
@@ -100,6 +104,7 @@
         text = re.sub(br'[ \t\r\f]+\n', br'\n', text)
     return text
 
+
 def splitblock(base1, lines1, base2, lines2, opts):
     # The input lines matches except for interwoven blank lines. We
     # transform it into a sequence of matching blocks and blank blocks.
@@ -109,8 +114,7 @@
     s2, e2 = 0, len(lines2)
     while s1 < e1 or s2 < e2:
         i1, i2, btype = s1, s2, '='
-        if (i1 >= e1 or lines1[i1] == 0
-            or i2 >= e2 or lines2[i2] == 0):
+        if i1 >= e1 or lines1[i1] == 0 or i2 >= e2 or lines2[i2] == 0:
             # Consume the block of blank lines
             btype = '~'
             while i1 < e1 and lines1[i1] == 0:
@@ -126,6 +130,7 @@
         s1 = i1
         s2 = i2
 
+
 def hunkinrange(hunk, linerange):
     """Return True if `hunk` defined as (start, length) is in `linerange`
     defined as (lowerbound, upperbound).
@@ -151,6 +156,7 @@
     lowerbound, upperbound = linerange
     return lowerbound < start + length and start < upperbound
 
+
 def blocksinrange(blocks, rangeb):
     """filter `blocks` like (a1, a2, b1, b2) from items outside line range
     `rangeb` from ``(b1, b2)`` point of view.
@@ -190,13 +196,18 @@
         raise error.Abort(_('line range exceeds file size'))
     return filteredblocks, (lba, uba)
 
+
 def chooseblocksfunc(opts=None):
-    if (opts is None or not opts.xdiff
-        or not util.safehasattr(bdiff, 'xdiffblocks')):
+    if (
+        opts is None
+        or not opts.xdiff
+        or not util.safehasattr(bdiff, 'xdiffblocks')
+    ):
         return bdiff.blocks
     else:
         return bdiff.xdiffblocks
 
+
 def allblocks(text1, text2, opts=None, lines1=None, lines2=None):
     """Return (block, type) tuples, where block is an mdiff.blocks
     line entry. type is '=' for blocks matching exactly one another
@@ -231,13 +242,14 @@
                     lines1 = splitnewlines(text1)
                 if lines2 is None:
                     lines2 = splitnewlines(text2)
-                old = wsclean(opts, "".join(lines1[s[0]:s[1]]))
-                new = wsclean(opts, "".join(lines2[s[2]:s[3]]))
+                old = wsclean(opts, "".join(lines1[s[0] : s[1]]))
+                new = wsclean(opts, "".join(lines2[s[2] : s[3]]))
                 if old == new:
                     type = '~'
             yield s, type
         yield s1, '='
 
+
 def unidiff(a, ad, b, bd, fn1, fn2, binary, opts=defaultopts):
     """Return a unified diff as a (headers, hunks) tuple.
 
@@ -248,6 +260,7 @@
 
     Set binary=True if either a or b should be taken as a binary file.
     """
+
     def datetag(date, fn=None):
         if not opts.git and not opts.nodates:
             return '\t%s' % date
@@ -274,7 +287,7 @@
         if a and b and len(a) == len(b) and a == b:
             return sentinel
         headerlines = []
-        hunks = (None, ['Binary file %s has changed\n' % fn1]),
+        hunks = ((None, ['Binary file %s has changed\n' % fn1]),)
     elif not a:
         without_newline = not b.endswith('\n')
         b = splitnewlines(b)
@@ -290,7 +303,7 @@
         if without_newline:
             hunklines[-1] += '\n'
             hunklines.append(_missing_newline_marker)
-        hunks = (hunkrange, hunklines),
+        hunks = ((hunkrange, hunklines),)
     elif not b:
         without_newline = not a.endswith('\n')
         a = splitnewlines(a)
@@ -306,7 +319,7 @@
         if without_newline:
             hunklines[-1] += '\n'
             hunklines.append(_missing_newline_marker)
-        hunks = (hunkrange, hunklines),
+        hunks = ((hunkrange, hunklines),)
     else:
         hunks = _unidiff(a, b, opts=opts)
         if not next(hunks):
@@ -319,6 +332,7 @@
 
     return headerlines, hunks
 
+
 def _unidiff(t1, t2, opts=defaultopts):
     """Yield hunks of a headerless unified diff from t1 and t2 texts.
 
@@ -332,6 +346,7 @@
     """
     l1 = splitnewlines(t1)
     l2 = splitnewlines(t2)
+
     def contextend(l, len):
         ret = l + opts.context
         if ret > len:
@@ -345,6 +360,7 @@
         return ret
 
     lastfunc = [0, '']
+
     def yieldhunk(hunk):
         (astart, a2, bstart, b2, delta) = hunk
         aend = contextend(a2, len(l1))
@@ -452,9 +468,9 @@
             # create a new hunk
             hunk = [astart, a2, bstart, b2, delta]
 
-        delta[len(delta):] = [' ' + x for x in l1[astart:a1]]
-        delta[len(delta):] = ['-' + x for x in old]
-        delta[len(delta):] = ['+' + x for x in new]
+        delta[len(delta) :] = [' ' + x for x in l1[astart:a1]]
+        delta[len(delta) :] = ['-' + x for x in old]
+        delta[len(delta) :] = ['+' + x for x in new]
 
     if hunk:
         if not has_hunks:
@@ -465,8 +481,10 @@
     elif not has_hunks:
         yield False
 
+
 def b85diff(to, tn):
     '''print base85-encoded binary diff'''
+
     def fmtline(line):
         l = len(line)
         if l <= 26:
@@ -479,7 +497,7 @@
         l = len(text)
         i = 0
         while i < l:
-            yield text[i:i + csize]
+            yield text[i : i + csize]
             i += csize
 
     if to is None:
@@ -500,28 +518,33 @@
 
     return ''.join(ret)
 
+
 def patchtext(bin):
     pos = 0
     t = []
     while pos < len(bin):
-        p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
+        p1, p2, l = struct.unpack(">lll", bin[pos : pos + 12])
         pos += 12
-        t.append(bin[pos:pos + l])
+        t.append(bin[pos : pos + l])
         pos += l
     return "".join(t)
 
+
 def patch(a, bin):
     if len(a) == 0:
         # skip over trivial delta header
         return util.buffer(bin, 12)
     return mpatch.patches(a, [bin])
 
+
 # similar to difflib.SequenceMatcher.get_matching_blocks
 def get_matching_blocks(a, b):
     return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)]
 
+
 def trivialdiffheader(length):
     return struct.pack(">lll", 0, 0, length) if length else ''
 
+
 def replacediffheader(oldlen, newlen):
     return struct.pack(">lll", 0, oldlen, newlen)
--- a/mercurial/merge.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/merge.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,9 +23,7 @@
     nullid,
     nullrev,
 )
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 from . import (
     copies,
     encoding,
@@ -43,12 +41,14 @@
 _pack = struct.pack
 _unpack = struct.unpack
 
+
 def _droponode(data):
     # used for compatibility for v1
     bits = data.split('\0')
     bits = bits[:-2] + bits[-1:]
     return '\0'.join(bits)
 
+
 # Merge state record types. See ``mergestate`` docs for more.
 RECORD_LOCAL = b'L'
 RECORD_OTHER = b'O'
@@ -90,6 +90,7 @@
 ACTION_EXEC = b'e'
 ACTION_CREATED_MERGE = b'cm'
 
+
 class mergestate(object):
     '''track 3-way merge state of individual files
 
@@ -136,6 +137,7 @@
     The resolve command transitions between 'u' and 'r' for conflicts and
     'pu' and 'pr' for path conflicts.
     '''
+
     statepathv1 = 'merge/state'
     statepathv2 = 'merge/state2'
 
@@ -209,15 +211,21 @@
                 bits = record.split('\0', 1)
                 mdstate = bits[1]
                 if len(mdstate) != 1 or mdstate not in (
-                    MERGE_DRIVER_STATE_UNMARKED, MERGE_DRIVER_STATE_MARKED,
-                    MERGE_DRIVER_STATE_SUCCESS):
+                    MERGE_DRIVER_STATE_UNMARKED,
+                    MERGE_DRIVER_STATE_MARKED,
+                    MERGE_DRIVER_STATE_SUCCESS,
+                ):
                     # the merge driver should be idempotent, so just rerun it
                     mdstate = MERGE_DRIVER_STATE_UNMARKED
 
                 self._readmergedriver = bits[0]
                 self._mdstate = mdstate
-            elif rtype in (RECORD_MERGED, RECORD_CHANGEDELETE_CONFLICT,
-                           RECORD_PATH_CONFLICT, RECORD_MERGE_DRIVER_MERGE):
+            elif rtype in (
+                RECORD_MERGED,
+                RECORD_CHANGEDELETE_CONFLICT,
+                RECORD_PATH_CONFLICT,
+                RECORD_MERGE_DRIVER_MERGE,
+            ):
                 bits = record.split('\0')
                 self._state[bits[0]] = bits[1:]
             elif rtype == RECORD_FILE_VALUES:
@@ -276,7 +284,7 @@
             return v1records
 
     def _v1v2match(self, v1records, v2records):
-        oldv2 = set() # old format version of v2 record
+        oldv2 = set()  # old format version of v2 record
         for rec in v2records:
             if rec[0] == RECORD_LOCAL:
                 oldv2.add(rec)
@@ -336,11 +344,11 @@
             off = 0
             end = len(data)
             while off < end:
-                rtype = data[off:off + 1]
+                rtype = data[off : off + 1]
                 off += 1
-                length = _unpack('>I', data[off:(off + 4)])[0]
+                length = _unpack('>I', data[off : (off + 4)])[0]
                 off += 4
-                record = data[off:(off + length)]
+                record = data[off : (off + length)]
                 off += length
                 if rtype == RECORD_OVERRIDE:
                     rtype, record = record[0:1], record[1:]
@@ -362,11 +370,14 @@
         # - B then continues the merge and the malicious merge driver
         #  gets invoked
         configmergedriver = self._repo.ui.config('experimental', 'mergedriver')
-        if (self._readmergedriver is not None
-            and self._readmergedriver != configmergedriver):
+        if (
+            self._readmergedriver is not None
+            and self._readmergedriver != configmergedriver
+        ):
             raise error.ConfigError(
                 _("merge driver changed since merge started"),
-                hint=_("revert merge driver change or abort merge"))
+                hint=_("revert merge driver change or abort merge"),
+            )
 
         return configmergedriver
 
@@ -392,9 +403,12 @@
         """
         # Check local variables before looking at filesystem for performance
         # reasons.
-        return (bool(self._local) or bool(self._state) or
-                self._repo.vfs.exists(self.statepathv1) or
-                self._repo.vfs.exists(self.statepathv2))
+        return (
+            bool(self._local)
+            or bool(self._state)
+            or self._repo.vfs.exists(self.statepathv1)
+            or self._repo.vfs.exists(self.statepathv2)
+        )
 
     def commit(self):
         """Write current state on disk (if necessary)"""
@@ -408,8 +422,12 @@
         records.append((RECORD_LOCAL, hex(self._local)))
         records.append((RECORD_OTHER, hex(self._other)))
         if self.mergedriver:
-            records.append((RECORD_MERGE_DRIVER_STATE, '\0'.join([
-                self.mergedriver, self._mdstate])))
+            records.append(
+                (
+                    RECORD_MERGE_DRIVER_STATE,
+                    '\0'.join([self.mergedriver, self._mdstate]),
+                )
+            )
         # Write out state items. In all cases, the value of the state map entry
         # is written as the contents of the record. The record type depends on
         # the type of state that is stored, and capital-letter records are used
@@ -418,30 +436,36 @@
         for filename, v in self._state.iteritems():
             if v[0] == MERGE_RECORD_DRIVER_RESOLVED:
                 # Driver-resolved merge. These are stored in 'D' records.
-                records.append((RECORD_MERGE_DRIVER_MERGE,
-                                '\0'.join([filename] + v)))
-            elif v[0] in (MERGE_RECORD_UNRESOLVED_PATH,
-                          MERGE_RECORD_RESOLVED_PATH):
+                records.append(
+                    (RECORD_MERGE_DRIVER_MERGE, '\0'.join([filename] + v))
+                )
+            elif v[0] in (
+                MERGE_RECORD_UNRESOLVED_PATH,
+                MERGE_RECORD_RESOLVED_PATH,
+            ):
                 # Path conflicts. These are stored in 'P' records.  The current
                 # resolution state ('pu' or 'pr') is stored within the record.
-                records.append((RECORD_PATH_CONFLICT,
-                                '\0'.join([filename] + v)))
+                records.append(
+                    (RECORD_PATH_CONFLICT, '\0'.join([filename] + v))
+                )
             elif v[1] == nullhex or v[6] == nullhex:
                 # Change/Delete or Delete/Change conflicts. These are stored in
                 # 'C' records. v[1] is the local file, and is nullhex when the
                 # file is deleted locally ('dc'). v[6] is the remote file, and
                 # is nullhex when the file is deleted remotely ('cd').
-                records.append((RECORD_CHANGEDELETE_CONFLICT,
-                                '\0'.join([filename] + v)))
+                records.append(
+                    (RECORD_CHANGEDELETE_CONFLICT, '\0'.join([filename] + v))
+                )
             else:
                 # Normal files.  These are stored in 'F' records.
-                records.append((RECORD_MERGED,
-                                '\0'.join([filename] + v)))
+                records.append((RECORD_MERGED, '\0'.join([filename] + v)))
         for filename, extras in sorted(self._stateextras.iteritems()):
-            rawextras = '\0'.join('%s\0%s' % (k, v) for k, v in
-                                  extras.iteritems())
-            records.append((RECORD_FILE_VALUES,
-                            '%s\0%s' % (filename, rawextras)))
+            rawextras = '\0'.join(
+                '%s\0%s' % (k, v) for k, v in extras.iteritems()
+            )
+            records.append(
+                (RECORD_FILE_VALUES, '%s\0%s' % (filename, rawextras))
+            )
         if self._labels is not None:
             labels = '\0'.join(self._labels)
             records.append((RECORD_LABELS, labels))
@@ -500,10 +524,16 @@
         else:
             localkey = mergestate.getlocalkey(fcl.path())
             self._repo.vfs.write('merge/' + localkey, fcl.data())
-        self._state[fd] = [MERGE_RECORD_UNRESOLVED, localkey, fcl.path(),
-                           fca.path(), hex(fca.filenode()),
-                           fco.path(), hex(fco.filenode()),
-                           fcl.flags()]
+        self._state[fd] = [
+            MERGE_RECORD_UNRESOLVED,
+            localkey,
+            fcl.path(),
+            fca.path(),
+            hex(fca.filenode()),
+            fco.path(),
+            hex(fco.filenode()),
+            fcl.flags(),
+        ]
         self._stateextras[fd] = {'ancestorlinknode': hex(fca.node())}
         self._dirty = True
 
@@ -539,8 +569,10 @@
         """Obtain the paths of unresolved files."""
 
         for f, entry in self._state.iteritems():
-            if entry[0] in (MERGE_RECORD_UNRESOLVED,
-                            MERGE_RECORD_UNRESOLVED_PATH):
+            if entry[0] in (
+                MERGE_RECORD_UNRESOLVED,
+                MERGE_RECORD_UNRESOLVED_PATH,
+            ):
                 yield f
 
     def driverresolved(self):
@@ -555,8 +587,7 @@
 
     def _resolve(self, preresolve, dfile, wctx):
         """rerun merge process for file path `dfile`"""
-        if self[dfile] in (MERGE_RECORD_RESOLVED,
-                           MERGE_RECORD_DRIVER_RESOLVED):
+        if self[dfile] in (MERGE_RECORD_RESOLVED, MERGE_RECORD_DRIVER_RESOLVED):
             return True, 0
         stateentry = self._state[dfile]
         state, localkey, lfile, afile, anode, ofile, onode, flags = stateentry
@@ -578,9 +609,12 @@
             if fca.node() == nullid and flags != flo:
                 if preresolve:
                     self._repo.ui.warn(
-                        _('warning: cannot merge flags for %s '
-                          'without common ancestor - keeping local flags\n')
-                        % afile)
+                        _(
+                            'warning: cannot merge flags for %s '
+                            'without common ancestor - keeping local flags\n'
+                        )
+                        % afile
+                    )
             elif flags == fla:
                 flags = flo
         if preresolve:
@@ -591,15 +625,27 @@
                 f.close()
             else:
                 wctx[dfile].remove(ignoremissing=True)
-            complete, r, deleted = filemerge.premerge(self._repo, wctx,
-                                                      self._local, lfile, fcd,
-                                                      fco, fca,
-                                                      labels=self._labels)
+            complete, r, deleted = filemerge.premerge(
+                self._repo,
+                wctx,
+                self._local,
+                lfile,
+                fcd,
+                fco,
+                fca,
+                labels=self._labels,
+            )
         else:
-            complete, r, deleted = filemerge.filemerge(self._repo, wctx,
-                                                       self._local, lfile, fcd,
-                                                       fco, fca,
-                                                       labels=self._labels)
+            complete, r, deleted = filemerge.filemerge(
+                self._repo,
+                wctx,
+                self._local,
+                lfile,
+                fcd,
+                fco,
+                fca,
+                labels=self._labels,
+            )
         if r is None:
             # no real conflict
             del self._state[dfile]
@@ -619,9 +665,9 @@
                     # cd: remote picked (or otherwise deleted)
                     action = ACTION_REMOVE
             else:
-                if fcd.isabsent(): # dc: remote picked
+                if fcd.isabsent():  # dc: remote picked
                     action = ACTION_GET
-                elif fco.isabsent(): # cd: local picked
+                elif fco.isabsent():  # cd: local picked
                     if dfile in self.localctx:
                         action = ACTION_ADD_MODIFIED
                     else:
@@ -704,16 +750,19 @@
         Meant for use by custom merge drivers."""
         self._results[f] = 0, ACTION_GET
 
+
 def _getcheckunknownconfig(repo, section, name):
     config = repo.ui.config(section, name)
     valid = ['abort', 'ignore', 'warn']
     if config not in valid:
         validstr = ', '.join(["'" + v + "'" for v in valid])
-        raise error.ConfigError(_("%s.%s not valid "
-                                  "('%s' is none of %s)")
-                                % (section, name, config, validstr))
+        raise error.ConfigError(
+            _("%s.%s not valid " "('%s' is none of %s)")
+            % (section, name, config, validstr)
+        )
     return config
 
+
 def _checkunknownfile(repo, wctx, mctx, f, f2=None):
     if wctx.isinmemory():
         # Nothing to do in IMM because nothing in the "working copy" can be an
@@ -725,10 +774,13 @@
 
     if f2 is None:
         f2 = f
-    return (repo.wvfs.audit.check(f)
+    return (
+        repo.wvfs.audit.check(f)
         and repo.wvfs.isfileorlink(f)
         and repo.dirstate.normalize(f) not in repo.dirstate
-        and mctx[f2].cmp(wctx[f]))
+        and mctx[f2].cmp(wctx[f])
+    )
+
 
 class _unknowndirschecker(object):
     """
@@ -740,6 +792,7 @@
     Returns the shortest path at which a conflict occurs, or None if there is
     no conflict.
     """
+
     def __init__(self):
         # A set of paths known to be good.  This prevents repeated checking of
         # dirs.  It will be updated with any new dirs that are checked and found
@@ -763,8 +816,10 @@
             if p in self._unknowndircache:
                 continue
             if repo.wvfs.audit.check(p):
-                if (repo.wvfs.isfileorlink(p)
-                        and repo.dirstate.normalize(p) not in repo.dirstate):
+                if (
+                    repo.wvfs.isfileorlink(p)
+                    and repo.dirstate.normalize(p) not in repo.dirstate
+                ):
                     return p
                 if not repo.wvfs.lexists(p):
                     self._missingdircache.add(p)
@@ -782,6 +837,7 @@
                         return f
         return None
 
+
 def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
     """
     Considers any actions that care about the presence of conflicting unknown
@@ -796,6 +852,7 @@
     ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored')
     pathconfig = repo.ui.configbool('experimental', 'merge.checkpathconflicts')
     if not force:
+
         def collectconflicts(conflicts, config):
             if config == 'abort':
                 abortconflicts.update(conflicts)
@@ -816,8 +873,7 @@
                     fileconflicts.add(f)
 
         allconflicts = fileconflicts | pathconflicts
-        ignoredconflicts = {c for c in allconflicts
-                            if repo.dirstate._ignore(c)}
+        ignoredconflicts = {c for c in allconflicts if repo.dirstate._ignore(c)}
         unknownconflicts = allconflicts - ignoredconflicts
         collectconflicts(ignoredconflicts, ignoredconfig)
         collectconflicts(unknownconflicts, unknownconfig)
@@ -846,8 +902,11 @@
                 if not different:
                     actions[f] = (ACTION_GET, (fl2, False), 'remote created')
                 elif mergeforce or config == 'abort':
-                    actions[f] = (ACTION_MERGE, (f, f, None, False, anc),
-                                  'remote differs from untracked local')
+                    actions[f] = (
+                        ACTION_MERGE,
+                        (f, f, None, False, anc),
+                        'remote differs from untracked local',
+                    )
                 elif config == 'abort':
                     abortconflicts.add(f)
                 else:
@@ -865,8 +924,12 @@
         else:
             warn(_("%s: untracked file differs\n") % f)
     if abortconflicts:
-        raise error.Abort(_("untracked files in working directory "
-                            "differ from files in requested revision"))
+        raise error.Abort(
+            _(
+                "untracked files in working directory "
+                "differ from files in requested revision"
+            )
+        )
 
     for f in sorted(warnconflicts):
         if repo.wvfs.isfileorlink(f):
@@ -876,11 +939,15 @@
 
     for f, (m, args, msg) in actions.iteritems():
         if m == ACTION_CREATED:
-            backup = (f in fileconflicts or f in pathconflicts or
-                      any(p in pathconflicts for p in util.finddirs(f)))
-            flags, = args
+            backup = (
+                f in fileconflicts
+                or f in pathconflicts
+                or any(p in pathconflicts for p in util.finddirs(f))
+            )
+            (flags,) = args
             actions[f] = (ACTION_GET, (flags, backup), msg)
 
+
 def _forgetremoved(wctx, mctx, branchmerge):
     """
     Forget removed files
@@ -911,6 +978,7 @@
 
     return actions
 
+
 def _checkcollision(repo, wmf, actions):
     """
     Check for case-folding collisions.
@@ -934,8 +1002,14 @@
 
     if actions:
         # KEEP and EXEC are no-op
-        for m in (ACTION_ADD, ACTION_ADD_MODIFIED, ACTION_FORGET, ACTION_GET,
-                  ACTION_CHANGED_DELETED, ACTION_DELETED_CHANGED):
+        for m in (
+            ACTION_ADD,
+            ACTION_ADD_MODIFIED,
+            ACTION_FORGET,
+            ACTION_GET,
+            ACTION_CHANGED_DELETED,
+            ACTION_DELETED_CHANGED,
+        ):
             for f, args, msg in actions[m]:
                 pmmf.add(f)
         for f, args, msg in actions[ACTION_REMOVE]:
@@ -957,8 +1031,10 @@
     for f in pmmf:
         fold = util.normcase(f)
         if fold in foldmap:
-            raise error.Abort(_("case-folding collision between %s and %s")
-                             % (f, foldmap[fold]))
+            raise error.Abort(
+                _("case-folding collision between %s and %s")
+                % (f, foldmap[fold])
+            )
         foldmap[fold] = f
 
     # check case-folding of directories
@@ -966,24 +1042,29 @@
     for fold, f in sorted(foldmap.items()):
         if fold.startswith(foldprefix) and not f.startswith(unfoldprefix):
             # the folded prefix matches but actual casing is different
-            raise error.Abort(_("case-folding collision between "
-                                "%s and directory of %s") % (lastfull, f))
+            raise error.Abort(
+                _("case-folding collision between " "%s and directory of %s")
+                % (lastfull, f)
+            )
         foldprefix = fold + '/'
         unfoldprefix = f + '/'
         lastfull = f
 
+
 def driverpreprocess(repo, ms, wctx, labels=None):
     """run the preprocess step of the merge driver, if any
 
     This is currently not implemented -- it's an extension point."""
     return True
 
+
 def driverconclude(repo, ms, wctx, labels=None):
     """run the conclude step of the merge driver, if any
 
     This is currently not implemented -- it's an extension point."""
     return True
 
+
 def _filesindirs(repo, manifest, dirs):
     """
     Generator that yields pairs of all the files in the manifest that are found
@@ -996,6 +1077,7 @@
                 yield f, p
                 break
 
+
 def checkpathconflicts(repo, wctx, mctx, actions):
     """
     Check if any actions introduce path conflicts in the repository, updating
@@ -1022,8 +1104,12 @@
     deletedfiles = set()
 
     for f, (m, args, msg) in actions.items():
-        if m in (ACTION_CREATED, ACTION_DELETED_CHANGED, ACTION_MERGE,
-                 ACTION_CREATED_MERGE):
+        if m in (
+            ACTION_CREATED,
+            ACTION_DELETED_CHANGED,
+            ACTION_MERGE,
+            ACTION_CREATED_MERGE,
+        ):
             # This action may create a new local file.
             createdfiledirs.update(util.finddirs(f))
             if mf.hasdir(f):
@@ -1054,10 +1140,12 @@
                 # A file is in a directory which aliases a local file.
                 # We will need to rename the local file.
                 localconflicts.add(p)
-        if p in actions and actions[p][0] in (ACTION_CREATED,
-                                              ACTION_DELETED_CHANGED,
-                                              ACTION_MERGE,
-                                              ACTION_CREATED_MERGE):
+        if p in actions and actions[p][0] in (
+            ACTION_CREATED,
+            ACTION_DELETED_CHANGED,
+            ACTION_MERGE,
+            ACTION_CREATED_MERGE,
+        ):
             # The file is in a directory which aliases a remote file.
             # This is an internal inconsistency within the remote
             # manifest.
@@ -1068,10 +1156,12 @@
         if p not in deletedfiles:
             ctxname = bytes(wctx).rstrip('+')
             pnew = util.safename(p, ctxname, wctx, set(actions.keys()))
-            actions[pnew] = (ACTION_PATH_CONFLICT_RESOLVE, (p,),
-                             'local path conflict')
-            actions[p] = (ACTION_PATH_CONFLICT, (pnew, 'l'),
-                          'path conflict')
+            actions[pnew] = (
+                ACTION_PATH_CONFLICT_RESOLVE,
+                (p,),
+                'local path conflict',
+            )
+            actions[p] = (ACTION_PATH_CONFLICT, (pnew, 'l'), 'path conflict')
 
     if remoteconflicts:
         # Check if all files in the conflicting directories have been removed.
@@ -1086,10 +1176,16 @@
                 else:
                     # Action was create, change to renamed get action.
                     fl = args[0]
-                    actions[pnew] = (ACTION_LOCAL_DIR_RENAME_GET, (p, fl),
-                                     'remote path conflict')
-                actions[p] = (ACTION_PATH_CONFLICT, (pnew, ACTION_REMOVE),
-                              'path conflict')
+                    actions[pnew] = (
+                        ACTION_LOCAL_DIR_RENAME_GET,
+                        (p, fl),
+                        'remote path conflict',
+                    )
+                actions[p] = (
+                    ACTION_PATH_CONFLICT,
+                    (pnew, ACTION_REMOVE),
+                    'path conflict',
+                )
                 remoteconflicts.remove(p)
                 break
 
@@ -1098,6 +1194,7 @@
             repo.ui.warn(_("%s: is both a file and a directory\n") % p)
         raise error.Abort(_("destination manifest contains path conflicts"))
 
+
 def _filternarrowactions(narrowmatch, branchmerge, actions):
     """
     Filters out actions that can ignored because the repo is narrowed.
@@ -1105,7 +1202,7 @@
     Raise an exception if the merge cannot be completed because the repo is
     narrowed.
     """
-    nooptypes = {'k'} # TODO: handle with nonconflicttypes
+    nooptypes = {'k'}  # TODO: handle with nonconflicttypes
     nonconflicttypes = set('a am c cm f g r e'.split())
     # We mutate the items in the dict during iteration, so iterate
     # over a copy.
@@ -1113,20 +1210,36 @@
         if narrowmatch(f):
             pass
         elif not branchmerge:
-            del actions[f] # just updating, ignore changes outside clone
+            del actions[f]  # just updating, ignore changes outside clone
         elif action[0] in nooptypes:
-            del actions[f] # merge does not affect file
+            del actions[f]  # merge does not affect file
         elif action[0] in nonconflicttypes:
-            raise error.Abort(_('merge affects file \'%s\' outside narrow, '
-                                'which is not yet supported') % f,
-                              hint=_('merging in the other direction '
-                                     'may work'))
+            raise error.Abort(
+                _(
+                    'merge affects file \'%s\' outside narrow, '
+                    'which is not yet supported'
+                )
+                % f,
+                hint=_('merging in the other direction ' 'may work'),
+            )
         else:
-            raise error.Abort(_('conflict in file \'%s\' is outside '
-                                'narrow clone') % f)
+            raise error.Abort(
+                _('conflict in file \'%s\' is outside ' 'narrow clone') % f
+            )
+
 
-def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher,
-                  acceptremote, followcopies, forcefulldiff=False):
+def manifestmerge(
+    repo,
+    wctx,
+    p2,
+    pa,
+    branchmerge,
+    force,
+    matcher,
+    acceptremote,
+    followcopies,
+    forcefulldiff=False,
+):
     """
     Merge wctx and p2 with ancestor pa and generate merge action list
 
@@ -1140,8 +1253,10 @@
     copy, movewithdir, diverge, renamedelete, dirmove = {}, {}, {}, {}, {}
 
     # manifests fetched in order are going to be faster, so prime the caches
-    [x.manifest() for x in
-     sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)]
+    [
+        x.manifest()
+        for x in sorted(wctx.parents() + [p2, pa], key=scmutil.intrev)
+    ]
 
     if followcopies:
         ret = copies.mergecopies(repo, wctx, p2, pa)
@@ -1151,8 +1266,9 @@
     boolf = pycompat.bytestr(bool(force))
     boolm = pycompat.bytestr(bool(matcher))
     repo.ui.note(_("resolving manifests\n"))
-    repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
-                  % (boolbm, boolf, boolm))
+    repo.ui.debug(
+        " branchmerge: %s, force: %s, partial: %s\n" % (boolbm, boolf, boolm)
+    )
     repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
 
     m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
@@ -1170,7 +1286,7 @@
     # - ma is the same as m1 or m2, which we're just going to diff again later
     # - The caller specifically asks for a full diff, which is useful during bid
     #   merge.
-    if (pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff):
+    if pa not in ([wctx, p2] + wctx.parents()) and not forcefulldiff:
         # Identify which files are relevant to the merge, so we can limit the
         # total m1-vs-m2 diff to just those files. This has significant
         # performance benefits in large repositories.
@@ -1189,57 +1305,80 @@
 
     actions = {}
     for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
-        if n1 and n2: # file exists on both local and remote side
+        if n1 and n2:  # file exists on both local and remote side
             if f not in ma:
                 fa = copy.get(f, None)
                 if fa is not None:
-                    actions[f] = (ACTION_MERGE, (f, f, fa, False, pa.node()),
-                                  'both renamed from %s' % fa)
+                    actions[f] = (
+                        ACTION_MERGE,
+                        (f, f, fa, False, pa.node()),
+                        'both renamed from %s' % fa,
+                    )
                 else:
-                    actions[f] = (ACTION_MERGE, (f, f, None, False, pa.node()),
-                                  'both created')
+                    actions[f] = (
+                        ACTION_MERGE,
+                        (f, f, None, False, pa.node()),
+                        'both created',
+                    )
             else:
                 a = ma[f]
                 fla = ma.flags(f)
                 nol = 'l' not in fl1 + fl2 + fla
                 if n2 == a and fl2 == fla:
                     actions[f] = (ACTION_KEEP, (), 'remote unchanged')
-                elif n1 == a and fl1 == fla: # local unchanged - use remote
-                    if n1 == n2: # optimization: keep local content
+                elif n1 == a and fl1 == fla:  # local unchanged - use remote
+                    if n1 == n2:  # optimization: keep local content
                         actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
                     else:
-                        actions[f] = (ACTION_GET, (fl2, False),
-                                      'remote is newer')
-                elif nol and n2 == a: # remote only changed 'x'
+                        actions[f] = (
+                            ACTION_GET,
+                            (fl2, False),
+                            'remote is newer',
+                        )
+                elif nol and n2 == a:  # remote only changed 'x'
                     actions[f] = (ACTION_EXEC, (fl2,), 'update permissions')
-                elif nol and n1 == a: # local only changed 'x'
+                elif nol and n1 == a:  # local only changed 'x'
                     actions[f] = (ACTION_GET, (fl1, False), 'remote is newer')
-                else: # both changed something
-                    actions[f] = (ACTION_MERGE, (f, f, f, False, pa.node()),
-                                  'versions differ')
-        elif n1: # file exists only on local side
+                else:  # both changed something
+                    actions[f] = (
+                        ACTION_MERGE,
+                        (f, f, f, False, pa.node()),
+                        'versions differ',
+                    )
+        elif n1:  # file exists only on local side
             if f in copied:
-                pass # we'll deal with it on m2 side
-            elif f in movewithdir: # directory rename, move local
+                pass  # we'll deal with it on m2 side
+            elif f in movewithdir:  # directory rename, move local
                 f2 = movewithdir[f]
                 if f2 in m2:
-                    actions[f2] = (ACTION_MERGE, (f, f2, None, True, pa.node()),
-                                   'remote directory rename, both created')
+                    actions[f2] = (
+                        ACTION_MERGE,
+                        (f, f2, None, True, pa.node()),
+                        'remote directory rename, both created',
+                    )
                 else:
-                    actions[f2] = (ACTION_DIR_RENAME_MOVE_LOCAL, (f, fl1),
-                                   'remote directory rename - move from %s' % f)
+                    actions[f2] = (
+                        ACTION_DIR_RENAME_MOVE_LOCAL,
+                        (f, fl1),
+                        'remote directory rename - move from %s' % f,
+                    )
             elif f in copy:
                 f2 = copy[f]
-                actions[f] = (ACTION_MERGE, (f, f2, f2, False, pa.node()),
-                              'local copied/moved from %s' % f2)
-            elif f in ma: # clean, a different, no remote
+                actions[f] = (
+                    ACTION_MERGE,
+                    (f, f2, f2, False, pa.node()),
+                    'local copied/moved from %s' % f2,
+                )
+            elif f in ma:  # clean, a different, no remote
                 if n1 != ma[f]:
                     if acceptremote:
                         actions[f] = (ACTION_REMOVE, None, 'remote delete')
                     else:
-                        actions[f] = (ACTION_CHANGED_DELETED,
-                                      (f, None, f, False, pa.node()),
-                                      'prompt changed/deleted')
+                        actions[f] = (
+                            ACTION_CHANGED_DELETED,
+                            (f, None, f, False, pa.node()),
+                            'prompt changed/deleted',
+                        )
                 elif n1 == addednodeid:
                     # This extra 'a' is added by working copy manifest to mark
                     # the file as locally added. We should forget it instead of
@@ -1247,26 +1386,37 @@
                     actions[f] = (ACTION_FORGET, None, 'remote deleted')
                 else:
                     actions[f] = (ACTION_REMOVE, None, 'other deleted')
-        elif n2: # file exists only on remote side
+        elif n2:  # file exists only on remote side
             if f in copied:
-                pass # we'll deal with it on m1 side
+                pass  # we'll deal with it on m1 side
             elif f in movewithdir:
                 f2 = movewithdir[f]
                 if f2 in m1:
-                    actions[f2] = (ACTION_MERGE,
-                                   (f2, f, None, False, pa.node()),
-                                   'local directory rename, both created')
+                    actions[f2] = (
+                        ACTION_MERGE,
+                        (f2, f, None, False, pa.node()),
+                        'local directory rename, both created',
+                    )
                 else:
-                    actions[f2] = (ACTION_LOCAL_DIR_RENAME_GET, (f, fl2),
-                                   'local directory rename - get from %s' % f)
+                    actions[f2] = (
+                        ACTION_LOCAL_DIR_RENAME_GET,
+                        (f, fl2),
+                        'local directory rename - get from %s' % f,
+                    )
             elif f in copy:
                 f2 = copy[f]
                 if f2 in m2:
-                    actions[f] = (ACTION_MERGE, (f2, f, f2, False, pa.node()),
-                                  'remote copied from %s' % f2)
+                    actions[f] = (
+                        ACTION_MERGE,
+                        (f2, f, f2, False, pa.node()),
+                        'remote copied from %s' % f2,
+                    )
                 else:
-                    actions[f] = (ACTION_MERGE, (f2, f, f2, True, pa.node()),
-                                  'remote moved from %s' % f2)
+                    actions[f] = (
+                        ACTION_MERGE,
+                        (f2, f, f2, True, pa.node()),
+                        'remote moved from %s' % f2,
+                    )
             elif f not in ma:
                 # local unknown, remote created: the logic is described by the
                 # following table:
@@ -1284,25 +1434,32 @@
                 elif not branchmerge:
                     actions[f] = (ACTION_CREATED, (fl2,), 'remote created')
                 else:
-                    actions[f] = (ACTION_CREATED_MERGE, (fl2, pa.node()),
-                                  'remote created, get or merge')
+                    actions[f] = (
+                        ACTION_CREATED_MERGE,
+                        (fl2, pa.node()),
+                        'remote created, get or merge',
+                    )
             elif n2 != ma[f]:
                 df = None
                 for d in dirmove:
                     if f.startswith(d):
                         # new file added in a directory that was moved
-                        df = dirmove[d] + f[len(d):]
+                        df = dirmove[d] + f[len(d) :]
                         break
                 if df is not None and df in m1:
-                    actions[df] = (ACTION_MERGE, (df, f, f, False, pa.node()),
-                                   'local directory rename - respect move '
-                                   'from %s' % f)
+                    actions[df] = (
+                        ACTION_MERGE,
+                        (df, f, f, False, pa.node()),
+                        'local directory rename - respect move ' 'from %s' % f,
+                    )
                 elif acceptremote:
                     actions[f] = (ACTION_CREATED, (fl2,), 'remote recreating')
                 else:
-                    actions[f] = (ACTION_DELETED_CHANGED,
-                                  (None, f, f, False, pa.node()),
-                                  'prompt deleted/changed')
+                    actions[f] = (
+                        ACTION_DELETED_CHANGED,
+                        (None, f, f, False, pa.node()),
+                        'prompt deleted/changed',
+                    )
 
     if repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
         # If we are merging, look for path conflicts.
@@ -1315,48 +1472,88 @@
 
     return actions, diverge, renamedelete
 
+
 def _resolvetrivial(repo, wctx, mctx, ancestor, actions):
     """Resolves false conflicts where the nodeid changed but the content
        remained the same."""
     # We force a copy of actions.items() because we're going to mutate
     # actions as we resolve trivial conflicts.
     for f, (m, args, msg) in list(actions.items()):
-        if (m == ACTION_CHANGED_DELETED and f in ancestor
-            and not wctx[f].cmp(ancestor[f])):
+        if (
+            m == ACTION_CHANGED_DELETED
+            and f in ancestor
+            and not wctx[f].cmp(ancestor[f])
+        ):
             # local did change but ended up with same content
             actions[f] = ACTION_REMOVE, None, 'prompt same'
-        elif (m == ACTION_DELETED_CHANGED and f in ancestor
-              and not mctx[f].cmp(ancestor[f])):
+        elif (
+            m == ACTION_DELETED_CHANGED
+            and f in ancestor
+            and not mctx[f].cmp(ancestor[f])
+        ):
             # remote did change but ended up with same content
-            del actions[f] # don't get = keep local deleted
+            del actions[f]  # don't get = keep local deleted
+
 
-def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force,
-                     acceptremote, followcopies, matcher=None,
-                     mergeforce=False):
+def calculateupdates(
+    repo,
+    wctx,
+    mctx,
+    ancestors,
+    branchmerge,
+    force,
+    acceptremote,
+    followcopies,
+    matcher=None,
+    mergeforce=False,
+):
     """Calculate the actions needed to merge mctx into wctx using ancestors"""
     # Avoid cycle.
     from . import sparse
 
-    if len(ancestors) == 1: # default
+    if len(ancestors) == 1:  # default
         actions, diverge, renamedelete = manifestmerge(
-            repo, wctx, mctx, ancestors[0], branchmerge, force, matcher,
-            acceptremote, followcopies)
+            repo,
+            wctx,
+            mctx,
+            ancestors[0],
+            branchmerge,
+            force,
+            matcher,
+            acceptremote,
+            followcopies,
+        )
         _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
 
-    else: # only when merge.preferancestor=* - the default
+    else:  # only when merge.preferancestor=* - the default
         repo.ui.note(
-            _("note: merging %s and %s using bids from ancestors %s\n") %
-            (wctx, mctx, _(' and ').join(pycompat.bytestr(anc)
-                                            for anc in ancestors)))
+            _("note: merging %s and %s using bids from ancestors %s\n")
+            % (
+                wctx,
+                mctx,
+                _(' and ').join(pycompat.bytestr(anc) for anc in ancestors),
+            )
+        )
 
         # Call for bids
-        fbids = {} # mapping filename to bids (action method to list af actions)
+        fbids = (
+            {}
+        )  # mapping filename to bids (action method to list af actions)
         diverge, renamedelete = None, None
         for ancestor in ancestors:
             repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor)
             actions, diverge1, renamedelete1 = manifestmerge(
-                repo, wctx, mctx, ancestor, branchmerge, force, matcher,
-                acceptremote, followcopies, forcefulldiff=True)
+                repo,
+                wctx,
+                mctx,
+                ancestor,
+                branchmerge,
+                force,
+                matcher,
+                acceptremote,
+                followcopies,
+                forcefulldiff=True,
+            )
             _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce)
 
             # Track the shortest set of warning on the theory that bid
@@ -1384,9 +1581,9 @@
         for f, bids in sorted(fbids.items()):
             # bids is a mapping from action method to list af actions
             # Consensus?
-            if len(bids) == 1: # all bids are the same kind of method
+            if len(bids) == 1:  # all bids are the same kind of method
                 m, l = list(bids.items())[0]
-                if all(a == l[0] for a in l[1:]): # len(bids) is > 1
+                if all(a == l[0] for a in l[1:]):  # len(bids) is > 1
                     repo.ui.note(_(" %s: consensus for %s\n") % (f, m))
                     actions[f] = l[0]
                     continue
@@ -1410,8 +1607,9 @@
                     repo.ui.note('  %s -> %s\n' % (msg, m))
             # Pick random action. TODO: Instead, prompt user when resolving
             m, l = list(bids.items())[0]
-            repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') %
-                         (f, m))
+            repo.ui.warn(
+                _(' %s: ambiguous merge - picked %s action\n') % (f, m)
+            )
             actions[f] = l[0]
             continue
         repo.ui.note(_('end of auction\n\n'))
@@ -1420,12 +1618,14 @@
         fractions = _forgetremoved(wctx, mctx, branchmerge)
         actions.update(fractions)
 
-    prunedactions = sparse.filterupdatesactions(repo, wctx, mctx, branchmerge,
-                                                actions)
+    prunedactions = sparse.filterupdatesactions(
+        repo, wctx, mctx, branchmerge, actions
+    )
     _resolvetrivial(repo, wctx, mctx, ancestors[0], actions)
 
     return prunedactions, diverge, renamedelete
 
+
 def _getcwd():
     try:
         return encoding.getcwd()
@@ -1434,6 +1634,7 @@
             return None
         raise
 
+
 def batchremove(repo, wctx, actions):
     """apply removes to the working directory
 
@@ -1450,8 +1651,9 @@
         try:
             wctx[f].remove(ignoremissing=True)
         except OSError as inst:
-            repo.ui.warn(_("update failed to remove %s: %s!\n") %
-                         (f, inst.strerror))
+            repo.ui.warn(
+                _("update failed to remove %s: %s!\n") % (f, inst.strerror)
+            )
         if i == 100:
             yield i, f
             i = 0
@@ -1462,8 +1664,14 @@
     if cwd and not _getcwd():
         # cwd was removed in the course of removing files; print a helpful
         # warning.
-        repo.ui.warn(_("current directory was removed\n"
-                       "(consider changing to repo root: %s)\n") % repo.root)
+        repo.ui.warn(
+            _(
+                "current directory was removed\n"
+                "(consider changing to repo root: %s)\n"
+            )
+            % repo.root
+        )
+
 
 def batchget(repo, mctx, wctx, wantfiledata, actions):
     """apply gets to the working directory
@@ -1502,14 +1710,17 @@
             wfctx = wctx[f]
             wfctx.clearunknown()
             atomictemp = ui.configbool("experimental", "update.atomic-file")
-            size = wfctx.write(fctx(f).data(), flags,
-                               backgroundclose=True,
-                               atomictemp=atomictemp)
+            size = wfctx.write(
+                fctx(f).data(),
+                flags,
+                backgroundclose=True,
+                atomictemp=atomictemp,
+            )
             if wantfiledata:
                 s = wfctx.lstat()
                 mode = s.st_mode
                 mtime = s[stat.ST_MTIME]
-                filedata[f] = ((mode, size, mtime)) # for dirstate.normal
+                filedata[f] = (mode, size, mtime)  # for dirstate.normal
             if i == 100:
                 yield False, (i, f)
                 i = 0
@@ -1518,6 +1729,7 @@
         yield False, (i, f)
     yield True, filedata
 
+
 def _prefetchfiles(repo, ctx, actions):
     """Invoke ``scmutil.prefetchfiles()`` for the files relevant to the dict
     of merge actions.  ``ctx`` is the context being merged in."""
@@ -1525,13 +1737,23 @@
     # Skipping 'a', 'am', 'f', 'r', 'dm', 'e', 'k', 'p' and 'pr', because they
     # don't touch the context to be merged in.  'cd' is skipped, because
     # changed/deleted never resolves to something from the remote side.
-    oplist = [actions[a] for a in (ACTION_GET, ACTION_DELETED_CHANGED,
-                                   ACTION_LOCAL_DIR_RENAME_GET, ACTION_MERGE)]
+    oplist = [
+        actions[a]
+        for a in (
+            ACTION_GET,
+            ACTION_DELETED_CHANGED,
+            ACTION_LOCAL_DIR_RENAME_GET,
+            ACTION_MERGE,
+        )
+    ]
     prefetch = scmutil.prefetchfiles
     matchfiles = scmutil.matchfiles
-    prefetch(repo, [ctx.rev()],
-             matchfiles(repo,
-                        [f for sublist in oplist for f, args, msg in sublist]))
+    prefetch(
+        repo,
+        [ctx.rev()],
+        matchfiles(repo, [f for sublist in oplist for f, args, msg in sublist]),
+    )
+
 
 @attr.s(frozen=True)
 class updateresult(object):
@@ -1541,30 +1763,40 @@
     unresolvedcount = attr.ib()
 
     def isempty(self):
-        return not (self.updatedcount or self.mergedcount
-                    or self.removedcount or self.unresolvedcount)
+        return not (
+            self.updatedcount
+            or self.mergedcount
+            or self.removedcount
+            or self.unresolvedcount
+        )
+
 
 def emptyactions():
     """create an actions dict, to be populated and passed to applyupdates()"""
-    return dict((m, [])
-                for m in (
-                    ACTION_ADD,
-                    ACTION_ADD_MODIFIED,
-                    ACTION_FORGET,
-                    ACTION_GET,
-                    ACTION_CHANGED_DELETED,
-                    ACTION_DELETED_CHANGED,
-                    ACTION_REMOVE,
-                    ACTION_DIR_RENAME_MOVE_LOCAL,
-                    ACTION_LOCAL_DIR_RENAME_GET,
-                    ACTION_MERGE,
-                    ACTION_EXEC,
-                    ACTION_KEEP,
-                    ACTION_PATH_CONFLICT,
-                    ACTION_PATH_CONFLICT_RESOLVE))
+    return dict(
+        (m, [])
+        for m in (
+            ACTION_ADD,
+            ACTION_ADD_MODIFIED,
+            ACTION_FORGET,
+            ACTION_GET,
+            ACTION_CHANGED_DELETED,
+            ACTION_DELETED_CHANGED,
+            ACTION_REMOVE,
+            ACTION_DIR_RENAME_MOVE_LOCAL,
+            ACTION_LOCAL_DIR_RENAME_GET,
+            ACTION_MERGE,
+            ACTION_EXEC,
+            ACTION_KEEP,
+            ACTION_PATH_CONFLICT,
+            ACTION_PATH_CONFLICT_RESOLVE,
+        )
+    )
 
-def applyupdates(repo, actions, wctx, mctx, overwrite, wantfiledata,
-                 labels=None):
+
+def applyupdates(
+    repo, actions, wctx, mctx, overwrite, wantfiledata, labels=None
+):
     """apply the merge action list to the working directory
 
     wctx is the working copy context
@@ -1590,7 +1822,7 @@
     mergeactions.extend(actions[ACTION_MERGE])
     for f, args, msg in mergeactions:
         f1, f2, fa, move, anc = args
-        if f == '.hgsubstate': # merged internally
+        if f == '.hgsubstate':  # merged internally
             continue
         if f1 is None:
             fcl = filemerge.absentfilectx(wctx, fa)
@@ -1618,10 +1850,10 @@
             wctx[f].audit()
             wctx[f].remove()
 
-    numupdates = sum(len(l) for m, l in actions.items()
-                     if m != ACTION_KEEP)
-    progress = repo.ui.makeprogress(_('updating'), unit=_('files'),
-                                    total=numupdates)
+    numupdates = sum(len(l) for m, l in actions.items() if m != ACTION_KEEP)
+    progress = repo.ui.makeprogress(
+        _('updating'), unit=_('files'), total=numupdates
+    )
 
     if [a for a in actions[ACTION_REMOVE] if a[0] == '.hgsubstate']:
         subrepoutil.submerge(repo, wctx, mctx, wctx, overwrite, labels)
@@ -1630,8 +1862,13 @@
     for f, args, msg in actions[ACTION_PATH_CONFLICT]:
         f1, fo = args
         s = repo.ui.status
-        s(_("%s: path conflict - a file or link has the same name as a "
-            "directory\n") % f)
+        s(
+            _(
+                "%s: path conflict - a file or link has the same name as a "
+                "directory\n"
+            )
+            % f
+        )
         if fo == 'l':
             s(_("the local file has been renamed to %s\n") % f1)
         else:
@@ -1645,8 +1882,9 @@
     cost = 0 if wctx.isinmemory() else 0.001
 
     # remove in parallel (must come before resolving path conflicts and getting)
-    prog = worker.worker(repo.ui, cost, batchremove, (repo, wctx),
-                         actions[ACTION_REMOVE])
+    prog = worker.worker(
+        repo.ui, cost, batchremove, (repo, wctx), actions[ACTION_REMOVE]
+    )
     for i, item in prog:
         progress.increment(step=i, item=item)
     removed = len(actions[ACTION_REMOVE])
@@ -1654,7 +1892,7 @@
     # resolve path conflicts (must come before getting)
     for f, args, msg in actions[ACTION_PATH_CONFLICT_RESOLVE]:
         repo.ui.debug(" %s: %s -> pr\n" % (f, msg))
-        f0, = args
+        (f0,) = args
         if wctx[f0].lexists():
             repo.ui.note(_("moving %s to %s\n") % (f0, f))
             wctx[f].audit()
@@ -1663,13 +1901,18 @@
         progress.increment(item=f)
 
     # get in parallel.
-    threadsafe = repo.ui.configbool('experimental',
-                                    'worker.wdir-get-thread-safe')
-    prog = worker.worker(repo.ui, cost, batchget,
-                         (repo, mctx, wctx, wantfiledata),
-                         actions[ACTION_GET],
-                         threadsafe=threadsafe,
-                         hasretval=True)
+    threadsafe = repo.ui.configbool(
+        'experimental', 'worker.wdir-get-thread-safe'
+    )
+    prog = worker.worker(
+        repo.ui,
+        cost,
+        batchget,
+        (repo, mctx, wctx, wantfiledata),
+        actions[ACTION_GET],
+        threadsafe=threadsafe,
+        hasretval=True,
+    )
     getfiledata = {}
     for final, res in prog:
         if final:
@@ -1726,7 +1969,7 @@
     for f, args, msg in actions[ACTION_EXEC]:
         repo.ui.debug(" %s: %s -> e\n" % (f, msg))
         progress.increment(item=f)
-        flags, = args
+        (flags,) = args
         wctx[f].audit()
         wctx[f].setflags('l' in flags, 'x' in flags)
         updated += 1
@@ -1738,8 +1981,9 @@
 
     if usemergedriver:
         if wctx.isinmemory():
-            raise error.InMemoryMergeConflictsError("in-memory merge does not "
-                                                    "support mergedriver")
+            raise error.InMemoryMergeConflictsError(
+                "in-memory merge does not " "support mergedriver"
+            )
         ms.commit()
         proceed = driverpreprocess(repo, ms, wctx, labels=labels)
         # the driver might leave some files unresolved
@@ -1747,8 +1991,9 @@
         if not proceed:
             # XXX setting unresolved to at least 1 is a hack to make sure we
             # error out
-            return updateresult(updated, merged, removed,
-                                max(len(unresolvedf), 1))
+            return updateresult(
+                updated, merged, removed, max(len(unresolvedf), 1)
+            )
         newactions = []
         for f, args, msg in mergeactions:
             if f in unresolvedf:
@@ -1761,9 +2006,10 @@
         for f, args, msg in mergeactions:
             repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg))
             progress.increment(item=f)
-            if f == '.hgsubstate': # subrepo states need updating
-                subrepoutil.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
-                                     overwrite, labels)
+            if f == '.hgsubstate':  # subrepo states need updating
+                subrepoutil.submerge(
+                    repo, wctx, mctx, wctx.ancestor(mctx), overwrite, labels
+                )
                 continue
             wctx[f].audit()
             complete, r = ms.preresolve(f, wctx)
@@ -1782,8 +2028,11 @@
 
     unresolved = ms.unresolvedcount()
 
-    if (usemergedriver and not unresolved
-        and ms.mdstate() != MERGE_DRIVER_STATE_SUCCESS):
+    if (
+        usemergedriver
+        and not unresolved
+        and ms.mdstate() != MERGE_DRIVER_STATE_SUCCESS
+    ):
         if not driverconclude(repo, ms, wctx, labels=labels):
             # XXX setting unresolved to at least 1 is a hack to make sure we
             # error out
@@ -1823,13 +2072,15 @@
             # those lists aren't consulted again.
             mfiles.difference_update(a[0] for a in acts)
 
-        actions[ACTION_MERGE] = [a for a in actions[ACTION_MERGE]
-                                 if a[0] in mfiles]
+        actions[ACTION_MERGE] = [
+            a for a in actions[ACTION_MERGE] if a[0] in mfiles
+        ]
 
     progress.complete()
     assert len(getfiledata) == (len(actions[ACTION_GET]) if wantfiledata else 0)
     return updateresult(updated, merged, removed, unresolved), getfiledata
 
+
 def recordupdates(repo, actions, branchmerge, getfiledata):
     "record merge actions to the dirstate"
     # remove (must come first)
@@ -1845,7 +2096,7 @@
 
     # resolve path conflicts
     for f, args, msg in actions.get(ACTION_PATH_CONFLICT_RESOLVE, []):
-        f0, = args
+        (f0,) = args
         origf0 = repo.dirstate.copied(f0) or f0
         repo.dirstate.add(f)
         repo.dirstate.copy(origf0, f)
@@ -1888,7 +2139,7 @@
             # We've done a branch merge, mark this file as merged
             # so that we properly record the merger later
             repo.dirstate.merge(f)
-            if f1 != f2: # copy/rename
+            if f1 != f2:  # copy/rename
                 if move:
                     repo.dirstate.remove(f1)
                 if f1 != f:
@@ -1901,7 +2152,7 @@
             # of that file some time in the past. Thus our
             # merge will appear as a normal local file
             # modification.
-            if f2 == f: # file not locally copied/moved
+            if f2 == f:  # file not locally copied/moved
                 repo.dirstate.normallookup(f)
             if move:
                 repo.dirstate.drop(f1)
@@ -1926,14 +2177,26 @@
         else:
             repo.dirstate.normal(f)
 
+
 UPDATECHECK_ABORT = 'abort'  # handled at higher layers
 UPDATECHECK_NONE = 'none'
 UPDATECHECK_LINEAR = 'linear'
 UPDATECHECK_NO_CONFLICT = 'noconflict'
 
-def update(repo, node, branchmerge, force, ancestor=None,
-           mergeancestor=False, labels=None, matcher=None, mergeforce=False,
-           updatecheck=None, wc=None):
+
+def update(
+    repo,
+    node,
+    branchmerge,
+    force,
+    ancestor=None,
+    mergeancestor=False,
+    labels=None,
+    matcher=None,
+    mergeforce=False,
+    updatecheck=None,
+    wc=None,
+):
     """
     Perform a merge between the working directory and the given node
 
@@ -1998,15 +2261,22 @@
         # updatecheck='abort' to better suppport some of these callers.
         if updatecheck is None:
             updatecheck = UPDATECHECK_LINEAR
-        if updatecheck not in (UPDATECHECK_NONE,
-                               UPDATECHECK_LINEAR,
-                               UPDATECHECK_NO_CONFLICT,
+        if updatecheck not in (
+            UPDATECHECK_NONE,
+            UPDATECHECK_LINEAR,
+            UPDATECHECK_NO_CONFLICT,
         ):
-            raise ValueError(r'Invalid updatecheck %r (can accept %r)' % (
-                updatecheck, (UPDATECHECK_NONE,
-                               UPDATECHECK_LINEAR,
-                               UPDATECHECK_NO_CONFLICT,
-                )))
+            raise ValueError(
+                r'Invalid updatecheck %r (can accept %r)'
+                % (
+                    updatecheck,
+                    (
+                        UPDATECHECK_NONE,
+                        UPDATECHECK_LINEAR,
+                        UPDATECHECK_NO_CONFLICT,
+                    ),
+                )
+            )
     # If we're doing a partial update, we need to skip updating
     # the dirstate, so make a note of any partial-ness to the
     # update here.
@@ -2038,33 +2308,44 @@
                 raise error.Abort(_("outstanding uncommitted merge"))
             ms = mergestate.read(repo)
             if list(ms.unresolved()):
-                raise error.Abort(_("outstanding merge conflicts"),
-                                  hint=_("use 'hg resolve' to resolve"))
+                raise error.Abort(
+                    _("outstanding merge conflicts"),
+                    hint=_("use 'hg resolve' to resolve"),
+                )
         if branchmerge:
             if pas == [p2]:
-                raise error.Abort(_("merging with a working directory ancestor"
-                                   " has no effect"))
+                raise error.Abort(
+                    _(
+                        "merging with a working directory ancestor"
+                        " has no effect"
+                    )
+                )
             elif pas == [p1]:
                 if not mergeancestor and wc.branch() == p2.branch():
-                    raise error.Abort(_("nothing to merge"),
-                                     hint=_("use 'hg update' "
-                                            "or check 'hg heads'"))
+                    raise error.Abort(
+                        _("nothing to merge"),
+                        hint=_("use 'hg update' " "or check 'hg heads'"),
+                    )
             if not force and (wc.files() or wc.deleted()):
-                raise error.Abort(_("uncommitted changes"),
-                                 hint=_("use 'hg status' to list changes"))
+                raise error.Abort(
+                    _("uncommitted changes"),
+                    hint=_("use 'hg status' to list changes"),
+                )
             if not wc.isinmemory():
                 for s in sorted(wc.substate):
                     wc.sub(s).bailifchanged()
 
         elif not overwrite:
-            if p1 == p2: # no-op update
+            if p1 == p2:  # no-op update
                 # call the hooks and exit early
                 repo.hook('preupdate', throw=True, parent1=xp2, parent2='')
                 repo.hook('update', parent1=xp2, parent2='', error=0)
                 return updateresult(0, 0, 0, 0)
 
-            if (updatecheck == UPDATECHECK_LINEAR and
-                    pas not in ([p1], [p2])):  # nonlinear
+            if updatecheck == UPDATECHECK_LINEAR and pas not in (
+                [p1],
+                [p2],
+            ):  # nonlinear
                 dirty = wc.dirty(missing=True)
                 if dirty:
                     # Branching is a bit strange to ensure we do the minimal
@@ -2072,7 +2353,7 @@
                     foreground = obsutil.foreground(repo, [p1.node()])
                     # note: the <node> variable contains a random identifier
                     if repo[node].node() in foreground:
-                        pass # allow updating to successors
+                        pass  # allow updating to successors
                     else:
                         msg = _("uncommitted changes")
                         hint = _("commit or update --clean to discard changes")
@@ -2097,13 +2378,27 @@
 
         ### calculate phase
         actionbyfile, diverge, renamedelete = calculateupdates(
-            repo, wc, p2, pas, branchmerge, force, mergeancestor,
-            followcopies, matcher=matcher, mergeforce=mergeforce)
+            repo,
+            wc,
+            p2,
+            pas,
+            branchmerge,
+            force,
+            mergeancestor,
+            followcopies,
+            matcher=matcher,
+            mergeforce=mergeforce,
+        )
 
         if updatecheck == UPDATECHECK_NO_CONFLICT:
             for f, (m, args, msg) in actionbyfile.iteritems():
-                if m not in (ACTION_GET, ACTION_KEEP, ACTION_EXEC,
-                             ACTION_REMOVE, ACTION_PATH_CONFLICT_RESOLVE):
+                if m not in (
+                    ACTION_GET,
+                    ACTION_KEEP,
+                    ACTION_EXEC,
+                    ACTION_REMOVE,
+                    ACTION_PATH_CONFLICT_RESOLVE,
+                ):
                     msg = _("conflicting changes")
                     hint = _("commit or update --clean to discard changes")
                     raise error.Abort(msg, hint=hint)
@@ -2118,9 +2413,14 @@
             prompts['f'] = f
             if m == ACTION_CHANGED_DELETED:
                 if repo.ui.promptchoice(
-                    _("local%(l)s changed %(f)s which other%(o)s deleted\n"
-                      "use (c)hanged version or (d)elete?"
-                      "$$ &Changed $$ &Delete") % prompts, 0):
+                    _(
+                        "local%(l)s changed %(f)s which other%(o)s deleted\n"
+                        "use (c)hanged version or (d)elete?"
+                        "$$ &Changed $$ &Delete"
+                    )
+                    % prompts,
+                    0,
+                ):
                     actionbyfile[f] = (ACTION_REMOVE, None, 'prompt delete')
                 elif f in p1:
                     actionbyfile[f] = (ACTION_ADD_MODIFIED, None, 'prompt keep')
@@ -2129,12 +2429,23 @@
             elif m == ACTION_DELETED_CHANGED:
                 f1, f2, fa, move, anc = args
                 flags = p2[f2].flags()
-                if repo.ui.promptchoice(
-                    _("other%(o)s changed %(f)s which local%(l)s deleted\n"
-                      "use (c)hanged version or leave (d)eleted?"
-                      "$$ &Changed $$ &Deleted") % prompts, 0) == 0:
-                    actionbyfile[f] = (ACTION_GET, (flags, False),
-                                       'prompt recreating')
+                if (
+                    repo.ui.promptchoice(
+                        _(
+                            "other%(o)s changed %(f)s which local%(l)s deleted\n"
+                            "use (c)hanged version or leave (d)eleted?"
+                            "$$ &Changed $$ &Deleted"
+                        )
+                        % prompts,
+                        0,
+                    )
+                    == 0
+                ):
+                    actionbyfile[f] = (
+                        ACTION_GET,
+                        (flags, False),
+                        'prompt recreating',
+                    )
                 else:
                     del actionbyfile[f]
 
@@ -2147,28 +2458,39 @@
 
         if not util.fscasesensitive(repo.path):
             # check collision between files only in p2 for clean update
-            if (not branchmerge and
-                (force or not wc.dirty(missing=True, branch=False))):
+            if not branchmerge and (
+                force or not wc.dirty(missing=True, branch=False)
+            ):
                 _checkcollision(repo, p2.manifest(), None)
             else:
                 _checkcollision(repo, wc.manifest(), actions)
 
         # divergent renames
         for f, fl in sorted(diverge.iteritems()):
-            repo.ui.warn(_("note: possible conflict - %s was renamed "
-                           "multiple times to:\n") % f)
+            repo.ui.warn(
+                _(
+                    "note: possible conflict - %s was renamed "
+                    "multiple times to:\n"
+                )
+                % f
+            )
             for nf in sorted(fl):
                 repo.ui.warn(" %s\n" % nf)
 
         # rename and delete
         for f, fl in sorted(renamedelete.iteritems()):
-            repo.ui.warn(_("note: possible conflict - %s was deleted "
-                           "and renamed to:\n") % f)
+            repo.ui.warn(
+                _(
+                    "note: possible conflict - %s was deleted "
+                    "and renamed to:\n"
+                )
+                % f
+            )
             for nf in sorted(fl):
                 repo.ui.warn(" %s\n" % nf)
 
         ### apply phase
-        if not branchmerge: # just jump to the new rev
+        if not branchmerge:  # just jump to the new rev
             fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
         if not partial and not wc.isinmemory():
             repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
@@ -2186,11 +2508,13 @@
         # We only allow on Linux and MacOS because that's where fsmonitor is
         # considered stable.
         fsmonitorwarning = repo.ui.configbool('fsmonitor', 'warn_when_unused')
-        fsmonitorthreshold = repo.ui.configint('fsmonitor',
-                                               'warn_update_file_count')
+        fsmonitorthreshold = repo.ui.configint(
+            'fsmonitor', 'warn_update_file_count'
+        )
         try:
             # avoid cycle: extensions -> cmdutil -> merge
             from . import extensions
+
             extensions.find('fsmonitor')
             fsmonitorenabled = repo.ui.config('fsmonitor', 'mode') != 'off'
             # We intentionally don't look at whether fsmonitor has disabled
@@ -2199,20 +2523,26 @@
         except KeyError:
             fsmonitorenabled = False
 
-        if (fsmonitorwarning
-                and not fsmonitorenabled
-                and p1.node() == nullid
-                and len(actions[ACTION_GET]) >= fsmonitorthreshold
-                and pycompat.sysplatform.startswith(('linux', 'darwin'))):
+        if (
+            fsmonitorwarning
+            and not fsmonitorenabled
+            and p1.node() == nullid
+            and len(actions[ACTION_GET]) >= fsmonitorthreshold
+            and pycompat.sysplatform.startswith(('linux', 'darwin'))
+        ):
             repo.ui.warn(
-                _('(warning: large working directory being used without '
-                  'fsmonitor enabled; enable fsmonitor to improve performance; '
-                  'see "hg help -e fsmonitor")\n'))
+                _(
+                    '(warning: large working directory being used without '
+                    'fsmonitor enabled; enable fsmonitor to improve performance; '
+                    'see "hg help -e fsmonitor")\n'
+                )
+            )
 
         updatedirstate = not partial and not wc.isinmemory()
         wantfiledata = updatedirstate and not branchmerge
-        stats, getfiledata = applyupdates(repo, actions, wc, p2, overwrite,
-                                          wantfiledata, labels=labels)
+        stats, getfiledata = applyupdates(
+            repo, actions, wc, p2, overwrite, wantfiledata, labels=labels
+        )
 
         if updatedirstate:
             with repo.dirstate.parentchange():
@@ -2230,12 +2560,15 @@
         sparse.prunetemporaryincludes(repo)
 
     if not partial:
-        repo.hook('update', parent1=xp1, parent2=xp2,
-                  error=stats.unresolvedcount)
+        repo.hook(
+            'update', parent1=xp1, parent2=xp2, error=stats.unresolvedcount
+        )
     return stats
 
-def graft(repo, ctx, pctx, labels=None, keepparent=False,
-          keepconflictparent=False):
+
+def graft(
+    repo, ctx, pctx, labels=None, keepparent=False, keepconflictparent=False
+):
     """Do a graft-like merge.
 
     This is a merge where the merge ancestor is chosen such that one
@@ -2259,9 +2592,15 @@
     # which local deleted".
     mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node())
 
-    stats = update(repo, ctx.node(), True, True, pctx.node(),
-                   mergeancestor=mergeancestor, labels=labels)
-
+    stats = update(
+        repo,
+        ctx.node(),
+        True,
+        True,
+        pctx.node(),
+        mergeancestor=mergeancestor,
+        labels=labels,
+    )
 
     if keepconflictparent and stats.unresolvedcount:
         pother = ctx.node()
@@ -2279,8 +2618,16 @@
         copies.duplicatecopies(repo, repo[None], ctx.rev(), pctx.rev())
     return stats
 
-def purge(repo, matcher, ignored=False, removeemptydirs=True,
-          removefiles=True, abortonerror=False, noop=False):
+
+def purge(
+    repo,
+    matcher,
+    ignored=False,
+    removeemptydirs=True,
+    removefiles=True,
+    abortonerror=False,
+    noop=False,
+):
     """Purge the working directory of untracked files.
 
     ``matcher`` is a matcher configured to scan the working directory -
--- a/mercurial/mergeutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/mergeutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,14 +9,16 @@
 
 from .i18n import _
 
-from . import (
-    error,
-)
+from . import error
+
 
 def checkunresolved(ms):
     if list(ms.unresolved()):
-        raise error.Abort(_("unresolved merge conflicts "
-                            "(see 'hg help resolve')"))
+        raise error.Abort(
+            _("unresolved merge conflicts " "(see 'hg help resolve')")
+        )
     if ms.mdstate() != 's' or list(ms.driverresolved()):
-        raise error.Abort(_('driver-resolved merge conflicts'),
-                          hint=_('run "hg resolve --all" to resolve'))
+        raise error.Abort(
+            _('driver-resolved merge conflicts'),
+            hint=_('run "hg resolve --all" to resolve'),
+        )
--- a/mercurial/minirst.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/minirst.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,25 +28,29 @@
     pycompat,
     url,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 
 def section(s):
     return "%s\n%s\n\n" % (s, "\"" * encoding.colwidth(s))
 
+
 def subsection(s):
     return "%s\n%s\n\n" % (s, '=' * encoding.colwidth(s))
 
+
 def subsubsection(s):
     return "%s\n%s\n\n" % (s, "-" * encoding.colwidth(s))
 
+
 def subsubsubsection(s):
     return "%s\n%s\n\n" % (s, "." * encoding.colwidth(s))
 
+
 def subsubsubsubsection(s):
     return "%s\n%s\n\n" % (s, "'" * encoding.colwidth(s))
 
+
 def replace(text, substs):
     '''
     Apply a list of (find, replace) pairs to a text.
@@ -70,8 +74,10 @@
         utext = utext.replace(f.decode("ascii"), t.decode("ascii"))
     return utext.encode(pycompat.sysstr(encoding.encoding))
 
+
 _blockre = re.compile(br"\n(?:\s*\n)+")
 
+
 def findblocks(text):
     """Find continuous blocks of lines in text.
 
@@ -87,6 +93,7 @@
             blocks.append({'indent': indent, 'lines': lines})
     return blocks
 
+
 def findliteralblocks(blocks):
     """Finds literal blocks and adds a 'type' field to the blocks.
 
@@ -117,9 +124,11 @@
                 # Partially minimized form: remove space and both
                 # colons.
                 blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3]
-            elif (len(blocks[i]['lines']) == 1 and
-                  blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and
-                  blocks[i]['lines'][0].find(' ', 3) == -1):
+            elif (
+                len(blocks[i]['lines']) == 1
+                and blocks[i]['lines'][0].lstrip(' ').startswith('.. ')
+                and blocks[i]['lines'][0].find(' ', 3) == -1
+            ):
                 # directive on its own line, not a literal block
                 i += 1
                 continue
@@ -144,22 +153,27 @@
         i += 1
     return blocks
 
+
 _bulletre = re.compile(br'(\*|-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ')
-_optionre = re.compile(br'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)'
-                       br'((.*)  +)(.*)$')
+_optionre = re.compile(
+    br'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' br'((.*)  +)(.*)$'
+)
 _fieldre = re.compile(br':(?![: ])([^:]*)(?<! ):[ ]+(.*)')
 _definitionre = re.compile(br'[^ ]')
 _tablere = re.compile(br'(=+\s+)*=+')
 
+
 def splitparagraphs(blocks):
     """Split paragraphs into lists."""
     # Tuples with (list type, item regexp, single line items?). Order
     # matters: definition lists has the least specific regexp and must
     # come last.
-    listtypes = [('bullet', _bulletre, True),
-                 ('option', _optionre, True),
-                 ('field', _fieldre, True),
-                 ('definition', _definitionre, False)]
+    listtypes = [
+        ('bullet', _bulletre, True),
+        ('option', _optionre, True),
+        ('field', _fieldre, True),
+        ('definition', _definitionre, False),
+    ]
 
     def match(lines, i, itemre, singleline):
         """Does itemre match an item at line i?
@@ -185,16 +199,23 @@
                     items = []
                     for j, line in enumerate(lines):
                         if match(lines, j, itemre, singleline):
-                            items.append({'type': type, 'lines': [],
-                                          'indent': blocks[i]['indent']})
+                            items.append(
+                                {
+                                    'type': type,
+                                    'lines': [],
+                                    'indent': blocks[i]['indent'],
+                                }
+                            )
                         items[-1]['lines'].append(line)
-                    blocks[i:i + 1] = items
+                    blocks[i : i + 1] = items
                     break
         i += 1
     return blocks
 
+
 _fieldwidth = 14
 
+
 def updatefieldlists(blocks):
     """Find key for field lists."""
     i = 0
@@ -215,6 +236,7 @@
 
     return blocks
 
+
 def updateoptionlists(blocks):
     i = 0
     while i < len(blocks):
@@ -238,9 +260,10 @@
             if not shortoption:
                 noshortop = '   '
 
-            opt = "%s%s" %   (shortoption and "-%s " % shortoption or '',
-                            ("%s--%s %s") % (noshortop, longoption,
-                                             longoptionarg))
+            opt = "%s%s" % (
+                shortoption and "-%s " % shortoption or '',
+                "%s--%s %s" % (noshortop, longoption, longoptionarg),
+            )
             opt = opt.rstrip()
             blocks[j]['optstr'] = opt
             optstrwidth = max(optstrwidth, encoding.colwidth(opt))
@@ -251,6 +274,7 @@
         i = j + 1
     return blocks
 
+
 def prunecontainers(blocks, keep):
     """Prune unwanted containers.
 
@@ -267,8 +291,9 @@
         # +---+                               |
         #     | blocks                        |
         #     +-------------------------------+
-        if (blocks[i]['type'] == 'paragraph' and
-            blocks[i]['lines'][0].startswith('.. container::')):
+        if blocks[i]['type'] == 'paragraph' and blocks[i]['lines'][
+            0
+        ].startswith('.. container::'):
             indent = blocks[i]['indent']
             adjustment = blocks[i + 1]['indent'] - indent
             containertype = blocks[i]['lines'][0][15:]
@@ -292,8 +317,10 @@
         i += 1
     return blocks, pruned
 
+
 _sectionre = re.compile(br"""^([-=`:.'"~^_*+#])\1+$""")
 
+
 def findtables(blocks):
     '''Find simple tables
 
@@ -309,19 +336,23 @@
         #  1    2   3
         #  x    y   z
         # === ==== ===
-        if (block['type'] == 'paragraph' and
-            len(block['lines']) > 2 and
-            _tablere.match(block['lines'][0]) and
-            block['lines'][0] == block['lines'][-1]):
+        if (
+            block['type'] == 'paragraph'
+            and len(block['lines']) > 2
+            and _tablere.match(block['lines'][0])
+            and block['lines'][0] == block['lines'][-1]
+        ):
             block['type'] = 'table'
             block['header'] = False
             div = block['lines'][0]
 
             # column markers are ASCII so we can calculate column
             # position in bytes
-            columns = [x for x in pycompat.xrange(len(div))
-                       if div[x:x + 1] == '=' and (x == 0 or
-                                                   div[x - 1:x] == ' ')]
+            columns = [
+                x
+                for x in pycompat.xrange(len(div))
+                if div[x : x + 1] == '=' and (x == 0 or div[x - 1 : x] == ' ')
+            ]
             rows = []
             for l in block['lines'][1:-1]:
                 if l == div:
@@ -330,12 +361,12 @@
                 row = []
                 # we measure columns not in bytes or characters but in
                 # colwidth which makes things tricky
-                pos = columns[0] # leading whitespace is bytes
+                pos = columns[0]  # leading whitespace is bytes
                 for n, start in enumerate(columns):
                     if n + 1 < len(columns):
                         width = columns[n + 1] - start
-                        v = encoding.getcols(l, pos, width) # gather columns
-                        pos += len(v) # calculate byte position of end
+                        v = encoding.getcols(l, pos, width)  # gather columns
+                        pos += len(v)  # calculate byte position of end
                         row.append(v.strip())
                     else:
                         row.append(l[pos:].strip())
@@ -345,6 +376,7 @@
 
     return blocks
 
+
 def findsections(blocks):
     """Finds sections.
 
@@ -358,15 +390,18 @@
         # | Section title                |
         # | -------------                |
         # +------------------------------+
-        if (block['type'] == 'paragraph' and
-            len(block['lines']) == 2 and
-            encoding.colwidth(block['lines'][0]) == len(block['lines'][1]) and
-            _sectionre.match(block['lines'][1])):
+        if (
+            block['type'] == 'paragraph'
+            and len(block['lines']) == 2
+            and encoding.colwidth(block['lines'][0]) == len(block['lines'][1])
+            and _sectionre.match(block['lines'][1])
+        ):
             block['underline'] = block['lines'][1][0:1]
             block['type'] = 'section'
             del block['lines'][1]
     return blocks
 
+
 def inlineliterals(blocks):
     substs = [('``', '"')]
     for b in blocks:
@@ -374,6 +409,7 @@
             b['lines'] = [replace(l, substs) for l in b['lines']]
     return blocks
 
+
 def hgrole(blocks):
     substs = [(':hg:`', "'hg "), ('`', "'")]
     for b in blocks:
@@ -385,6 +421,7 @@
             b['lines'] = [replace(l, substs) for l in b['lines']]
     return blocks
 
+
 def addmargins(blocks):
     """Adds empty blocks for vertical spacing.
 
@@ -393,8 +430,11 @@
     """
     i = 1
     while i < len(blocks):
-        if (blocks[i]['type'] == blocks[i - 1]['type'] and
-            blocks[i]['type'] in ('bullet', 'option', 'field')):
+        if blocks[i]['type'] == blocks[i - 1]['type'] and blocks[i]['type'] in (
+            'bullet',
+            'option',
+            'field',
+        ):
             i += 1
         elif not blocks[i - 1]['lines']:
             # no lines in previous block, do not separate
@@ -404,13 +444,15 @@
             i += 2
     return blocks
 
+
 def prunecomments(blocks):
     """Remove comments."""
     i = 0
     while i < len(blocks):
         b = blocks[i]
-        if b['type'] == 'paragraph' and (b['lines'][0].startswith('.. ') or
-                                         b['lines'] == ['..']):
+        if b['type'] == 'paragraph' and (
+            b['lines'][0].startswith('.. ') or b['lines'] == ['..']
+        ):
             del blocks[i]
             if i < len(blocks) and blocks[i]['type'] == 'margin':
                 del blocks[i]
@@ -426,17 +468,18 @@
     """
     admonitions = admonitions or _admonitiontitles.keys()
 
-    admonitionre = re.compile(br'\.\. (%s)::' % '|'.join(sorted(admonitions)),
-                              flags=re.IGNORECASE)
+    admonitionre = re.compile(
+        br'\.\. (%s)::' % '|'.join(sorted(admonitions)), flags=re.IGNORECASE
+    )
 
     i = 0
     while i < len(blocks):
         m = admonitionre.match(blocks[i]['lines'][0])
         if m:
             blocks[i]['type'] = 'admonition'
-            admonitiontitle = blocks[i]['lines'][0][3:m.end() - 2].lower()
+            admonitiontitle = blocks[i]['lines'][0][3 : m.end() - 2].lower()
 
-            firstline = blocks[i]['lines'][0][m.end() + 1:]
+            firstline = blocks[i]['lines'][0][m.end() + 1 :]
             if firstline:
                 blocks[i]['lines'].insert(1, '   ' + firstline)
 
@@ -445,6 +488,7 @@
         i = i + 1
     return blocks
 
+
 _admonitiontitles = {
     'attention': _('Attention:'),
     'caution': _('Caution:'),
@@ -457,6 +501,7 @@
     'warning': _('Warning!'),
 }
 
+
 def formatoption(block, width):
     desc = ' '.join(map(bytes.strip, block['lines']))
     colwidth = encoding.colwidth(block['optstr'])
@@ -464,9 +509,12 @@
     hanging = block['optstrwidth']
     initindent = '%s%s  ' % (block['optstr'], ' ' * ((hanging - colwidth)))
     hangindent = ' ' * (encoding.colwidth(initindent) + 1)
-    return ' %s\n' % (stringutil.wrap(desc, usablewidth,
-                                      initindent=initindent,
-                                      hangindent=hangindent))
+    return ' %s\n' % (
+        stringutil.wrap(
+            desc, usablewidth, initindent=initindent, hangindent=hangindent
+        )
+    )
+
 
 def formatblock(block, width):
     """Format a block according to width."""
@@ -481,10 +529,12 @@
 
         defindent = indent + hang * ' '
         text = ' '.join(map(bytes.strip, block['lines']))
-        return '%s\n%s\n' % (indent + admonition,
-                             stringutil.wrap(text, width=width,
-                                             initindent=defindent,
-                                             hangindent=defindent))
+        return '%s\n%s\n' % (
+            indent + admonition,
+            stringutil.wrap(
+                text, width=width, initindent=defindent, hangindent=defindent
+            ),
+        )
     if block['type'] == 'margin':
         return '\n'
     if block['type'] == 'literal':
@@ -492,7 +542,7 @@
         return indent + ('\n' + indent).join(block['lines']) + '\n'
     if block['type'] == 'section':
         underline = encoding.colwidth(block['lines'][0]) * block['underline']
-        return "%s%s\n%s%s\n" % (indent, block['lines'][0],indent, underline)
+        return "%s%s\n%s%s\n" % (indent, block['lines'][0], indent, underline)
     if block['type'] == 'table':
         table = block['table']
         # compute column widths
@@ -508,9 +558,9 @@
                 pad = ' ' * (w - encoding.colwidth(v))
                 l.append(v + pad)
             l = ' '.join(l)
-            l = stringutil.wrap(l, width=width,
-                                initindent=indent,
-                                hangindent=hang)
+            l = stringutil.wrap(
+                l, width=width, initindent=indent, hangindent=hang
+            )
             if not text and block['header']:
                 text = l + '\n' + indent + '-' * (min(width, span)) + '\n'
             else:
@@ -521,9 +571,12 @@
         hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
         defindent = indent + hang * ' '
         text = ' '.join(map(bytes.strip, block['lines'][1:]))
-        return '%s\n%s\n' % (term, stringutil.wrap(text, width=width,
-                                                   initindent=defindent,
-                                                   hangindent=defindent))
+        return '%s\n%s\n' % (
+            term,
+            stringutil.wrap(
+                text, width=width, initindent=defindent, hangindent=defindent
+            ),
+        )
     subindent = indent
     if block['type'] == 'bullet':
         if block['lines'][0].startswith('| '):
@@ -547,9 +600,13 @@
         return formatoption(block, width)
 
     text = ' '.join(map(bytes.strip, block['lines']))
-    return stringutil.wrap(text, width=width,
-                           initindent=indent,
-                           hangindent=subindent) + '\n'
+    return (
+        stringutil.wrap(
+            text, width=width, initindent=indent, hangindent=subindent
+        )
+        + '\n'
+    )
+
 
 def formathtml(blocks):
     """Format RST blocks as HTML"""
@@ -634,14 +691,16 @@
             else:
                 nb = blocks[pos + 1]
                 ni = nb['indent']
-                if (ni < level or
-                    (ni == level and
-                     nb['type'] not in 'definition bullet field option')):
+                if ni < level or (
+                    ni == level
+                    and nb['type'] not in 'definition bullet field option'
+                ):
                     out.append('</%s>\n' % start)
                     listnest.pop()
 
     return ''.join(out)
 
+
 def parse(text, indent=0, keep=None, admonitions=None):
     """Parse text into a list of blocks"""
     blocks = findblocks(text)
@@ -661,14 +720,17 @@
     blocks = prunecomments(blocks)
     return blocks, pruned
 
+
 def formatblocks(blocks, width):
     text = ''.join(formatblock(b, width) for b in blocks)
     return text
 
+
 def formatplain(blocks, width):
     """Format parsed blocks as plain text"""
     return ''.join(formatblock(b, width) for b in blocks)
 
+
 def format(text, width=80, indent=0, keep=None, style='plain', section=None):
     """Parse and format the text according to width."""
     blocks, pruned = parse(text, indent, keep or [])
@@ -679,6 +741,7 @@
     else:
         return formatplain(blocks, width=width)
 
+
 def filtersections(blocks, section):
     """Select parsed blocks under the specified section
 
@@ -705,8 +768,7 @@
                 s = []
                 for j in pycompat.xrange(3, plen - 1):
                     parent = parents[j]
-                    if (j >= llen or
-                        lastparents[j] != parent):
+                    if j >= llen or lastparents[j] != parent:
                         s.append(len(blocks))
                         sec = sections[parent][2]
                         blocks.append(sec[0])
@@ -728,12 +790,14 @@
             path = [blocks[syn]['lines'][0] for syn in s]
             real = s[-1] + 2
             realline = blocks[real]['lines']
-            realline[0] = ('"%s"' %
-                           '.'.join(path + [realline[0]]).replace('"', ''))
-            del blocks[s[0]:real]
+            realline[0] = '"%s"' % '.'.join(path + [realline[0]]).replace(
+                '"', ''
+            )
+            del blocks[s[0] : real]
 
     return blocks
 
+
 def _getsections(blocks):
     '''return a list of (section path, nesting level, blocks) tuples'''
     nest = ""
@@ -793,8 +857,9 @@
                     if section['type'] != 'margin':
                         sindent = section['indent']
                         if len(section['lines']) > 1:
-                            sindent += (len(section['lines'][1]) -
-                                        len(section['lines'][1].lstrip(' ')))
+                            sindent += len(section['lines'][1]) - len(
+                                section['lines'][1].lstrip(' ')
+                            )
                         if bindent >= sindent:
                             break
                     pointer += 1
@@ -806,6 +871,7 @@
             secs[-1][2].append(b)
     return secs
 
+
 def maketable(data, indent=0, header=False):
     '''Generate an RST table for the given table data as a list of lines'''
 
--- a/mercurial/namespaces.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/namespaces.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,6 +7,7 @@
     util,
 )
 
+
 def tolist(val):
     """
     a convenience method to return an empty list instead of None
@@ -16,6 +17,7 @@
     else:
         return [val]
 
+
 class namespaces(object):
     """provides an interface to register and operate on multiple namespaces. See
     the namespace class below for details on the namespace object.
@@ -33,32 +35,44 @@
         bmknames = lambda repo: repo._bookmarks.keys()
         bmknamemap = lambda repo, name: tolist(repo._bookmarks.get(name))
         bmknodemap = lambda repo, node: repo.nodebookmarks(node)
-        n = namespace("bookmarks", templatename="bookmark",
-                      logfmt=columns['bookmark'],
-                      listnames=bmknames,
-                      namemap=bmknamemap, nodemap=bmknodemap,
-                      builtin=True)
+        n = namespace(
+            "bookmarks",
+            templatename="bookmark",
+            logfmt=columns['bookmark'],
+            listnames=bmknames,
+            namemap=bmknamemap,
+            nodemap=bmknodemap,
+            builtin=True,
+        )
         self.addnamespace(n)
 
         tagnames = lambda repo: [t for t, n in repo.tagslist()]
         tagnamemap = lambda repo, name: tolist(repo._tagscache.tags.get(name))
         tagnodemap = lambda repo, node: repo.nodetags(node)
-        n = namespace("tags", templatename="tag",
-                      logfmt=columns['tag'],
-                      listnames=tagnames,
-                      namemap=tagnamemap, nodemap=tagnodemap,
-                      deprecated={'tip'},
-                      builtin=True)
+        n = namespace(
+            "tags",
+            templatename="tag",
+            logfmt=columns['tag'],
+            listnames=tagnames,
+            namemap=tagnamemap,
+            nodemap=tagnodemap,
+            deprecated={'tip'},
+            builtin=True,
+        )
         self.addnamespace(n)
 
         bnames = lambda repo: repo.branchmap().keys()
         bnamemap = lambda repo, name: tolist(repo.branchtip(name, True))
         bnodemap = lambda repo, node: [repo[node].branch()]
-        n = namespace("branches", templatename="branch",
-                      logfmt=columns['branch'],
-                      listnames=bnames,
-                      namemap=bnamemap, nodemap=bnodemap,
-                      builtin=True)
+        n = namespace(
+            "branches",
+            templatename="branch",
+            logfmt=columns['branch'],
+            listnames=bnames,
+            namemap=bnamemap,
+            nodemap=bnodemap,
+            builtin=True,
+        )
         self.addnamespace(n)
 
     def __getitem__(self, namespace):
@@ -89,6 +103,7 @@
         # we only generate a template keyword if one does not already exist
         if namespace.name not in templatekw.keywords:
             templatekeyword = registrar.templatekeyword(templatekw.keywords)
+
             @templatekeyword(namespace.name, requires={'repo', 'ctx'})
             def generatekw(context, mapping):
                 return templatekw.shownames(context, mapping, namespace.name)
@@ -107,6 +122,7 @@
                 return n
         raise KeyError(_('no such name: %s') % name)
 
+
 class namespace(object):
     """provides an interface to a namespace
 
@@ -135,9 +151,20 @@
                  Mercurial.
     """
 
-    def __init__(self, name, templatename=None, logname=None, colorname=None,
-                 logfmt=None, listnames=None, namemap=None, nodemap=None,
-                 deprecated=None, builtin=False, singlenode=None):
+    def __init__(
+        self,
+        name,
+        templatename=None,
+        logname=None,
+        colorname=None,
+        logfmt=None,
+        listnames=None,
+        namemap=None,
+        nodemap=None,
+        deprecated=None,
+        builtin=False,
+        singlenode=None,
+    ):
         """create a namespace
 
         name: the namespace to be registered (in plural form)
--- a/mercurial/narrowspec.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/narrowspec.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,9 +8,7 @@
 from __future__ import absolute_import
 
 from .i18n import _
-from .interfaces import (
-    repository,
-)
+from .interfaces import repository
 from . import (
     error,
     match as matchmod,
@@ -36,6 +34,7 @@
     b'rootfilesin:',
 )
 
+
 def normalizesplitpattern(kind, pat):
     """Returns the normalized version of a pattern and kind.
 
@@ -45,6 +44,7 @@
     _validatepattern(pat)
     return kind, pat
 
+
 def _numlines(s):
     """Returns the number of lines in s, including ending empty lines."""
     # We use splitlines because it is Unicode-friendly and thus Python 3
@@ -52,6 +52,7 @@
     # it by adding a character at the end.
     return len((s + 'x').splitlines())
 
+
 def _validatepattern(pat):
     """Validates the pattern and aborts if it is invalid.
 
@@ -68,6 +69,7 @@
     if '.' in components or '..' in components:
         raise error.Abort(_('"." and ".." are not allowed in narrowspec paths'))
 
+
 def normalizepattern(pattern, defaultkind='path'):
     """Returns the normalized version of a text-format pattern.
 
@@ -76,6 +78,7 @@
     kind, pat = matchmod._patsplit(pattern, defaultkind)
     return '%s:%s' % normalizesplitpattern(kind, pat)
 
+
 def parsepatterns(pats):
     """Parses an iterable of patterns into a typed pattern set.
 
@@ -91,6 +94,7 @@
     validatepatterns(res)
     return res
 
+
 def validatepatterns(pats):
     """Validate that patterns are in the expected data structure and format.
 
@@ -102,17 +106,23 @@
     prefixed pattern representation (but can't necessarily be fully trusted).
     """
     if not isinstance(pats, set):
-        raise error.ProgrammingError('narrow patterns should be a set; '
-                                     'got %r' % pats)
+        raise error.ProgrammingError(
+            'narrow patterns should be a set; ' 'got %r' % pats
+        )
 
     for pat in pats:
         if not pat.startswith(VALID_PREFIXES):
             # Use a Mercurial exception because this can happen due to user
             # bugs (e.g. manually updating spec file).
-            raise error.Abort(_('invalid prefix on narrow pattern: %s') % pat,
-                              hint=_('narrow patterns must begin with one of '
-                                     'the following: %s') %
-                                   ', '.join(VALID_PREFIXES))
+            raise error.Abort(
+                _('invalid prefix on narrow pattern: %s') % pat,
+                hint=_(
+                    'narrow patterns must begin with one of '
+                    'the following: %s'
+                )
+                % ', '.join(VALID_PREFIXES),
+            )
+
 
 def format(includes, excludes):
     output = '[include]\n'
@@ -123,43 +133,54 @@
         output += e + '\n'
     return output
 
+
 def match(root, include=None, exclude=None):
     if not include:
         # Passing empty include and empty exclude to matchmod.match()
         # gives a matcher that matches everything, so explicitly use
         # the nevermatcher.
         return matchmod.never()
-    return matchmod.match(root, '', [], include=include or [],
-                          exclude=exclude or [])
+    return matchmod.match(
+        root, '', [], include=include or [], exclude=exclude or []
+    )
+
 
 def parseconfig(ui, spec):
     # maybe we should care about the profiles returned too
     includepats, excludepats, profiles = sparse.parseconfig(ui, spec, 'narrow')
     if profiles:
-        raise error.Abort(_("including other spec files using '%include' is not"
-                            " supported in narrowspec"))
+        raise error.Abort(
+            _(
+                "including other spec files using '%include' is not"
+                " supported in narrowspec"
+            )
+        )
 
     validatepatterns(includepats)
     validatepatterns(excludepats)
 
     return includepats, excludepats
 
+
 def load(repo):
     # Treat "narrowspec does not exist" the same as "narrowspec file exists
     # and is empty".
     spec = repo.svfs.tryread(FILENAME)
     return parseconfig(repo.ui, spec)
 
+
 def save(repo, includepats, excludepats):
     validatepatterns(includepats)
     validatepatterns(excludepats)
     spec = format(includepats, excludepats)
     repo.svfs.write(FILENAME, spec)
 
+
 def copytoworkingcopy(repo):
     spec = repo.svfs.read(FILENAME)
     repo.vfs.write(DIRSTATE_FILENAME, spec)
 
+
 def savebackup(repo, backupname):
     if repository.NARROW_REQUIREMENT not in repo.requirements:
         return
@@ -167,11 +188,13 @@
     svfs.tryunlink(backupname)
     util.copyfile(svfs.join(FILENAME), svfs.join(backupname), hardlink=True)
 
+
 def restorebackup(repo, backupname):
     if repository.NARROW_REQUIREMENT not in repo.requirements:
         return
     util.rename(repo.svfs.join(backupname), repo.svfs.join(FILENAME))
 
+
 def savewcbackup(repo, backupname):
     if repository.NARROW_REQUIREMENT not in repo.requirements:
         return
@@ -179,8 +202,10 @@
     vfs.tryunlink(backupname)
     # It may not exist in old repos
     if vfs.exists(DIRSTATE_FILENAME):
-        util.copyfile(vfs.join(DIRSTATE_FILENAME), vfs.join(backupname),
-                      hardlink=True)
+        util.copyfile(
+            vfs.join(DIRSTATE_FILENAME), vfs.join(backupname), hardlink=True
+        )
+
 
 def restorewcbackup(repo, backupname):
     if repository.NARROW_REQUIREMENT not in repo.requirements:
@@ -189,11 +214,13 @@
     if repo.vfs.exists(backupname):
         util.rename(repo.vfs.join(backupname), repo.vfs.join(DIRSTATE_FILENAME))
 
+
 def clearwcbackup(repo, backupname):
     if repository.NARROW_REQUIREMENT not in repo.requirements:
         return
     repo.vfs.tryunlink(backupname)
 
+
 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
     r""" Restricts the patterns according to repo settings,
     results in a logical AND operation
@@ -247,12 +274,14 @@
         res_includes = set(req_includes)
     return res_includes, res_excludes, invalid_includes
 
+
 # These two are extracted for extensions (specifically for Google's CitC file
 # system)
 def _deletecleanfiles(repo, files):
     for f in files:
         repo.wvfs.unlinkpath(f)
 
+
 def _writeaddedfiles(repo, pctx, files):
     actions = merge.emptyactions()
     addgaction = actions[merge.ACTION_GET].append
@@ -260,8 +289,15 @@
     for f in files:
         if not repo.wvfs.exists(f):
             addgaction((f, (mf.flags(f), False), "narrowspec updated"))
-    merge.applyupdates(repo, actions, wctx=repo[None],
-                       mctx=repo['.'], overwrite=False, wantfiledata=False)
+    merge.applyupdates(
+        repo,
+        actions,
+        wctx=repo[None],
+        mctx=repo['.'],
+        overwrite=False,
+        wantfiledata=False,
+    )
+
 
 def checkworkingcopynarrowspec(repo):
     # Avoid infinite recursion when updating the working copy
@@ -270,8 +306,11 @@
     storespec = repo.svfs.tryread(FILENAME)
     wcspec = repo.vfs.tryread(DIRSTATE_FILENAME)
     if wcspec != storespec:
-        raise error.Abort(_("working copy's narrowspec is stale"),
-                          hint=_("run 'hg tracked --update-working-copy'"))
+        raise error.Abort(
+            _("working copy's narrowspec is stale"),
+            hint=_("run 'hg tracked --update-working-copy'"),
+        )
+
 
 def updateworkingcopy(repo, assumeclean=False):
     """updates the working copy and dirstate from the store narrowspec
@@ -291,8 +330,9 @@
     removedmatch = matchmod.differencematcher(oldmatch, newmatch)
 
     ds = repo.dirstate
-    lookup, status = ds.status(removedmatch, subrepos=[], ignored=True,
-                               clean=True, unknown=True)
+    lookup, status = ds.status(
+        removedmatch, subrepos=[], ignored=True, clean=True, unknown=True
+    )
     trackeddirty = status.modified + status.added
     clean = status.clean
     if assumeclean:
--- a/mercurial/obsolete.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/obsolete.py	Sun Oct 06 09:45:02 2019 -0400
@@ -98,6 +98,7 @@
 allowunstableopt = 'allowunstable'
 exchangeopt = 'exchange'
 
+
 def _getoptionvalue(repo, option):
     """Returns True if the given repository has the given obsolete option
     enabled.
@@ -127,6 +128,7 @@
 
         return option in result
 
+
 def getoptions(repo):
     """Returns dicts showing state of obsolescence features."""
 
@@ -135,9 +137,13 @@
     exchangevalue = _getoptionvalue(repo, exchangeopt)
 
     # createmarkers must be enabled if other options are enabled
-    if ((unstablevalue or exchangevalue) and not createmarkersvalue):
-        raise error.Abort(_("'createmarkers' obsolete option must be enabled "
-                            "if other obsolete options are enabled"))
+    if (unstablevalue or exchangevalue) and not createmarkersvalue:
+        raise error.Abort(
+            _(
+                "'createmarkers' obsolete option must be enabled "
+                "if other obsolete options are enabled"
+            )
+        )
 
     return {
         createmarkersopt: createmarkersvalue,
@@ -145,12 +151,14 @@
         exchangeopt: exchangevalue,
     }
 
+
 def isenabled(repo, option):
     """Returns True if the given repository has the given obsolete option
     enabled.
     """
     return getoptions(repo)[option]
 
+
 # Creating aliases for marker flags because evolve extension looks for
 # bumpedfix in obsolete.py
 bumpedfix = obsutil.bumpedfix
@@ -177,39 +185,44 @@
 #   additional encoding. Keys cannot contain '\0' or ':' and values
 #   cannot contain '\0'.
 _fm0version = 0
-_fm0fixed   = '>BIB20s'
+_fm0fixed = '>BIB20s'
 _fm0node = '20s'
 _fm0fsize = _calcsize(_fm0fixed)
 _fm0fnodesize = _calcsize(_fm0node)
 
+
 def _fm0readmarkers(data, off, stop):
     # Loop on markers
     while off < stop:
         # read fixed part
-        cur = data[off:off + _fm0fsize]
+        cur = data[off : off + _fm0fsize]
         off += _fm0fsize
         numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur)
         # read replacement
         sucs = ()
         if numsuc:
-            s = (_fm0fnodesize * numsuc)
-            cur = data[off:off + s]
+            s = _fm0fnodesize * numsuc
+            cur = data[off : off + s]
             sucs = _unpack(_fm0node * numsuc, cur)
             off += s
         # read metadata
         # (metadata will be decoded on demand)
-        metadata = data[off:off + mdsize]
+        metadata = data[off : off + mdsize]
         if len(metadata) != mdsize:
-            raise error.Abort(_('parsing obsolete marker: metadata is too '
-                               'short, %d bytes expected, got %d')
-                             % (mdsize, len(metadata)))
+            raise error.Abort(
+                _(
+                    'parsing obsolete marker: metadata is too '
+                    'short, %d bytes expected, got %d'
+                )
+                % (mdsize, len(metadata))
+            )
         off += mdsize
         metadata = _fm0decodemeta(metadata)
         try:
             when, offset = metadata.pop('date', '0 0').split(' ')
             date = float(when), int(offset)
         except ValueError:
-            date = (0., 0)
+            date = (0.0, 0)
         parents = None
         if 'p2' in metadata:
             parents = (metadata.pop('p1', None), metadata.pop('p2', None))
@@ -233,6 +246,7 @@
 
         yield (pre, sucs, flags, metadata, date, parents)
 
+
 def _fm0encodeonemarker(marker):
     pre, sucs, flags, metadata, date, parents = marker
     if flags & usingsha256:
@@ -253,6 +267,7 @@
     data.extend(sucs)
     return _pack(format, *data) + metadata
 
+
 def _fm0encodemeta(meta):
     """Return encoded metadata string to string mapping.
 
@@ -264,6 +279,7 @@
             raise ValueError("':' is forbidden in metadata value'")
     return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
 
+
 def _fm0decodemeta(data):
     """Return string to string dictionary from encoded version."""
     d = {}
@@ -273,6 +289,7 @@
             d[key] = value
     return d
 
+
 ## Parsing and writing of version "1"
 #
 # The header is followed by the markers. Each marker is made of:
@@ -316,10 +333,11 @@
 _fm1fsize = _calcsize(_fm1fixed)
 _fm1parentnone = 3
 _fm1parentshift = 14
-_fm1parentmask = (_fm1parentnone << _fm1parentshift)
+_fm1parentmask = _fm1parentnone << _fm1parentshift
 _fm1metapair = 'BB'
 _fm1metapairsize = _calcsize(_fm1metapair)
 
+
 def _fm1purereadmarkers(data, off, stop):
     # make some global constants local for performance
     noneflag = _fm1parentnone
@@ -394,6 +412,7 @@
 
         yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents)
 
+
 def _fm1encodeonemarker(marker):
     pre, sucs, flags, metadata, date, parents = marker
     # determine node size
@@ -411,7 +430,7 @@
     formatmeta = _fm1metapair * len(metadata)
     format = _fm1fixed + formatnodes + formatmeta
     # tz is stored in minutes so we divide by 60
-    tz = date[1]//60
+    tz = date[1] // 60
     data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre]
     data.extend(sucs)
     if parents is not None:
@@ -421,12 +440,16 @@
         lk = len(key)
         lv = len(value)
         if lk > 255:
-            msg = ('obsstore metadata key cannot be longer than 255 bytes'
-                   ' (key "%s" is %u bytes)') % (key, lk)
+            msg = (
+                'obsstore metadata key cannot be longer than 255 bytes'
+                ' (key "%s" is %u bytes)'
+            ) % (key, lk)
             raise error.ProgrammingError(msg)
         if lv > 255:
-            msg = ('obsstore metadata value cannot be longer than 255 bytes'
-                   ' (value "%s" for key "%s" is %u bytes)') % (value, key, lv)
+            msg = (
+                'obsstore metadata value cannot be longer than 255 bytes'
+                ' (value "%s" for key "%s" is %u bytes)'
+            ) % (value, key, lv)
             raise error.ProgrammingError(msg)
         data.append(lk)
         data.append(lv)
@@ -438,20 +461,26 @@
         data.append(value)
     return ''.join(data)
 
+
 def _fm1readmarkers(data, off, stop):
     native = getattr(parsers, 'fm1readmarkers', None)
     if not native:
         return _fm1purereadmarkers(data, off, stop)
     return native(data, off, stop)
 
+
 # mapping to read/write various marker formats
 # <version> -> (decoder, encoder)
-formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker),
-           _fm1version: (_fm1readmarkers, _fm1encodeonemarker)}
+formats = {
+    _fm0version: (_fm0readmarkers, _fm0encodeonemarker),
+    _fm1version: (_fm1readmarkers, _fm1encodeonemarker),
+}
+
 
 def _readmarkerversion(data):
     return _unpack('>B', data[0:1])[0]
 
+
 @util.nogc
 def _readmarkers(data, off=None, stop=None):
     """Read and enumerate markers from raw data"""
@@ -465,9 +494,11 @@
         raise error.UnknownVersion(msg, version=diskversion)
     return diskversion, formats[diskversion][0](data, off, stop)
 
+
 def encodeheader(version=_fm0version):
     return _pack('>B', version)
 
+
 def encodemarkers(markers, addheader=False, version=_fm0version):
     # Kept separate from flushmarkers(), it will be reused for
     # markers exchange.
@@ -477,17 +508,20 @@
     for marker in markers:
         yield encodeone(marker)
 
+
 @util.nogc
 def _addsuccessors(successors, markers):
     for mark in markers:
         successors.setdefault(mark[0], set()).add(mark)
 
+
 @util.nogc
 def _addpredecessors(predecessors, markers):
     for mark in markers:
         for suc in mark[1]:
             predecessors.setdefault(suc, set()).add(mark)
 
+
 @util.nogc
 def _addchildren(children, markers):
     for mark in markers:
@@ -496,6 +530,7 @@
             for p in parents:
                 children.setdefault(p, set()).add(mark)
 
+
 def _checkinvalidmarkers(markers):
     """search for marker with invalid data and raise error if needed
 
@@ -504,8 +539,13 @@
     """
     for mark in markers:
         if node.nullid in mark[1]:
-            raise error.Abort(_('bad obsolescence marker detected: '
-                               'invalid successors nullid'))
+            raise error.Abort(
+                _(
+                    'bad obsolescence marker detected: '
+                    'invalid successors nullid'
+                )
+            )
+
 
 class obsstore(object):
     """Store obsolete markers
@@ -558,8 +598,17 @@
         Remove me in the future when obsolete marker is always on."""
         return self._readonly
 
-    def create(self, transaction, prec, succs=(), flag=0, parents=None,
-               date=None, metadata=None, ui=None):
+    def create(
+        self,
+        transaction,
+        prec,
+        succs=(),
+        flag=0,
+        parents=None,
+        date=None,
+        metadata=None,
+        ui=None,
+    ):
         """obsolete: add a new obsolete marker
 
         * ensuring it is hashable
@@ -591,7 +640,8 @@
                 raise ValueError(succ)
         if prec in succs:
             raise ValueError(
-                r'in-marker cycle with %s' % pycompat.sysstr(node.hex(prec)))
+                r'in-marker cycle with %s' % pycompat.sysstr(node.hex(prec))
+            )
 
         metadata = tuple(sorted(metadata.iteritems()))
         for k, v in metadata:
@@ -603,7 +653,8 @@
                 raise error.ProgrammingError(
                     'obsstore metadata must be valid UTF-8 sequence '
                     '(key = %r, value = %r)'
-                    % (pycompat.bytestr(k), pycompat.bytestr(v)))
+                    % (pycompat.bytestr(k), pycompat.bytestr(v))
+                )
 
         marker = (bytes(prec), tuple(succs), int(flag), metadata, date, parents)
         return bool(self.add(transaction, [marker]))
@@ -614,8 +665,9 @@
         Take care of filtering duplicate.
         Return the number of new marker."""
         if self._readonly:
-            raise error.Abort(_('creating obsolete markers is not enabled on '
-                              'this repo'))
+            raise error.Abort(
+                _('creating obsolete markers is not enabled on ' 'this repo')
+            )
         known = set()
         getsuccessors = self.successors.get
         new = []
@@ -696,7 +748,7 @@
         return attr in self.__dict__
 
     def _addmarkers(self, markers, rawdata):
-        markers = list(markers) # to allow repeated iteration
+        markers = list(markers)  # to allow repeated iteration
         self._data = self._data + rawdata
         self._all.extend(markers)
         if self._cached(r'successors'):
@@ -740,6 +792,7 @@
             seennodes |= pendingnodes
         return seenmarkers
 
+
 def makestore(ui, repo):
     """Create an obsstore instance from a repo."""
     # read default format for new obsstore.
@@ -752,10 +805,13 @@
     readonly = not isenabled(repo, createmarkersopt)
     store = obsstore(repo.svfs, readonly=readonly, **kwargs)
     if store and readonly:
-        ui.warn(_('obsolete feature not enabled but %i markers found!\n')
-                % len(list(store)))
+        ui.warn(
+            _('obsolete feature not enabled but %i markers found!\n')
+            % len(list(store))
+        )
     return store
 
+
 def commonversion(versions):
     """Return the newest version listed in both versions and our local formats.
 
@@ -768,12 +824,14 @@
             return v
     return None
 
+
 # arbitrary picked to fit into 8K limit from HTTP server
 # you have to take in account:
 # - the version header
 # - the base85 encoding
 _maxpayload = 5300
 
+
 def _pushkeyescape(markers):
     """encode markers into a dict suitable for pushkey exchange
 
@@ -784,7 +842,7 @@
     currentlen = _maxpayload * 2  # ensure we create a new part
     for marker in markers:
         nextdata = _fm0encodeonemarker(marker)
-        if (len(nextdata) + currentlen > _maxpayload):
+        if len(nextdata) + currentlen > _maxpayload:
             currentpart = []
             currentlen = 0
             parts.append(currentpart)
@@ -795,12 +853,14 @@
         keys['dump%i' % idx] = util.b85encode(data)
     return keys
 
+
 def listmarkers(repo):
     """List markers over pushkey"""
     if not repo.obsstore:
         return {}
     return _pushkeyescape(sorted(repo.obsstore))
 
+
 def pushmarker(repo, key, old, new):
     """Push markers over pushkey"""
     if not key.startswith('dump'):
@@ -815,18 +875,24 @@
         repo.invalidatevolatilesets()
         return True
 
+
 # mapping of 'set-name' -> <function to compute this set>
 cachefuncs = {}
+
+
 def cachefor(name):
     """Decorator to register a function as computing the cache for a set"""
+
     def decorator(func):
         if name in cachefuncs:
             msg = "duplicated registration for volatileset '%s' (existing: %r)"
             raise error.ProgrammingError(msg % (name, cachefuncs[name]))
         cachefuncs[name] = func
         return func
+
     return decorator
 
+
 def getrevs(repo, name):
     """Return the set of revision that belong to the <name> set
 
@@ -838,6 +904,7 @@
         repo.obsstore.caches[name] = cachefuncs[name](repo)
     return repo.obsstore.caches[name]
 
+
 # To be simple we need to invalidate obsolescence cache when:
 #
 # - new changeset is added:
@@ -856,10 +923,12 @@
     if 'obsstore' in repo._filecache:
         repo.obsstore.caches.clear()
 
+
 def _mutablerevs(repo):
     """the set of mutable revision in the repository"""
     return repo._phasecache.getrevset(repo, phases.mutablephases)
 
+
 @cachefor('obsolete')
 def _computeobsoleteset(repo):
     """the set of obsolete revisions"""
@@ -869,6 +938,7 @@
     obs = set(r for r in notpublic if isobs(getnode(r)))
     return obs
 
+
 @cachefor('orphan')
 def _computeorphanset(repo):
     """the set of non obsolete revisions with obsolete parents"""
@@ -886,23 +956,26 @@
                 break
     return unstable
 
+
 @cachefor('suspended')
 def _computesuspendedset(repo):
     """the set of obsolete parents with non obsolete descendants"""
     suspended = repo.changelog.ancestors(getrevs(repo, 'orphan'))
     return set(r for r in getrevs(repo, 'obsolete') if r in suspended)
 
+
 @cachefor('extinct')
 def _computeextinctset(repo):
     """the set of obsolete parents without non obsolete descendants"""
     return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended')
 
+
 @cachefor('phasedivergent')
 def _computephasedivergentset(repo):
     """the set of revs trying to obsolete public revisions"""
     bumped = set()
     # util function (avoid attribute lookup in the loop)
-    phase = repo._phasecache.phase # would be faster to grab the full list
+    phase = repo._phasecache.phase  # would be faster to grab the full list
     public = phases.public
     cl = repo.changelog
     torev = cl.nodemap.get
@@ -912,15 +985,17 @@
         # We only evaluate mutable, non-obsolete revision
         node = tonode(rev)
         # (future) A cache of predecessors may worth if split is very common
-        for pnode in obsutil.allpredecessors(obsstore, [node],
-                                   ignoreflags=bumpedfix):
-            prev = torev(pnode) # unfiltered! but so is phasecache
+        for pnode in obsutil.allpredecessors(
+            obsstore, [node], ignoreflags=bumpedfix
+        ):
+            prev = torev(pnode)  # unfiltered! but so is phasecache
             if (prev is not None) and (phase(repo, prev) <= public):
                 # we have a public predecessor
                 bumped.add(rev)
-                break # Next draft!
+                break  # Next draft!
     return bumped
 
+
 @cachefor('contentdivergent')
 def _computecontentdivergentset(repo):
     """the set of rev that compete to be the final successors of some revision.
@@ -937,7 +1012,7 @@
         while toprocess:
             prec = toprocess.pop()[0]
             if prec in seen:
-                continue # emergency cycle hanging prevention
+                continue  # emergency cycle hanging prevention
             seen.add(prec)
             if prec not in newermap:
                 obsutil.successorssets(repo, prec, cache=newermap)
@@ -948,6 +1023,7 @@
             toprocess.update(obsstore.predecessors.get(prec, ()))
     return divergent
 
+
 def makefoldid(relation, user):
 
     folddigest = hashlib.sha1(user)
@@ -958,8 +1034,10 @@
     # seems fine to use a small ID. Smaller ID save space.
     return node.hex(folddigest.digest())[:8]
 
-def createmarkers(repo, relations, flag=0, date=None, metadata=None,
-                  operation=None):
+
+def createmarkers(
+    repo, relations, flag=0, date=None, metadata=None, operation=None
+):
     """Add obsolete markers between changesets in a repo
 
     <relations> must be an iterable of ((<old>,...), (<new>, ...)[,{metadata}])
@@ -984,14 +1062,16 @@
         metadata['user'] = encoding.fromlocal(luser)
 
     # Operation metadata handling
-    useoperation = repo.ui.configbool('experimental',
-        'evolution.track-operation')
+    useoperation = repo.ui.configbool(
+        'experimental', 'evolution.track-operation'
+    )
     if useoperation and operation:
         metadata['operation'] = operation
 
     # Effect flag metadata handling
-    saveeffectflag = repo.ui.configbool('experimental',
-                                        'evolution.effect-flags')
+    saveeffectflag = repo.ui.configbool(
+        'experimental', 'evolution.effect-flags'
+    )
 
     with repo.transaction('add-obsolescence-marker') as tr:
         markerargs = []
@@ -1018,17 +1098,19 @@
                     localmetadata['fold-size'] = '%d' % foldsize
 
                 if not prec.mutable():
-                    raise error.Abort(_("cannot obsolete public changeset: %s")
-                                     % prec,
-                                     hint="see 'hg help phases' for details")
+                    raise error.Abort(
+                        _("cannot obsolete public changeset: %s") % prec,
+                        hint="see 'hg help phases' for details",
+                    )
                 nprec = prec.node()
                 nsucs = tuple(s.node() for s in sucs)
                 npare = None
                 if not nsucs:
                     npare = tuple(p.node() for p in prec.parents())
                 if nprec in nsucs:
-                    raise error.Abort(_("changeset %s cannot obsolete itself")
-                                      % prec)
+                    raise error.Abort(
+                        _("changeset %s cannot obsolete itself") % prec
+                    )
 
                 # Effect flag can be different by relation
                 if saveeffectflag:
@@ -1045,7 +1127,14 @@
 
         for args in markerargs:
             nprec, nsucs, npare, localmetadata = args
-            repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare,
-                                 date=date, metadata=localmetadata,
-                                 ui=repo.ui)
+            repo.obsstore.create(
+                tr,
+                nprec,
+                nsucs,
+                flag,
+                parents=npare,
+                date=date,
+                metadata=localmetadata,
+                ui=repo.ui,
+            )
             repo.filteredrevcache.clear()
--- a/mercurial/obsutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/obsutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,9 +17,7 @@
     phases,
     util,
 )
-from .utils import (
-    dateutil,
-)
+from .utils import dateutil
 
 ### obsolescence marker flag
 
@@ -54,6 +52,7 @@
 bumpedfix = 1
 usingsha256 = 2
 
+
 class marker(object):
     """Wrap obsolete marker raw data"""
 
@@ -95,6 +94,7 @@
         """The flags field of the marker"""
         return self._data[2]
 
+
 def getmarkers(repo, nodes=None, exclusive=False):
     """returns markers known in a repository
 
@@ -110,6 +110,7 @@
     for markerdata in rawmarkers:
         yield marker(repo, markerdata)
 
+
 def closestpredecessors(repo, nodeid):
     """yield the list of next predecessors pointing on visible changectx nodes
 
@@ -138,6 +139,7 @@
             else:
                 stack.append(precnodeid)
 
+
 def allpredecessors(obsstore, nodes, ignoreflags=0):
     """Yield node for every precursors of <nodes>.
 
@@ -161,6 +163,7 @@
                 seen.add(suc)
                 remaining.add(suc)
 
+
 def allsuccessors(obsstore, nodes, ignoreflags=0):
     """Yield node for every successor of <nodes>.
 
@@ -182,10 +185,12 @@
                     seen.add(suc)
                     remaining.add(suc)
 
+
 def _filterprunes(markers):
     """return a set with no prune markers"""
     return set(m for m in markers if m[1])
 
+
 def exclusivemarkers(repo, nodes):
     """set of markers relevant to "nodes" but no other locally-known nodes
 
@@ -307,6 +312,7 @@
 
     return exclmarkers
 
+
 def foreground(repo, nodes):
     """return all nodes in the "foreground" of other node
 
@@ -333,6 +339,7 @@
             foreground = set(repo.set('%ln::', known))
     return set(c.node() for c in foreground)
 
+
 # effectflag field
 #
 # Effect-flag is a 1-byte bit field used to store what changed between a
@@ -350,13 +357,13 @@
 
 EFFECTFLAGFIELD = "ef1"
 
-DESCCHANGED = 1 << 0 # action changed the description
-METACHANGED = 1 << 1 # action change the meta
-DIFFCHANGED = 1 << 3 # action change diff introduced by the changeset
-PARENTCHANGED = 1 << 2 # action change the parent
-USERCHANGED = 1 << 4 # the user changed
-DATECHANGED = 1 << 5 # the date changed
-BRANCHCHANGED = 1 << 6 # the branch changed
+DESCCHANGED = 1 << 0  # action changed the description
+METACHANGED = 1 << 1  # action change the meta
+DIFFCHANGED = 1 << 3  # action change diff introduced by the changeset
+PARENTCHANGED = 1 << 2  # action change the parent
+USERCHANGED = 1 << 4  # the user changed
+DATECHANGED = 1 << 5  # the date changed
+BRANCHCHANGED = 1 << 6  # the branch changed
 
 METABLACKLIST = [
     re.compile('^branch$'),
@@ -365,6 +372,7 @@
     re.compile('^source$'),
 ]
 
+
 def metanotblacklisted(metaitem):
     """ Check that the key of a meta item (extrakey, extravalue) does not
     match at least one of the blacklist pattern
@@ -373,6 +381,7 @@
 
     return not any(pattern.match(metakey) for pattern in METABLACKLIST)
 
+
 def _prepare_hunk(hunk):
     """Drop all information but the username and patch"""
     cleanhunk = []
@@ -383,6 +392,7 @@
             cleanhunk.append(line)
     return cleanhunk
 
+
 def _getdifflines(iterdiff):
     """return a cleaned up lines"""
     lines = next(iterdiff, None)
@@ -392,6 +402,7 @@
 
     return _prepare_hunk(lines)
 
+
 def _cmpdiff(leftctx, rightctx):
     """return True if both ctx introduce the "same diff"
 
@@ -419,6 +430,7 @@
             return False
     return True
 
+
 def geteffectflag(source, successors):
     """ From an obs-marker relation, compute what changed between the
     predecessor and the successor.
@@ -462,6 +474,7 @@
 
     return effects
 
+
 def getobsoleted(repo, tr):
     """return the set of pre-existing revisions obsoleted by a transaction"""
     torev = repo.unfiltered().changelog.nodemap.get
@@ -484,6 +497,7 @@
             obsoleted.add(rev)
     return obsoleted
 
+
 class _succs(list):
     """small class to represent a successors with some metadata about it"""
 
@@ -504,6 +518,7 @@
     def canmerge(self, other):
         return self._set.issubset(other._set)
 
+
 def successorssets(repo, initialnode, closest=False, cache=None):
     """Return set of all latest successors of initial nodes
 
@@ -611,9 +626,9 @@
 
         # case 2 condition is a bit hairy because of closest,
         # we compute it on its own
-        case2condition =  ((current not in succmarkers)
-                           or (closest and current != initialnode
-                               and current in repo))
+        case2condition = (current not in succmarkers) or (
+            closest and current != initialnode and current in repo
+        )
 
         if current in cache:
             # case (1): We already know the successors sets
@@ -720,8 +735,9 @@
                 # remove duplicated and subset
                 seen = []
                 final = []
-                candidates = sorted((s for s in succssets if s),
-                                    key=len, reverse=True)
+                candidates = sorted(
+                    (s for s in succssets if s), key=len, reverse=True
+                )
                 for cand in candidates:
                     for seensuccs in seen:
                         if cand.canmerge(seensuccs):
@@ -730,10 +746,11 @@
                     else:
                         final.append(cand)
                         seen.append(cand)
-                final.reverse() # put small successors set first
+                final.reverse()  # put small successors set first
                 cache[current] = final
     return cache[initialnode]
 
+
 def successorsandmarkers(repo, ctx):
     """compute the raw data needed for computing obsfate
     Returns a list of dict, one dict per successors set
@@ -750,7 +767,7 @@
 
     # Try to recover pruned markers
     succsmap = repo.obsstore.successors
-    fullsuccessorsets = [] # successor set + markers
+    fullsuccessorsets = []  # successor set + markers
     for sset in ssets:
         if sset:
             fullsuccessorsets.append(sset)
@@ -781,6 +798,7 @@
 
     return values
 
+
 def _getobsfate(successorssets):
     """ Compute a changeset obsolescence fate based on its successorssets.
     Successors can be the tipmost ones or the immediate ones. This function
@@ -807,6 +825,7 @@
         else:
             return 'superseded_split'
 
+
 def obsfateverb(successorset, markers):
     """ Return the verb summarizing the successorset and potentially using
     information from the markers
@@ -819,29 +838,37 @@
         verb = 'split'
     return verb
 
+
 def markersdates(markers):
     """returns the list of dates for a list of markers
     """
     return [m[4] for m in markers]
 
+
 def markersusers(markers):
     """ Returns a sorted list of markers users without duplicates
     """
     markersmeta = [dict(m[3]) for m in markers]
-    users = set(encoding.tolocal(meta['user']) for meta in markersmeta
-                if meta.get('user'))
+    users = set(
+        encoding.tolocal(meta['user'])
+        for meta in markersmeta
+        if meta.get('user')
+    )
 
     return sorted(users)
 
+
 def markersoperations(markers):
     """ Returns a sorted list of markers operations without duplicates
     """
     markersmeta = [dict(m[3]) for m in markers]
-    operations = set(meta.get('operation') for meta in markersmeta
-                     if meta.get('operation'))
+    operations = set(
+        meta.get('operation') for meta in markersmeta if meta.get('operation')
+    )
 
     return sorted(operations)
 
+
 def obsfateprinter(ui, repo, successors, markers, formatctx):
     """ Build a obsfate string for a single successorset using all obsfate
     related function defined in obsutil
@@ -900,10 +927,12 @@
     "diverged": _("hidden revision '%s' has diverged"),
     "superseded": _("hidden revision '%s' was rewritten as: %s"),
     "superseded_split": _("hidden revision '%s' was split as: %s"),
-    "superseded_split_several": _("hidden revision '%s' was split as: %s and "
-                                  "%d more"),
+    "superseded_split_several": _(
+        "hidden revision '%s' was split as: %s and " "%d more"
+    ),
 }
 
+
 def _getfilteredreason(repo, changeid, ctx):
     """return a human-friendly string on why a obsolete changeset is hidden
     """
@@ -934,6 +963,7 @@
             args = (changeid, firstsuccessors, remainingnumber)
             return filteredmsgtable['superseded_split_several'] % args
 
+
 def divergentsets(repo, ctx):
     """Compute sets of commits divergent with a given one"""
     cache = {}
@@ -951,8 +981,11 @@
                 # we already know the latest base for this divergency
                 continue
             base[tuple(nsuccset)] = n
-    return [{'divergentnodes': divset, 'commonpredecessor': b}
-            for divset, b in base.iteritems()]
+    return [
+        {'divergentnodes': divset, 'commonpredecessor': b}
+        for divset, b in base.iteritems()
+    ]
+
 
 def whyunstable(repo, ctx):
     result = []
@@ -964,24 +997,38 @@
             elif parent.obsolete():
                 kind = 'obsolete'
             if kind is not None:
-                result.append({'instability': 'orphan',
-                               'reason': '%s parent' % kind,
-                               'node': parent.hex()})
+                result.append(
+                    {
+                        'instability': 'orphan',
+                        'reason': '%s parent' % kind,
+                        'node': parent.hex(),
+                    }
+                )
     if ctx.phasedivergent():
-        predecessors = allpredecessors(repo.obsstore, [ctx.node()],
-                                       ignoreflags=bumpedfix)
-        immutable = [repo[p] for p in predecessors
-                     if p in repo and not repo[p].mutable()]
+        predecessors = allpredecessors(
+            repo.obsstore, [ctx.node()], ignoreflags=bumpedfix
+        )
+        immutable = [
+            repo[p] for p in predecessors if p in repo and not repo[p].mutable()
+        ]
         for predecessor in immutable:
-            result.append({'instability': 'phase-divergent',
-                           'reason': 'immutable predecessor',
-                           'node': predecessor.hex()})
+            result.append(
+                {
+                    'instability': 'phase-divergent',
+                    'reason': 'immutable predecessor',
+                    'node': predecessor.hex(),
+                }
+            )
     if ctx.contentdivergent():
         dsets = divergentsets(repo, ctx)
         for dset in dsets:
             divnodes = [repo[n] for n in dset['divergentnodes']]
-            result.append({'instability': 'content-divergent',
-                           'divergentnodes': divnodes,
-                           'reason': 'predecessor',
-                           'node': nodemod.hex(dset['commonpredecessor'])})
+            result.append(
+                {
+                    'instability': 'content-divergent',
+                    'divergentnodes': divnodes,
+                    'reason': 'predecessor',
+                    'node': nodemod.hex(dset['commonpredecessor']),
+                }
+            )
     return result
--- a/mercurial/parser.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/parser.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,29 +24,33 @@
     pycompat,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 
 class parser(object):
     def __init__(self, elements, methods=None):
         self._elements = elements
         self._methods = methods
         self.current = None
+
     def _advance(self):
         'advance the tokenizer'
         t = self.current
         self.current = next(self._iter, None)
         return t
+
     def _hasnewterm(self):
         'True if next token may start new term'
         return any(self._elements[self.current[0]][1:3])
+
     def _match(self, m):
         'make sure the tokenizer matches an end condition'
         if self.current[0] != m:
-            raise error.ParseError(_("unexpected token: %s") % self.current[0],
-                                   self.current[2])
+            raise error.ParseError(
+                _("unexpected token: %s") % self.current[0], self.current[2]
+            )
         self._advance()
+
     def _parseoperand(self, bind, m=None):
         'gather right-hand-side operand until an end condition or binding met'
         if m and self.current[0] == m:
@@ -56,6 +60,7 @@
         if m:
             self._match(m)
         return expr
+
     def _parse(self, bind=0):
         token, value, pos = self._advance()
         # handle prefix rules on current token, take as primary if unambiguous
@@ -78,6 +83,7 @@
             else:
                 raise error.ParseError(_("not an infix: %s") % token, pos)
         return expr
+
     def parse(self, tokeniter):
         'generate a parse tree from tokens'
         self._iter = tokeniter
@@ -85,11 +91,13 @@
         res = self._parse()
         token, value, pos = self.current
         return res, pos
+
     def eval(self, tree):
         'recursively evaluate a parse tree using node methods'
         if not isinstance(tree, tuple):
             return tree
         return self._methods[tree[0]](*[self.eval(t) for t in tree[1:]])
+
     def __call__(self, tokeniter):
         'parse tokens into a parse tree and evaluate if methods given'
         t = self.parse(tokeniter)
@@ -97,6 +105,7 @@
             return self.eval(t)
         return t
 
+
 def splitargspec(spec):
     """Parse spec of function arguments into (poskeys, varkey, keys, optkey)
 
@@ -130,6 +139,7 @@
         return pres, posts[0], posts[1:], optkey
     return [], None, pres, optkey
 
+
 def buildargsdict(trees, funcname, argspec, keyvaluenode, keynode):
     """Build dict from list containing positional and keyword arguments
 
@@ -147,50 +157,59 @@
     arguments are rejected, but missing keyword arguments are just omitted.
     """
     poskeys, varkey, keys, optkey = argspec
-    kwstart = next((i for i, x in enumerate(trees)
-                    if x and x[0] == keyvaluenode),
-                   len(trees))
+    kwstart = next(
+        (i for i, x in enumerate(trees) if x and x[0] == keyvaluenode),
+        len(trees),
+    )
     if kwstart < len(poskeys):
-        raise error.ParseError(_("%(func)s takes at least %(nargs)d positional "
-                                 "arguments")
-                               % {'func': funcname, 'nargs': len(poskeys)})
+        raise error.ParseError(
+            _("%(func)s takes at least %(nargs)d positional " "arguments")
+            % {'func': funcname, 'nargs': len(poskeys)}
+        )
     if not varkey and kwstart > len(poskeys) + len(keys):
-        raise error.ParseError(_("%(func)s takes at most %(nargs)d positional "
-                                 "arguments")
-                               % {'func': funcname,
-                                  'nargs': len(poskeys) + len(keys)})
+        raise error.ParseError(
+            _("%(func)s takes at most %(nargs)d positional " "arguments")
+            % {'func': funcname, 'nargs': len(poskeys) + len(keys)}
+        )
     args = util.sortdict()
     # consume positional arguments
     for k, x in zip(poskeys, trees[:kwstart]):
         args[k] = x
     if varkey:
-        args[varkey] = trees[len(args):kwstart]
+        args[varkey] = trees[len(args) : kwstart]
     else:
-        for k, x in zip(keys, trees[len(args):kwstart]):
+        for k, x in zip(keys, trees[len(args) : kwstart]):
             args[k] = x
     # remainder should be keyword arguments
     if optkey:
         args[optkey] = util.sortdict()
     for x in trees[kwstart:]:
         if not x or x[0] != keyvaluenode or x[1][0] != keynode:
-            raise error.ParseError(_("%(func)s got an invalid argument")
-                                   % {'func': funcname})
+            raise error.ParseError(
+                _("%(func)s got an invalid argument") % {'func': funcname}
+            )
         k = x[1][1]
         if k in keys:
             d = args
         elif not optkey:
-            raise error.ParseError(_("%(func)s got an unexpected keyword "
-                                     "argument '%(key)s'")
-                                   % {'func': funcname, 'key': k})
+            raise error.ParseError(
+                _("%(func)s got an unexpected keyword " "argument '%(key)s'")
+                % {'func': funcname, 'key': k}
+            )
         else:
             d = args[optkey]
         if k in d:
-            raise error.ParseError(_("%(func)s got multiple values for keyword "
-                                     "argument '%(key)s'")
-                                   % {'func': funcname, 'key': k})
+            raise error.ParseError(
+                _(
+                    "%(func)s got multiple values for keyword "
+                    "argument '%(key)s'"
+                )
+                % {'func': funcname, 'key': k}
+            )
         d[k] = x[2]
     return args
 
+
 def unescapestr(s):
     try:
         return stringutil.unescapestr(s)
@@ -198,6 +217,7 @@
         # mangle Python's exception into our format
         raise error.ParseError(pycompat.bytestr(e).lower())
 
+
 def _prettyformat(tree, leafnodes, level, lines):
     if not isinstance(tree, tuple):
         lines.append((level, stringutil.pprint(tree)))
@@ -210,12 +230,14 @@
             _prettyformat(s, leafnodes, level + 1, lines)
         lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
 
+
 def prettyformat(tree, leafnodes):
     lines = []
     _prettyformat(tree, leafnodes, 0, lines)
     output = '\n'.join(('  ' * l + s) for l, s in lines)
     return output
 
+
 def simplifyinfixops(tree, targetnodes):
     """Flatten chained infix operations to reduce usage of Python stack
 
@@ -295,6 +317,7 @@
     simplified.append(op)
     return tuple(reversed(simplified))
 
+
 def _buildtree(template, placeholder, replstack):
     if template == placeholder:
         return replstack.pop()
@@ -302,6 +325,7 @@
         return template
     return tuple(_buildtree(x, placeholder, replstack) for x in template)
 
+
 def buildtree(template, placeholder, *repls):
     """Create new tree by substituting placeholders by replacements
 
@@ -322,6 +346,7 @@
         raise error.ProgrammingError('too many replacements')
     return r
 
+
 def _matchtree(pattern, tree, placeholder, incompletenodes, matches):
     if pattern == tree:
         return True
@@ -332,8 +357,11 @@
         return True
     if len(pattern) != len(tree):
         return False
-    return all(_matchtree(p, x, placeholder, incompletenodes, matches)
-               for p, x in zip(pattern, tree))
+    return all(
+        _matchtree(p, x, placeholder, incompletenodes, matches)
+        for p, x in zip(pattern, tree)
+    )
+
 
 def matchtree(pattern, tree, placeholder=None, incompletenodes=()):
     """If a tree matches the pattern, return a list of the tree and nodes
@@ -375,6 +403,7 @@
     if _matchtree(pattern, tree, placeholder, incompletenodes, matches):
         return matches
 
+
 def parseerrordetail(inst):
     """Compose error message from specified ParseError object
     """
@@ -383,6 +412,7 @@
     else:
         return inst.args[0]
 
+
 class alias(object):
     """Parsed result of alias"""
 
@@ -396,6 +426,7 @@
         # `expandaliases`.
         self.warned = False
 
+
 class basealiasrules(object):
     """Parsing and expansion rule set of aliases
 
@@ -408,6 +439,7 @@
         h = heads(default)
         b($1) = ancestors($1) - ancestors(default)
     """
+
     # typically a config section, which will be included in error messages
     _section = None
     # tag of symbol node
@@ -665,28 +697,32 @@
             return tree
         r = cls._getalias(aliases, tree)
         if r is None:
-            return tuple(cls._expand(aliases, t, expanding, cache)
-                         for t in tree)
+            return tuple(
+                cls._expand(aliases, t, expanding, cache) for t in tree
+            )
         a, l = r
         if a.error:
             raise error.Abort(a.error)
         if a in expanding:
-            raise error.ParseError(_('infinite expansion of %(section)s '
-                                     '"%(name)s" detected')
-                                   % {'section': cls._section, 'name': a.name})
+            raise error.ParseError(
+                _('infinite expansion of %(section)s ' '"%(name)s" detected')
+                % {'section': cls._section, 'name': a.name}
+            )
         # get cacheable replacement tree by expanding aliases recursively
         expanding.append(a)
         if a.name not in cache:
-            cache[a.name] = cls._expand(aliases, a.replacement, expanding,
-                                        cache)
+            cache[a.name] = cls._expand(
+                aliases, a.replacement, expanding, cache
+            )
         result = cache[a.name]
         expanding.pop()
         if a.args is None:
             return result
         # substitute function arguments in replacement tree
         if len(l) != len(a.args):
-            raise error.ParseError(_('invalid number of arguments: %d')
-                                   % len(l))
+            raise error.ParseError(
+                _('invalid number of arguments: %d') % len(l)
+            )
         l = [cls._expand(aliases, t, [], cache) for t in l]
         return cls._expandargs(result, dict(zip(a.args, l)))
 
--- a/mercurial/patch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/patch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -49,15 +49,18 @@
 
 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
 tabsplitter = re.compile(br'(\t+|[^\t]+)')
-wordsplitter = re.compile(br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|'
-                          b'[^ \ta-zA-Z0-9_\x80-\xff])')
+wordsplitter = re.compile(
+    br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|' b'[^ \ta-zA-Z0-9_\x80-\xff])'
+)
 
 PatchError = error.PatchError
 
 # public functions
 
+
 def split(stream):
     '''return an iterator of individual patches from a stream'''
+
     def isheader(line, inheader):
         if inheader and line.startswith((' ', '\t')):
             # continuation
@@ -185,12 +188,15 @@
     # if we are here, we have a very plain patch
     return remainder(cur)
 
+
 ## Some facility for extensible patch parsing:
 # list of pairs ("header to match", "data key")
-patchheadermap = [('Date', 'date'),
-                  ('Branch', 'branch'),
-                  ('Node ID', 'nodeid'),
-                 ]
+patchheadermap = [
+    ('Date', 'date'),
+    ('Branch', 'branch'),
+    ('Node ID', 'nodeid'),
+]
+
 
 @contextlib.contextmanager
 def extract(ui, fileobj):
@@ -218,15 +224,18 @@
         tmpfp.close()
         os.unlink(tmpname)
 
+
 def _extract(ui, fileobj, tmpname, tmpfp):
 
     # attempt to detect the start of a patch
     # (this heuristic is borrowed from quilt)
-    diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
-                        br'retrieving revision [0-9]+(\.[0-9]+)*$|'
-                        br'---[ \t].*?^\+\+\+[ \t]|'
-                        br'\*\*\*[ \t].*?^---[ \t])',
-                        re.MULTILINE | re.DOTALL)
+    diffre = re.compile(
+        br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
+        br'retrieving revision [0-9]+(\.[0-9]+)*$|'
+        br'---[ \t].*?^\+\+\+[ \t]|'
+        br'\*\*\*[ \t].*?^---[ \t])',
+        re.MULTILINE | re.DOTALL,
+    )
 
     data = {}
 
@@ -236,8 +245,12 @@
     data['user'] = msg[r'From'] and mail.headdecode(msg[r'From'])
     if not subject and not data['user']:
         # Not an email, restore parsed headers if any
-        subject = '\n'.join(': '.join(map(encoding.strtolocal, h))
-                            for h in msg.items()) + '\n'
+        subject = (
+            '\n'.join(
+                ': '.join(map(encoding.strtolocal, h)) for h in msg.items()
+            )
+            + '\n'
+        )
 
     # should try to parse msg['Date']
     parents = []
@@ -246,7 +259,7 @@
         if subject.startswith('[PATCH'):
             pend = subject.find(']')
             if pend >= 0:
-                subject = subject[pend + 1:].lstrip()
+                subject = subject[pend + 1 :].lstrip()
         subject = re.sub(br'\n[ \t]+', ' ', subject)
         ui.debug('Subject: %s\n' % subject)
     if data['user']:
@@ -269,7 +282,7 @@
             ui.debug('found patch at byte %d\n' % m.start(0))
             diffs_seen += 1
             cfp = stringio()
-            for line in payload[:m.start(0)].splitlines():
+            for line in payload[: m.start(0)].splitlines():
                 if line.startswith('# HG changeset patch') and not hgpatch:
                     ui.debug('patch generated by hg export\n')
                     hgpatch = True
@@ -288,7 +301,7 @@
                         for header, key in patchheadermap:
                             prefix = '# %s ' % header
                             if line.startswith(prefix):
-                                data[key] = line[len(prefix):]
+                                data[key] = line[len(prefix) :]
                                 ui.debug('%s: %s\n' % (header, data[key]))
                     else:
                         hgpatchheader = False
@@ -319,6 +332,7 @@
 
     return data
 
+
 class patchmeta(object):
     """Patched file metadata
 
@@ -329,6 +343,7 @@
     'islink' is True if the file is a symlink and 'isexec' is True if
     the file is executable. Otherwise, 'mode' is None.
     """
+
     def __init__(self, path):
         self.path = path
         self.oldpath = None
@@ -365,6 +380,7 @@
     def __repr__(self):
         return r"<patchmeta %s %r>" % (self.op, self.path)
 
+
 def readgitpatch(lr):
     """extract git-style metadata about patches from <patchname>"""
 
@@ -409,6 +425,7 @@
 
     return gitpatches
 
+
 class linereader(object):
     # simple class to allow pushing lines back into the input stream
     def __init__(self, fp):
@@ -429,6 +446,7 @@
     def __iter__(self):
         return iter(self.readline, '')
 
+
 class abstractbackend(object):
     def __init__(self, ui):
         self.ui = ui
@@ -463,6 +481,7 @@
     def close(self):
         raise NotImplementedError
 
+
 class fsbackend(abstractbackend):
     def __init__(self, ui, basedir):
         super(fsbackend, self).__init__(ui)
@@ -504,8 +523,9 @@
     def writerej(self, fname, failed, total, lines):
         fname = fname + ".rej"
         self.ui.warn(
-            _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
-            (failed, total, fname))
+            _("%d out of %d hunks FAILED -- saving rejects to file %s\n")
+            % (failed, total, fname)
+        )
         fp = self.opener(fname, 'w')
         fp.writelines(lines)
         fp.close()
@@ -513,6 +533,7 @@
     def exists(self, fname):
         return self.opener.lexists(fname)
 
+
 class workingbackend(fsbackend):
     def __init__(self, ui, repo, similarity):
         super(workingbackend, self).__init__(ui, repo.root)
@@ -557,6 +578,7 @@
             scmutil.marktouched(self.repo, changed, self.similarity)
         return sorted(self.changed)
 
+
 class filestore(object):
     def __init__(self, maxsize=None):
         self.opener = None
@@ -564,7 +586,7 @@
         self.created = 0
         self.maxsize = maxsize
         if self.maxsize is None:
-            self.maxsize = 4*(2**20)
+            self.maxsize = 4 * (2 ** 20)
         self.size = 0
         self.data = {}
 
@@ -594,6 +616,7 @@
         if self.opener:
             shutil.rmtree(self.opener.base)
 
+
 class repobackend(abstractbackend):
     def __init__(self, ui, repo, ctx, store):
         super(repobackend, self).__init__(ui)
@@ -636,11 +659,13 @@
     def close(self):
         return self.changed | self.removed
 
+
 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
 eolmodes = ['strict', 'crlf', 'lf', 'auto']
 
+
 class patchfile(object):
     def __init__(self, ui, gp, backend, store, eolmode='strict'):
         self.fname = gp.path
@@ -686,8 +711,12 @@
                 self.mode = (False, False)
         if self.missing:
             self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
-            self.ui.warn(_("(use '--prefix' to apply patch relative to the "
-                           "current directory)\n"))
+            self.ui.warn(
+                _(
+                    "(use '--prefix' to apply patch relative to the "
+                    "current directory)\n"
+                )
+            )
 
         self.hash = {}
         self.dirty = 0
@@ -727,7 +756,6 @@
         else:
             self.ui.note(s)
 
-
     def findlines(self, l, linenum):
         # looks through the hash and finds candidate lines.  The
         # result is a list of line numbers sorted based on distance
@@ -757,9 +785,10 @@
 
     def apply(self, h):
         if not h.complete():
-            raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
-                            (h.number, h.desc, len(h.a), h.lena, len(h.b),
-                            h.lenb))
+            raise PatchError(
+                _("bad hunk #%d %s (%d %d %d %d)")
+                % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)
+            )
 
         self.hunks += 1
 
@@ -769,8 +798,10 @@
 
         if self.exists and self.create:
             if self.copysource:
-                self.ui.warn(_("cannot create %s: destination already "
-                               "exists\n") % self.fname)
+                self.ui.warn(
+                    _("cannot create %s: destination already " "exists\n")
+                    % self.fname
+                )
             else:
                 self.ui.warn(_("file %s already exists\n") % self.fname)
             self.rej.append(h)
@@ -787,8 +818,11 @@
             return 0
 
         horig = h
-        if (self.eolmode in ('crlf', 'lf')
-            or self.eolmode == 'auto' and self.eol):
+        if (
+            self.eolmode in ('crlf', 'lf')
+            or self.eolmode == 'auto'
+            and self.eol
+        ):
             # If new eols are going to be normalized, then normalize
             # hunk data before patching. Otherwise, preserve input
             # line-endings.
@@ -805,7 +839,7 @@
             if self.remove:
                 self.backend.unlink(self.fname)
             else:
-                self.lines[oldstart:oldstart + len(old)] = new
+                self.lines[oldstart : oldstart + len(old)] = new
                 self.offset += len(new) - len(old)
                 self.dirty = True
             return 0
@@ -835,15 +869,20 @@
                         self.dirty = True
                         offset = l - orig_start - fuzzlen
                         if fuzzlen:
-                            msg = _("Hunk #%d succeeded at %d "
-                                    "with fuzz %d "
-                                    "(offset %d lines).\n")
+                            msg = _(
+                                "Hunk #%d succeeded at %d "
+                                "with fuzz %d "
+                                "(offset %d lines).\n"
+                            )
                             self.printfile(True)
-                            self.ui.warn(msg %
-                                (h.number, l + 1, fuzzlen, offset))
+                            self.ui.warn(
+                                msg % (h.number, l + 1, fuzzlen, offset)
+                            )
                         else:
-                            msg = _("Hunk #%d succeeded at %d "
-                                    "(offset %d lines).\n")
+                            msg = _(
+                                "Hunk #%d succeeded at %d "
+                                "(offset %d lines).\n"
+                            )
                             self.ui.note(msg % (h.number, l + 1, offset))
                         return fuzzlen
         self.printfile(True)
@@ -857,9 +896,11 @@
         self.write_rej()
         return len(self.rej)
 
+
 class header(object):
     """patch header
     """
+
     diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
     diff_re = re.compile('diff -r .* (.*)$')
     allhunks_re = re.compile('(?:index|deleted file) ')
@@ -885,9 +926,13 @@
                     fp.write(_('this is a binary file\n'))
                 break
             if h.startswith('---'):
-                fp.write(_('%d hunks, %d lines changed\n') %
-                         (len(self.hunks),
-                          sum([max(h.added, h.removed) for h in self.hunks])))
+                fp.write(
+                    _('%d hunks, %d lines changed\n')
+                    % (
+                        len(self.hunks),
+                        sum([max(h.added, h.removed) for h in self.hunks]),
+                    )
+                )
                 break
             fp.write(h)
 
@@ -926,8 +971,10 @@
         # if they have some content as we want to be able to change it
         nocontent = len(self.header) == 2
         emptynewfile = self.isnewfile() and nocontent
-        return (emptynewfile
-                or any(self.special_re.match(h) for h in self.header))
+        return emptynewfile or any(
+            self.special_re.match(h) for h in self.header
+        )
+
 
 class recordhunk(object):
     """patch hunk
@@ -935,8 +982,17 @@
     XXX shouldn't we merge this with the other hunk class?
     """
 
-    def __init__(self, header, fromline, toline, proc, before, hunk, after,
-                 maxcontext=None):
+    def __init__(
+        self,
+        header,
+        fromline,
+        toline,
+        proc,
+        before,
+        hunk,
+        after,
+        maxcontext=None,
+    ):
         def trimcontext(lines, reverse=False):
             if maxcontext is not None:
                 delta = len(lines) - maxcontext
@@ -960,16 +1016,22 @@
         if not isinstance(v, recordhunk):
             return False
 
-        return ((v.hunk == self.hunk) and
-                (v.proc == self.proc) and
-                (self.fromline == v.fromline) and
-                (self.header.files() == v.header.files()))
+        return (
+            (v.hunk == self.hunk)
+            and (v.proc == self.proc)
+            and (self.fromline == v.fromline)
+            and (self.header.files() == v.header.files())
+        )
 
     def __hash__(self):
-        return hash((tuple(self.hunk),
-            tuple(self.header.files()),
-            self.fromline,
-            self.proc))
+        return hash(
+            (
+                tuple(self.hunk),
+                tuple(self.header.files()),
+                self.fromline,
+                self.proc,
+            )
+        )
 
     def countchanges(self, hunk):
         """hunk -> (n+,n-)"""
@@ -986,8 +1048,15 @@
         """
         m = {'+': '-', '-': '+', '\\': '\\'}
         hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
-        return recordhunk(self.header, self.toline, self.fromline, self.proc,
-                          self.before, hunk, self.after)
+        return recordhunk(
+            self.header,
+            self.toline,
+            self.fromline,
+            self.proc,
+            self.before,
+            hunk,
+            self.after,
+        )
 
     def write(self, fp):
         delta = len(self.before) + len(self.after)
@@ -995,9 +1064,16 @@
             delta -= 1
         fromlen = delta + self.removed
         tolen = delta + self.added
-        fp.write('@@ -%d,%d +%d,%d @@%s\n' %
-                 (self.fromline, fromlen, self.toline, tolen,
-                  self.proc and (' ' + self.proc)))
+        fp.write(
+            '@@ -%d,%d +%d,%d @@%s\n'
+            % (
+                self.fromline,
+                fromlen,
+                self.toline,
+                tolen,
+                self.proc and (' ' + self.proc),
+            )
+        )
         fp.write(''.join(self.before + self.hunk + self.after))
 
     pretty = write
@@ -1008,6 +1084,7 @@
     def __repr__(self):
         return '<hunk %r@%d>' % (self.filename(), self.fromline)
 
+
 def getmessages():
     return {
         'multiple': {
@@ -1023,49 +1100,58 @@
             'record': _("record this change to '%s'?"),
         },
         'help': {
-            'apply': _('[Ynesfdaq?]'
-                         '$$ &Yes, apply this change'
-                         '$$ &No, skip this change'
-                         '$$ &Edit this change manually'
-                         '$$ &Skip remaining changes to this file'
-                         '$$ Apply remaining changes to this &file'
-                         '$$ &Done, skip remaining changes and files'
-                         '$$ Apply &all changes to all remaining files'
-                         '$$ &Quit, applying no changes'
-                         '$$ &? (display help)'),
-            'discard': _('[Ynesfdaq?]'
-                         '$$ &Yes, discard this change'
-                         '$$ &No, skip this change'
-                         '$$ &Edit this change manually'
-                         '$$ &Skip remaining changes to this file'
-                         '$$ Discard remaining changes to this &file'
-                         '$$ &Done, skip remaining changes and files'
-                         '$$ Discard &all changes to all remaining files'
-                         '$$ &Quit, discarding no changes'
-                         '$$ &? (display help)'),
-            'keep': _('[Ynesfdaq?]'
-                         '$$ &Yes, keep this change'
-                         '$$ &No, skip this change'
-                         '$$ &Edit this change manually'
-                         '$$ &Skip remaining changes to this file'
-                         '$$ Keep remaining changes to this &file'
-                         '$$ &Done, skip remaining changes and files'
-                         '$$ Keep &all changes to all remaining files'
-                         '$$ &Quit, keeping all changes'
-                         '$$ &? (display help)'),
-            'record': _('[Ynesfdaq?]'
-                        '$$ &Yes, record this change'
-                        '$$ &No, skip this change'
-                        '$$ &Edit this change manually'
-                        '$$ &Skip remaining changes to this file'
-                        '$$ Record remaining changes to this &file'
-                        '$$ &Done, skip remaining changes and files'
-                        '$$ Record &all changes to all remaining files'
-                        '$$ &Quit, recording no changes'
-                        '$$ &? (display help)'),
-        }
+            'apply': _(
+                '[Ynesfdaq?]'
+                '$$ &Yes, apply this change'
+                '$$ &No, skip this change'
+                '$$ &Edit this change manually'
+                '$$ &Skip remaining changes to this file'
+                '$$ Apply remaining changes to this &file'
+                '$$ &Done, skip remaining changes and files'
+                '$$ Apply &all changes to all remaining files'
+                '$$ &Quit, applying no changes'
+                '$$ &? (display help)'
+            ),
+            'discard': _(
+                '[Ynesfdaq?]'
+                '$$ &Yes, discard this change'
+                '$$ &No, skip this change'
+                '$$ &Edit this change manually'
+                '$$ &Skip remaining changes to this file'
+                '$$ Discard remaining changes to this &file'
+                '$$ &Done, skip remaining changes and files'
+                '$$ Discard &all changes to all remaining files'
+                '$$ &Quit, discarding no changes'
+                '$$ &? (display help)'
+            ),
+            'keep': _(
+                '[Ynesfdaq?]'
+                '$$ &Yes, keep this change'
+                '$$ &No, skip this change'
+                '$$ &Edit this change manually'
+                '$$ &Skip remaining changes to this file'
+                '$$ Keep remaining changes to this &file'
+                '$$ &Done, skip remaining changes and files'
+                '$$ Keep &all changes to all remaining files'
+                '$$ &Quit, keeping all changes'
+                '$$ &? (display help)'
+            ),
+            'record': _(
+                '[Ynesfdaq?]'
+                '$$ &Yes, record this change'
+                '$$ &No, skip this change'
+                '$$ &Edit this change manually'
+                '$$ &Skip remaining changes to this file'
+                '$$ Record remaining changes to this &file'
+                '$$ &Done, skip remaining changes and files'
+                '$$ Record &all changes to all remaining files'
+                '$$ &Quit, recording no changes'
+                '$$ &? (display help)'
+            ),
+        },
     }
 
+
 def filterpatch(ui, headers, match, operation=None):
     """Interactively filter patch chunks into applied-only chunks"""
     messages = getmessages()
@@ -1094,15 +1180,15 @@
             # chars is a good target) because of issue6158.
             r = ui.promptchoice("%s\n(enter ? for help) %s" % (query, resps))
             ui.write("\n")
-            if r == 8: # ?
+            if r == 8:  # ?
                 for c, t in ui.extractchoices(resps)[1]:
                     ui.write('%s - %s\n' % (c, encoding.lower(t)))
                 continue
-            elif r == 0: # yes
+            elif r == 0:  # yes
                 ret = True
-            elif r == 1: # no
+            elif r == 1:  # no
                 ret = False
-            elif r == 2: # Edit patch
+            elif r == 2:  # Edit patch
                 if chunk is None:
                     ui.write(_('cannot edit patch for whole file'))
                     ui.write("\n")
@@ -1113,7 +1199,8 @@
                     continue
                 # Patch comment based on the Git one (based on comment at end of
                 # https://mercurial-scm.org/wiki/RecordExtension)
-                phelp = '---' + _("""
+                phelp = '---' + _(
+                    """
 To remove '-' lines, make them ' ' lines (context).
 To remove '+' lines, delete them.
 Lines starting with # will be removed from the patch.
@@ -1123,23 +1210,28 @@
 file will be generated: you can use that when you try again. If
 all lines of the hunk are removed, then the edit is aborted and
 the hunk is left unchanged.
-""")
-                (patchfd, patchfn) = pycompat.mkstemp(prefix="hg-editor-",
-                                                      suffix=".diff")
+"""
+                )
+                (patchfd, patchfn) = pycompat.mkstemp(
+                    prefix="hg-editor-", suffix=".diff"
+                )
                 ncpatchfp = None
                 try:
                     # Write the initial patch
                     f = util.nativeeolwriter(os.fdopen(patchfd, r'wb'))
                     chunk.header.write(f)
                     chunk.write(f)
-                    f.write(''.join(['# ' + i + '\n'
-                                     for i in phelp.splitlines()]))
+                    f.write(
+                        ''.join(['# ' + i + '\n' for i in phelp.splitlines()])
+                    )
                     f.close()
                     # Start the editor and wait for it to complete
                     editor = ui.geteditor()
-                    ret = ui.system("%s \"%s\"" % (editor, patchfn),
-                                    environ={'HGUSER': ui.username()},
-                                    blockedtag='filterpatch')
+                    ret = ui.system(
+                        "%s \"%s\"" % (editor, patchfn),
+                        environ={'HGUSER': ui.username()},
+                        blockedtag='filterpatch',
+                    )
                     if ret != 0:
                         ui.warn(_("editor exited with exit code %d\n") % ret)
                         continue
@@ -1159,20 +1251,20 @@
                 # Signal that the chunk shouldn't be applied as-is, but
                 # provide the new patch to be used instead.
                 ret = False
-            elif r == 3: # Skip
+            elif r == 3:  # Skip
                 ret = skipfile = False
-            elif r == 4: # file (Record remaining)
+            elif r == 4:  # file (Record remaining)
                 ret = skipfile = True
-            elif r == 5: # done, skip remaining
+            elif r == 5:  # done, skip remaining
                 ret = skipall = False
-            elif r == 6: # all
+            elif r == 6:  # all
                 ret = skipall = True
-            elif r == 7: # quit
+            elif r == 7:  # quit
                 raise error.Abort(_('user quit'))
             return ret, skipfile, skipall, newpatches
 
     seen = set()
-    applied = {}        # 'filename' -> [] of chunks
+    applied = {}  # 'filename' -> [] of chunks
     skipfile, skipall = None, None
     pos, total = 1, sum(len(h.hunks) for h in headers)
     for h in headers:
@@ -1186,8 +1278,9 @@
         if skipall is None:
             h.pretty(ui)
         files = h.files()
-        msg = (_('examine changes to %s?') %
-               _(' and ').join("'%s'" % f for f in files))
+        msg = _('examine changes to %s?') % _(' and ').join(
+            "'%s'" % f for f in files
+        )
         if all(match.exact(f) for f in files):
             r, skipall, np = True, None, None
         else:
@@ -1205,10 +1298,14 @@
                 msg = messages['single'][operation] % chunk.filename()
             else:
                 idx = pos - len(h.hunks) + i
-                msg = messages['multiple'][operation] % (idx, total,
-                                                         chunk.filename())
-            r, skipfile, skipall, newpatches = prompt(skipfile,
-                    skipall, msg, chunk)
+                msg = messages['multiple'][operation] % (
+                    idx,
+                    total,
+                    chunk.filename(),
+                )
+            r, skipfile, skipall, newpatches = prompt(
+                skipfile, skipall, msg, chunk
+            )
             if r:
                 if fixoffset:
                     chunk = copy.copy(chunk)
@@ -1222,8 +1319,15 @@
                         applied[newhunk.filename()].append(newhunk)
             else:
                 fixoffset += chunk.removed - chunk.added
-    return (sum([h for h in applied.itervalues()
-               if h[0].special() or len(h) > 1], []), {})
+    return (
+        sum(
+            [h for h in applied.itervalues() if h[0].special() or len(h) > 1],
+            [],
+        ),
+        {},
+    )
+
+
 class hunk(object):
     def __init__(self, desc, num, lr, context):
         self.number = num
@@ -1279,8 +1383,9 @@
         self.starta = int(self.starta)
         self.startb = int(self.startb)
         try:
-            diffhelper.addlines(lr, self.hunk, self.lena, self.lenb,
-                                self.a, self.b)
+            diffhelper.addlines(
+                lr, self.hunk, self.lena, self.lenb, self.a, self.b
+            )
         except error.ParseError as e:
             raise PatchError(_("bad hunk #%d: %s") % (self.number, e))
         # if we hit eof before finishing out the hunk, the last line will
@@ -1317,8 +1422,9 @@
             elif l.startswith('  '):
                 u = ' ' + s
             else:
-                raise PatchError(_("bad hunk #%d old text line %d") %
-                                 (self.number, x))
+                raise PatchError(
+                    _("bad hunk #%d old text line %d") % (self.number, x)
+                )
             self.a.append(u)
             self.hunk.append(u)
 
@@ -1363,8 +1469,9 @@
                 lr.push(l)
                 break
             else:
-                raise PatchError(_("bad hunk #%d old text line %d") %
-                                 (self.number, x))
+                raise PatchError(
+                    _("bad hunk #%d old text line %d") % (self.number, x)
+                )
             self.b.append(s)
             while True:
                 if hunki >= len(self.hunk):
@@ -1391,8 +1498,12 @@
                 if x.startswith('+') or x.startswith(' '):
                     self.b.append(x[1:])
         # @@ -start,len +start,len @@
-        self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
-                                             self.startb, self.lenb)
+        self.desc = "@@ -%d,%d +%d,%d @@\n" % (
+            self.starta,
+            self.lena,
+            self.startb,
+            self.lenb,
+        )
         self.hunk[0] = self.desc
         self._fixnewline(lr)
 
@@ -1430,7 +1541,7 @@
 
             bot = min(fuzz, bot)
             top = min(fuzz, top)
-            return old[top:len(old) - bot], new[top:len(new) - bot], top
+            return old[top : len(old) - bot], new[top : len(new) - bot], top
         return old, new, 0
 
     def fuzzit(self, fuzz, toponly):
@@ -1444,8 +1555,10 @@
             newstart -= 1
         return old, oldstart, new, newstart
 
+
 class binhunk(object):
     'A binary patch file.'
+
     def __init__(self, lr, fname):
         self.text = None
         self.delta = False
@@ -1470,8 +1583,9 @@
         while True:
             line = getline(lr, self.hunk)
             if not line:
-                raise PatchError(_('could not extract "%s" binary data')
-                                 % self._fname)
+                raise PatchError(
+                    _('could not extract "%s" binary data') % self._fname
+                )
             if line.startswith('literal '):
                 size = int(line[8:].rstrip())
                 break
@@ -1490,15 +1604,20 @@
             try:
                 dec.append(util.b85decode(line[1:])[:l])
             except ValueError as e:
-                raise PatchError(_('could not decode "%s" binary patch: %s')
-                                 % (self._fname, stringutil.forcebytestr(e)))
+                raise PatchError(
+                    _('could not decode "%s" binary patch: %s')
+                    % (self._fname, stringutil.forcebytestr(e))
+                )
             line = getline(lr, self.hunk)
         text = zlib.decompress(''.join(dec))
         if len(text) != size:
-            raise PatchError(_('"%s" length is %d bytes, should be %d')
-                             % (self._fname, len(text), size))
+            raise PatchError(
+                _('"%s" length is %d bytes, should be %d')
+                % (self._fname, len(text), size)
+            )
         self.text = text
 
+
 def parsefilename(str):
     # --- filename \t|space stuff
     s = str[4:].rstrip('\r\n')
@@ -1509,6 +1628,7 @@
             return s
     return s[:i]
 
+
 def reversehunks(hunks):
     '''reverse the signs in the hunks given as argument
 
@@ -1572,6 +1692,7 @@
         newhunks.append(c)
     return newhunks
 
+
 def parsepatch(originalchunks, maxcontext=None):
     """patch -> [] of headers -> [] of hunks
 
@@ -1615,8 +1736,10 @@
      8
     +9
     """
+
     class parser(object):
         """patch parsing state machine"""
+
         def __init__(self):
             self.fromline = 0
             self.toline = 0
@@ -1636,8 +1759,16 @@
 
         def addcontext(self, context):
             if self.hunk:
-                h = recordhunk(self.header, self.fromline, self.toline,
-                        self.proc, self.before, self.hunk, context, maxcontext)
+                h = recordhunk(
+                    self.header,
+                    self.fromline,
+                    self.toline,
+                    self.proc,
+                    self.before,
+                    self.hunk,
+                    context,
+                    maxcontext,
+                )
                 self.header.hunks.append(h)
                 self.fromline += len(self.before) + h.removed
                 self.toline += len(self.before) + h.added
@@ -1660,28 +1791,29 @@
             self.header = h
 
         def addother(self, line):
-            pass # 'other' lines are ignored
+            pass  # 'other' lines are ignored
 
         def finished(self):
             self.addcontext([])
             return self.headers
 
         transitions = {
-            'file': {'context': addcontext,
-                     'file': newfile,
-                     'hunk': addhunk,
-                     'range': addrange},
-            'context': {'file': newfile,
-                        'hunk': addhunk,
-                        'range': addrange,
-                        'other': addother},
-            'hunk': {'context': addcontext,
-                     'file': newfile,
-                     'range': addrange},
-            'range': {'context': addcontext,
-                      'hunk': addhunk},
+            'file': {
+                'context': addcontext,
+                'file': newfile,
+                'hunk': addhunk,
+                'range': addrange,
+            },
+            'context': {
+                'file': newfile,
+                'hunk': addhunk,
+                'range': addrange,
+                'other': addother,
+            },
+            'hunk': {'context': addcontext, 'file': newfile, 'range': addrange},
+            'range': {'context': addcontext, 'hunk': addhunk},
             'other': {'other': addother},
-            }
+        }
 
     p = parser()
     fp = stringio()
@@ -1693,12 +1825,14 @@
         try:
             p.transitions[state][newstate](p, data)
         except KeyError:
-            raise PatchError('unhandled transition: %s -> %s' %
-                                   (state, newstate))
+            raise PatchError(
+                'unhandled transition: %s -> %s' % (state, newstate)
+            )
         state = newstate
     del fp
     return p.finished()
 
+
 def pathtransform(path, strip, prefix):
     '''turn a path from a patch into a path suitable for the repository
 
@@ -1728,15 +1862,18 @@
     while count > 0:
         i = path.find('/', i)
         if i == -1:
-            raise PatchError(_("unable to strip away %d of %d dirs from %s") %
-                             (count, strip, path))
+            raise PatchError(
+                _("unable to strip away %d of %d dirs from %s")
+                % (count, strip, path)
+            )
         i += 1
         # consume '//' in the path
-        while i < pathlen - 1 and path[i:i + 1] == '/':
+        while i < pathlen - 1 and path[i : i + 1] == '/':
             i += 1
         count -= 1
     return path[:i].lstrip(), prefix + path[i:].rstrip()
 
+
 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
     nulla = afile_orig == "/dev/null"
     nullb = bfile_orig == "/dev/null"
@@ -1753,17 +1890,22 @@
 
     # some diff programs apparently produce patches where the afile is
     # not /dev/null, but afile starts with bfile
-    abasedir = afile[:afile.rfind('/') + 1]
-    bbasedir = bfile[:bfile.rfind('/') + 1]
-    if (missing and abasedir == bbasedir and afile.startswith(bfile)
-        and hunk.starta == 0 and hunk.lena == 0):
+    abasedir = afile[: afile.rfind('/') + 1]
+    bbasedir = bfile[: bfile.rfind('/') + 1]
+    if (
+        missing
+        and abasedir == bbasedir
+        and afile.startswith(bfile)
+        and hunk.starta == 0
+        and hunk.lena == 0
+    ):
         create = True
         missing = False
 
     # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
     # diff is between a file and its backup. In this case, the original
     # file should be patched (see original mpatch code).
-    isbackup = (abase == bbase and bfile.startswith(afile))
+    isbackup = abase == bbase and bfile.startswith(afile)
     fname = None
     if not missing:
         if gooda and goodb:
@@ -1792,6 +1934,7 @@
         gp.op = 'DELETE'
     return gp
 
+
 def scanpatch(fp):
     """like patch.iterhunks, but yield different events
 
@@ -1816,9 +1959,11 @@
 
     for line in iter(lr.readline, ''):
         if line.startswith('diff --git a/') or line.startswith('diff -r '):
+
             def notheader(line):
                 s = line.split(None, 1)
                 return not s or s[0] not in ('---', 'diff')
+
             header = scanwhile(line, notheader)
             fromfile = lr.readline()
             if fromfile.startswith('---'):
@@ -1840,6 +1985,7 @@
             else:
                 yield 'other', line
 
+
 def scangitpatch(lr, firstline):
     """
     Git patches can emit:
@@ -1866,6 +2012,7 @@
     fp.seek(pos)
     return gitpatches
 
+
 def iterhunks(fp):
     """Read a patch and yield the following events:
     - ("file", afile, bfile, firsthunk): select a new target file.
@@ -1890,10 +2037,10 @@
         if state == BFILE and (
             (not context and x.startswith('@'))
             or (context is not False and x.startswith('***************'))
-            or x.startswith('GIT binary patch')):
+            or x.startswith('GIT binary patch')
+        ):
             gp = None
-            if (gitpatches and
-                gitpatches[-1].ispatching(afile, bfile)):
+            if gitpatches and gitpatches[-1].ispatching(afile, bfile):
                 gp = gitpatches.pop()
             if x.startswith('GIT binary patch'):
                 h = binhunk(lr, gp.path)
@@ -1913,8 +2060,9 @@
             if gitpatches is None:
                 # scan whole input for git metadata
                 gitpatches = scangitpatch(lr, x)
-                yield 'git', [g.copy() for g in gitpatches
-                              if g.op in ('COPY', 'RENAME')]
+                yield 'git', [
+                    g.copy() for g in gitpatches if g.op in ('COPY', 'RENAME')
+                ]
                 gitpatches.reverse()
             afile = 'a/' + m.group(1)
             bfile = 'b/' + m.group(2)
@@ -1922,8 +2070,9 @@
                 gp = gitpatches.pop()
                 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
             if not gitpatches:
-                raise PatchError(_('failed to synchronize metadata for "%s"')
-                                 % afile[2:])
+                raise PatchError(
+                    _('failed to synchronize metadata for "%s"') % afile[2:]
+                )
             newfile = True
         elif x.startswith('---'):
             # check for a unified diff
@@ -1961,10 +2110,12 @@
         gp = gitpatches.pop()
         yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
 
+
 def applybindelta(binchunk, data):
     """Apply a binary delta hunk
     The algorithm used is the algorithm from git's patch-delta.c
     """
+
     def deltahead(binchunk):
         i = 0
         for c in pycompat.bytestr(binchunk):
@@ -1972,6 +2123,7 @@
             if not (ord(c) & 0x80):
                 return i
         return i
+
     out = ""
     s = deltahead(binchunk)
     binchunk = binchunk[s:]
@@ -1979,31 +2131,31 @@
     binchunk = binchunk[s:]
     i = 0
     while i < len(binchunk):
-        cmd = ord(binchunk[i:i + 1])
+        cmd = ord(binchunk[i : i + 1])
         i += 1
-        if (cmd & 0x80):
+        if cmd & 0x80:
             offset = 0
             size = 0
-            if (cmd & 0x01):
-                offset = ord(binchunk[i:i + 1])
+            if cmd & 0x01:
+                offset = ord(binchunk[i : i + 1])
                 i += 1
-            if (cmd & 0x02):
-                offset |= ord(binchunk[i:i + 1]) << 8
+            if cmd & 0x02:
+                offset |= ord(binchunk[i : i + 1]) << 8
                 i += 1
-            if (cmd & 0x04):
-                offset |= ord(binchunk[i:i + 1]) << 16
+            if cmd & 0x04:
+                offset |= ord(binchunk[i : i + 1]) << 16
                 i += 1
-            if (cmd & 0x08):
-                offset |= ord(binchunk[i:i + 1]) << 24
+            if cmd & 0x08:
+                offset |= ord(binchunk[i : i + 1]) << 24
                 i += 1
-            if (cmd & 0x10):
-                size = ord(binchunk[i:i + 1])
+            if cmd & 0x10:
+                size = ord(binchunk[i : i + 1])
                 i += 1
-            if (cmd & 0x20):
-                size |= ord(binchunk[i:i + 1]) << 8
+            if cmd & 0x20:
+                size |= ord(binchunk[i : i + 1]) << 8
                 i += 1
-            if (cmd & 0x40):
-                size |= ord(binchunk[i:i + 1]) << 16
+            if cmd & 0x40:
+                size |= ord(binchunk[i : i + 1]) << 16
                 i += 1
             if size == 0:
                 size = 0x10000
@@ -2017,6 +2169,7 @@
             raise PatchError(_('unexpected delta opcode 0'))
     return out
 
+
 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
     """Reads a patch from fp and tries to apply it.
 
@@ -2027,8 +2180,17 @@
     read in binary mode. Otherwise, line endings are ignored when
     patching then normalized according to 'eolmode'.
     """
-    return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
-                      prefix=prefix, eolmode=eolmode)
+    return _applydiff(
+        ui,
+        fp,
+        patchfile,
+        backend,
+        store,
+        strip=strip,
+        prefix=prefix,
+        eolmode=eolmode,
+    )
+
 
 def _canonprefix(repo, prefix):
     if prefix:
@@ -2037,9 +2199,12 @@
             prefix += '/'
     return prefix
 
-def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
-               eolmode='strict'):
+
+def _applydiff(
+    ui, fp, patcher, backend, store, strip=1, prefix='', eolmode='strict'
+):
     prefix = _canonprefix(backend.repo, prefix)
+
     def pstrip(p):
         return pathtransform(p, strip - 1, prefix)[1]
 
@@ -2064,8 +2229,9 @@
                 if gp.oldpath:
                     gp.oldpath = pstrip(gp.oldpath)
             else:
-                gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
-                                   prefix)
+                gp = makepatchmeta(
+                    backend, afile, bfile, first_hunk, strip, prefix
+                )
             if gp.op == 'RENAME':
                 backend.unlink(gp.oldpath)
             if not first_hunk:
@@ -2077,8 +2243,9 @@
                     data, mode = store.getfile(gp.oldpath)[:2]
                     if data is None:
                         # This means that the old path does not exist
-                        raise PatchError(_("source file '%s' does not exist")
-                                           % gp.oldpath)
+                        raise PatchError(
+                            _("source file '%s' does not exist") % gp.oldpath
+                        )
                 if gp.mode:
                     mode = gp.mode
                     if gp.op == 'ADD':
@@ -2086,15 +2253,17 @@
                         # must be created
                         data = ''
                 if data or mode:
-                    if (gp.op in ('ADD', 'RENAME', 'COPY')
-                        and backend.exists(gp.path)):
-                        raise PatchError(_("cannot create %s: destination "
-                                           "already exists") % gp.path)
+                    if gp.op in ('ADD', 'RENAME', 'COPY') and backend.exists(
+                        gp.path
+                    ):
+                        raise PatchError(
+                            _("cannot create %s: destination " "already exists")
+                            % gp.path
+                        )
                     backend.setfile(gp.path, data, mode, gp.oldpath)
                 continue
             try:
-                current_file = patcher(ui, gp, backend, store,
-                                       eolmode=eolmode)
+                current_file = patcher(ui, gp, backend, store, eolmode=eolmode)
             except PatchError as inst:
                 ui.warn(str(inst) + '\n')
                 current_file = None
@@ -2122,8 +2291,8 @@
         return -1
     return err
 
-def _externalpatch(ui, repo, patcher, patchname, strip, files,
-                   similarity):
+
+def _externalpatch(ui, repo, patcher, patchname, strip, files, similarity):
     """use <patcher> to apply <patchname> to the working directory.
     returns whether patch was applied with fuzz factor."""
 
@@ -2132,8 +2301,12 @@
     cwd = repo.root
     if cwd:
         args.append('-d %s' % procutil.shellquote(cwd))
-    cmd = ('%s %s -p%d < %s'
-           % (patcher, ' '.join(args), strip, procutil.shellquote(patchname)))
+    cmd = '%s %s -p%d < %s' % (
+        patcher,
+        ' '.join(args),
+        strip,
+        procutil.shellquote(patchname),
+    )
     ui.debug('Using external patch tool: %s\n' % cmd)
     fp = procutil.popen(cmd, 'rb')
     try:
@@ -2162,12 +2335,15 @@
             scmutil.marktouched(repo, files, similarity)
     code = fp.close()
     if code:
-        raise PatchError(_("patch command failed: %s") %
-                         procutil.explainexit(code))
+        raise PatchError(
+            _("patch command failed: %s") % procutil.explainexit(code)
+        )
     return fuzz
 
-def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
-                 eolmode='strict'):
+
+def patchbackend(
+    ui, backend, patchobj, strip, prefix, files=None, eolmode='strict'
+):
     if files is None:
         files = set()
     if eolmode is None:
@@ -2182,8 +2358,9 @@
     except TypeError:
         fp = patchobj
     try:
-        ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
-                        eolmode=eolmode)
+        ret = applydiff(
+            ui, fp, backend, store, strip=strip, prefix=prefix, eolmode=eolmode
+        )
     finally:
         if fp != patchobj:
             fp.close()
@@ -2193,20 +2370,40 @@
         raise PatchError(_('patch failed to apply'))
     return ret > 0
 
-def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
-                  eolmode='strict', similarity=0):
+
+def internalpatch(
+    ui,
+    repo,
+    patchobj,
+    strip,
+    prefix='',
+    files=None,
+    eolmode='strict',
+    similarity=0,
+):
     """use builtin patch to apply <patchobj> to the working directory.
     returns whether patch was applied with fuzz factor."""
     backend = workingbackend(ui, repo, similarity)
     return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
 
-def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
-              eolmode='strict'):
+
+def patchrepo(
+    ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode='strict'
+):
     backend = repobackend(ui, repo, ctx, store)
     return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
 
-def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
-          similarity=0):
+
+def patch(
+    ui,
+    repo,
+    patchname,
+    strip=1,
+    prefix='',
+    files=None,
+    eolmode='strict',
+    similarity=0,
+):
     """Apply <patchname> to the working directory.
 
     'eolmode' specifies how end of lines should be handled. It can be:
@@ -2222,10 +2419,13 @@
     if files is None:
         files = set()
     if patcher:
-        return _externalpatch(ui, repo, patcher, patchname, strip,
-                              files, similarity)
-    return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
-                         similarity)
+        return _externalpatch(
+            ui, repo, patcher, patchname, strip, files, similarity
+        )
+    return internalpatch(
+        ui, repo, patchname, strip, prefix, files, eolmode, similarity
+    )
+
 
 def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
     backend = fsbackend(ui, repo.root)
@@ -2238,11 +2438,13 @@
                 if gp:
                     gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
                     if gp.oldpath:
-                        gp.oldpath = pathtransform(gp.oldpath, strip - 1,
-                                                   prefix)[1]
+                        gp.oldpath = pathtransform(
+                            gp.oldpath, strip - 1, prefix
+                        )[1]
                 else:
-                    gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
-                                       prefix)
+                    gp = makepatchmeta(
+                        backend, afile, bfile, first_hunk, strip, prefix
+                    )
                 changed.add(gp.path)
                 if gp.op == 'RENAME':
                     changed.add(gp.oldpath)
@@ -2250,16 +2452,29 @@
                 raise error.Abort(_('unsupported parser state: %s') % state)
         return changed
 
+
 class GitDiffRequired(Exception):
     pass
 
+
 diffopts = diffutil.diffallopts
 diffallopts = diffutil.diffallopts
 difffeatureopts = diffutil.difffeatureopts
 
-def diff(repo, node1=None, node2=None, match=None, changes=None,
-         opts=None, losedatafn=None, pathfn=None, copy=None,
-         copysourcematch=None, hunksfilterfn=None):
+
+def diff(
+    repo,
+    node1=None,
+    node2=None,
+    match=None,
+    changes=None,
+    opts=None,
+    losedatafn=None,
+    pathfn=None,
+    copy=None,
+    copysourcematch=None,
+    hunksfilterfn=None,
+):
     '''yields diff of changes to files between two nodes, or node and
     working directory.
 
@@ -2296,15 +2511,24 @@
     ctx2 = repo[node2]
 
     for fctx1, fctx2, hdr, hunks in diffhunks(
-            repo, ctx1=ctx1, ctx2=ctx2, match=match, changes=changes, opts=opts,
-            losedatafn=losedatafn, pathfn=pathfn, copy=copy,
-            copysourcematch=copysourcematch):
+        repo,
+        ctx1=ctx1,
+        ctx2=ctx2,
+        match=match,
+        changes=changes,
+        opts=opts,
+        losedatafn=losedatafn,
+        pathfn=pathfn,
+        copy=copy,
+        copysourcematch=copysourcematch,
+    ):
         if hunksfilterfn is not None:
             # If the file has been removed, fctx2 is None; but this should
             # not occur here since we catch removed files early in
             # logcmdutil.getlinerangerevs() for 'hg log -L'.
-            assert fctx2 is not None, (
-                'fctx2 unexpectly None in diff hunks filtering')
+            assert (
+                fctx2 is not None
+            ), 'fctx2 unexpectly None in diff hunks filtering'
             hunks = hunksfilterfn(fctx2, hunks)
         text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
         if hdr and (text or len(hdr) > 1):
@@ -2312,8 +2536,19 @@
         if text:
             yield text
 
-def diffhunks(repo, ctx1, ctx2, match=None, changes=None, opts=None,
-              losedatafn=None, pathfn=None, copy=None, copysourcematch=None):
+
+def diffhunks(
+    repo,
+    ctx1,
+    ctx2,
+    match=None,
+    changes=None,
+    opts=None,
+    losedatafn=None,
+    pathfn=None,
+    copy=None,
+    copysourcematch=None,
+):
     """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
     where `header` is a list of diff headers and `hunks` is an iterable of
     (`hunkrange`, `hunklines`) tuples.
@@ -2327,6 +2562,7 @@
     def lrugetfilectx():
         cache = {}
         order = collections.deque()
+
         def getfilectx(f, ctx):
             fctx = ctx.filectx(f, filelog=cache.get(f))
             if f not in cache:
@@ -2337,7 +2573,9 @@
                 order.remove(f)
             order.append(f)
             return fctx
+
         return getfilectx
+
     getfilectx = lrugetfilectx()
 
     if not changes:
@@ -2361,8 +2599,9 @@
     if copysourcematch:
         # filter out copies where source side isn't inside the matcher
         # (copies.pathcopies() already filtered out the destination)
-        copy = {dst: src for dst, src in copy.iteritems()
-                if copysourcematch(src)}
+        copy = {
+            dst: src for dst, src in copy.iteritems() if copysourcematch(src)
+        }
 
     modifiedset = set(modified)
     addedset = set(added)
@@ -2388,17 +2627,33 @@
             del copy[dst]
 
     prefetchmatch = scmutil.matchfiles(
-        repo, list(modifiedset | addedset | removedset))
+        repo, list(modifiedset | addedset | removedset)
+    )
     scmutil.prefetchfiles(repo, [ctx1.rev(), ctx2.rev()], prefetchmatch)
 
     def difffn(opts, losedata):
-        return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
-                       copy, getfilectx, opts, losedata, pathfn)
+        return trydiff(
+            repo,
+            revs,
+            ctx1,
+            ctx2,
+            modified,
+            added,
+            removed,
+            copy,
+            getfilectx,
+            opts,
+            losedata,
+            pathfn,
+        )
+
     if opts.upgrade and not opts.git:
         try:
+
             def losedata(fn):
                 if not losedatafn or not losedatafn(fn=fn):
                     raise GitDiffRequired
+
             # Buffer the whole output until we are sure it can be generated
             return list(difffn(opts.copy(git=False), losedata))
         except GitDiffRequired:
@@ -2406,6 +2661,7 @@
     else:
         return difffn(opts, None)
 
+
 def diffsinglehunk(hunklines):
     """yield tokens for a list of lines in a single hunk"""
     for line in hunklines:
@@ -2426,9 +2682,10 @@
                 yield (token, label)
 
         if chompline != stripline:
-            yield (chompline[len(stripline):], 'diff.trailingwhitespace')
+            yield (chompline[len(stripline) :], 'diff.trailingwhitespace')
         if chompline != line:
-            yield (line[len(chompline):], '')
+            yield (line[len(chompline) :], '')
+
 
 def diffsinglehunkinline(hunklines):
     """yield tokens for a list of lines in a single hunk, with inline colors"""
@@ -2467,8 +2724,10 @@
             btokens.append((changed, token))
 
     # yield deleted tokens, then inserted ones
-    for prefix, label, tokens in [('-', 'diff.deleted', atokens),
-                                  ('+', 'diff.inserted', btokens)]:
+    for prefix, label, tokens in [
+        ('-', 'diff.deleted', atokens),
+        ('+', 'diff.inserted', btokens),
+    ]:
         nextisnewline = True
         for changed, token in tokens:
             if nextisnewline:
@@ -2477,12 +2736,12 @@
             # special handling line end
             isendofline = token.endswith('\n')
             if isendofline:
-                chomp = token[:-1] # chomp
+                chomp = token[:-1]  # chomp
                 if chomp.endswith('\r'):
                     chomp = chomp[:-1]
-                endofline = token[len(chomp):]
-                token = chomp.rstrip() # detect spaces at the end
-                endspaces = chomp[len(token):]
+                endofline = token[len(chomp) :]
+                token = chomp.rstrip()  # detect spaces at the end
+                endspaces = chomp[len(token) :]
             # scan tabs
             for maybetab in tabsplitter.findall(token):
                 if b'\t' == maybetab[0:1]:
@@ -2499,29 +2758,34 @@
                 yield (endofline, '')
                 nextisnewline = True
 
+
 def difflabel(func, *args, **kw):
     '''yields 2-tuples of (output, label) based on the output of func()'''
     if kw.get(r'opts') and kw[r'opts'].worddiff:
         dodiffhunk = diffsinglehunkinline
     else:
         dodiffhunk = diffsinglehunk
-    headprefixes = [('diff', 'diff.diffline'),
-                    ('copy', 'diff.extended'),
-                    ('rename', 'diff.extended'),
-                    ('old', 'diff.extended'),
-                    ('new', 'diff.extended'),
-                    ('deleted', 'diff.extended'),
-                    ('index', 'diff.extended'),
-                    ('similarity', 'diff.extended'),
-                    ('---', 'diff.file_a'),
-                    ('+++', 'diff.file_b')]
-    textprefixes = [('@', 'diff.hunk'),
-                    # - and + are handled by diffsinglehunk
-                   ]
+    headprefixes = [
+        ('diff', 'diff.diffline'),
+        ('copy', 'diff.extended'),
+        ('rename', 'diff.extended'),
+        ('old', 'diff.extended'),
+        ('new', 'diff.extended'),
+        ('deleted', 'diff.extended'),
+        ('index', 'diff.extended'),
+        ('similarity', 'diff.extended'),
+        ('---', 'diff.file_a'),
+        ('+++', 'diff.file_b'),
+    ]
+    textprefixes = [
+        ('@', 'diff.hunk'),
+        # - and + are handled by diffsinglehunk
+    ]
     head = False
 
     # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
     hunkbuffer = []
+
     def consumehunkbuffer():
         if hunkbuffer:
             for token in dodiffhunk(hunkbuffer):
@@ -2560,8 +2824,10 @@
                     if stripline.startswith(prefix):
                         yield (stripline, label)
                         if line != stripline:
-                            yield (line[len(stripline):],
-                                   'diff.trailingwhitespace')
+                            yield (
+                                line[len(stripline) :],
+                                'diff.trailingwhitespace',
+                            )
                         break
                 else:
                     yield (line, '')
@@ -2570,10 +2836,12 @@
         for token in consumehunkbuffer():
             yield token
 
+
 def diffui(*args, **kw):
     '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
     return difflabel(diff, *args, **kw)
 
+
 def _filepairs(modified, added, removed, copy, opts):
     '''generates tuples (f1, f2, copyop), where f1 is the name of the file
     before and f2 is the the name after. For added files, f1 will be None,
@@ -2602,13 +2870,29 @@
             f2 = None
             if opts.git:
                 # have we already reported a copy above?
-                if (f in copyto and copyto[f] in addedset
-                    and copy[copyto[f]] == f):
+                if (
+                    f in copyto
+                    and copyto[f] in addedset
+                    and copy[copyto[f]] == f
+                ):
                     continue
         yield f1, f2, copyop
 
-def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
-            copy, getfilectx, opts, losedatafn, pathfn):
+
+def trydiff(
+    repo,
+    revs,
+    ctx1,
+    ctx2,
+    modified,
+    added,
+    removed,
+    copy,
+    getfilectx,
+    opts,
+    losedatafn,
+    pathfn,
+):
     '''given input data, generate a diff and yield it in blocks
 
     If generating a diff would lose data like flags or binary data and
@@ -2668,28 +2952,36 @@
             binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
 
         if losedatafn and not opts.git:
-            if (binary or
+            if (
+                binary
+                or
                 # copy/rename
-                f2 in copy or
+                f2 in copy
+                or
                 # empty file creation
-                (not f1 and isempty(fctx2)) or
+                (not f1 and isempty(fctx2))
+                or
                 # empty file deletion
-                (isempty(fctx1) and not f2) or
+                (isempty(fctx1) and not f2)
+                or
                 # create with flags
-                (not f1 and flag2) or
+                (not f1 and flag2)
+                or
                 # change flags
-                (f1 and f2 and flag1 != flag2)):
+                (f1 and f2 and flag1 != flag2)
+            ):
                 losedatafn(f2 or f1)
 
         path1 = pathfn(f1 or f2)
         path2 = pathfn(f2 or f1)
         header = []
         if opts.git:
-            header.append('diff --git %s%s %s%s' %
-                          (aprefix, path1, bprefix, path2))
-            if not f1: # added
+            header.append(
+                'diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)
+            )
+            if not f1:  # added
                 header.append('new file mode %s' % gitmode[flag2])
-            elif not f2: # removed
+            elif not f2:  # removed
                 header.append('deleted file mode %s' % gitmode[flag1])
             else:  # modified/copied/renamed
                 mode1, mode2 = gitmode[flag1], gitmode[flag2]
@@ -2716,8 +3008,9 @@
         #  yes      | yes  *        *   *     | text diff | yes
         #  no       | *    *        *   *     | text diff | yes
         # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
-        if binary and (not opts.git or (opts.git and opts.nobinary and not
-                                        opts.index)):
+        if binary and (
+            not opts.git or (opts.git and opts.nobinary and not opts.index)
+        ):
             # fast path: no binary content will be displayed, content1 and
             # content2 are only used for equivalent test. cmp() could have a
             # fast path.
@@ -2725,7 +3018,7 @@
                 content1 = b'\0'
             if fctx2 is not None:
                 if fctx1 is not None and not fctx1.cmp(fctx2):
-                    content2 = b'\0' # not different
+                    content2 = b'\0'  # not different
                 else:
                     content2 = b'\0\0'
         else:
@@ -2738,26 +3031,38 @@
         if binary and opts.git and not opts.nobinary:
             text = mdiff.b85diff(content1, content2)
             if text:
-                header.append('index %s..%s' %
-                              (gitindex(content1), gitindex(content2)))
-            hunks = (None, [text]),
+                header.append(
+                    'index %s..%s' % (gitindex(content1), gitindex(content2))
+                )
+            hunks = ((None, [text]),)
         else:
             if opts.git and opts.index > 0:
                 flag = flag1
                 if flag is None:
                     flag = flag2
-                header.append('index %s..%s %s' %
-                              (gitindex(content1)[0:opts.index],
-                               gitindex(content2)[0:opts.index],
-                               gitmode[flag]))
-
-            uheaders, hunks = mdiff.unidiff(content1, date1,
-                                            content2, date2,
-                                            path1, path2,
-                                            binary=binary, opts=opts)
+                header.append(
+                    'index %s..%s %s'
+                    % (
+                        gitindex(content1)[0 : opts.index],
+                        gitindex(content2)[0 : opts.index],
+                        gitmode[flag],
+                    )
+                )
+
+            uheaders, hunks = mdiff.unidiff(
+                content1,
+                date1,
+                content2,
+                date2,
+                path1,
+                path2,
+                binary=binary,
+                opts=opts,
+            )
             header.extend(uheaders)
         yield fctx1, fctx2, header, hunks
 
+
 def diffstatsum(stats):
     maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
     for f, a, r, b in stats:
@@ -2769,6 +3074,7 @@
 
     return maxfile, maxtotal, addtotal, removetotal, binary
 
+
 def diffstatdata(lines):
     diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$')
 
@@ -2802,8 +3108,9 @@
             adds += 1
         elif line.startswith('-') and not inheader:
             removes += 1
-        elif (line.startswith('GIT binary patch') or
-              line.startswith('Binary file')):
+        elif line.startswith('GIT binary patch') or line.startswith(
+            'Binary file'
+        ):
             isbinary = True
         elif line.startswith('rename from'):
             filename = line[12:]
@@ -2812,6 +3119,7 @@
     addresult()
     return results
 
+
 def diffstat(lines, width=80):
     output = []
     stats = diffstatdata(lines)
@@ -2839,17 +3147,27 @@
             count = '%d' % (adds + removes)
         pluses = '+' * scale(adds)
         minuses = '-' * scale(removes)
-        output.append(' %s%s |  %*s %s%s\n' %
-                      (filename, ' ' * (maxname - encoding.colwidth(filename)),
-                       countwidth, count, pluses, minuses))
+        output.append(
+            ' %s%s |  %*s %s%s\n'
+            % (
+                filename,
+                ' ' * (maxname - encoding.colwidth(filename)),
+                countwidth,
+                count,
+                pluses,
+                minuses,
+            )
+        )
 
     if stats:
-        output.append(_(' %d files changed, %d insertions(+), '
-                        '%d deletions(-)\n')
-                      % (len(stats), totaladds, totalremoves))
+        output.append(
+            _(' %d files changed, %d insertions(+), ' '%d deletions(-)\n')
+            % (len(stats), totaladds, totalremoves)
+        )
 
     return ''.join(output)
 
+
 def diffstatui(*args, **kw):
     '''like diffstat(), but yields 2-tuples of (output, label) for
     ui.write()
--- a/mercurial/pathutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pathutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,9 +13,11 @@
     util,
 )
 
+
 def _lowerclean(s):
     return encoding.hfsignoreclean(s.lower())
 
+
 class pathauditor(object):
     '''ensure that a filesystem path contains no banned components.
     the following properties of a path are checked:
@@ -63,25 +65,30 @@
         if util.endswithsep(path):
             raise error.Abort(_("path ends in directory separator: %s") % path)
         parts = util.splitpath(path)
-        if (os.path.splitdrive(path)[0]
+        if (
+            os.path.splitdrive(path)[0]
             or _lowerclean(parts[0]) in ('.hg', '.hg.', '')
-            or pycompat.ospardir in parts):
+            or pycompat.ospardir in parts
+        ):
             raise error.Abort(_("path contains illegal component: %s") % path)
         # Windows shortname aliases
         for p in parts:
             if "~" in p:
                 first, last = p.split("~", 1)
                 if last.isdigit() and first.upper() in ["HG", "HG8B6C"]:
-                    raise error.Abort(_("path contains illegal component: %s")
-                                     % path)
+                    raise error.Abort(
+                        _("path contains illegal component: %s") % path
+                    )
         if '.hg' in _lowerclean(path):
             lparts = [_lowerclean(p.lower()) for p in parts]
             for p in '.hg', '.hg.':
                 if p in lparts[1:]:
                     pos = lparts.index(p)
                     base = os.path.join(*parts[:pos])
-                    raise error.Abort(_("path '%s' is inside nested repo %r")
-                                      % (path, pycompat.bytestr(base)))
+                    raise error.Abort(
+                        _("path '%s' is inside nested repo %r")
+                        % (path, pycompat.bytestr(base))
+                    )
 
         normparts = util.splitpath(normpath)
         assert len(parts) == len(normparts)
@@ -93,8 +100,8 @@
         # This means we won't accidentally traverse a symlink into some other
         # filesystem (which is potentially expensive to access).
         for i in range(len(parts)):
-            prefix = pycompat.ossep.join(parts[:i + 1])
-            normprefix = pycompat.ossep.join(normparts[:i + 1])
+            prefix = pycompat.ossep.join(parts[: i + 1])
+            normprefix = pycompat.ossep.join(normparts[: i + 1])
             if normprefix in self.auditeddir:
                 continue
             if self._realfs:
@@ -119,11 +126,14 @@
                 raise
         else:
             if stat.S_ISLNK(st.st_mode):
-                msg = (_('path %r traverses symbolic link %r')
-                       % (pycompat.bytestr(path), pycompat.bytestr(prefix)))
+                msg = _('path %r traverses symbolic link %r') % (
+                    pycompat.bytestr(path),
+                    pycompat.bytestr(prefix),
+                )
                 raise error.Abort(msg)
-            elif (stat.S_ISDIR(st.st_mode) and
-                  os.path.isdir(os.path.join(curpath, '.hg'))):
+            elif stat.S_ISDIR(st.st_mode) and os.path.isdir(
+                os.path.join(curpath, '.hg')
+            ):
                 if not self.callback or not self.callback(curpath):
                     msg = _("path '%s' is inside nested repo %r")
                     raise error.Abort(msg % (path, pycompat.bytestr(prefix)))
@@ -135,6 +145,7 @@
         except (OSError, error.Abort):
             return False
 
+
 def canonpath(root, cwd, myname, auditor=None):
     '''return the canonical path of myname, given cwd and root
 
@@ -188,7 +199,7 @@
     if auditor is None:
         auditor = pathauditor(root)
     if name != rootsep and name.startswith(rootsep):
-        name = name[len(rootsep):]
+        name = name[len(rootsep) :]
         auditor(name)
         return util.pconvert(name)
     elif name == root:
@@ -228,12 +239,14 @@
                 relpath = util.pathto(root, cwd, '')
                 if relpath.endswith(pycompat.ossep):
                     relpath = relpath[:-1]
-                hint = (_("consider using '--cwd %s'") % relpath)
+                hint = _("consider using '--cwd %s'") % relpath
         except error.Abort:
             pass
 
-        raise error.Abort(_("%s not under root '%s'") % (myname, root),
-                         hint=hint)
+        raise error.Abort(
+            _("%s not under root '%s'") % (myname, root), hint=hint
+        )
+
 
 def normasprefix(path):
     '''normalize the specified path as path prefix
@@ -257,6 +270,7 @@
     else:
         return path
 
+
 # forward two methods from posixpath that do what we need, but we'd
 # rather not let our internals know that we're thinking in posix terms
 # - instead we'll let them be oblivious.
--- a/mercurial/phases.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/phases.py	Sun Oct 06 09:45:02 2019 -0400
@@ -123,8 +123,8 @@
 
 _fphasesentry = struct.Struct('>i20s')
 
-INTERNAL_FLAG = 64 # Phases for mercurial internal usage only
-HIDEABLE_FLAG = 32 # Phases that are hideable
+INTERNAL_FLAG = 64  # Phases for mercurial internal usage only
+HIDEABLE_FLAG = 32  # Phases that are hideable
 
 # record phase index
 public, draft, secret = range(3)
@@ -135,7 +135,7 @@
 # record phase names
 cmdphasenames = ['public', 'draft', 'secret']  # known to `hg phase` command
 phasenames = [None] * len(allphases)
-phasenames[:len(cmdphasenames)] = cmdphasenames
+phasenames[: len(cmdphasenames)] = cmdphasenames
 phasenames[archived] = 'archived'
 phasenames[internal] = 'internal'
 # record phase property
@@ -143,10 +143,12 @@
 remotehiddenphases = tuple(allphases[2:])
 localhiddenphases = tuple(p for p in allphases if p & HIDEABLE_FLAG)
 
+
 def supportinternal(repo):
     """True if the internal phase can be used on a repository"""
     return 'internal-phase' in repo.requirements
 
+
 def _readroots(repo, phasedefaults=None):
     """Read phase roots from disk
 
@@ -178,6 +180,7 @@
         dirty = True
     return roots, dirty
 
+
 def binaryencode(phasemapping):
     """encode a 'phase -> nodes' mapping into a binary stream
 
@@ -190,6 +193,7 @@
             binarydata.append(_fphasesentry.pack(phase, head))
     return ''.join(binarydata)
 
+
 def binarydecode(stream):
     """decode a binary stream into a 'phase -> nodes' mapping
 
@@ -206,6 +210,7 @@
         headsbyphase[phase].append(node)
     return headsbyphase
 
+
 def _trackphasechange(data, rev, old, new):
     """add a phase move the <data> dictionnary
 
@@ -218,6 +223,7 @@
         old = existing[0]
     data[rev] = (old, new)
 
+
 class phasecache(object):
     def __init__(self, repo, phasedefaults, _load=True):
         if _load:
@@ -230,7 +236,7 @@
 
     def getrevset(self, repo, phases, subset=None):
         """return a smartset for the given phases"""
-        self.loadphaserevs(repo) # ensure phase's sets are loaded
+        self.loadphaserevs(repo)  # ensure phase's sets are loaded
         phases = set(phases)
         if public not in phases:
             # fast path: _phasesets contains the interesting sets,
@@ -274,16 +280,22 @@
 
     def replace(self, phcache):
         """replace all values in 'self' with content of phcache"""
-        for a in ('phaseroots', 'dirty', 'opener', '_loadedrevslen',
-                  '_phasesets'):
+        for a in (
+            'phaseroots',
+            'dirty',
+            'opener',
+            '_loadedrevslen',
+            '_phasesets',
+        ):
             setattr(self, a, getattr(phcache, a))
 
     def _getphaserevsnative(self, repo):
         repo = repo.unfiltered()
         nativeroots = []
         for phase in trackedphases:
-            nativeroots.append(pycompat.maplist(repo.changelog.rev,
-                                                self.phaseroots[phase]))
+            nativeroots.append(
+                pycompat.maplist(repo.changelog.rev, self.phaseroots[phase])
+            )
         return repo.changelog.computephases(nativeroots)
 
     def _computephaserevspure(self, repo):
@@ -387,14 +399,15 @@
 
         repo = repo.unfiltered()
 
-        changes = set() # set of revisions to be changed
-        delroots = [] # set of root deleted by this path
+        changes = set()  # set of revisions to be changed
+        delroots = []  # set of root deleted by this path
         for phase in pycompat.xrange(targetphase + 1, len(allphases)):
             # filter nodes that are not in a compatible phase already
-            nodes = [n for n in nodes
-                     if self.phase(repo, repo[n].rev()) >= phase]
+            nodes = [
+                n for n in nodes if self.phase(repo, repo[n].rev()) >= phase
+            ]
             if not nodes:
-                break # no roots to move anymore
+                break  # no roots to move anymore
 
             olds = self.phaseroots[phase]
 
@@ -403,11 +416,14 @@
             if dryrun:
                 continue
             for r in affected:
-                _trackphasechange(phasetracking, r, self.phase(repo, r),
-                                  targetphase)
+                _trackphasechange(
+                    phasetracking, r, self.phase(repo, r), targetphase
+                )
 
-            roots = set(ctx.node() for ctx in repo.set(
-                    'roots((%ln::) - %ld)', olds, affected))
+            roots = set(
+                ctx.node()
+                for ctx in repo.set('roots((%ln::) - %ld)', olds, affected)
+            )
             if olds != roots:
                 self._updateroots(phase, roots, tr)
                 # some roots may need to be declared for lower phases
@@ -420,14 +436,16 @@
         return changes
 
     def retractboundary(self, repo, tr, targetphase, nodes):
-        oldroots = self.phaseroots[:targetphase + 1]
+        oldroots = self.phaseroots[: targetphase + 1]
         if tr is None:
             phasetracking = None
         else:
             phasetracking = tr.changes.get('phases')
         repo = repo.unfiltered()
-        if (self._retractboundary(repo, tr, targetphase, nodes)
-            and phasetracking is not None):
+        if (
+            self._retractboundary(repo, tr, targetphase, nodes)
+            and phasetracking is not None
+        ):
 
             # find the affected revisions
             new = self.phaseroots[targetphase]
@@ -440,7 +458,7 @@
                     roots = oldroots[phase]
                     revs = set(repo.revs('%ln::%ld', roots, affected))
                     affected -= revs
-                else: # public phase
+                else:  # public phase
                     revs = affected
                 for r in revs:
                     _trackphasechange(phasetracking, r, phase, targetphase)
@@ -457,8 +475,9 @@
         repo = repo.unfiltered()
         currentroots = self.phaseroots[targetphase]
         finalroots = oldroots = set(currentroots)
-        newroots = [n for n in nodes
-                    if self.phase(repo, repo[n].rev()) < targetphase]
+        newroots = [
+            n for n in nodes if self.phase(repo, repo[n].rev()) < targetphase
+        ]
         if newroots:
 
             if nullid in newroots:
@@ -469,12 +488,14 @@
             # Only compute new roots for revs above the roots that are being
             # retracted.
             minnewroot = min(repo[n].rev() for n in newroots)
-            aboveroots = [n for n in currentroots
-                          if repo[n].rev() >= minnewroot]
+            aboveroots = [
+                n for n in currentroots if repo[n].rev() >= minnewroot
+            ]
             updatedroots = repo.set('roots(%ln::)', aboveroots)
 
-            finalroots = set(n for n in currentroots if repo[n].rev() <
-                             minnewroot)
+            finalroots = set(
+                n for n in currentroots if repo[n].rev() < minnewroot
+            )
             finalroots.update(ctx.node() for ctx in updatedroots)
         if finalroots != oldroots:
             self._updateroots(targetphase, finalroots, tr)
@@ -487,14 +508,15 @@
         Nothing is lost as unknown nodes only hold data for their descendants.
         """
         filtered = False
-        nodemap = repo.changelog.nodemap # to filter unknown nodes
+        nodemap = repo.changelog.nodemap  # to filter unknown nodes
         for phase, nodes in enumerate(self.phaseroots):
             missing = sorted(node for node in nodes if node not in nodemap)
             if missing:
                 for mnode in missing:
                     repo.ui.debug(
                         'removing unknown node %s from %i-phase boundary\n'
-                        % (short(mnode), phase))
+                        % (short(mnode), phase)
+                    )
                 nodes.symmetric_difference_update(missing)
                 filtered = True
         if filtered:
@@ -509,6 +531,7 @@
         # (see branchmap one)
         self.invalidate()
 
+
 def advanceboundary(repo, tr, targetphase, nodes, dryrun=None):
     """Add nodes to a phase changing other nodes phases if necessary.
 
@@ -522,12 +545,14 @@
     Returns a set of revs whose phase is changed or should be changed
     """
     phcache = repo._phasecache.copy()
-    changes = phcache.advanceboundary(repo, tr, targetphase, nodes,
-                                      dryrun=dryrun)
+    changes = phcache.advanceboundary(
+        repo, tr, targetphase, nodes, dryrun=dryrun
+    )
     if not dryrun:
         repo._phasecache.replace(phcache)
     return changes
 
+
 def retractboundary(repo, tr, targetphase, nodes):
     """Set nodes back to a phase changing other nodes phases if
     necessary.
@@ -540,6 +565,7 @@
     phcache.retractboundary(repo, tr, targetphase, nodes)
     repo._phasecache.replace(phcache)
 
+
 def registernew(repo, tr, targetphase, nodes):
     """register a new revision and its phase
 
@@ -550,6 +576,7 @@
     phcache.registernew(repo, tr, targetphase, nodes)
     repo._phasecache.replace(phcache)
 
+
 def listphases(repo):
     """List phases root for serialization over pushkey"""
     # Use ordered dictionary so behavior is deterministic.
@@ -580,13 +607,14 @@
         keys['publishing'] = 'True'
     return keys
 
+
 def pushphase(repo, nhex, oldphasestr, newphasestr):
     """List phases root for serialization over pushkey"""
     repo = repo.unfiltered()
     with repo.lock():
         currentphase = repo[nhex].phase()
-        newphase = abs(int(newphasestr)) # let's avoid negative index surprise
-        oldphase = abs(int(oldphasestr)) # let's avoid negative index surprise
+        newphase = abs(int(newphasestr))  # let's avoid negative index surprise
+        oldphase = abs(int(oldphasestr))  # let's avoid negative index surprise
         if currentphase == oldphase and newphase < oldphase:
             with repo.transaction('pushkey-phase') as tr:
                 advanceboundary(repo, tr, newphase, [bin(nhex)])
@@ -597,6 +625,7 @@
         else:
             return False
 
+
 def subsetphaseheads(repo, subset):
     """Finds the phase heads for a subset of a history
 
@@ -613,6 +642,7 @@
         headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
     return headsbyphase
 
+
 def updatephases(repo, trgetter, headsbyphase):
     """Updates the repo with the given phase heads"""
     # Now advance phase boundaries of all but secret phase
@@ -626,6 +656,7 @@
         if heads:
             advanceboundary(repo, trgetter(), phase, heads)
 
+
 def analyzeremotephases(repo, subset, roots):
     """Compute phases heads and root in a subset of node from root dict
 
@@ -637,26 +668,31 @@
     repo = repo.unfiltered()
     # build list from dictionary
     draftroots = []
-    nodemap = repo.changelog.nodemap # to filter unknown nodes
+    nodemap = repo.changelog.nodemap  # to filter unknown nodes
     for nhex, phase in roots.iteritems():
-        if nhex == 'publishing': # ignore data related to publish option
+        if nhex == 'publishing':  # ignore data related to publish option
             continue
         node = bin(nhex)
         phase = int(phase)
         if phase == public:
             if node != nullid:
-                repo.ui.warn(_('ignoring inconsistent public root'
-                               ' from remote: %s\n') % nhex)
+                repo.ui.warn(
+                    _('ignoring inconsistent public root' ' from remote: %s\n')
+                    % nhex
+                )
         elif phase == draft:
             if node in nodemap:
                 draftroots.append(node)
         else:
-            repo.ui.warn(_('ignoring unexpected root from remote: %i %s\n')
-                         % (phase, nhex))
+            repo.ui.warn(
+                _('ignoring unexpected root from remote: %i %s\n')
+                % (phase, nhex)
+            )
     # compute heads
     publicheads = newheads(repo, subset, draftroots)
     return publicheads, draftroots
 
+
 class remotephasessummary(object):
     """summarize phase information on the remote side
 
@@ -678,6 +714,7 @@
         dheads = unfi.set('heads(%ln::%ln)', self.draftroots, remotesubset)
         self.draftheads = [c.node() for c in dheads]
 
+
 def newheads(repo, heads, roots):
     """compute new head of a subset minus another
 
@@ -703,8 +740,9 @@
     new_heads.difference_update(affected_zone)
     # revisions in the area have children outside of it,
     # They might be new heads
-    candidates = repo.revs("parents(%ld + (%ld and merge())) and not null",
-                           roots, affected_zone)
+    candidates = repo.revs(
+        "parents(%ld + (%ld and merge())) and not null", roots, affected_zone
+    )
     candidates -= affected_zone
     if new_heads or candidates:
         # remove candidate that are ancestors of other heads
@@ -715,6 +753,7 @@
 
     return pycompat.maplist(cl.node, sorted(new_heads))
 
+
 def newcommitphase(ui):
     """helper to get the target phase of new commit
 
@@ -731,15 +770,15 @@
             msg = _("phases.new-commit: not a valid phase name ('%s')")
             raise error.ConfigError(msg % v)
 
+
 def hassecret(repo):
     """utility function that check if a repo have any secret changeset."""
     return bool(repo._phasecache.phaseroots[2])
 
+
 def preparehookargs(node, old, new):
     if old is None:
         old = ''
     else:
         old = phasenames[old]
-    return {'node': node,
-            'oldphase': old,
-            'phase': phasenames[new]}
+    return {'node': node, 'oldphase': old, 'phase': phasenames[new]}
--- a/mercurial/posix.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/posix.py	Sun Oct 06 09:45:02 2019 -0400
@@ -40,8 +40,11 @@
     # poor souls, just say we tried and that it failed so we fall back
     # to copies.
     def oslink(src, dst):
-        raise OSError(errno.EINVAL,
-                      'hardlinks not supported: %s to %s' % (src, dst))
+        raise OSError(
+            errno.EINVAL, 'hardlinks not supported: %s to %s' % (src, dst)
+        )
+
+
 readlink = os.readlink
 unlink = os.unlink
 rename = os.rename
@@ -52,6 +55,7 @@
 os.umask(umask)
 
 if not pycompat.ispy3:
+
     def posixfile(name, mode=r'r', buffering=-1):
         fp = open(name, mode=mode, buffering=buffering)
         # The position when opening in append mode is implementation defined, so
@@ -59,11 +63,14 @@
         if r'a' in mode:
             fp.seek(0, os.SEEK_END)
         return fp
+
+
 else:
     # The underlying file object seeks as required in Python 3:
     # https://github.com/python/cpython/blob/v3.7.3/Modules/_io/fileio.c#L474
     posixfile = open
 
+
 def split(p):
     '''Same as posixpath.split, but faster
 
@@ -86,39 +93,46 @@
         return nh, ht[1]
     return ht[0] + '/', ht[1]
 
+
 def openhardlinks():
     '''return true if it is safe to hold open file handles to hardlinks'''
     return True
 
+
 def nlinks(name):
     '''return number of hardlinks for the given file'''
     return os.lstat(name).st_nlink
 
+
 def parsepatchoutput(output_line):
     """parses the output produced by patch and returns the filename"""
     pf = output_line[14:]
     if pycompat.sysplatform == 'OpenVMS':
         if pf[0] == '`':
-            pf = pf[1:-1] # Remove the quotes
+            pf = pf[1:-1]  # Remove the quotes
     else:
         if pf.startswith("'") and pf.endswith("'") and " " in pf:
-            pf = pf[1:-1] # Remove the quotes
+            pf = pf[1:-1]  # Remove the quotes
     return pf
 
+
 def sshargs(sshcmd, host, user, port):
     '''Build argument list for ssh'''
     args = user and ("%s@%s" % (user, host)) or host
     if '-' in args[:1]:
         raise error.Abort(
-            _('illegal ssh hostname or username starting with -: %s') % args)
+            _('illegal ssh hostname or username starting with -: %s') % args
+        )
     args = shellquote(args)
     if port:
         args = '-p %s %s' % (shellquote(port), args)
     return args
 
+
 def isexec(f):
     """check whether a file is executable"""
-    return (os.lstat(f).st_mode & 0o100 != 0)
+    return os.lstat(f).st_mode & 0o100 != 0
+
 
 def setflags(f, l, x):
     st = os.lstat(f)
@@ -146,7 +160,7 @@
         fp = open(f, "wb")
         fp.write(data)
         fp.close()
-        s = 0o666 & ~umask # avoid restatting for chmod
+        s = 0o666 & ~umask  # avoid restatting for chmod
 
     sx = s & 0o100
     if st.st_nlink > 1 and bool(x) != bool(sx):
@@ -165,6 +179,7 @@
         # Turn off all +x bits
         os.chmod(f, s & 0o666)
 
+
 def copymode(src, dst, mode=None, enforcewritable=False):
     '''Copy the file mode from the file at path src to dst.
     If src doesn't exist, we're using mode instead. If mode is None, we're
@@ -186,6 +201,7 @@
 
     os.chmod(dst, new_mode)
 
+
 def checkexec(path):
     """
     Check whether the given path is on a filesystem with UNIX-like exec flags
@@ -234,7 +250,7 @@
                     except OSError as e:
                         if e.errno != errno.ENOENT:
                             raise
-                        open(checknoexec, 'w').close() # might fail
+                        open(checknoexec, 'w').close()  # might fail
                         m = os.stat(checknoexec).st_mode
                     if m & EXECFLAGS == 0:
                         # check-exec is exec and check-no-exec is not exec
@@ -268,6 +284,7 @@
         # we don't care, the user probably won't be able to commit anyway
         return False
 
+
 def checklink(path):
     """check whether the given path is on a symlink-capable filesystem"""
     # mktemp is not racy because symlink creation will fail if the
@@ -283,14 +300,16 @@
         else:
             checkdir = path
             cachedir = None
-        name = tempfile.mktemp(dir=pycompat.fsdecode(checkdir),
-                               prefix=r'checklink-')
+        name = tempfile.mktemp(
+            dir=pycompat.fsdecode(checkdir), prefix=r'checklink-'
+        )
         name = pycompat.fsencode(name)
         try:
             fd = None
             if cachedir is None:
-                fd = pycompat.namedtempfile(dir=checkdir,
-                                            prefix='hg-checklink-')
+                fd = pycompat.namedtempfile(
+                    dir=checkdir, prefix='hg-checklink-'
+                )
                 target = os.path.basename(fd.name)
             else:
                 # create a fixed file to link to; doesn't matter if it
@@ -334,10 +353,12 @@
                 unlink(name)
             return False
 
+
 def checkosfilename(path):
     '''Check that the base-relative path is a valid filename on this platform.
     Returns None if the path is ok, or a UI string describing the problem.'''
-    return None # on posix platforms, every path is ok
+    return None  # on posix platforms, every path is ok
+
 
 def getfsmountpoint(dirpath):
     '''Get the filesystem mount point from a directory (best-effort)
@@ -346,6 +367,7 @@
     '''
     return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
 
+
 def getfstype(dirpath):
     '''Get the filesystem type name from a directory (best-effort)
 
@@ -353,20 +375,25 @@
     '''
     return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
 
+
 def setbinary(fd):
     pass
 
+
 def pconvert(path):
     return path
 
+
 def localpath(path):
     return path
 
+
 def samefile(fpath1, fpath2):
     """Returns whether path1 and path2 refer to the same file. This is only
     guaranteed to work for files, not directories."""
     return os.path.samefile(fpath1, fpath2)
 
+
 def samedevice(fpath1, fpath2):
     """Returns whether fpath1 and fpath2 are on the same device. This is only
     guaranteed to work for files, not directories."""
@@ -374,10 +401,12 @@
     st2 = os.lstat(fpath2)
     return st1.st_dev == st2.st_dev
 
+
 # os.path.normcase is a no-op, which doesn't help us on non-native filesystems
 def normcase(path):
     return path.lower()
 
+
 # what normcase does to ASCII strings
 normcasespec = encoding.normcasespecs.lower
 # fallback normcase function for non-ASCII strings
@@ -423,7 +452,7 @@
                     c = encoding.getutf8char(path, pos)
                     pos += len(c)
                 except ValueError:
-                    c = '%%%02X' % ord(path[pos:pos + 1])
+                    c = '%%%02X' % ord(path[pos : pos + 1])
                     pos += 1
                 s += c
 
@@ -434,17 +463,16 @@
         # drop HFS+ ignored characters
         return encoding.hfsignoreclean(enc)
 
+
 if pycompat.sysplatform == 'cygwin':
     # workaround for cygwin, in which mount point part of path is
     # treated as case sensitive, even though underlying NTFS is case
     # insensitive.
 
     # default mount points
-    cygwinmountpoints = sorted([
-            "/usr/bin",
-            "/usr/lib",
-            "/cygdrive",
-            ], reverse=True)
+    cygwinmountpoints = sorted(
+        ["/usr/bin", "/usr/lib", "/cygdrive",], reverse=True
+    )
 
     # use upper-ing as normcase as same as NTFS workaround
     def normcase(path):
@@ -459,7 +487,7 @@
                 continue
 
             mplen = len(mp)
-            if mplen == pathlen: # mount point itself
+            if mplen == pathlen:  # mount point itself
                 return mp
             if path[mplen] == pycompat.ossep:
                 return mp + encoding.upper(path[mplen:])
@@ -482,7 +510,10 @@
     def checklink(path):
         return False
 
+
 _needsshellquote = None
+
+
 def shellquote(s):
     if pycompat.sysplatform == 'OpenVMS':
         return '"%s"' % s
@@ -495,13 +526,16 @@
     else:
         return "'%s'" % s.replace("'", "'\\''")
 
+
 def shellsplit(s):
     """Parse a command string in POSIX shell way (best-effort)"""
     return pycompat.shlexsplit(s, posix=True)
 
+
 def quotecommand(cmd):
     return cmd
 
+
 def testpid(pid):
     '''return False if pid dead, True if running or not sure'''
     if pycompat.sysplatform == 'OpenVMS':
@@ -512,10 +546,12 @@
     except OSError as inst:
         return inst.errno != errno.ESRCH
 
+
 def isowner(st):
     """Return True if the stat object st is from the current user."""
     return st.st_uid == os.getuid()
 
+
 def findexe(command):
     '''Find executable for command searching like which does.
     If command is a basename then PATH is searched for command.
@@ -542,11 +578,14 @@
             return executable
     return None
 
+
 def setsignalhandler():
     pass
 
+
 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
 
+
 def statfiles(files):
     '''Stat each file in files. Yield each stat, or None if a file does not
     exist or has a type we don't care about.'''
@@ -563,10 +602,12 @@
             st = None
         yield st
 
+
 def getuser():
     '''return name of current user'''
     return pycompat.fsencode(getpass.getuser())
 
+
 def username(uid=None):
     """Return the name of the user with the given uid.
 
@@ -579,6 +620,7 @@
     except KeyError:
         return b'%d' % uid
 
+
 def groupname(gid=None):
     """Return the name of the group with the given gid.
 
@@ -591,6 +633,7 @@
     except KeyError:
         return pycompat.bytestr(gid)
 
+
 def groupmembers(name):
     """Return the list of members of the group with the given
     name, KeyError if the group does not exist.
@@ -598,19 +641,23 @@
     name = pycompat.fsdecode(name)
     return pycompat.rapply(pycompat.fsencode, list(grp.getgrnam(name).gr_mem))
 
+
 def spawndetached(args):
-    return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
-                      args[0], args)
+    return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args)
+
 
 def gethgcmd():
     return sys.argv[:1]
 
+
 def makedir(path, notindexed):
     os.mkdir(path)
 
+
 def lookupreg(key, name=None, scope=None):
     return None
 
+
 def hidewindow():
     """Hide current shell window.
 
@@ -619,6 +666,7 @@
     """
     pass
 
+
 class cachestat(object):
     def __init__(self, path):
         self.stat = os.stat(path)
@@ -635,29 +683,34 @@
             # rest. However, one of the other fields changing indicates
             # something fishy going on, so return False if anything but atime
             # changes.
-            return (self.stat.st_mode == other.stat.st_mode and
-                    self.stat.st_ino == other.stat.st_ino and
-                    self.stat.st_dev == other.stat.st_dev and
-                    self.stat.st_nlink == other.stat.st_nlink and
-                    self.stat.st_uid == other.stat.st_uid and
-                    self.stat.st_gid == other.stat.st_gid and
-                    self.stat.st_size == other.stat.st_size and
-                    self.stat[stat.ST_MTIME] == other.stat[stat.ST_MTIME] and
-                    self.stat[stat.ST_CTIME] == other.stat[stat.ST_CTIME])
+            return (
+                self.stat.st_mode == other.stat.st_mode
+                and self.stat.st_ino == other.stat.st_ino
+                and self.stat.st_dev == other.stat.st_dev
+                and self.stat.st_nlink == other.stat.st_nlink
+                and self.stat.st_uid == other.stat.st_uid
+                and self.stat.st_gid == other.stat.st_gid
+                and self.stat.st_size == other.stat.st_size
+                and self.stat[stat.ST_MTIME] == other.stat[stat.ST_MTIME]
+                and self.stat[stat.ST_CTIME] == other.stat[stat.ST_CTIME]
+            )
         except AttributeError:
             return False
 
     def __ne__(self, other):
         return not self == other
 
+
 def statislink(st):
     '''check whether a stat result is a symlink'''
     return st and stat.S_ISLNK(st.st_mode)
 
+
 def statisexec(st):
     '''check whether a stat result is an executable file'''
     return st and (st.st_mode & 0o100 != 0)
 
+
 def poll(fds):
     """block until something happens on any file descriptor
 
@@ -674,10 +727,11 @@
                 if inst.args[0] == errno.EINTR:
                     continue
                 raise
-    except ValueError: # out of range file descriptor
+    except ValueError:  # out of range file descriptor
         raise NotImplementedError()
     return sorted(list(set(sum(res, []))))
 
+
 def readpipe(pipe):
     """Read all available data from a pipe."""
     # We can't fstat() a pipe because Linux will always report 0.
@@ -702,6 +756,7 @@
     finally:
         fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags)
 
+
 def bindunixsocket(sock, path):
     """Bind the UNIX domain socket to the specified path"""
     # use relative path instead of full path at bind() if possible, since
--- a/mercurial/profiling.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/profiling.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
     util,
 )
 
+
 def _loadprofiler(ui, profiler):
     """load profiler extension. return profile method, or None on failure"""
     extname = profiler
@@ -29,6 +30,7 @@
     else:
         return getattr(mod, 'profile', None)
 
+
 @contextlib.contextmanager
 def lsprofile(ui, fp):
     format = ui.config('profiling', 'format')
@@ -37,16 +39,18 @@
     climit = ui.configint('profiling', 'nested')
 
     if format not in ['text', 'kcachegrind']:
-        ui.warn(_("unrecognized profiling format '%s'"
-                    " - Ignored\n") % format)
+        ui.warn(_("unrecognized profiling format '%s'" " - Ignored\n") % format)
         format = 'text'
 
     try:
         from . import lsprof
     except ImportError:
-        raise error.Abort(_(
-            'lsprof not available - install from '
-            'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
+        raise error.Abort(
+            _(
+                'lsprof not available - install from '
+                'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'
+            )
+        )
     p = lsprof.Profiler()
     p.enable(subcalls=True)
     try:
@@ -56,6 +60,7 @@
 
         if format == 'kcachegrind':
             from . import lsprofcalltree
+
             calltree = lsprofcalltree.KCacheGrind(p)
             calltree.output(fp)
         else:
@@ -64,20 +69,25 @@
             stats.sort(pycompat.sysstr(field))
             stats.pprint(limit=limit, file=fp, climit=climit)
 
+
 @contextlib.contextmanager
 def flameprofile(ui, fp):
     try:
         from flamegraph import flamegraph
     except ImportError:
-        raise error.Abort(_(
-            'flamegraph not available - install from '
-            'https://github.com/evanhempel/python-flamegraph'))
+        raise error.Abort(
+            _(
+                'flamegraph not available - install from '
+                'https://github.com/evanhempel/python-flamegraph'
+            )
+        )
     # developer config: profiling.freq
     freq = ui.configint('profiling', 'freq')
     filter_ = None
     collapse_recursion = True
-    thread = flamegraph.ProfileThread(fp, 1.0 / freq,
-                                      filter_, collapse_recursion)
+    thread = flamegraph.ProfileThread(
+        fp, 1.0 / freq, filter_, collapse_recursion
+    )
     start_time = util.timer()
     try:
         thread.start()
@@ -85,9 +95,15 @@
     finally:
         thread.stop()
         thread.join()
-        print('Collected %d stack frames (%d unique) in %2.2f seconds.' % (
-            util.timer() - start_time, thread.num_frames(),
-            thread.num_frames(unique=True)))
+        print(
+            'Collected %d stack frames (%d unique) in %2.2f seconds.'
+            % (
+                util.timer() - start_time,
+                thread.num_frames(),
+                thread.num_frames(unique=True),
+            )
+        )
+
 
 @contextlib.contextmanager
 def statprofile(ui, fp):
@@ -101,8 +117,9 @@
     else:
         ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
 
-    track = ui.config('profiling', 'time-track',
-                      pycompat.iswindows and 'cpu' or 'real')
+    track = ui.config(
+        'profiling', 'time-track', pycompat.iswindows and 'cpu' or 'real'
+    )
     statprof.start(mechanism='thread', track=track)
 
     try:
@@ -152,12 +169,14 @@
 
         statprof.display(fp, data=data, format=displayformat, **kwargs)
 
+
 class profile(object):
     """Start profiling.
 
     Profiling is active when the context manager is active. When the context
     manager exits, profiling results will be written to the configured output.
     """
+
     def __init__(self, ui, enabled=True):
         self._ui = ui
         self._output = None
@@ -193,8 +212,9 @@
             # try load profiler from extension with the same name
             proffn = _loadprofiler(self._ui, profiler)
             if proffn is None:
-                self._ui.warn(_("unrecognized profiler '%s' - ignored\n")
-                              % profiler)
+                self._ui.warn(
+                    _("unrecognized profiler '%s' - ignored\n") % profiler
+                )
                 profiler = 'stat'
 
         self._output = self._ui.config('profiling', 'output')
@@ -210,10 +230,13 @@
                 class uifp(object):
                     def __init__(self, ui):
                         self._ui = ui
+
                     def write(self, data):
                         self._ui.write_err(data)
+
                     def flush(self):
                         self._ui.flush()
+
                 self._fpdoclose = False
                 self._fp = uifp(self._ui)
             else:
@@ -231,15 +254,16 @@
 
             self._profiler = proffn(self._ui, self._fp)
             self._profiler.__enter__()
-        except: # re-raises
+        except:  # re-raises
             self._closefp()
             raise
 
     def __exit__(self, exception_type, exception_value, traceback):
         propagate = None
         if self._profiler is not None:
-            propagate = self._profiler.__exit__(exception_type, exception_value,
-                                                traceback)
+            propagate = self._profiler.__exit__(
+                exception_type, exception_value, traceback
+            )
             if self._output == 'blackbox':
                 val = 'Profile:\n%s' % self._fp.getvalue()
                 # ui.log treats the input as a format string,
--- a/mercurial/progress.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/progress.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,12 +14,16 @@
 from .i18n import _
 from . import encoding
 
+
 def spacejoin(*args):
     return ' '.join(s for s in args if s)
 
+
 def shouldprint(ui):
     return not (ui.quiet or ui.plain('progress')) and (
-        ui._isatty(ui.ferr) or ui.configbool('progress', 'assume-tty'))
+        ui._isatty(ui.ferr) or ui.configbool('progress', 'assume-tty')
+    )
+
 
 def fmtremaining(seconds):
     """format a number of remaining seconds in human readable way
@@ -27,7 +31,7 @@
     This will properly display seconds, minutes, hours, days if needed"""
     if seconds < 60:
         # i18n: format XX seconds as "XXs"
-        return _("%02ds") % (seconds)
+        return _("%02ds") % seconds
     minutes = seconds // 60
     if minutes < 60:
         seconds -= minutes * 60
@@ -61,6 +65,7 @@
     # i18n: format X years and YY weeks as "XyYYw"
     return _("%dy%02dw") % (years, weeks)
 
+
 # file_write() and file_flush() of Python 2 do not restart on EINTR if
 # the file is attached to a "slow" device (e.g. a terminal) and raise
 # IOError. We cannot know how many bytes would be written by file_write(),
@@ -79,6 +84,7 @@
                 continue
             raise
 
+
 class progbar(object):
     def __init__(self, ui):
         self.ui = ui
@@ -91,19 +97,20 @@
         self.starttimes = {}
         self.startvals = {}
         self.printed = False
-        self.lastprint = time.time() + float(self.ui.config(
-            'progress', 'delay'))
+        self.lastprint = time.time() + float(
+            self.ui.config('progress', 'delay')
+        )
         self.curtopic = None
         self.lasttopic = None
         self.indetcount = 0
-        self.refresh = float(self.ui.config(
-            'progress', 'refresh'))
-        self.changedelay = max(3 * self.refresh,
-                               float(self.ui.config(
-                                   'progress', 'changedelay')))
+        self.refresh = float(self.ui.config('progress', 'refresh'))
+        self.changedelay = max(
+            3 * self.refresh, float(self.ui.config('progress', 'changedelay'))
+        )
         self.order = self.ui.configlist('progress', 'format')
         self.estimateinterval = self.ui.configwith(
-            float, 'progress', 'estimateinterval')
+            float, 'progress', 'estimateinterval'
+        )
 
     def show(self, now, topic, pos, item, unit, total):
         if not shouldprint(self.ui):
@@ -169,8 +176,11 @@
                 # cursor bounce between the right and left sides
                 amt = self.indetcount % (2 * progwidth)
                 amt -= progwidth
-                bar = (' ' * int(progwidth - abs(amt)) + '<=>' +
-                       ' ' * int(abs(amt)))
+                bar = (
+                    ' ' * int(progwidth - abs(amt))
+                    + '<=>'
+                    + ' ' * int(abs(amt))
+                )
             prog = ''.join(('[', bar, ']'))
             out = spacejoin(head, prog, tail)
         else:
@@ -228,11 +238,13 @@
 
     def _oktoprint(self, now):
         '''Check if conditions are met to print - e.g. changedelay elapsed'''
-        if (self.lasttopic is None # first time we printed
+        if (
+            self.lasttopic is None  # first time we printed
             # not a topic change
             or self.curtopic == self.lasttopic
             # it's been long enough we should print anyway
-            or now - self.lastprint >= self.changedelay):
+            or now - self.lastprint >= self.changedelay
+        ):
             return True
         else:
             return False
@@ -293,7 +305,7 @@
             # truncate the list of topics assuming all topics within
             # this one are also closed
             if topic in self.topics:
-                self.topics = self.topics[:self.topics.index(topic)]
+                self.topics = self.topics[: self.topics.index(topic)]
                 # reset the last topic to the one we just unwound to,
                 # so that higher-level topics will be stickier than
                 # lower-level topics
--- a/mercurial/pure/base85.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pure/base85.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,15 +11,19 @@
 
 from .. import pycompat
 
-_b85chars = pycompat.bytestr("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdef"
-                             "ghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~")
+_b85chars = pycompat.bytestr(
+    "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdef"
+    "ghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~"
+)
 _b85chars2 = [(a + b) for a in _b85chars for b in _b85chars]
 _b85dec = {}
 
+
 def _mkb85dec():
     for i, c in enumerate(_b85chars):
         _b85dec[c] = i
 
+
 def b85encode(text, pad=False):
     """encode text in base85 format"""
     l = len(text)
@@ -27,12 +31,14 @@
     if r:
         text += '\0' * (4 - r)
     longs = len(text) >> 2
-    words = struct.unpack('>%dL' % (longs), text)
+    words = struct.unpack('>%dL' % longs, text)
 
-    out = ''.join(_b85chars[(word // 52200625) % 85] +
-                  _b85chars2[(word // 7225) % 7225] +
-                  _b85chars2[word % 7225]
-                  for word in words)
+    out = ''.join(
+        _b85chars[(word // 52200625) % 85]
+        + _b85chars2[(word // 7225) % 7225]
+        + _b85chars2[word % 7225]
+        for word in words
+    )
 
     if pad:
         return out
@@ -44,6 +50,7 @@
     olen += l // 4 * 5
     return out[:olen]
 
+
 def b85decode(text):
     """decode base85-encoded text"""
     if not _b85dec:
@@ -52,15 +59,16 @@
     l = len(text)
     out = []
     for i in range(0, len(text), 5):
-        chunk = text[i:i + 5]
+        chunk = text[i : i + 5]
         chunk = pycompat.bytestr(chunk)
         acc = 0
         for j, c in enumerate(chunk):
             try:
                 acc = acc * 85 + _b85dec[c]
             except KeyError:
-                raise ValueError('bad base85 character at position %d'
-                                 % (i + j))
+                raise ValueError(
+                    'bad base85 character at position %d' % (i + j)
+                )
         if acc > 4294967295:
             raise ValueError('Base85 overflow in hunk starting at byte %d' % i)
         out.append(acc)
@@ -70,11 +78,11 @@
     if cl:
         acc *= 85 ** (5 - cl)
         if cl > 1:
-            acc += 0xffffff >> (cl - 2) * 8
+            acc += 0xFFFFFF >> (cl - 2) * 8
         out[-1] = acc
 
     out = struct.pack('>%dL' % (len(out)), *out)
     if cl:
-        out = out[:-(5 - cl)]
+        out = out[: -(5 - cl)]
 
     return out
--- a/mercurial/pure/bdiff.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pure/bdiff.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,6 +11,7 @@
 import re
 import struct
 
+
 def splitnewlines(text):
     '''like str.splitlines, but only split on newlines.'''
     lines = [l + '\n' for l in text.split('\n')]
@@ -21,6 +22,7 @@
             lines[-1] = lines[-1][:-1]
     return lines
 
+
 def _normalizeblocks(a, b, blocks):
     prev = None
     r = []
@@ -38,18 +40,21 @@
         a2end = a2 + l2
         b2end = b2 + l2
         if a1end == a2:
-            while (a1end + shift < a2end and
-                   a[a1end + shift] == b[b1end + shift]):
+            while (
+                a1end + shift < a2end and a[a1end + shift] == b[b1end + shift]
+            ):
                 shift += 1
         elif b1end == b2:
-            while (b1end + shift < b2end and
-                   a[a1end + shift] == b[b1end + shift]):
+            while (
+                b1end + shift < b2end and a[a1end + shift] == b[b1end + shift]
+            ):
                 shift += 1
         r.append((a1, b1, l1 + shift))
         prev = a2 + shift, b2 + shift, l2 - shift
     r.append(prev)
     return r
 
+
 def bdiff(a, b):
     a = bytes(a).splitlines(True)
     b = bytes(b).splitlines(True)
@@ -76,6 +81,7 @@
 
     return "".join(bin)
 
+
 def blocks(a, b):
     an = splitnewlines(a)
     bn = splitnewlines(b)
@@ -83,6 +89,7 @@
     d = _normalizeblocks(an, bn, d)
     return [(i, i + n, j, j + n) for (i, j, n) in d]
 
+
 def fixws(text, allws):
     if allws:
         text = re.sub('[ \t\r]+', '', text)
--- a/mercurial/pure/charencode.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pure/charencode.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,9 +9,8 @@
 
 import array
 
-from .. import (
-    pycompat,
-)
+from .. import pycompat
+
 
 def isasciistr(s):
     try:
@@ -20,6 +19,7 @@
     except UnicodeDecodeError:
         return False
 
+
 def asciilower(s):
     '''convert a string to lowercase if ASCII
 
@@ -27,6 +27,7 @@
     s.decode('ascii')
     return s.lower()
 
+
 def asciiupper(s):
     '''convert a string to uppercase if ASCII
 
@@ -34,22 +35,24 @@
     s.decode('ascii')
     return s.upper()
 
+
 _jsonmap = []
 _jsonmap.extend("\\u%04x" % x for x in range(32))
 _jsonmap.extend(pycompat.bytechr(x) for x in range(32, 127))
 _jsonmap.append('\\u007f')
 _jsonmap[0x09] = '\\t'
-_jsonmap[0x0a] = '\\n'
+_jsonmap[0x0A] = '\\n'
 _jsonmap[0x22] = '\\"'
-_jsonmap[0x5c] = '\\\\'
+_jsonmap[0x5C] = '\\\\'
 _jsonmap[0x08] = '\\b'
-_jsonmap[0x0c] = '\\f'
-_jsonmap[0x0d] = '\\r'
+_jsonmap[0x0C] = '\\f'
+_jsonmap[0x0D] = '\\r'
 _paranoidjsonmap = _jsonmap[:]
-_paranoidjsonmap[0x3c] = '\\u003c'  # '<' (e.g. escape "</script>")
-_paranoidjsonmap[0x3e] = '\\u003e'  # '>'
+_paranoidjsonmap[0x3C] = '\\u003c'  # '<' (e.g. escape "</script>")
+_paranoidjsonmap[0x3E] = '\\u003e'  # '>'
 _jsonmap.extend(pycompat.bytechr(x) for x in range(128, 256))
 
+
 def jsonescapeu8fast(u8chars, paranoid):
     """Convert a UTF-8 byte string to JSON-escaped form (fast path)
 
@@ -64,11 +67,13 @@
     except IndexError:
         raise ValueError
 
+
 if pycompat.ispy3:
     _utf8strict = r'surrogatepass'
 else:
     _utf8strict = r'strict'
 
+
 def jsonescapeu8fallback(u8chars, paranoid):
     """Convert a UTF-8 byte string to JSON-escaped form (slow path)
 
--- a/mercurial/pure/mpatch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pure/mpatch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,12 +10,15 @@
 import struct
 
 from .. import pycompat
+
 stringio = pycompat.bytesio
 
+
 class mpatchError(Exception):
     """error raised when a delta cannot be decoded
     """
 
+
 # This attempts to apply a series of patches in time proportional to
 # the total size of the patches, rather than patches * len(text). This
 # means rather than shuffling strings around, we shuffle around
@@ -26,16 +29,18 @@
 # mmap and simply use memmove. This avoids creating a bunch of large
 # temporary string buffers.
 
-def _pull(dst, src, l): # pull l bytes from src
+
+def _pull(dst, src, l):  # pull l bytes from src
     while l:
         f = src.pop()
-        if f[0] > l: # do we need to split?
+        if f[0] > l:  # do we need to split?
             src.append((f[0] - l, f[1] + l))
             dst.append((l, f[1]))
             return
         dst.append(f)
         l -= f[0]
 
+
 def _move(m, dest, src, count):
     """move count bytes from src to dest
 
@@ -46,6 +51,7 @@
     m.seek(dest)
     m.write(buf)
 
+
 def _collect(m, buf, list):
     start = buf
     for l, p in reversed(list):
@@ -53,6 +59,7 @@
         buf += l
     return (buf - start, start)
 
+
 def patches(a, bins):
     if not bins:
         return a
@@ -60,7 +67,7 @@
     plens = [len(x) for x in bins]
     pl = sum(plens)
     bl = len(a) + pl
-    tl = bl + bl + pl # enough for the patches and two working texts
+    tl = bl + bl + pl  # enough for the patches and two working texts
     b1, b2 = 0, bl
 
     if not tl:
@@ -93,25 +100,26 @@
                 p1, p2, l = struct.unpack(">lll", m.read(12))
             except struct.error:
                 raise mpatchError("patch cannot be decoded")
-            _pull(new, frags, p1 - last) # what didn't change
-            _pull([], frags, p2 - p1)    # what got deleted
-            new.append((l, pos + 12))   # what got added
+            _pull(new, frags, p1 - last)  # what didn't change
+            _pull([], frags, p2 - p1)  # what got deleted
+            new.append((l, pos + 12))  # what got added
             pos += l + 12
             last = p2
-        frags.extend(reversed(new))     # what was left at the end
+        frags.extend(reversed(new))  # what was left at the end
 
     t = _collect(m, b2, frags)
 
     m.seek(t[1])
     return m.read(t[0])
 
+
 def patchedsize(orig, delta):
     outlen, last, bin = 0, 0, 0
     binend = len(delta)
     data = 12
 
     while data <= binend:
-        decode = delta[bin:bin + 12]
+        decode = delta[bin : bin + 12]
         start, end, length = struct.unpack(">lll", decode)
         if start > end:
             break
--- a/mercurial/pure/osutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pure/osutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
     pycompat,
 )
 
+
 def _mode_to_kind(mode):
     if statmod.S_ISREG(mode):
         return statmod.S_IFREG
@@ -35,6 +36,7 @@
         return statmod.S_IFSOCK
     return mode
 
+
 def listdir(path, stat=False, skip=None):
     '''listdir(path, stat=False) -> list_of_tuples
 
@@ -65,6 +67,7 @@
             result.append((fn, _mode_to_kind(st.st_mode)))
     return result
 
+
 if not pycompat.iswindows:
     posixfile = open
 
@@ -111,8 +114,11 @@
     _recvmsg = getattr(_libc, 'recvmsg', None)
     if _recvmsg:
         _recvmsg.restype = getattr(ctypes, 'c_ssize_t', ctypes.c_long)
-        _recvmsg.argtypes = (ctypes.c_int, ctypes.POINTER(_msghdr),
-                             ctypes.c_int)
+        _recvmsg.argtypes = (
+            ctypes.c_int,
+            ctypes.POINTER(_msghdr),
+            ctypes.c_int,
+        )
     else:
         # recvmsg isn't always provided by libc; such systems are unsupported
         def _recvmsg(sockfd, msg, flags):
@@ -132,10 +138,15 @@
         dummy = (ctypes.c_ubyte * 1)()
         iov = _iovec(ctypes.cast(dummy, ctypes.c_void_p), ctypes.sizeof(dummy))
         cbuf = ctypes.create_string_buffer(256)
-        msgh = _msghdr(None, 0,
-                       ctypes.pointer(iov), 1,
-                       ctypes.cast(cbuf, ctypes.c_void_p), ctypes.sizeof(cbuf),
-                       0)
+        msgh = _msghdr(
+            None,
+            0,
+            ctypes.pointer(iov),
+            1,
+            ctypes.cast(cbuf, ctypes.c_void_p),
+            ctypes.sizeof(cbuf),
+            0,
+        )
         r = _recvmsg(sockfd, ctypes.byref(msgh), 0)
         if r < 0:
             e = ctypes.get_errno()
@@ -145,14 +156,18 @@
         cmsg = _CMSG_FIRSTHDR(msgh)
         if not cmsg:
             return []
-        if (cmsg.cmsg_level != socket.SOL_SOCKET or
-            cmsg.cmsg_type != _SCM_RIGHTS):
+        if (
+            cmsg.cmsg_level != socket.SOL_SOCKET
+            or cmsg.cmsg_type != _SCM_RIGHTS
+        ):
             return []
         rfds = ctypes.cast(cmsg.cmsg_data, ctypes.POINTER(ctypes.c_int))
-        rfdscount = ((cmsg.cmsg_len - _cmsghdr.cmsg_data.offset) //
-                     ctypes.sizeof(ctypes.c_int))
+        rfdscount = (
+            cmsg.cmsg_len - _cmsghdr.cmsg_data.offset
+        ) // ctypes.sizeof(ctypes.c_int)
         return [rfds[i] for i in pycompat.xrange(rfdscount)]
 
+
 else:
     import msvcrt
 
@@ -188,14 +203,22 @@
 
     # types of parameters of C functions used (required by pypy)
 
-    _kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
-        _DWORD, _DWORD, _HANDLE]
+    _kernel32.CreateFileA.argtypes = [
+        _LPCSTR,
+        _DWORD,
+        _DWORD,
+        ctypes.c_void_p,
+        _DWORD,
+        _DWORD,
+        _HANDLE,
+    ]
     _kernel32.CreateFileA.restype = _HANDLE
 
     def _raiseioerror(name):
         err = ctypes.WinError()
-        raise IOError(err.errno, r'%s: %s' % (encoding.strfromlocal(name),
-                                              err.strerror))
+        raise IOError(
+            err.errno, r'%s: %s' % (encoding.strfromlocal(name), err.strerror)
+        )
 
     class posixfile(object):
         '''a file object aiming for POSIX-like semantics
@@ -235,9 +258,15 @@
             else:
                 raise ValueError(r"invalid mode: %s" % pycompat.sysstr(mode))
 
-            fh = _kernel32.CreateFileA(name, access,
-                    _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
-                    None, creation, _FILE_ATTRIBUTE_NORMAL, None)
+            fh = _kernel32.CreateFileA(
+                name,
+                access,
+                _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
+                None,
+                creation,
+                _FILE_ATTRIBUTE_NORMAL,
+                None,
+            )
             if fh == _INVALID_HANDLE_VALUE:
                 _raiseioerror(name)
 
--- a/mercurial/pure/parsers.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pure/parsers.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,6 +12,7 @@
 
 from ..node import nullid
 from .. import pycompat
+
 stringio = pycompat.bytesio
 
 
@@ -26,17 +27,21 @@
     # x is a tuple
     return x
 
+
 indexformatng = ">Qiiiiii20s12x"
 indexfirst = struct.calcsize('Q')
 sizeint = struct.calcsize('i')
 indexsize = struct.calcsize(indexformatng)
 
+
 def gettype(q):
     return int(q & 0xFFFF)
 
+
 def offset_type(offset, type):
     return int(int(offset) << 16 | type)
 
+
 class BaseIndexObject(object):
     def __len__(self):
         return self._lgt + len(self._extra)
@@ -57,7 +62,7 @@
         if i >= self._lgt:
             return self._extra[i - self._lgt]
         index = self._calculate_index(i)
-        r = struct.unpack(indexformatng, self._data[index:index + indexsize])
+        r = struct.unpack(indexformatng, self._data[index : index + indexsize])
         if i == 0:
             e = list(r)
             type = gettype(e[0])
@@ -65,6 +70,7 @@
             return tuple(e)
         return r
 
+
 class IndexObject(BaseIndexObject):
     def __init__(self, data):
         assert len(data) % indexsize == 0
@@ -81,11 +87,12 @@
         i = i.start
         self._check_index(i)
         if i < self._lgt:
-            self._data = self._data[:i * indexsize]
+            self._data = self._data[: i * indexsize]
             self._lgt = i
             self._extra = []
         else:
-            self._extra = self._extra[:i - self._lgt]
+            self._extra = self._extra[: i - self._lgt]
+
 
 class InlinedIndexObject(BaseIndexObject):
     def __init__(self, data, inline=0):
@@ -100,8 +107,9 @@
             self._offsets = [0] * lgt
         count = 0
         while off <= len(self._data) - indexsize:
-            s, = struct.unpack('>i',
-                self._data[off + indexfirst:off + sizeint + indexfirst])
+            (s,) = struct.unpack(
+                '>i', self._data[off + indexfirst : off + sizeint + indexfirst]
+            )
             if lgt is not None:
                 self._offsets[count] = off
             count += 1
@@ -120,18 +128,20 @@
             self._lgt = i
             self._extra = []
         else:
-            self._extra = self._extra[:i - self._lgt]
+            self._extra = self._extra[: i - self._lgt]
 
     def _calculate_index(self, i):
         return self._offsets[i]
 
+
 def parse_index2(data, inline):
     if not inline:
         return IndexObject(data), None
     return InlinedIndexObject(data, inline), (0, data)
 
+
 def parse_dirstate(dmap, copymap, st):
-    parents = [st[:20], st[20: 40]]
+    parents = [st[:20], st[20:40]]
     # dereference fields so they will be local in loop
     format = ">cllll"
     e_size = struct.calcsize(format)
@@ -141,7 +151,7 @@
     # the inner loop
     while pos1 < l:
         pos2 = pos1 + e_size
-        e = _unpack(">cllll", st[pos1:pos2]) # a literal here is faster
+        e = _unpack(">cllll", st[pos1:pos2])  # a literal here is faster
         pos1 = pos2 + e[4]
         f = st[pos2:pos1]
         if '\0' in f:
@@ -150,6 +160,7 @@
         dmap[f] = e[:4]
     return parents
 
+
 def pack_dirstate(dmap, copymap, pl, now):
     now = int(now)
     cs = stringio()
--- a/mercurial/pvec.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pvec.py	Sun Oct 06 09:45:02 2019 -0400
@@ -56,13 +56,14 @@
     util,
 )
 
-_size = 448 # 70 chars b85-encoded
+_size = 448  # 70 chars b85-encoded
 _bytes = _size / 8
 _depthbits = 24
 _depthbytes = _depthbits / 8
 _vecbytes = _bytes - _depthbytes
 _vecbits = _vecbytes * 8
-_radius = (_vecbits - 30) / 2 # high probability vectors are related
+_radius = (_vecbits - 30) / 2  # high probability vectors are related
+
 
 def _bin(bs):
     '''convert a bytestring to a long'''
@@ -71,6 +72,7 @@
         v = v * 256 + ord(b)
     return v
 
+
 def _str(v, l):
     bs = ""
     for p in pycompat.xrange(l):
@@ -78,13 +80,16 @@
         v >>= 8
     return bs
 
+
 def _split(b):
     '''depth and bitvec'''
     return _bin(b[:_depthbytes]), _bin(b[_depthbytes:])
 
+
 def _join(depth, bitvec):
     return _str(depth, _depthbytes) + _str(bitvec, _vecbytes)
 
+
 def _hweight(x):
     c = 0
     while x:
@@ -92,17 +97,21 @@
             c += 1
         x >>= 1
     return c
+
+
 _htab = [_hweight(x) for x in pycompat.xrange(256)]
 
+
 def _hamming(a, b):
     '''find the hamming distance between two longs'''
     d = a ^ b
     c = 0
     while d:
-        c += _htab[d & 0xff]
+        c += _htab[d & 0xFF]
         d >>= 8
     return c
 
+
 def _mergevec(x, y, c):
     # Ideally, this function would be x ^ y ^ ancestor, but finding
     # ancestors is a nuisance. So instead we find the minimal number
@@ -116,7 +125,7 @@
     hdist = _hamming(v1, v2)
     ddist = d1 - d2
     v = v1
-    m = v1 ^ v2 # mask of different bits
+    m = v1 ^ v2  # mask of different bits
     i = 1
 
     if hdist > ddist:
@@ -140,10 +149,12 @@
 
     return depth, v
 
+
 def _flipbit(v, node):
     # converting bit strings to longs is slow
-    bit = (hash(node) & 0xffffffff) % _vecbits
-    return v ^ (1<<bit)
+    bit = (hash(node) & 0xFFFFFFFF) % _vecbits
+    return v ^ (1 << bit)
+
 
 def ctxpvec(ctx):
     '''construct a pvec for ctx while filling in the cache'''
@@ -168,6 +179,7 @@
     bs = _join(*pvc[ctx.rev()])
     return pvec(util.b85encode(bs))
 
+
 class pvec(object):
     def __init__(self, hashorctx):
         if isinstance(hashorctx, str):
@@ -185,7 +197,7 @@
     def __lt__(self, b):
         delta = b._depth - self._depth
         if delta < 0:
-            return False # always correct
+            return False  # always correct
         if _hamming(self._vec, b._vec) > delta:
             return False
         return True
--- a/mercurial/pycompat.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/pycompat.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,8 +17,8 @@
 import sys
 import tempfile
 
-ispy3 = (sys.version_info[0] >= 3)
-ispypy = (r'__pypy__' in sys.builtin_module_names)
+ispy3 = sys.version_info[0] >= 3
+ispypy = r'__pypy__' in sys.builtin_module_names
 
 if not ispy3:
     import cookielib
@@ -32,6 +32,8 @@
 
     def future_set_exception_info(f, exc_info):
         f.set_exception_info(*exc_info)
+
+
 else:
     import concurrent.futures as futures
     import http.cookiejar as cookielib
@@ -44,9 +46,11 @@
     def future_set_exception_info(f, exc_info):
         f.set_exception(exc_info[0])
 
+
 def identity(a):
     return a
 
+
 def _rapply(f, xs):
     if xs is None:
         # assume None means non-value of optional data
@@ -57,6 +61,7 @@
         return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
     return f(xs)
 
+
 def rapply(f, xs):
     """Apply function recursively to every item preserving the data structure
 
@@ -80,6 +85,7 @@
         return xs
     return _rapply(f, xs)
 
+
 if ispy3:
     import builtins
     import functools
@@ -195,8 +201,11 @@
         def __new__(cls, s=b''):
             if isinstance(s, bytestr):
                 return s
-            if (not isinstance(s, (bytes, bytearray))
-                and not hasattr(s, u'__bytes__')):  # hasattr-py3-only
+            if not isinstance(
+                s, (bytes, bytearray)
+            ) and not hasattr(  # hasattr-py3-only
+                s, u'__bytes__'
+            ):
                 s = str(s).encode(u'ascii')
             return bytes.__new__(cls, s)
 
@@ -270,6 +279,7 @@
         @functools.wraps(f)
         def w(object, name, *args):
             return f(object, sysstr(name), *args)
+
         return w
 
     # these wrappers are automagically imported by hgloader
@@ -296,8 +306,7 @@
         shortlist = shortlist.decode('latin-1')
         namelist = [a.decode('latin-1') for a in namelist]
         opts, args = orig(args, shortlist, namelist)
-        opts = [(a[0].encode('latin-1'), a[1].encode('latin-1'))
-                for a in opts]
+        opts = [(a[0].encode('latin-1'), a[1].encode('latin-1')) for a in opts]
         args = [a.encode('latin-1') for a in args]
         return opts, args
 
@@ -347,8 +356,7 @@
     bytesurl = identity
 
     # this can't be parsed on Python 3
-    exec('def raisewithtb(exc, tb):\n'
-         '    raise exc, None, tb\n')
+    exec('def raisewithtb(exc, tb):\n' '    raise exc, None, tb\n')
 
     def fsencode(filename):
         """
@@ -359,8 +367,7 @@
         if isinstance(filename, str):
             return filename
         else:
-            raise TypeError(
-                r"expect str, not %s" % type(filename).__name__)
+            raise TypeError(r"expect str, not %s" % type(filename).__name__)
 
     # In Python 2, fsdecode() has a very chance to receive bytes. So it's
     # better not to touch Python 2 part as it's already working fine.
@@ -412,23 +419,30 @@
 isposix = osname == b'posix'
 iswindows = osname == b'nt'
 
+
 def getoptb(args, shortlist, namelist):
     return _getoptbwrapper(getopt.getopt, args, shortlist, namelist)
 
+
 def gnugetoptb(args, shortlist, namelist):
     return _getoptbwrapper(getopt.gnu_getopt, args, shortlist, namelist)
 
+
 def mkdtemp(suffix=b'', prefix=b'tmp', dir=None):
     return tempfile.mkdtemp(suffix, prefix, dir)
 
+
 # text=True is not supported; use util.from/tonativeeol() instead
 def mkstemp(suffix=b'', prefix=b'tmp', dir=None):
     return tempfile.mkstemp(suffix, prefix, dir)
 
+
 # mode must include 'b'ytes as encoding= is not supported
-def namedtempfile(mode=b'w+b', bufsize=-1, suffix=b'', prefix=b'tmp', dir=None,
-                  delete=True):
+def namedtempfile(
+    mode=b'w+b', bufsize=-1, suffix=b'', prefix=b'tmp', dir=None, delete=True
+):
     mode = sysstr(mode)
     assert r'b' in mode
-    return tempfile.NamedTemporaryFile(mode, bufsize, suffix=suffix,
-                                       prefix=prefix, dir=dir, delete=delete)
+    return tempfile.NamedTemporaryFile(
+        mode, bufsize, suffix=suffix, prefix=prefix, dir=dir, delete=delete
+    )
--- a/mercurial/registrar.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/registrar.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,6 +21,7 @@
 # the other items extensions want might to register.
 configitem = configitems.getitemregister
 
+
 class _funcregistrarbase(object):
     """Base of decorator to register a function for specific purpose
 
@@ -47,6 +48,7 @@
     - 'barfunc' is stored as 'bar' in '_table' of an instance 'keyword' above
     - 'barfunc.__doc__' becomes ":bar: Explanation of bar keyword"
     """
+
     def __init__(self, table=None):
         if table is None:
             self._table = {}
@@ -122,6 +124,7 @@
         """Execute exra setup for registered function, if needed
         """
 
+
 class command(_funcregistrarbase):
     """Decorator to register a command function to table
 
@@ -198,7 +201,7 @@
     CATEGORY_CHANGE_MANAGEMENT = 'management'
     CATEGORY_CHANGE_ORGANIZATION = 'organization'
     CATEGORY_FILE_CONTENTS = 'files'
-    CATEGORY_CHANGE_NAVIGATION  = 'navigation'
+    CATEGORY_CHANGE_NAVIGATION = 'navigation'
     CATEGORY_WORKING_DIRECTORY = 'wdir'
     CATEGORY_IMPORT_EXPORT = 'import'
     CATEGORY_MAINTENANCE = 'maintenance'
@@ -206,9 +209,19 @@
     CATEGORY_MISC = 'misc'
     CATEGORY_NONE = 'none'
 
-    def _doregister(self, func, name, options=(), synopsis=None,
-                    norepo=False, optionalrepo=False, inferrepo=False,
-                    intents=None, helpcategory=None, helpbasic=False):
+    def _doregister(
+        self,
+        func,
+        name,
+        options=(),
+        synopsis=None,
+        norepo=False,
+        optionalrepo=False,
+        inferrepo=False,
+        intents=None,
+        helpcategory=None,
+        helpbasic=False,
+    ):
         func.norepo = norepo
         func.optionalrepo = optionalrepo
         func.inferrepo = inferrepo
@@ -221,8 +234,10 @@
             self._table[name] = func, list(options)
         return func
 
+
 INTENT_READONLY = b'readonly'
 
+
 class revsetpredicate(_funcregistrarbase):
     """Decorator to register revset predicate
 
@@ -263,6 +278,7 @@
 
     Otherwise, explicit 'revset.loadpredicate()' is needed.
     """
+
     _getname = _funcregistrarbase._parsefuncdecl
     _docformat = "``%s``\n    %s"
 
@@ -271,6 +287,7 @@
         func._takeorder = takeorder
         func._weight = weight
 
+
 class filesetpredicate(_funcregistrarbase):
     """Decorator to register fileset predicate
 
@@ -312,6 +329,7 @@
 
     Otherwise, explicit 'fileset.loadpredicate()' is needed.
     """
+
     _getname = _funcregistrarbase._parsefuncdecl
     _docformat = "``%s``\n    %s"
 
@@ -319,11 +337,14 @@
         func._callstatus = callstatus
         func._weight = weight
 
+
 class _templateregistrarbase(_funcregistrarbase):
     """Base of decorator to register functions as template specific one
     """
+
     _docformat = ":%s: %s"
 
+
 class templatekeyword(_templateregistrarbase):
     """Decorator to register template keyword
 
@@ -356,6 +377,7 @@
     def _extrasetup(self, name, func, requires=()):
         func._requires = requires
 
+
 class templatefilter(_templateregistrarbase):
     """Decorator to register template filer
 
@@ -387,6 +409,7 @@
     def _extrasetup(self, name, func, intype=None):
         func._intype = intype
 
+
 class templatefunc(_templateregistrarbase):
     """Decorator to register template function
 
@@ -419,12 +442,14 @@
 
     Otherwise, explicit 'templatefuncs.loadfunction()' is needed.
     """
+
     _getname = _funcregistrarbase._parsefuncdecl
 
     def _extrasetup(self, name, func, argspec=None, requires=()):
         func._argspec = argspec
         func._requires = requires
 
+
 class internalmerge(_funcregistrarbase):
     """Decorator to register in-process merge tool
 
@@ -480,6 +505,7 @@
 
     Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
     """
+
     _docformat = "``:%s``\n    %s"
 
     # merge type definitions:
@@ -487,9 +513,16 @@
     mergeonly = 'mergeonly'  # just the full merge, no premerge
     fullmerge = 'fullmerge'  # both premerge and merge
 
-    def _extrasetup(self, name, func, mergetype,
-                    onfailure=None, precheck=None,
-                    binary=False, symlink=False):
+    def _extrasetup(
+        self,
+        name,
+        func,
+        mergetype,
+        onfailure=None,
+        precheck=None,
+        binary=False,
+        symlink=False,
+    ):
         func.mergetype = mergetype
         func.onfailure = onfailure
         func.precheck = precheck
--- a/mercurial/repair.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/repair.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,12 +28,12 @@
     pycompat,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 
-def backupbundle(repo, bases, heads, node, suffix, compress=True,
-                 obsolescence=True):
+def backupbundle(
+    repo, bases, heads, node, suffix, compress=True, obsolescence=True
+):
     """create a bundle with the specified revisions as a backup"""
 
     backupdir = "strip-backup"
@@ -45,8 +45,12 @@
     allcommits = repo.set('%ln::%ln', bases, heads)
     allhashes = sorted(c.hex() for c in allcommits)
     totalhash = hashlib.sha1(''.join(allhashes)).digest()
-    name = "%s/%s-%s-%s.hg" % (backupdir, short(node),
-                               hex(totalhash[:4]), suffix)
+    name = "%s/%s-%s-%s.hg" % (
+        backupdir,
+        short(node),
+        hex(totalhash[:4]),
+        suffix,
+    )
 
     cgversion = changegroup.localversion(repo)
     comp = None
@@ -65,8 +69,18 @@
         'obsolescence': obsolescence,
         'phases': True,
     }
-    return bundle2.writenewbundle(repo.ui, repo, 'strip', name, bundletype,
-                                  outgoing, contentopts, vfs, compression=comp)
+    return bundle2.writenewbundle(
+        repo.ui,
+        repo,
+        'strip',
+        name,
+        bundletype,
+        outgoing,
+        contentopts,
+        vfs,
+        compression=comp,
+    )
+
 
 def _collectfiles(repo, striprev):
     """find out the filelogs affected by the strip"""
@@ -77,10 +91,12 @@
 
     return sorted(files)
 
+
 def _collectrevlog(revlog, striprev):
     _, brokenset = revlog.getstrippoint(striprev)
     return [revlog.linkrev(r) for r in brokenset]
 
+
 def _collectbrokencsets(repo, files, striprev):
     """return the changesets which will be broken by the truncation"""
     s = set()
@@ -92,6 +108,7 @@
 
     return s
 
+
 def strip(ui, repo, nodelist, backup=True, topic='backup'):
     # This function requires the caller to lock the repo, but it operates
     # within a transaction of its own, and thus requires there to be no current
@@ -151,8 +168,9 @@
     if repo.ui.configbool('devel', 'strip-obsmarkers'):
         obsmarkers = obsutil.exclusivemarkers(repo, stripbases)
     if obsmarkers:
-        stripobsidx = [i for i, m in enumerate(repo.obsstore)
-                       if m in obsmarkers]
+        stripobsidx = [
+            i for i, m in enumerate(repo.obsstore) if m in obsmarkers
+        ]
 
     newbmtarget, updatebm = _bookmarkmovements(repo, tostrip)
 
@@ -169,8 +187,15 @@
         # we are trying to strip.  This is harmless since the stripped markers
         # are already backed up and we did not touched the markers for the
         # saved changesets.
-        tmpbundlefile = backupbundle(repo, savebases, saveheads, node, 'temp',
-                                     compress=False, obsolescence=False)
+        tmpbundlefile = backupbundle(
+            repo,
+            savebases,
+            saveheads,
+            node,
+            'temp',
+            compress=False,
+            obsolescence=False,
+        )
 
     with ui.uninterruptible():
         try:
@@ -213,8 +238,9 @@
                 if not isinstance(gen, bundle2.unbundle20):
                     txnname = "strip\n%s" % util.hidepassword(tmpbundleurl)
                 with repo.transaction(txnname) as tr:
-                    bundle2.applybundle(repo, gen, tr, source='strip',
-                                        url=tmpbundleurl)
+                    bundle2.applybundle(
+                        repo, gen, tr, source='strip', url=tmpbundleurl
+                    )
                 if not repo.ui.verbose:
                     repo.ui.popbuffer()
                 f.close()
@@ -229,19 +255,32 @@
                     undovfs.unlink(undofile)
                 except OSError as e:
                     if e.errno != errno.ENOENT:
-                        ui.warn(_('error removing %s: %s\n') %
-                                (undovfs.join(undofile),
-                                 stringutil.forcebytestr(e)))
+                        ui.warn(
+                            _('error removing %s: %s\n')
+                            % (
+                                undovfs.join(undofile),
+                                stringutil.forcebytestr(e),
+                            )
+                        )
 
-        except: # re-raises
+        except:  # re-raises
             if backupfile:
-                ui.warn(_("strip failed, backup bundle stored in '%s'\n")
-                        % vfs.join(backupfile))
+                ui.warn(
+                    _("strip failed, backup bundle stored in '%s'\n")
+                    % vfs.join(backupfile)
+                )
             if tmpbundlefile:
-                ui.warn(_("strip failed, unrecovered changes stored in '%s'\n")
-                        % vfs.join(tmpbundlefile))
-                ui.warn(_("(fix the problem, then recover the changesets with "
-                          "\"hg unbundle '%s'\")\n") % vfs.join(tmpbundlefile))
+                ui.warn(
+                    _("strip failed, unrecovered changes stored in '%s'\n")
+                    % vfs.join(tmpbundlefile)
+                )
+                ui.warn(
+                    _(
+                        "(fix the problem, then recover the changesets with "
+                        "\"hg unbundle '%s'\")\n"
+                    )
+                    % vfs.join(tmpbundlefile)
+                )
             raise
         else:
             if tmpbundlefile:
@@ -253,6 +292,7 @@
     # extensions can use it
     return backupfile
 
+
 def softstrip(ui, repo, nodelist, backup=True, topic='backup'):
     """perform a "soft" strip using the archived phase"""
     tostrip = [c.node() for c in repo.set('sort(%ln::)', nodelist)]
@@ -292,17 +332,19 @@
             newbmtarget = '.'
     return newbmtarget, updatebm
 
+
 def _createstripbackup(repo, stripbases, node, topic):
     # backup the changeset we are about to strip
     vfs = repo.vfs
     cl = repo.changelog
     backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic)
-    repo.ui.status(_("saved backup bundle to %s\n") %
-                   vfs.join(backupfile))
-    repo.ui.log("backupbundle", "saved backup bundle to %s\n",
-                vfs.join(backupfile))
+    repo.ui.status(_("saved backup bundle to %s\n") % vfs.join(backupfile))
+    repo.ui.log(
+        "backupbundle", "saved backup bundle to %s\n", vfs.join(backupfile)
+    )
     return backupfile
 
+
 def safestriproots(ui, repo, nodes):
     """return list of roots of nodes where descendants are covered by nodes"""
     torev = repo.unfiltered().changelog.rev
@@ -316,10 +358,13 @@
     notstrip = revs - tostrip
     if notstrip:
         nodestr = ', '.join(sorted(short(repo[n].node()) for n in notstrip))
-        ui.warn(_('warning: orphaned descendants detected, '
-                  'not stripping %s\n') % nodestr)
+        ui.warn(
+            _('warning: orphaned descendants detected, ' 'not stripping %s\n')
+            % nodestr
+        )
     return [c.node() for c in repo.set('roots(%ld)', tostrip)]
 
+
 class stripcallback(object):
     """used as a transaction postclose callback"""
 
@@ -338,6 +383,7 @@
         if roots:
             strip(self.ui, self.repo, roots, self.backup, self.topic)
 
+
 def delayedstrip(ui, repo, nodelist, topic=None, backup=True):
     """like strip, but works inside transaction and won't strip irreverent revs
 
@@ -361,21 +407,25 @@
         callback.topic = topic
     callback.addnodes(nodelist)
 
+
 def stripmanifest(repo, striprev, tr, files):
     for revlog in manifestrevlogs(repo):
         revlog.strip(striprev, tr)
 
+
 def manifestrevlogs(repo):
     yield repo.manifestlog.getstorage(b'')
     if 'treemanifest' in repo.requirements:
         # This logic is safe if treemanifest isn't enabled, but also
         # pointless, so we skip it if treemanifest isn't enabled.
         for unencoded, encoded, size in repo.store.datafiles():
-            if (unencoded.startswith('meta/') and
-                unencoded.endswith('00manifest.i')):
+            if unencoded.startswith('meta/') and unencoded.endswith(
+                '00manifest.i'
+            ):
                 dir = unencoded[5:-12]
                 yield repo.manifestlog.getstorage(dir)
 
+
 def rebuildfncache(ui, repo):
     """Rebuilds the fncache file from repo history.
 
@@ -384,8 +434,12 @@
     repo = repo.unfiltered()
 
     if 'fncache' not in repo.requirements:
-        ui.warn(_('(not rebuilding fncache because repository does not '
-                  'support fncache)\n'))
+        ui.warn(
+            _(
+                '(not rebuilding fncache because repository does not '
+                'support fncache)\n'
+            )
+        )
         return
 
     with repo.lock():
@@ -396,8 +450,9 @@
         newentries = set()
         seenfiles = set()
 
-        progress = ui.makeprogress(_('rebuilding'), unit=_('changesets'),
-                                   total=len(repo))
+        progress = ui.makeprogress(
+            _('rebuilding'), unit=_('changesets'), total=len(repo)
+        )
         for rev in repo:
             progress.update(rev)
 
@@ -438,8 +493,10 @@
             ui.write(_('adding %s\n') % p)
 
         if addcount or removecount:
-            ui.write(_('%d items added, %d removed from fncache\n') %
-                     (addcount, removecount))
+            ui.write(
+                _('%d items added, %d removed from fncache\n')
+                % (addcount, removecount)
+            )
             fnc.entries = newentries
             fnc._dirty = True
 
@@ -448,6 +505,7 @@
         else:
             ui.write(_('fncache already up to date\n'))
 
+
 def deleteobsmarkers(obsstore, indices):
     """Delete some obsmarkers from obsstore and return how many were deleted
 
--- a/mercurial/repocache.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/repocache.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,6 +19,7 @@
     util,
 )
 
+
 class repoloader(object):
     """Load repositories in background thread
 
@@ -68,8 +69,9 @@
         loader thread.
         """
         if self._thread and self._thread.is_alive():
-            raise error.ProgrammingError(b'cannot obtain cached repo while '
-                                         b'loader is active')
+            raise error.ProgrammingError(
+                b'cannot obtain cached repo while ' b'loader is active'
+            )
         return self._cache.peek(path, None)
 
     def _mainloop(self):
@@ -99,10 +101,15 @@
         except KeyError:
             repo = hg.repository(self._ui, path).unfiltered()
         _warmupcache(repo)
-        repo.ui.log(b'repocache', b'loaded repo into cache: %s (in %.3fs)\n',
-                    path, util.timer() - start)
+        repo.ui.log(
+            b'repocache',
+            b'loaded repo into cache: %s (in %.3fs)\n',
+            path,
+            util.timer() - start,
+        )
         self._cache.insert(path, repo)
 
+
 # TODO: think about proper API of preloading cache
 def _warmupcache(repo):
     repo.invalidateall()
@@ -115,6 +122,7 @@
         obsolete.getrevs(repo, name)
     repo._phasecache.loadphaserevs(repo)
 
+
 # TODO: think about proper API of attaching preloaded attributes
 def copycache(srcrepo, destrepo):
     """Copy cached attributes from srcrepo to destrepo"""
--- a/mercurial/repoview.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/repoview.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,9 +19,8 @@
     tags as tagsmod,
     util,
 )
-from .utils import (
-    repoviewutil,
-)
+from .utils import repoviewutil
+
 
 def hideablerevs(repo):
     """Revision candidates to be hidden
@@ -37,6 +36,7 @@
     internals = frozenset(internals)
     return obsoletes | internals
 
+
 def pinnedrevs(repo):
     """revisions blocking hidden changesets from being filtered
     """
@@ -72,6 +72,7 @@
                 hidden.remove(p)
                 stack.append(p)
 
+
 def computehidden(repo, visibilityexceptions=None):
     """compute the set of hidden revision to filter
 
@@ -90,6 +91,7 @@
         _revealancestors(pfunc, hidden, visible)
     return frozenset(hidden)
 
+
 def computesecret(repo, visibilityexceptions=None):
     """compute the set of revision that can never be exposed through hgweb
 
@@ -98,6 +100,7 @@
     secrets = repo._phasecache.getrevset(repo, phases.remotehiddenphases)
     return frozenset(secrets)
 
+
 def computeunserved(repo, visibilityexceptions=None):
     """compute the set of revision that should be filtered when used a server
 
@@ -111,6 +114,7 @@
     else:
         return hiddens
 
+
 def computemutable(repo, visibilityexceptions=None):
     assert not repo.changelog.filteredrevs
     # fast check to avoid revset call on huge repo
@@ -120,6 +124,7 @@
         return frozenset(r for r in maymutable if getphase(repo, r))
     return frozenset()
 
+
 def computeimpactable(repo, visibilityexceptions=None):
     """Everything impactable by mutable revision
 
@@ -145,21 +150,25 @@
     firstmutable = max(0, firstmutable)
     return frozenset(pycompat.xrange(firstmutable, len(cl)))
 
+
 # function to compute filtered set
 #
 # When adding a new filter you MUST update the table at:
 #     mercurial.utils.repoviewutil.subsettable
 # Otherwise your filter will have to recompute all its branches cache
 # from scratch (very slow).
-filtertable = {'visible': computehidden,
-               'visible-hidden': computehidden,
-               'served.hidden': computesecret,
-               'served': computeunserved,
-               'immutable':  computemutable,
-               'base':  computeimpactable}
+filtertable = {
+    'visible': computehidden,
+    'visible-hidden': computehidden,
+    'served.hidden': computesecret,
+    'served': computeunserved,
+    'immutable': computemutable,
+    'base': computeimpactable,
+}
 
 _basefiltername = list(filtertable)
 
+
 def extrafilter(ui):
     """initialize extra filter and return its id
 
@@ -178,15 +187,18 @@
 
     if combine('base') not in filtertable:
         for name in _basefiltername:
+
             def extrafilteredrevs(repo, *args, **kwargs):
                 baserevs = filtertable[name](repo, *args, **kwargs)
                 extrarevs = frozenset(repo.revs(frevs))
                 return baserevs | extrarevs
+
             filtertable[combine(name)] = extrafilteredrevs
             if name in subsettable:
                 subsettable[combine(name)] = combine(subsettable[name])
     return fid
 
+
 def filterrevs(repo, filtername, visibilityexceptions=None):
     """returns set of filtered revision for this filter name
 
@@ -200,6 +212,7 @@
         repo.filteredrevcache[filtername] = func(repo.unfiltered())
     return repo.filteredrevcache[filtername]
 
+
 class repoview(object):
     """Provide a read/write view of a repo through a filtered changelog
 
@@ -241,8 +254,7 @@
         object.__setattr__(self, r'_clcachekey', None)
         object.__setattr__(self, r'_clcache', None)
         # revs which are exceptions and must not be hidden
-        object.__setattr__(self, r'_visibilityexceptions',
-                           visibilityexceptions)
+        object.__setattr__(self, r'_visibilityexceptions', visibilityexceptions)
 
     # not a propertycache on purpose we shall implement a proper cache later
     @property
@@ -263,8 +275,9 @@
         newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed)
         # if cl.index is not unfiindex, unfi.changelog would be
         # recreated, and our clcache refers to garbage object
-        if (cl is not None and
-            (cl.index is not unfiindex or newkey != self._clcachekey)):
+        if cl is not None and (
+            cl.index is not unfiindex or newkey != self._clcachekey
+        ):
             cl = None
         # could have been made None by the previous if
         if cl is None:
@@ -285,9 +298,11 @@
         return self.unfiltered().filtered(name, visibilityexceptions)
 
     def __repr__(self):
-        return r'<%s:%s %r>' % (self.__class__.__name__,
-                                pycompat.sysstr(self.filtername),
-                                self.unfiltered())
+        return r'<%s:%s %r>' % (
+            self.__class__.__name__,
+            pycompat.sysstr(self.filtername),
+            self.unfiltered(),
+        )
 
     # everything access are forwarded to the proxied repo
     def __getattr__(self, attr):
@@ -299,15 +314,19 @@
     def __delattr__(self, attr):
         return delattr(self._unfilteredrepo, attr)
 
+
 # Python <3.4 easily leaks types via __mro__. See
 # https://bugs.python.org/issue17950. We cache dynamically created types
 # so they won't be leaked on every invocation of repo.filtered().
 _filteredrepotypes = weakref.WeakKeyDictionary()
 
+
 def newtype(base):
     """Create a new type with the repoview mixin and the given base class"""
     if base not in _filteredrepotypes:
+
         class filteredrepo(repoview, base):
             pass
+
         _filteredrepotypes[base] = filteredrepo
     return _filteredrepotypes[base]
--- a/mercurial/revlog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/revlog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -56,9 +56,7 @@
     REVIDX_RAWTEXT_CHANGING_FLAGS,
     REVIDX_SIDEDATA,
 )
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 from . import (
     ancestor,
     dagop,
@@ -118,29 +116,36 @@
 def ellipsisreadprocessor(rl, text):
     return text, False, {}
 
+
 def ellipsiswriteprocessor(rl, text, sidedata):
     return text, False
 
+
 def ellipsisrawprocessor(rl, text):
     return False
 
+
 ellipsisprocessor = (
     ellipsisreadprocessor,
     ellipsiswriteprocessor,
     ellipsisrawprocessor,
 )
 
+
 def getoffset(q):
     return int(q >> 16)
 
+
 def gettype(q):
     return int(q & 0xFFFF)
 
+
 def offset_type(offset, type):
     if (type & ~flagutil.REVIDX_KNOWN_FLAGS) != 0:
         raise ValueError('unknown revlog index flags')
     return int(int(offset) << 16 | type)
 
+
 @attr.s(slots=True, frozen=True)
 class _revisioninfo(object):
     """Information about a revision that allows building its fulltext
@@ -152,6 +157,7 @@
 
     One of btext[0] or cachedelta must be set.
     """
+
     node = attr.ib()
     p1 = attr.ib()
     p2 = attr.ib()
@@ -160,6 +166,7 @@
     cachedelta = attr.ib()
     flags = attr.ib()
 
+
 @interfaceutil.implementer(repository.irevisiondelta)
 @attr.s(slots=True)
 class revlogrevisiondelta(object):
@@ -173,6 +180,7 @@
     delta = attr.ib()
     linknode = attr.ib(default=None)
 
+
 @interfaceutil.implementer(repository.iverifyproblem)
 @attr.s(frozen=True)
 class revlogproblem(object):
@@ -180,6 +188,7 @@
     error = attr.ib(default=None)
     node = attr.ib(default=None)
 
+
 # index v0:
 #  4 bytes: offset
 #  4 bytes: compressed length
@@ -192,12 +201,14 @@
 indexformatv0_pack = indexformatv0.pack
 indexformatv0_unpack = indexformatv0.unpack
 
+
 class revlogoldindex(list):
     def __getitem__(self, i):
         if i == -1:
             return (0, 0, 0, -1, -1, -1, -1, nullid)
         return list.__getitem__(self, i)
 
+
 class revlogoldio(object):
     def __init__(self):
         self.size = indexformatv0.size
@@ -209,12 +220,20 @@
         n = off = 0
         l = len(data)
         while off + s <= l:
-            cur = data[off:off + s]
+            cur = data[off : off + s]
             off += s
             e = indexformatv0_unpack(cur)
             # transform to revlogv1 format
-            e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3],
-                  nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6])
+            e2 = (
+                offset_type(e[0], 0),
+                e[1],
+                -1,
+                e[2],
+                e[3],
+                nodemap.get(e[4], nullrev),
+                nodemap.get(e[5], nullrev),
+                e[6],
+            )
             index.append(e2)
             nodemap[e[6]] = n
             n += 1
@@ -223,12 +242,21 @@
 
     def packentry(self, entry, node, version, rev):
         if gettype(entry[0]):
-            raise error.RevlogError(_('index entry flags need revlog '
-                                      'version 1'))
-        e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4],
-              node(entry[5]), node(entry[6]), entry[7])
+            raise error.RevlogError(
+                _('index entry flags need revlog ' 'version 1')
+            )
+        e2 = (
+            getoffset(entry[0]),
+            entry[1],
+            entry[3],
+            entry[4],
+            node(entry[5]),
+            node(entry[6]),
+            entry[7],
+        )
         return indexformatv0_pack(*e2)
 
+
 # index ng:
 #  6 bytes: offset
 #  2 bytes: flags
@@ -247,7 +275,8 @@
 
 # corresponds to uncompressed length of indexformatng (2 gigs, 4-byte
 # signed integer)
-_maxentrysize = 0x7fffffff
+_maxentrysize = 0x7FFFFFFF
+
 
 class revlogio(object):
     def __init__(self):
@@ -264,6 +293,7 @@
             p = versionformat_pack(version) + p[4:]
         return p
 
+
 class revlog(object):
     """
     the underlying revision storage object
@@ -304,9 +334,16 @@
 
     _flagserrorclass = error.RevlogError
 
-    def __init__(self, opener, indexfile, datafile=None, checkambig=False,
-                 mmaplargeindex=False, censorable=False,
-                 upperboundcomp=None):
+    def __init__(
+        self,
+        opener,
+        indexfile,
+        datafile=None,
+        checkambig=False,
+        mmaplargeindex=False,
+        censorable=False,
+        upperboundcomp=None,
+    ):
         """
         create a revlog object
 
@@ -410,18 +447,24 @@
             flagutil.insertflagprocessor(flag, processor, self._flagprocessors)
 
         if self._chunkcachesize <= 0:
-            raise error.RevlogError(_('revlog chunk cache size %r is not '
-                                      'greater than 0') % self._chunkcachesize)
+            raise error.RevlogError(
+                _('revlog chunk cache size %r is not ' 'greater than 0')
+                % self._chunkcachesize
+            )
         elif self._chunkcachesize & (self._chunkcachesize - 1):
-            raise error.RevlogError(_('revlog chunk cache size %r is not a '
-                                      'power of 2') % self._chunkcachesize)
+            raise error.RevlogError(
+                _('revlog chunk cache size %r is not a ' 'power of 2')
+                % self._chunkcachesize
+            )
 
         indexdata = ''
         self._initempty = True
         try:
             with self._indexfp() as f:
-                if (mmapindexthreshold is not None and
-                    self.opener.fstat(f).st_size >= mmapindexthreshold):
+                if (
+                    mmapindexthreshold is not None
+                    and self.opener.fstat(f).st_size >= mmapindexthreshold
+                ):
                     # TODO: should .close() to release resources without
                     # relying on Python GC
                     indexdata = util.buffer(util.mmapread(f))
@@ -445,35 +488,39 @@
 
         if fmt == REVLOGV0:
             if flags:
-                raise error.RevlogError(_('unknown flags (%#04x) in version %d '
-                                          'revlog %s') %
-                                        (flags >> 16, fmt, self.indexfile))
+                raise error.RevlogError(
+                    _('unknown flags (%#04x) in version %d ' 'revlog %s')
+                    % (flags >> 16, fmt, self.indexfile)
+                )
 
             self._inline = False
             self._generaldelta = False
 
         elif fmt == REVLOGV1:
             if flags & ~REVLOGV1_FLAGS:
-                raise error.RevlogError(_('unknown flags (%#04x) in version %d '
-                                          'revlog %s') %
-                                        (flags >> 16, fmt, self.indexfile))
+                raise error.RevlogError(
+                    _('unknown flags (%#04x) in version %d ' 'revlog %s')
+                    % (flags >> 16, fmt, self.indexfile)
+                )
 
             self._inline = versionflags & FLAG_INLINE_DATA
             self._generaldelta = versionflags & FLAG_GENERALDELTA
 
         elif fmt == REVLOGV2:
             if flags & ~REVLOGV2_FLAGS:
-                raise error.RevlogError(_('unknown flags (%#04x) in version %d '
-                                          'revlog %s') %
-                                        (flags >> 16, fmt, self.indexfile))
+                raise error.RevlogError(
+                    _('unknown flags (%#04x) in version %d ' 'revlog %s')
+                    % (flags >> 16, fmt, self.indexfile)
+                )
 
             self._inline = versionflags & FLAG_INLINE_DATA
             # generaldelta implied by version 2 revlogs.
             self._generaldelta = True
 
         else:
-            raise error.RevlogError(_('unknown version (%d) in revlog %s') %
-                                    (fmt, self.indexfile))
+            raise error.RevlogError(
+                _('unknown version (%d) in revlog %s') % (fmt, self.indexfile)
+            )
         # sparse-revlog can't be on without general-delta (issue6056)
         if not self._generaldelta:
             self._sparserevlog = False
@@ -486,8 +533,7 @@
         try:
             d = self._io.parseindex(indexdata, self._inline)
         except (ValueError, IndexError):
-            raise error.RevlogError(_("index %s is corrupted") %
-                                    self.indexfile)
+            raise error.RevlogError(_("index %s is corrupted") % self.indexfile)
         self.index, nodemap, self._chunkcache = d
         if nodemap is not None:
             self.nodemap = self._nodecache = nodemap
@@ -544,12 +590,16 @@
 
     def tip(self):
         return self.node(len(self.index) - 1)
+
     def __contains__(self, rev):
         return 0 <= rev < len(self)
+
     def __len__(self):
         return len(self.index)
+
     def __iter__(self):
         return iter(pycompat.xrange(len(self)))
+
     def revs(self, start=0, stop=None):
         """iterate over all rev in this revlog (from start to stop)"""
         return storageutil.iterrevs(len(self), start=start, stop=stop)
@@ -576,8 +626,9 @@
         # the rawtext content that the delta will be based on, and two clients
         # could have a same revlog node with different flags (i.e. different
         # rawtext contents) and the delta could be incompatible.
-        if ((self.flags(baserev) & REVIDX_RAWTEXT_CHANGING_FLAGS)
-            or (self.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS)):
+        if (self.flags(baserev) & REVIDX_RAWTEXT_CHANGING_FLAGS) or (
+            self.flags(rev) & REVIDX_RAWTEXT_CHANGING_FLAGS
+        ):
             return False
         return True
 
@@ -704,7 +755,7 @@
     def parents(self, node):
         i = self.index
         d = i[self.rev(node)]
-        return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline
+        return i[d[5]][7], i[d[6]][7]  # map revisions to nodes inline
 
     def chainlen(self, rev):
         return self._chaininfo(rev)[0]
@@ -966,7 +1017,7 @@
                 return nonodes
             lowestrev = min([self.rev(n) for n in roots])
         else:
-            roots = [nullid] # Everybody's a descendant of nullid
+            roots = [nullid]  # Everybody's a descendant of nullid
             lowestrev = nullrev
         if (lowestrev == nullrev) and (heads is None):
             # We want _all_ the nodes!
@@ -1005,11 +1056,12 @@
                     if n not in ancestors:
                         # If we are possibly a descendant of one of the roots
                         # and we haven't already been marked as an ancestor
-                        ancestors.add(n) # Mark as ancestor
+                        ancestors.add(n)  # Mark as ancestor
                         # Add non-nullid parents to list of nodes to tag.
-                        nodestotag.update([p for p in self.parents(n) if
-                                           p != nullid])
-                    elif n in heads: # We've seen it before, is it a fake head?
+                        nodestotag.update(
+                            [p for p in self.parents(n) if p != nullid]
+                        )
+                    elif n in heads:  # We've seen it before, is it a fake head?
                         # So it is, real heads should not be the ancestors of
                         # any other heads.
                         heads.pop(n)
@@ -1139,8 +1191,9 @@
 
         stoprevs = set(self.rev(n) for n in stop or [])
 
-        revs = dagop.headrevssubset(self.revs, self.parentrevs, startrev=start,
-                                    stoprevs=stoprevs)
+        revs = dagop.headrevssubset(
+            self.revs, self.parentrevs, startrev=start, stoprevs=stoprevs
+        )
 
         return [self.node(rev) for rev in revs]
 
@@ -1168,7 +1221,7 @@
         """calculate all the heads of the common ancestors of revs"""
         try:
             ancs = self.index.commonancestorsheads(*revs)
-        except (AttributeError, OverflowError): # C implementation failed
+        except (AttributeError, OverflowError):  # C implementation failed
             ancs = ancestor.commonancestorsheads(self.parentrevs, *revs)
         return ancs
 
@@ -1199,11 +1252,13 @@
 
         If includepath is True, return (<roots>::<heads>)."""
         try:
-            return self.index.reachableroots2(minroot, heads, roots,
-                                              includepath)
+            return self.index.reachableroots2(
+                minroot, heads, roots, includepath
+            )
         except AttributeError:
-            return dagop._reachablerootspure(self.parentrevs,
-                                             minroot, roots, heads, includepath)
+            return dagop._reachablerootspure(
+                self.parentrevs, minroot, roots, heads, includepath
+            )
 
     def ancestor(self, a, b):
         """calculate the "best" common ancestor of nodes a and b"""
@@ -1227,10 +1282,10 @@
             # odds of a binary node being all hex in ASCII are 1 in 10**25
             try:
                 node = id
-                self.rev(node) # quick search the index
+                self.rev(node)  # quick search the index
                 return node
             except error.LookupError:
-                pass # may be partial hex id
+                pass  # may be partial hex id
         try:
             # str(rev)
             rev = int(id)
@@ -1271,7 +1326,8 @@
             # fast path: for unfiltered changelog, radix tree is accurate
             if not getattr(self, 'filteredrevs', None):
                 raise error.AmbiguousPrefixLookupError(
-                    id, self.indexfile, _('ambiguous identifier'))
+                    id, self.indexfile, _('ambiguous identifier')
+                )
             # fall through to slow path that filters hidden revisions
         except (AttributeError, ValueError):
             # we are pure python, or key was too short to search radix tree
@@ -1284,10 +1340,11 @@
             try:
                 # hex(node)[:...]
                 l = len(id) // 2  # grab an even number of digits
-                prefix = bin(id[:l * 2])
+                prefix = bin(id[: l * 2])
                 nl = [e[7] for e in self.index if e[7].startswith(prefix)]
-                nl = [n for n in nl if hex(n).startswith(id) and
-                      self.hasnode(n)]
+                nl = [
+                    n for n in nl if hex(n).startswith(id) and self.hasnode(n)
+                ]
                 if nullhex.startswith(id):
                     nl.append(nullid)
                 if len(nl) > 0:
@@ -1295,7 +1352,8 @@
                         self._pcache[id] = nl[0]
                         return nl[0]
                     raise error.AmbiguousPrefixLookupError(
-                        id, self.indexfile, _('ambiguous identifier'))
+                        id, self.indexfile, _('ambiguous identifier')
+                    )
                 if maybewdir:
                     raise error.WdirUnsupported
                 return None
@@ -1318,6 +1376,7 @@
 
     def shortest(self, node, minlength=1):
         """Find the shortest unambiguous prefix that matches node."""
+
         def isvalid(prefix):
             try:
                 matchednode = self._partialmatch(prefix)
@@ -1402,8 +1461,9 @@
         # involving reading the revlog backwards.
         cachesize = self._chunkcachesize
         realoffset = offset & ~(cachesize - 1)
-        reallength = (((offset + length + cachesize) & ~(cachesize - 1))
-                      - realoffset)
+        reallength = (
+            (offset + length + cachesize) & ~(cachesize - 1)
+        ) - realoffset
         with self._datareadfp(df) as df:
             df.seek(realoffset)
             d = df.read(reallength)
@@ -1413,19 +1473,33 @@
             startoffset = offset - realoffset
             if len(d) - startoffset < length:
                 raise error.RevlogError(
-                    _('partial read of revlog %s; expected %d bytes from '
-                      'offset %d, got %d') %
-                    (self.indexfile if self._inline else self.datafile,
-                     length, realoffset, len(d) - startoffset))
+                    _(
+                        'partial read of revlog %s; expected %d bytes from '
+                        'offset %d, got %d'
+                    )
+                    % (
+                        self.indexfile if self._inline else self.datafile,
+                        length,
+                        realoffset,
+                        len(d) - startoffset,
+                    )
+                )
 
             return util.buffer(d, startoffset, length)
 
         if len(d) < length:
             raise error.RevlogError(
-                _('partial read of revlog %s; expected %d bytes from offset '
-                  '%d, got %d') %
-                (self.indexfile if self._inline else self.datafile,
-                 length, offset, len(d)))
+                _(
+                    'partial read of revlog %s; expected %d bytes from offset '
+                    '%d, got %d'
+                )
+                % (
+                    self.indexfile if self._inline else self.datafile,
+                    length,
+                    offset,
+                    len(d),
+                )
+            )
 
         return d
 
@@ -1448,7 +1522,7 @@
         cacheend = cachestart + length
         if cachestart >= 0 and cacheend <= l:
             if cachestart == 0 and cacheend == l:
-                return d # avoid a copy
+                return d  # avoid a copy
             return util.buffer(d, cachestart, cacheend - cachestart)
 
         return self._readsegment(offset, length, df=df)
@@ -1525,8 +1599,9 @@
         if not self._withsparseread:
             slicedchunks = (revs,)
         else:
-            slicedchunks = deltautil.slicechunk(self, revs,
-                                                targetsize=targetsize)
+            slicedchunks = deltautil.slicechunk(
+                self, revs, targetsize=targetsize
+            )
 
         for revschunk in slicedchunks:
             firstrev = revschunk[0]
@@ -1604,18 +1679,17 @@
         if rev1 != nullrev and self.deltaparent(rev2) == rev1:
             return bytes(self._chunk(rev2))
 
-        return mdiff.textdiff(self.rawdata(rev1),
-                              self.rawdata(rev2))
+        return mdiff.textdiff(self.rawdata(rev1), self.rawdata(rev2))
 
     def _processflags(self, text, flags, operation, raw=False):
         """deprecated entry point to access flag processors"""
-        msg = ('_processflag(...) use the specialized variant')
+        msg = '_processflag(...) use the specialized variant'
         util.nouideprecwarn(msg, '5.2', stacklevel=2)
         if raw:
             return text, flagutil.processflagsraw(self, text, flags)
         elif operation == 'read':
             return flagutil.processflagsread(self, text, flags)
-        else: # write operation
+        else:  # write operation
             return flagutil.processflagswrite(self, text, flags)
 
     def revision(self, nodeorrev, _df=None, raw=False):
@@ -1628,8 +1702,10 @@
         to True when generating changegroups or in debug commands.
         """
         if raw:
-            msg = ('revlog.revision(..., raw=True) is deprecated, '
-                   'use revlog.rawdata(...)')
+            msg = (
+                'revlog.revision(..., raw=True) is deprecated, '
+                'use revlog.rawdata(...)'
+            )
             util.nouideprecwarn(msg, '5.2', stacklevel=2)
         return self._revisiondata(nodeorrev, _df, raw=raw)[0]
 
@@ -1684,8 +1760,7 @@
                 r = flagutil.processflagsread(self, rawtext, flags)
             except error.SidedataHashError as exc:
                 msg = _("integrity check failed on %s:%s sidedata key %d")
-                msg %= (self.indexfile, pycompat.bytestr(rev),
-                        exc.sidedatakey)
+                msg %= (self.indexfile, pycompat.bytestr(rev), exc.sidedatakey)
                 raise error.RevlogError(msg)
             text, validatehash, sidedata = r
         if validatehash:
@@ -1735,7 +1810,7 @@
             bins = bins[1:]
 
         rawtext = mdiff.patches(basetext, bins)
-        del basetext # let us have a chance to free memory early
+        del basetext  # let us have a chance to free memory early
         return (rev, rawtext, False)
 
     def rawdata(self, nodeorrev, _df=None):
@@ -1775,8 +1850,10 @@
                 revornode = rev
                 if revornode is None:
                     revornode = templatefilters.short(hex(node))
-                raise error.RevlogError(_("integrity check failed on %s:%s")
-                    % (self.indexfile, pycompat.bytestr(revornode)))
+                raise error.RevlogError(
+                    _("integrity check failed on %s:%s")
+                    % (self.indexfile, pycompat.bytestr(revornode))
+                )
         except error.RevlogError:
             if self._censorable and storageutil.iscensoredtext(text):
                 raise error.CensoredNodeError(self.indexfile, node, text)
@@ -1790,14 +1867,17 @@
         to use multiple index and data files.
         """
         tiprev = len(self) - 1
-        if (not self._inline or
-            (self.start(tiprev) + self.length(tiprev)) < _maxinline):
+        if (
+            not self._inline
+            or (self.start(tiprev) + self.length(tiprev)) < _maxinline
+        ):
             return
 
         trinfo = tr.find(self.indexfile)
         if trinfo is None:
-            raise error.RevlogError(_("%s not found in the transaction")
-                                    % self.indexfile)
+            raise error.RevlogError(
+                _("%s not found in the transaction") % self.indexfile
+            )
 
         trindex = trinfo[2]
         if trindex is not None:
@@ -1838,9 +1918,19 @@
         """called when trying to add a node already stored.
         """
 
-    def addrevision(self, text, transaction, link, p1, p2, cachedelta=None,
-                    node=None, flags=REVIDX_DEFAULT_FLAGS, deltacomputer=None,
-                    sidedata=None):
+    def addrevision(
+        self,
+        text,
+        transaction,
+        link,
+        p1,
+        p2,
+        cachedelta=None,
+        node=None,
+        flags=REVIDX_DEFAULT_FLAGS,
+        deltacomputer=None,
+        sidedata=None,
+    ):
         """add a revision to the log
 
         text - the revision data to add
@@ -1856,8 +1946,9 @@
             multiple calls
         """
         if link == nullrev:
-            raise error.RevlogError(_("attempted to add linkrev -1 to %s")
-                                    % self.indexfile)
+            raise error.RevlogError(
+                _("attempted to add linkrev -1 to %s") % self.indexfile
+            )
 
         if sidedata is None:
             sidedata = {}
@@ -1865,15 +1956,16 @@
         elif not self.hassidedata:
             raise error.ProgrammingError(
                 _("trying to add sidedata to a revlog who don't support them")
-                )
+            )
         else:
             flags |= REVIDX_SIDEDATA
 
         if flags:
             node = node or self.hash(text, p1, p2)
 
-        rawtext, validatehash = flagutil.processflagswrite(self, text, flags,
-                                                           sidedata=sidedata)
+        rawtext, validatehash = flagutil.processflagswrite(
+            self, text, flags, sidedata=sidedata
+        )
 
         # If the flag processor modifies the revision data, ignore any provided
         # cachedelta.
@@ -1883,7 +1975,8 @@
         if len(rawtext) > _maxentrysize:
             raise error.RevlogError(
                 _("%s: size of %d bytes exceeds maximum revlog storage of 2GiB")
-                % (self.indexfile, len(rawtext)))
+                % (self.indexfile, len(rawtext))
+            )
 
         node = node or self.hash(rawtext, p1, p2)
         if node in self.nodemap:
@@ -1892,12 +1985,30 @@
         if validatehash:
             self.checkhash(rawtext, node, p1=p1, p2=p2)
 
-        return self.addrawrevision(rawtext, transaction, link, p1, p2, node,
-                                   flags, cachedelta=cachedelta,
-                                   deltacomputer=deltacomputer)
-
-    def addrawrevision(self, rawtext, transaction, link, p1, p2, node, flags,
-                       cachedelta=None, deltacomputer=None):
+        return self.addrawrevision(
+            rawtext,
+            transaction,
+            link,
+            p1,
+            p2,
+            node,
+            flags,
+            cachedelta=cachedelta,
+            deltacomputer=deltacomputer,
+        )
+
+    def addrawrevision(
+        self,
+        rawtext,
+        transaction,
+        link,
+        p1,
+        p2,
+        node,
+        flags,
+        cachedelta=None,
+        deltacomputer=None,
+    ):
         """add a raw revision with known flags, node and parents
         useful when reusing a revision not stored in this revlog (ex: received
         over wire, or read from an external bundle).
@@ -1907,9 +2018,19 @@
             dfh = self._datafp("a+")
         ifh = self._indexfp("a+")
         try:
-            return self._addrevision(node, rawtext, transaction, link, p1, p2,
-                                     flags, cachedelta, ifh, dfh,
-                                     deltacomputer=deltacomputer)
+            return self._addrevision(
+                node,
+                rawtext,
+                transaction,
+                link,
+                p1,
+                p2,
+                flags,
+                cachedelta,
+                ifh,
+                dfh,
+                deltacomputer=deltacomputer,
+            )
         finally:
             if dfh:
                 dfh.close()
@@ -1966,8 +2087,10 @@
             try:
                 return _zlibdecompress(data)
             except zlib.error as e:
-                raise error.RevlogError(_('revlog decompress error: %s') %
-                                        stringutil.forcebytestr(e))
+                raise error.RevlogError(
+                    _('revlog decompress error: %s')
+                    % stringutil.forcebytestr(e)
+                )
         # '\0' is more common than 'u' so it goes first.
         elif t == '\0':
             return data
@@ -1986,9 +2109,21 @@
 
         return compressor.decompress(data)
 
-    def _addrevision(self, node, rawtext, transaction, link, p1, p2, flags,
-                     cachedelta, ifh, dfh, alwayscache=False,
-                     deltacomputer=None):
+    def _addrevision(
+        self,
+        node,
+        rawtext,
+        transaction,
+        link,
+        p1,
+        p2,
+        flags,
+        cachedelta,
+        ifh,
+        dfh,
+        alwayscache=False,
+        deltacomputer=None,
+    ):
         """internal function to add revisions to the log
 
         see addrevision for argument descriptions.
@@ -2003,11 +2138,13 @@
           if both are set, they must correspond to each other.
         """
         if node == nullid:
-            raise error.RevlogError(_("%s: attempt to add null revision") %
-                                    self.indexfile)
+            raise error.RevlogError(
+                _("%s: attempt to add null revision") % self.indexfile
+            )
         if node == wdirid or node in wdirfilenodeids:
-            raise error.RevlogError(_("%s: attempt to add wdir revision") %
-                                    self.indexfile)
+            raise error.RevlogError(
+                _("%s: attempt to add wdir revision") % self.indexfile
+            )
 
         if self._inline:
             fh = ifh
@@ -2027,8 +2164,9 @@
             # need rawtext size, before changed by flag processors, which is
             # the non-raw size. use revlog explicitly to avoid filelog's extra
             # logic that might remove metadata size.
-            textlen = mdiff.patchedsize(revlog.size(self, cachedelta[0]),
-                                        cachedelta[1])
+            textlen = mdiff.patchedsize(
+                revlog.size(self, cachedelta[0]), cachedelta[1]
+            )
         else:
             textlen = len(rawtext)
 
@@ -2039,8 +2177,16 @@
 
         deltainfo = deltacomputer.finddeltainfo(revinfo, fh)
 
-        e = (offset_type(offset, flags), deltainfo.deltalen, textlen,
-             deltainfo.base, link, p1r, p2r, node)
+        e = (
+            offset_type(offset, flags),
+            deltainfo.deltalen,
+            textlen,
+            deltainfo.base,
+            link,
+            p1r,
+            p2r,
+            node,
+        )
         self.index.append(e)
         self.nodemap[node] = curr
 
@@ -2050,15 +2196,16 @@
             self._nodepos = curr
 
         entry = self._io.packentry(e, self.node, self.version, curr)
-        self._writeentry(transaction, ifh, dfh, entry, deltainfo.data,
-                         link, offset)
+        self._writeentry(
+            transaction, ifh, dfh, entry, deltainfo.data, link, offset
+        )
 
         rawtext = btext[0]
 
         if alwayscache and rawtext is None:
             rawtext = deltacomputer.buildtext(revinfo, fh)
 
-        if type(rawtext) == bytes: # only accept immutable objects
+        if type(rawtext) == bytes:  # only accept immutable objects
             self._revisioncache = (node, curr, rawtext)
         self._chainbasecache[curr] = deltainfo.chainbase
         return node
@@ -2126,6 +2273,7 @@
             transaction.add(self.indexfile, isize, r)
             transaction.add(self.datafile, end)
             dfh = self._datafp("a+")
+
         def flush():
             if dfh:
                 dfh.flush()
@@ -2150,12 +2298,14 @@
 
                 for p in (p1, p2):
                     if p not in self.nodemap:
-                        raise error.LookupError(p, self.indexfile,
-                                                _('unknown parent'))
+                        raise error.LookupError(
+                            p, self.indexfile, _('unknown parent')
+                        )
 
                 if deltabase not in self.nodemap:
-                    raise error.LookupError(deltabase, self.indexfile,
-                                            _('unknown delta base'))
+                    raise error.LookupError(
+                        deltabase, self.indexfile, _('unknown delta base')
+                    )
 
                 baserev = self.rev(deltabase)
 
@@ -2166,8 +2316,9 @@
                     oldlen = self.rawsize(baserev)
                     newlen = len(delta) - hlen
                     if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen):
-                        raise error.CensoredBaseError(self.indexfile,
-                                                      self.node(baserev))
+                        raise error.CensoredBaseError(
+                            self.indexfile, self.node(baserev)
+                        )
 
                 if not flags and self._peek_iscensored(baserev, delta, flush):
                     flags |= REVIDX_ISCENSORED
@@ -2179,11 +2330,20 @@
                 # We're only using addgroup() in the context of changegroup
                 # generation so the revision data can always be handled as raw
                 # by the flagprocessor.
-                self._addrevision(node, None, transaction, link,
-                                  p1, p2, flags, (baserev, delta),
-                                  ifh, dfh,
-                                  alwayscache=bool(addrevisioncb),
-                                  deltacomputer=deltacomputer)
+                self._addrevision(
+                    node,
+                    None,
+                    transaction,
+                    link,
+                    p1,
+                    p2,
+                    flags,
+                    (baserev, delta),
+                    ifh,
+                    dfh,
+                    alwayscache=bool(addrevisioncb),
+                    deltacomputer=deltacomputer,
+                )
 
                 if addrevisioncb:
                     addrevisioncb(self, node)
@@ -2224,9 +2384,13 @@
         Returns a tuple containing the minimum rev and a set of all revs that
         have linkrevs that will be broken by this strip.
         """
-        return storageutil.resolvestripinfo(minlink, len(self) - 1,
-                                            self.headrevs(),
-                                            self.linkrev, self.parentrevs)
+        return storageutil.resolvestripinfo(
+            minlink,
+            len(self) - 1,
+            self.headrevs(),
+            self.linkrev,
+            self.parentrevs,
+        )
 
     def strip(self, minlink, transaction):
         """truncate the revlog on the first revision with a linkrev >= minlink
@@ -2319,22 +2483,33 @@
             res.append(self.datafile)
         return res
 
-    def emitrevisions(self, nodes, nodesorder=None, revisiondata=False,
-                      assumehaveparentrevisions=False,
-                      deltamode=repository.CG_DELTAMODE_STD):
+    def emitrevisions(
+        self,
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+        deltamode=repository.CG_DELTAMODE_STD,
+    ):
         if nodesorder not in ('nodes', 'storage', 'linear', None):
-            raise error.ProgrammingError('unhandled value for nodesorder: %s' %
-                                         nodesorder)
+            raise error.ProgrammingError(
+                'unhandled value for nodesorder: %s' % nodesorder
+            )
 
         if nodesorder is None and not self._generaldelta:
             nodesorder = 'storage'
 
-        if (not self._storedeltachains and
-                deltamode != repository.CG_DELTAMODE_PREV):
+        if (
+            not self._storedeltachains
+            and deltamode != repository.CG_DELTAMODE_PREV
+        ):
             deltamode = repository.CG_DELTAMODE_FULL
 
         return storageutil.emitrevisions(
-            self, nodes, nodesorder, revlogrevisiondelta,
+            self,
+            nodes,
+            nodesorder,
+            revlogrevisiondelta,
             deltaparentfn=self.deltaparent,
             candeltafn=self.candelta,
             rawsizefn=self.rawsize,
@@ -2342,7 +2517,8 @@
             flagsfn=self.flags,
             deltamode=deltamode,
             revisiondata=revisiondata,
-            assumehaveparentrevisions=assumehaveparentrevisions)
+            assumehaveparentrevisions=assumehaveparentrevisions,
+        )
 
     DELTAREUSEALWAYS = 'always'
     DELTAREUSESAMEREVS = 'samerevs'
@@ -2352,8 +2528,14 @@
 
     DELTAREUSEALL = {'always', 'samerevs', 'never', 'fulladd'}
 
-    def clone(self, tr, destrevlog, addrevisioncb=None,
-            deltareuse=DELTAREUSESAMEREVS, forcedeltabothparents=None):
+    def clone(
+        self,
+        tr,
+        destrevlog,
+        addrevisioncb=None,
+        deltareuse=DELTAREUSESAMEREVS,
+        forcedeltabothparents=None,
+    ):
         """Copy this revlog to another, possibly with format changes.
 
         The destination revlog will contain the same revisions and nodes.
@@ -2425,16 +2607,18 @@
 
             destrevlog._deltabothparents = forcedeltabothparents or oldamd
 
-            self._clone(tr, destrevlog, addrevisioncb, deltareuse,
-                        forcedeltabothparents)
+            self._clone(
+                tr, destrevlog, addrevisioncb, deltareuse, forcedeltabothparents
+            )
 
         finally:
             destrevlog._lazydelta = oldlazydelta
             destrevlog._lazydeltabase = oldlazydeltabase
             destrevlog._deltabothparents = oldamd
 
-    def _clone(self, tr, destrevlog, addrevisioncb, deltareuse,
-               forcedeltabothparents):
+    def _clone(
+        self, tr, destrevlog, addrevisioncb, deltareuse, forcedeltabothparents
+    ):
         """perform the core duty of `revlog.clone` after parameter processing"""
         deltacomputer = deltautil.deltacomputer(destrevlog)
         index = self.index
@@ -2443,7 +2627,7 @@
 
             # Some classes override linkrev to take filtered revs into
             # account. Use raw entry from index.
-            flags = entry[0] & 0xffff
+            flags = entry[0] & 0xFFFF
             linkrev = entry[4]
             p1 = index[entry[5]][7]
             p2 = index[entry[6]][7]
@@ -2455,10 +2639,17 @@
             rawtext = None
             if deltareuse == self.DELTAREUSEFULLADD:
                 text = self.revision(rev)
-                destrevlog.addrevision(text, tr, linkrev, p1, p2,
-                                       cachedelta=cachedelta,
-                                       node=node, flags=flags,
-                                       deltacomputer=deltacomputer)
+                destrevlog.addrevision(
+                    text,
+                    tr,
+                    linkrev,
+                    p1,
+                    p2,
+                    cachedelta=cachedelta,
+                    node=node,
+                    flags=flags,
+                    deltacomputer=deltacomputer,
+                )
             else:
                 if destrevlog._lazydelta:
                     dp = self.deltaparent(rev)
@@ -2468,15 +2659,26 @@
                 if not cachedelta:
                     rawtext = self.rawdata(rev)
 
-                ifh = destrevlog.opener(destrevlog.indexfile, 'a+',
-                                        checkambig=False)
+                ifh = destrevlog.opener(
+                    destrevlog.indexfile, 'a+', checkambig=False
+                )
                 dfh = None
                 if not destrevlog._inline:
                     dfh = destrevlog.opener(destrevlog.datafile, 'a+')
                 try:
-                    destrevlog._addrevision(node, rawtext, tr, linkrev, p1,
-                                            p2, flags, cachedelta, ifh, dfh,
-                                            deltacomputer=deltacomputer)
+                    destrevlog._addrevision(
+                        node,
+                        rawtext,
+                        tr,
+                        linkrev,
+                        p1,
+                        p2,
+                        flags,
+                        cachedelta,
+                        ifh,
+                        dfh,
+                        deltacomputer=deltacomputer,
+                    )
                 finally:
                     if dfh:
                         dfh.close()
@@ -2487,15 +2689,17 @@
 
     def censorrevision(self, tr, censornode, tombstone=b''):
         if (self.version & 0xFFFF) == REVLOGV0:
-            raise error.RevlogError(_('cannot censor with version %d revlogs') %
-                                    self.version)
+            raise error.RevlogError(
+                _('cannot censor with version %d revlogs') % self.version
+            )
 
         censorrev = self.rev(censornode)
         tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
 
         if len(tombstone) > self.rawsize(censorrev):
-            raise error.Abort(_('censor tombstone must be no longer than '
-                                'censored data'))
+            raise error.Abort(
+                _('censor tombstone must be no longer than ' 'censored data')
+            )
 
         # Rewriting the revlog in place is hard. Our strategy for censoring is
         # to create a new revlog, copy all revisions to it, then replace the
@@ -2505,8 +2709,7 @@
         newdatafile = self.datafile + b'.tmpcensored'
 
         # This is a bit dangerous. We could easily have a mismatch of state.
-        newrl = revlog(self.opener, newindexfile, newdatafile,
-                       censorable=True)
+        newrl = revlog(self.opener, newindexfile, newdatafile, censorable=True)
         newrl.version = self.version
         newrl._generaldelta = self._generaldelta
         newrl._io = self._io
@@ -2516,27 +2719,45 @@
             p1, p2 = self.parents(node)
 
             if rev == censorrev:
-                newrl.addrawrevision(tombstone, tr, self.linkrev(censorrev),
-                                     p1, p2, censornode, REVIDX_ISCENSORED)
+                newrl.addrawrevision(
+                    tombstone,
+                    tr,
+                    self.linkrev(censorrev),
+                    p1,
+                    p2,
+                    censornode,
+                    REVIDX_ISCENSORED,
+                )
 
                 if newrl.deltaparent(rev) != nullrev:
-                    raise error.Abort(_('censored revision stored as delta; '
-                                        'cannot censor'),
-                                      hint=_('censoring of revlogs is not '
-                                             'fully implemented; please report '
-                                             'this bug'))
+                    raise error.Abort(
+                        _(
+                            'censored revision stored as delta; '
+                            'cannot censor'
+                        ),
+                        hint=_(
+                            'censoring of revlogs is not '
+                            'fully implemented; please report '
+                            'this bug'
+                        ),
+                    )
                 continue
 
             if self.iscensored(rev):
                 if self.deltaparent(rev) != nullrev:
-                    raise error.Abort(_('cannot censor due to censored '
-                                        'revision having delta stored'))
+                    raise error.Abort(
+                        _(
+                            'cannot censor due to censored '
+                            'revision having delta stored'
+                        )
+                    )
                 rawtext = self._chunk(rev)
             else:
                 rawtext = self.rawdata(rev)
 
-            newrl.addrawrevision(rawtext, tr, self.linkrev(rev), p1, p2, node,
-                                 self.flags(rev))
+            newrl.addrawrevision(
+                rawtext, tr, self.linkrev(rev), p1, p2, node, self.flags(rev)
+            )
 
         tr.addbackup(self.indexfile, location='store')
         if not self._inline:
@@ -2566,8 +2787,9 @@
         # The verifier tells us what version revlog we should be.
         if version != state['expectedversion']:
             yield revlogproblem(
-                warning=_("warning: '%s' uses revlog format %d; expected %d") %
-                        (self.indexfile, version, state['expectedversion']))
+                warning=_("warning: '%s' uses revlog format %d; expected %d")
+                % (self.indexfile, version, state['expectedversion'])
+            )
 
         state['skipread'] = set()
 
@@ -2639,23 +2861,31 @@
                 if l1 != l2:
                     yield revlogproblem(
                         error=_('unpacked size is %d, %d expected') % (l2, l1),
-                        node=node)
+                        node=node,
+                    )
 
             except error.CensoredNodeError:
                 if state['erroroncensored']:
-                    yield revlogproblem(error=_('censored file data'),
-                                        node=node)
+                    yield revlogproblem(
+                        error=_('censored file data'), node=node
+                    )
                     state['skipread'].add(node)
             except Exception as e:
                 yield revlogproblem(
-                    error=_('unpacking %s: %s') % (short(node),
-                                                   stringutil.forcebytestr(e)),
-                    node=node)
+                    error=_('unpacking %s: %s')
+                    % (short(node), stringutil.forcebytestr(e)),
+                    node=node,
+                )
                 state['skipread'].add(node)
 
-    def storageinfo(self, exclusivefiles=False, sharedfiles=False,
-                    revisionscount=False, trackedsize=False,
-                    storedsize=False):
+    def storageinfo(
+        self,
+        exclusivefiles=False,
+        sharedfiles=False,
+        revisionscount=False,
+        trackedsize=False,
+        storedsize=False,
+    ):
         d = {}
 
         if exclusivefiles:
@@ -2673,7 +2903,8 @@
             d['trackedsize'] = sum(map(self.rawsize, iter(self)))
 
         if storedsize:
-            d['storedsize'] = sum(self.opener.stat(path).st_size
-                                  for path in self.files())
+            d['storedsize'] = sum(
+                self.opener.stat(path).st_size for path in self.files()
+            )
 
         return d
--- a/mercurial/revlogutils/constants.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/revlogutils/constants.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,9 +9,7 @@
 
 from __future__ import absolute_import
 
-from ..interfaces import (
-    repository,
-)
+from ..interfaces import repository
 
 # revlog header flags
 REVLOGV0 = 0
@@ -20,9 +18,9 @@
 # Reminder: change the bounds check in revlog.__init__ when this is changed.
 REVLOGV2 = 0xDEAD
 # Shared across v1 and v2.
-FLAG_INLINE_DATA = (1 << 16)
+FLAG_INLINE_DATA = 1 << 16
 # Only used by v1, implied by v2.
-FLAG_GENERALDELTA = (1 << 17)
+FLAG_GENERALDELTA = 1 << 17
 REVLOG_DEFAULT_FLAGS = FLAG_INLINE_DATA
 REVLOG_DEFAULT_FORMAT = REVLOGV1
 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
@@ -53,10 +51,7 @@
 
 # bitmark for flags that could cause rawdata content change
 REVIDX_RAWTEXT_CHANGING_FLAGS = (
-    REVIDX_ISCENSORED
-    | REVIDX_EXTSTORED
-    | REVIDX_SIDEDATA
+    REVIDX_ISCENSORED | REVIDX_EXTSTORED | REVIDX_SIDEDATA
 )
 
 SPARSE_REVLOG_MAX_CHAIN_LENGTH = 1000
-
--- a/mercurial/revlogutils/deltas.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/revlogutils/deltas.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,9 +13,7 @@
 import struct
 
 # import stuff from node for others to import from revlog
-from ..node import (
-    nullrev,
-)
+from ..node import nullrev
 from ..i18n import _
 
 from .constants import (
@@ -23,9 +21,7 @@
     REVIDX_RAWTEXT_CHANGING_FLAGS,
 )
 
-from ..thirdparty import (
-    attr,
-)
+from ..thirdparty import attr
 
 from .. import (
     error,
@@ -33,13 +29,12 @@
     util,
 )
 
-from . import (
-    flagutil,
-)
+from . import flagutil
 
 # maximum <delta-chain-data>/<revision-text-length> ratio
 LIMIT_DELTA2TEXT = 2
 
+
 class _testrevlog(object):
     """minimalist fake revlog to use in doctests"""
 
@@ -74,6 +69,7 @@
             return True
         return rev in self._snapshot
 
+
 def slicechunk(revlog, revs, targetsize=None):
     """slice revs to reduce the amount of unrelated data to be read from disk.
 
@@ -141,12 +137,13 @@
     densityslicing = getattr(revlog.index, 'slicechunktodensity', None)
     if densityslicing is None:
         densityslicing = lambda x, y, z: _slicechunktodensity(revlog, x, y, z)
-    for chunk in densityslicing(revs,
-                                revlog._srdensitythreshold,
-                                revlog._srmingapsize):
+    for chunk in densityslicing(
+        revs, revlog._srdensitythreshold, revlog._srmingapsize
+    ):
         for subchunk in _slicechunktosize(revlog, chunk, targetsize):
             yield subchunk
 
+
 def _slicechunktosize(revlog, revs, targetsize=None):
     """slice revs to match the target size
 
@@ -257,7 +254,7 @@
     startrevidx = 0
     endrevidx = 1
     iterrevs = enumerate(revs)
-    next(iterrevs) # skip first rev.
+    next(iterrevs)  # skip first rev.
     # first step: get snapshots out of the way
     for idx, r in iterrevs:
         span = revlog.end(r) - startdata
@@ -282,12 +279,12 @@
     while (enddata - startdata) > targetsize:
         endrevidx = nbitem
         if nbitem - startrevidx <= 1:
-            break # protect against individual chunk larger than limit
+            break  # protect against individual chunk larger than limit
         localenddata = revlog.end(revs[endrevidx - 1])
         span = localenddata - startdata
         while span > targetsize:
             if endrevidx - startrevidx <= 1:
-                break # protect against individual chunk larger than limit
+                break  # protect against individual chunk larger than limit
             endrevidx -= (endrevidx - startrevidx) // 2
             localenddata = revlog.end(revs[endrevidx - 1])
             span = localenddata - startdata
@@ -301,8 +298,8 @@
     if chunk:
         yield chunk
 
-def _slicechunktodensity(revlog, revs, targetdensity=0.5,
-                         mingapsize=0):
+
+def _slicechunktodensity(revlog, revs, targetdensity=0.5, mingapsize=0):
     """slice revs to reduce the amount of unrelated data to be read from disk.
 
     ``revs`` is sliced into groups that should be read in one time.
@@ -427,6 +424,7 @@
     if chunk:
         yield chunk
 
+
 def _trimchunk(revlog, revs, startidx, endidx=None):
     """returns revs[startidx:endidx] without empty trailing revs
 
@@ -473,13 +471,14 @@
     # If we have a non-emtpy delta candidate, there are nothing to trim
     if revs[endidx - 1] < len(revlog):
         # Trim empty revs at the end, except the very first revision of a chain
-        while (endidx > 1
-                and endidx > startidx
-                and length(revs[endidx - 1]) == 0):
+        while (
+            endidx > 1 and endidx > startidx and length(revs[endidx - 1]) == 0
+        ):
             endidx -= 1
 
     return revs[startidx:endidx]
 
+
 def segmentspan(revlog, revs):
     """Get the byte span of a segment of revisions
 
@@ -509,14 +508,16 @@
     end = revlog.end(revs[-1])
     return end - revlog.start(revs[0])
 
+
 def _textfromdelta(fh, revlog, baserev, delta, p1, p2, flags, expectednode):
     """build full text from a (base, delta) pair and other metadata"""
     # special case deltas which replace entire base; no need to decode
     # base revision. this neatly avoids censored bases, which throw when
     # they're decoded.
     hlen = struct.calcsize(">lll")
-    if delta[:hlen] == mdiff.replacediffheader(revlog.rawsize(baserev),
-                                               len(delta) - hlen):
+    if delta[:hlen] == mdiff.replacediffheader(
+        revlog.rawsize(baserev), len(delta) - hlen
+    ):
         fulltext = delta[hlen:]
     else:
         # deltabase is rawtext before changed by flag processors, which is
@@ -529,14 +530,16 @@
         if validatehash:
             revlog.checkhash(fulltext, expectednode, p1=p1, p2=p2)
         if flags & REVIDX_ISCENSORED:
-            raise error.StorageError(_('node %s is not censored') %
-                                     expectednode)
+            raise error.StorageError(
+                _('node %s is not censored') % expectednode
+            )
     except error.CensoredNodeError:
         # must pass the censored index flag to add censored revisions
         if not flags & REVIDX_ISCENSORED:
             raise
     return fulltext
 
+
 @attr.s(slots=True, frozen=True)
 class _deltainfo(object):
     distance = attr.ib()
@@ -548,6 +551,7 @@
     compresseddeltalen = attr.ib()
     snapshotdepth = attr.ib()
 
+
 def isgooddeltainfo(revlog, deltainfo, revinfo):
     """Returns True if the given delta is good. Good means that it is within
     the disk span, disk size, and chain length bounds that we know to be
@@ -565,7 +569,7 @@
     defaultmax = textlen * 4
     maxdist = revlog._maxdeltachainspan
     if not maxdist:
-        maxdist = deltainfo.distance # ensure the conditional pass
+        maxdist = deltainfo.distance  # ensure the conditional pass
     maxdist = max(maxdist, defaultmax)
 
     # Bad delta from read span:
@@ -596,8 +600,7 @@
     # Bad delta from chain length:
     #
     #   If the number of delta in the chain gets too high.
-    if (revlog._maxchainlen
-            and revlog._maxchainlen < deltainfo.chainlen):
+    if revlog._maxchainlen and revlog._maxchainlen < deltainfo.chainlen:
         return False
 
     # bad delta from intermediate snapshot size limit
@@ -605,23 +608,29 @@
     #   If an intermediate snapshot size is higher than the limit.  The
     #   limit exist to prevent endless chain of intermediate delta to be
     #   created.
-    if (deltainfo.snapshotdepth is not None and
-            (textlen >> deltainfo.snapshotdepth) < deltainfo.deltalen):
+    if (
+        deltainfo.snapshotdepth is not None
+        and (textlen >> deltainfo.snapshotdepth) < deltainfo.deltalen
+    ):
         return False
 
     # bad delta if new intermediate snapshot is larger than the previous
     # snapshot
-    if (deltainfo.snapshotdepth
-            and revlog.length(deltainfo.base) < deltainfo.deltalen):
+    if (
+        deltainfo.snapshotdepth
+        and revlog.length(deltainfo.base) < deltainfo.deltalen
+    ):
         return False
 
     return True
 
+
 # If a revision's full text is that much bigger than a base candidate full
 # text's, it is very unlikely that it will produce a valid delta. We no longer
 # consider these candidates.
 LIMIT_BASE2TEXT = 500
 
+
 def _candidategroups(revlog, textlen, p1, p2, cachedelta):
     """Provides group of revision to be tested as delta base
 
@@ -649,10 +658,9 @@
         group = []
         for rev in temptative:
             # skip over empty delta (no need to include them in a chain)
-            while (revlog._generaldelta
-                   and not (rev == nullrev
-                            or rev in tested
-                            or deltalength(rev))):
+            while revlog._generaldelta and not (
+                rev == nullrev or rev in tested or deltalength(rev)
+            ):
                 tested.add(rev)
                 rev = deltaparent(rev)
             # no need to try a delta against nullrev, this will be done as a
@@ -715,6 +723,7 @@
             good = yield tuple(group)
     yield None
 
+
 def _findsnapshots(revlog, cache, start_rev):
     """find snapshot from start_rev to tip"""
     if util.safehasattr(revlog.index, 'findsnapshots'):
@@ -726,6 +735,7 @@
             if issnapshot(rev):
                 cache[deltaparent(rev)].append(rev)
 
+
 def _refinedgroups(revlog, p1, p2, cachedelta):
     good = None
     # First we try to reuse a the delta contained in the bundle.
@@ -774,6 +784,7 @@
     # we have found nothing
     yield None
 
+
 def _rawgroups(revlog, p1, p2, cachedelta, snapshots=None):
     """Provides group of revision to be tested as delta base
 
@@ -894,6 +905,7 @@
         # fulltext.
         yield (prev,)
 
+
 class deltacomputer(object):
     def __init__(self, revlog):
         self.revlog = revlog
@@ -914,9 +926,16 @@
         baserev = cachedelta[0]
         delta = cachedelta[1]
 
-        fulltext = btext[0] = _textfromdelta(fh, revlog, baserev, delta,
-                                             revinfo.p1, revinfo.p2,
-                                             revinfo.flags, revinfo.node)
+        fulltext = btext[0] = _textfromdelta(
+            fh,
+            revlog,
+            baserev,
+            delta,
+            revinfo.p1,
+            revinfo.p2,
+            revinfo.flags,
+            revinfo.node,
+        )
         return fulltext
 
     def _builddeltadiff(self, base, revinfo, fh):
@@ -953,11 +972,13 @@
         delta = None
         if revinfo.cachedelta:
             cachebase, cachediff = revinfo.cachedelta
-            #check if the diff still apply
+            # check if the diff still apply
             currentbase = cachebase
-            while (currentbase != nullrev
-                    and currentbase != base
-                    and self.revlog.length(currentbase) == 0):
+            while (
+                currentbase != nullrev
+                and currentbase != base
+                and self.revlog.length(currentbase) == 0
+            ):
                 currentbase = self.revlog.deltaparent(currentbase)
             if self.revlog._lazydelta and currentbase == base:
                 delta = revinfo.cachedelta[1]
@@ -979,9 +1000,16 @@
         chainlen += 1
         compresseddeltalen += deltalen
 
-        return _deltainfo(dist, deltalen, (header, data), deltabase,
-                          chainbase, chainlen, compresseddeltalen,
-                          snapshotdepth)
+        return _deltainfo(
+            dist,
+            deltalen,
+            (header, data),
+            deltabase,
+            chainbase,
+            chainlen,
+            compresseddeltalen,
+            snapshotdepth,
+        )
 
     def _fullsnapshotinfo(self, fh, revinfo):
         curr = len(self.revlog)
@@ -992,9 +1020,16 @@
         snapshotdepth = 0
         chainlen = 1
 
-        return _deltainfo(dist, deltalen, data, deltabase,
-                          chainbase, chainlen, compresseddeltalen,
-                          snapshotdepth)
+        return _deltainfo(
+            dist,
+            deltalen,
+            data,
+            deltabase,
+            chainbase,
+            chainlen,
+            compresseddeltalen,
+            snapshotdepth,
+        )
 
     def finddeltainfo(self, revinfo, fh):
         """Find an acceptable delta against a candidate revision
@@ -1025,8 +1060,9 @@
 
         deltainfo = None
         p1r, p2r = revlog.rev(p1), revlog.rev(p2)
-        groups = _candidategroups(self.revlog, revinfo.textlen,
-                                             p1r, p2r, cachedelta)
+        groups = _candidategroups(
+            self.revlog, revinfo.textlen, p1r, p2r, cachedelta
+        )
         candidaterevs = next(groups)
         while candidaterevs is not None:
             nominateddeltas = []
--- a/mercurial/revlogutils/flagutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/revlogutils/flagutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -20,10 +20,7 @@
     REVIDX_SIDEDATA,
 )
 
-from .. import (
-    error,
-    util
-)
+from .. import error, util
 
 # blanked usage of all the name to prevent pyflakes constraints
 # We need these name available in the module for extensions.
@@ -42,6 +39,7 @@
     REVIDX_ISCENSORED: None,
 }
 
+
 def addflagprocessor(flag, processor):
     """Register a flag processor on a revision data flag.
 
@@ -69,18 +67,20 @@
     """
     insertflagprocessor(flag, processor, flagprocessors)
 
+
 def insertflagprocessor(flag, processor, flagprocessors):
     if not flag & REVIDX_KNOWN_FLAGS:
-        msg = _("cannot register processor on unknown flag '%#x'.") % (flag)
+        msg = _("cannot register processor on unknown flag '%#x'.") % flag
         raise error.ProgrammingError(msg)
     if flag not in REVIDX_FLAGS_ORDER:
-        msg = _("flag '%#x' undefined in REVIDX_FLAGS_ORDER.") % (flag)
+        msg = _("flag '%#x' undefined in REVIDX_FLAGS_ORDER.") % flag
         raise error.ProgrammingError(msg)
     if flag in flagprocessors:
-        msg = _("cannot register multiple processors on flag '%#x'.") % (flag)
+        msg = _("cannot register multiple processors on flag '%#x'.") % flag
         raise error.Abort(msg)
     flagprocessors[flag] = processor
 
+
 def processflagswrite(revlog, text, flags, sidedata):
     """Inspect revision data flags and applies write transformations defined
     by registered flag processors.
@@ -97,8 +97,10 @@
     processed text and ``validatehash`` is a bool indicating whether the
     returned text should be checked for hash integrity.
     """
-    return _processflagsfunc(revlog, text, flags, 'write',
-                             sidedata=sidedata)[:2]
+    return _processflagsfunc(revlog, text, flags, 'write', sidedata=sidedata)[
+        :2
+    ]
+
 
 def processflagsread(revlog, text, flags):
     """Inspect revision data flags and applies read transformations defined
@@ -120,6 +122,7 @@
     """
     return _processflagsfunc(revlog, text, flags, 'read')
 
+
 def processflagsraw(revlog, text, flags):
     """Inspect revision data flags to check is the content hash should be
     validated.
@@ -138,6 +141,7 @@
     """
     return _processflagsfunc(revlog, text, flags, 'raw')[1]
 
+
 def _processflagsfunc(revlog, text, flags, operation, sidedata=None):
     """internal function to process flag on a revlog
 
@@ -147,12 +151,13 @@
     if flags == 0:
         return text, True, {}
     if operation not in ('read', 'write', 'raw'):
-        raise error.ProgrammingError(_("invalid '%s' operation") %
-                                     operation)
+        raise error.ProgrammingError(_("invalid '%s' operation") % operation)
     # Check all flags are known.
     if flags & ~REVIDX_KNOWN_FLAGS:
-        raise revlog._flagserrorclass(_("incompatible revision flag '%#x'") %
-                                      (flags & ~REVIDX_KNOWN_FLAGS))
+        raise revlog._flagserrorclass(
+            _("incompatible revision flag '%#x'")
+            % (flags & ~REVIDX_KNOWN_FLAGS)
+        )
     validatehash = True
     # Depending on the operation (read or write), the order might be
     # reversed due to non-commutative transforms.
@@ -168,7 +173,7 @@
             vhash = True
 
             if flag not in revlog._flagprocessors:
-                message = _("missing processor for flag '%#x'") % (flag)
+                message = _("missing processor for flag '%#x'") % flag
                 raise revlog._flagserrorclass(message)
 
             processor = revlog._flagprocessors[flag]
@@ -180,7 +185,7 @@
                 elif operation == 'read':
                     text, vhash, s = readtransform(revlog, text)
                     outsidedata.update(s)
-                else: # write operation
+                else:  # write operation
                     text, vhash = writetransform(revlog, text, sidedata)
             validatehash = validatehash and vhash
 
--- a/mercurial/revlogutils/sidedata.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/revlogutils/sidedata.py	Sun Oct 06 09:45:02 2019 -0400
@@ -52,6 +52,7 @@
 SIDEDATA_HEADER = struct.Struct(r'>H')
 SIDEDATA_ENTRY = struct.Struct(r'>HL20s')
 
+
 def sidedatawriteprocessor(rl, text, sidedata):
     sidedata = list(sidedata.items())
     sidedata.sort()
@@ -64,10 +65,11 @@
     rawtext.append(bytes(text))
     return ''.join(rawtext), False
 
+
 def sidedatareadprocessor(rl, text):
     sidedata = {}
     offset = 0
-    nbentry, = SIDEDATA_HEADER.unpack(text[:SIDEDATA_HEADER.size])
+    (nbentry,) = SIDEDATA_HEADER.unpack(text[: SIDEDATA_HEADER.size])
     offset += SIDEDATA_HEADER.size
     dataoffset = SIDEDATA_HEADER.size + (SIDEDATA_ENTRY.size * nbentry)
     for i in range(nbentry):
@@ -85,10 +87,12 @@
     text = text[dataoffset:]
     return text, True, sidedata
 
+
 def sidedatarawprocessor(rl, text):
     # side data modifies rawtext and prevent rawtext hash validation
     return False
 
+
 processors = (
     sidedatareadprocessor,
     sidedatawriteprocessor,
--- a/mercurial/revset.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/revset.py	Sun Oct 06 09:45:02 2019 -0400
@@ -93,17 +93,19 @@
 #
 # There are a few revsets that always redefine the order if 'define' is
 # specified: 'sort(X)', 'reverse(X)', 'x:y'.
-anyorder = 'any'        # don't care the order, could be even random-shuffled
+anyorder = 'any'  # don't care the order, could be even random-shuffled
 defineorder = 'define'  # ALWAYS redefine, or ALWAYS follow the current order
 followorder = 'follow'  # MUST follow the current order
 
 # helpers
 
+
 def getset(repo, subset, x, order=defineorder):
     if not x:
         raise error.ParseError(_("missing argument"))
     return methods[x[0]](repo, subset, *x[1:], order=order)
 
+
 def _getrevsource(repo, r):
     extra = repo[r].extra()
     for label in ('source', 'transplant_source', 'rebase_source'):
@@ -114,11 +116,14 @@
                 pass
     return None
 
+
 def _sortedb(xs):
     return sorted(pycompat.rapply(pycompat.maybebytestr, xs))
 
+
 # operator methods
 
+
 def stringset(repo, subset, x, order):
     if not x:
         raise error.ParseError(_("empty string is not a valid revision"))
@@ -127,6 +132,7 @@
         return baseset([x])
     return baseset()
 
+
 def rawsmartset(repo, subset, x, order):
     """argument is already a smartset, use that directly"""
     if order == followorder:
@@ -134,6 +140,7 @@
     else:
         return x & subset
 
+
 def rangeset(repo, subset, x, y, order):
     m = getset(repo, fullreposet(repo), x)
     n = getset(repo, fullreposet(repo), y)
@@ -142,10 +149,12 @@
         return baseset()
     return _makerangeset(repo, subset, m.first(), n.last(), order)
 
+
 def rangeall(repo, subset, x, order):
     assert x is None
     return _makerangeset(repo, subset, 0, repo.changelog.tiprev(), order)
 
+
 def rangepre(repo, subset, y, order):
     # ':y' can't be rewritten to '0:y' since '0' may be hidden
     n = getset(repo, fullreposet(repo), y)
@@ -153,12 +162,15 @@
         return baseset()
     return _makerangeset(repo, subset, 0, n.last(), order)
 
+
 def rangepost(repo, subset, x, order):
     m = getset(repo, fullreposet(repo), x)
     if not m:
         return baseset()
-    return _makerangeset(repo, subset, m.first(), repo.changelog.tiprev(),
-                         order)
+    return _makerangeset(
+        repo, subset, m.first(), repo.changelog.tiprev(), order
+    )
+
 
 def _makerangeset(repo, subset, m, n, order):
     if m == n:
@@ -178,12 +190,15 @@
         # carrying the sorting over when possible would be more efficient
         return subset & r
 
+
 def dagrange(repo, subset, x, y, order):
     r = fullreposet(repo)
-    xs = dagop.reachableroots(repo, getset(repo, r, x), getset(repo, r, y),
-                              includepath=True)
+    xs = dagop.reachableroots(
+        repo, getset(repo, r, x), getset(repo, r, y), includepath=True
+    )
     return subset & xs
 
+
 def andset(repo, subset, x, y, order):
     if order == anyorder:
         yorder = anyorder
@@ -191,6 +206,7 @@
         yorder = followorder
     return getset(repo, getset(repo, subset, x, order), y, yorder)
 
+
 def andsmallyset(repo, subset, x, y, order):
     # 'andsmally(x, y)' is equivalent to 'and(x, y)', but faster when y is small
     if order == anyorder:
@@ -199,9 +215,11 @@
         yorder = followorder
     return getset(repo, getset(repo, subset, y, yorder), x, order)
 
+
 def differenceset(repo, subset, x, y, order):
     return getset(repo, subset, x, order) - getset(repo, subset, y, anyorder)
 
+
 def _orsetlist(repo, subset, xs, order):
     assert xs
     if len(xs) == 1:
@@ -211,6 +229,7 @@
     b = _orsetlist(repo, subset, xs[p:], order)
     return a + b
 
+
 def orset(repo, subset, x, order):
     xs = getlist(x)
     if not xs:
@@ -221,12 +240,15 @@
     else:
         return _orsetlist(repo, subset, xs, order)
 
+
 def notset(repo, subset, x, order):
     return subset - getset(repo, subset, x, anyorder)
 
+
 def relationset(repo, subset, x, y, order):
     raise error.ParseError(_("can't use a relation in this context"))
 
+
 def _splitrange(a, b):
     """Split range with bounds a and b into two ranges at 0 and return two
     tuples of numbers for use as startdepth and stopdepth arguments of
@@ -257,14 +279,17 @@
         descdepths = (max(a, 0), b + 1)
     return ancdepths, descdepths
 
+
 def generationsrel(repo, subset, x, rel, z, order):
     # TODO: rewrite tests, and drop startdepth argument from ancestors() and
     # descendants() predicates
-    a, b = getintrange(z,
-                       _('relation subscript must be an integer or a range'),
-                       _('relation subscript bounds must be integers'),
-                       deffirst=-(dagop.maxlogdepth - 1),
-                       deflast=+(dagop.maxlogdepth - 1))
+    a, b = getintrange(
+        z,
+        _('relation subscript must be an integer or a range'),
+        _('relation subscript bounds must be integers'),
+        deffirst=-(dagop.maxlogdepth - 1),
+        deflast=+(dagop.maxlogdepth - 1),
+    )
     (ancstart, ancstop), (descstart, descstop) = _splitrange(a, b)
 
     if ancstart is None and descstart is None:
@@ -284,6 +309,7 @@
 
     return subset & s
 
+
 def relsubscriptset(repo, subset, x, y, z, order):
     # this is pretty basic implementation of 'x#y[z]' operator, still
     # experimental so undocumented. see the wiki for further ideas.
@@ -295,16 +321,22 @@
     relnames = [r for r in subscriptrelations.keys() if len(r) > 1]
     raise error.UnknownIdentifier(rel, relnames)
 
+
 def subscriptset(repo, subset, x, y, order):
     raise error.ParseError(_("can't use a subscript in this context"))
 
+
 def listset(repo, subset, *xs, **opts):
-    raise error.ParseError(_("can't use a list in this context"),
-                           hint=_('see \'hg help "revsets.x or y"\''))
+    raise error.ParseError(
+        _("can't use a list in this context"),
+        hint=_('see \'hg help "revsets.x or y"\''),
+    )
+
 
 def keyvaluepair(repo, subset, k, v, order):
     raise error.ParseError(_("can't use a key-value pair in this context"))
 
+
 def func(repo, subset, a, b, order):
     f = getsymbol(a)
     if f in symbols:
@@ -318,6 +350,7 @@
     syms = [s for (s, fn) in symbols.items() if keep(fn)]
     raise error.UnknownIdentifier(f, syms)
 
+
 # functions
 
 # symbols are callables like:
@@ -335,12 +368,15 @@
 
 predicate = registrar.revsetpredicate()
 
+
 @predicate('_destupdate')
 def _destupdate(repo, subset, x):
     # experimental revset for update destination
     args = getargsdict(x, 'limit', 'clean')
-    return subset & baseset([destutil.destupdate(repo,
-                            **pycompat.strkwargs(args))[0]])
+    return subset & baseset(
+        [destutil.destupdate(repo, **pycompat.strkwargs(args))[0]]
+    )
+
 
 @predicate('_destmerge')
 def _destmerge(repo, subset, x):
@@ -350,6 +386,7 @@
         sourceset = getset(repo, fullreposet(repo), x)
     return subset & baseset([destutil.destmerge(repo, sourceset=sourceset)])
 
+
 @predicate('adds(pattern)', safe=True, weight=30)
 def adds(repo, subset, x):
     """Changesets that add a file matching pattern.
@@ -362,6 +399,7 @@
     pat = getstring(x, _("adds requires a pattern"))
     return checkstatus(repo, subset, pat, 1)
 
+
 @predicate('ancestor(*changeset)', safe=True, weight=0.5)
 def ancestor(repo, subset, x):
     """A greatest common ancestor of the changesets.
@@ -383,14 +421,17 @@
         return baseset([r])
     return baseset()
 
-def _ancestors(repo, subset, x, followfirst=False, startdepth=None,
-               stopdepth=None):
+
+def _ancestors(
+    repo, subset, x, followfirst=False, startdepth=None, stopdepth=None
+):
     heads = getset(repo, fullreposet(repo), x)
     if not heads:
         return baseset()
     s = dagop.revancestors(repo, heads, followfirst, startdepth, stopdepth)
     return subset & s
 
+
 @predicate('ancestors(set[, depth])', safe=True)
 def ancestors(repo, subset, x):
     """Changesets that are ancestors of changesets in set, including the
@@ -406,8 +447,9 @@
         raise error.ParseError(_('ancestors takes at least 1 argument'))
     startdepth = stopdepth = None
     if 'startdepth' in args:
-        n = getinteger(args['startdepth'],
-                       "ancestors expects an integer startdepth")
+        n = getinteger(
+            args['startdepth'], "ancestors expects an integer startdepth"
+        )
         if n < 0:
             raise error.ParseError("negative startdepth")
         startdepth = n
@@ -417,8 +459,10 @@
         if n < 0:
             raise error.ParseError(_("negative depth"))
         stopdepth = n + 1
-    return _ancestors(repo, subset, args['set'],
-                      startdepth=startdepth, stopdepth=stopdepth)
+    return _ancestors(
+        repo, subset, args['set'], startdepth=startdepth, stopdepth=stopdepth
+    )
+
 
 @predicate('_firstancestors', safe=True)
 def _firstancestors(repo, subset, x):
@@ -426,6 +470,7 @@
     # Like ``ancestors(set)`` but follows only the first parents.
     return _ancestors(repo, subset, x, followfirst=True)
 
+
 def _childrenspec(repo, subset, x, n, order):
     """Changesets that are the Nth child of a changeset
     in set.
@@ -438,12 +483,14 @@
                 break
             if len(c) > 1:
                 raise error.RepoLookupError(
-                    _("revision in set has more than one child"))
+                    _("revision in set has more than one child")
+                )
             r = c[0].rev()
         else:
             cs.add(r)
     return subset & cs
 
+
 def ancestorspec(repo, subset, x, n, order):
     """``set~n``
     Changesets that are the Nth ancestor (first parents only) of a changeset
@@ -464,6 +511,7 @@
         ps.add(r)
     return subset & ps
 
+
 @predicate('author(string)', safe=True, weight=10)
 def author(repo, subset, x):
     """Alias for ``user(string)``.
@@ -471,8 +519,10 @@
     # i18n: "author" is a keyword
     n = getstring(x, _("author requires a string"))
     kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
-    return subset.filter(lambda x: matcher(repo[x].user()),
-                         condrepr=('<user %r>', n))
+    return subset.filter(
+        lambda x: matcher(repo[x].user()), condrepr=('<user %r>', n)
+    )
+
 
 @predicate('bisect(string)', safe=True)
 def bisect(repo, subset, x):
@@ -491,12 +541,14 @@
     state = set(hbisect.get(repo, status))
     return subset & state
 
+
 # Backward-compatibility
 # - no help entry so that we do not advertise it any more
 @predicate('bisected', safe=True)
 def bisected(repo, subset, x):
     return bisect(repo, subset, x)
 
+
 @predicate('bookmark([name])', safe=True)
 def bookmark(repo, subset, x):
     """The named bookmark or all bookmarks.
@@ -506,9 +558,11 @@
     # i18n: "bookmark" is a keyword
     args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
     if args:
-        bm = getstring(args[0],
-                       # i18n: "bookmark" is a keyword
-                       _('the argument to bookmark must be a string'))
+        bm = getstring(
+            args[0],
+            # i18n: "bookmark" is a keyword
+            _('the argument to bookmark must be a string'),
+        )
         kind, pattern, matcher = stringutil.stringmatcher(bm)
         bms = set()
         if kind == 'literal':
@@ -516,8 +570,9 @@
                 pattern = repo._bookmarks.expandname(pattern)
             bmrev = repo._bookmarks.get(pattern, None)
             if not bmrev:
-                raise error.RepoLookupError(_("bookmark '%s' does not exist")
-                                            % pattern)
+                raise error.RepoLookupError(
+                    _("bookmark '%s' does not exist") % pattern
+                )
             bms.add(repo[bmrev].rev())
         else:
             matchrevs = set()
@@ -531,6 +586,7 @@
     bms -= {node.nullrev}
     return subset & bms
 
+
 @predicate('branch(string or set)', safe=True, weight=10)
 def branch(repo, subset, x):
     """
@@ -541,6 +597,7 @@
     :hg:`help revisions.patterns`.
     """
     getbi = repo.revbranchcache().branchinfo
+
     def getbranch(r):
         try:
             return getbi(r)[0]
@@ -558,22 +615,28 @@
             # note: falls through to the revspec case if no branch with
             # this name exists and pattern kind is not specified explicitly
             if repo.branchmap().hasbranch(pattern):
-                return subset.filter(lambda r: matcher(getbranch(r)),
-                                     condrepr=('<branch %r>', b))
+                return subset.filter(
+                    lambda r: matcher(getbranch(r)), condrepr=('<branch %r>', b)
+                )
             if b.startswith('literal:'):
-                raise error.RepoLookupError(_("branch '%s' does not exist")
-                                            % pattern)
+                raise error.RepoLookupError(
+                    _("branch '%s' does not exist") % pattern
+                )
         else:
-            return subset.filter(lambda r: matcher(getbranch(r)),
-                                 condrepr=('<branch %r>', b))
+            return subset.filter(
+                lambda r: matcher(getbranch(r)), condrepr=('<branch %r>', b)
+            )
 
     s = getset(repo, fullreposet(repo), x)
     b = set()
     for r in s:
         b.add(getbranch(r))
     c = s.__contains__
-    return subset.filter(lambda r: c(r) or getbranch(r) in b,
-                         condrepr=lambda: '<branch %r>' % _sortedb(b))
+    return subset.filter(
+        lambda r: c(r) or getbranch(r) in b,
+        condrepr=lambda: '<branch %r>' % _sortedb(b),
+    )
+
 
 @predicate('phasedivergent()', safe=True)
 def phasedivergent(repo, subset, x):
@@ -587,6 +650,7 @@
     phasedivergent = obsmod.getrevs(repo, 'phasedivergent')
     return subset & phasedivergent
 
+
 @predicate('bundle()', safe=True)
 def bundle(repo, subset, x):
     """Changesets in the bundle.
@@ -599,6 +663,7 @@
         raise error.Abort(_("no bundle provided - specify with -R"))
     return subset & bundlerevs
 
+
 def checkstatus(repo, subset, pat, field):
     """Helper for status-related revsets (adds, removes, modifies).
     The field parameter says which kind is desired:
@@ -609,6 +674,7 @@
     hasset = matchmod.patkind(pat) == 'set'
 
     mcache = [None]
+
     def matches(x):
         c = repo[x]
         if not mcache[0] or hasset:
@@ -637,6 +703,7 @@
 
     return subset.filter(matches, condrepr=('<status[%r] %r>', field, pat))
 
+
 def _children(repo, subset, parentset):
     if not parentset:
         return baseset()
@@ -654,6 +721,7 @@
             cs.add(r)
     return baseset(cs)
 
+
 @predicate('children(set)', safe=True)
 def children(repo, subset, x):
     """Child changesets of changesets in set.
@@ -662,14 +730,17 @@
     cs = _children(repo, subset, s)
     return subset & cs
 
+
 @predicate('closed()', safe=True, weight=10)
 def closed(repo, subset, x):
     """Changeset is closed.
     """
     # i18n: "closed" is a keyword
     getargs(x, 0, 0, _("closed takes no arguments"))
-    return subset.filter(lambda r: repo[r].closesbranch(),
-                         condrepr='<branch closed>')
+    return subset.filter(
+        lambda r: repo[r].closesbranch(), condrepr='<branch closed>'
+    )
+
 
 # for internal use
 @predicate('_commonancestorheads(set)', safe=True)
@@ -684,6 +755,7 @@
     ancs = repo.changelog._commonancestorsheads(*list(startrevs))
     return subset & baseset(ancs)
 
+
 @predicate('commonancestors(set)', safe=True)
 def commonancestors(repo, subset, x):
     """Changesets that are ancestors of every changeset in set.
@@ -695,6 +767,7 @@
         subset &= dagop.revancestors(repo, baseset([r]))
     return subset
 
+
 @predicate('contains(pattern)', weight=100)
 def contains(repo, subset, x):
     """The revision's manifest contains a file matching pattern (but might not
@@ -722,6 +795,7 @@
 
     return subset.filter(matches, condrepr=('<contains %r>', pat))
 
+
 @predicate('converted([id])', safe=True)
 def converted(repo, subset, x):
     """Changesets converted from the given identifier in the old repository if
@@ -742,8 +816,10 @@
         source = repo[r].extra().get('convert_revision', None)
         return source is not None and (rev is None or source.startswith(rev))
 
-    return subset.filter(lambda r: _matchvalue(r),
-                         condrepr=('<converted %r>', rev))
+    return subset.filter(
+        lambda r: _matchvalue(r), condrepr=('<converted %r>', rev)
+    )
+
 
 @predicate('date(interval)', safe=True, weight=10)
 def date(repo, subset, x):
@@ -752,8 +828,10 @@
     # i18n: "date" is a keyword
     ds = getstring(x, _("date requires a string"))
     dm = dateutil.matchdate(ds)
-    return subset.filter(lambda x: dm(repo[x].date()[0]),
-                         condrepr=('<date %r>', ds))
+    return subset.filter(
+        lambda x: dm(repo[x].date()[0]), condrepr=('<date %r>', ds)
+    )
+
 
 @predicate('desc(string)', safe=True, weight=10)
 def desc(repo, subset, x):
@@ -767,17 +845,21 @@
 
     kind, pattern, matcher = _substringmatcher(ds, casesensitive=False)
 
-    return subset.filter(lambda r: matcher(repo[r].description()),
-                         condrepr=('<desc %r>', ds))
-
-def _descendants(repo, subset, x, followfirst=False, startdepth=None,
-                 stopdepth=None):
+    return subset.filter(
+        lambda r: matcher(repo[r].description()), condrepr=('<desc %r>', ds)
+    )
+
+
+def _descendants(
+    repo, subset, x, followfirst=False, startdepth=None, stopdepth=None
+):
     roots = getset(repo, fullreposet(repo), x)
     if not roots:
         return baseset()
     s = dagop.revdescendants(repo, roots, followfirst, startdepth, stopdepth)
     return subset & s
 
+
 @predicate('descendants(set[, depth])', safe=True)
 def descendants(repo, subset, x):
     """Changesets which are descendants of changesets in set, including the
@@ -793,8 +875,9 @@
         raise error.ParseError(_('descendants takes at least 1 argument'))
     startdepth = stopdepth = None
     if 'startdepth' in args:
-        n = getinteger(args['startdepth'],
-                       "descendants expects an integer startdepth")
+        n = getinteger(
+            args['startdepth'], "descendants expects an integer startdepth"
+        )
         if n < 0:
             raise error.ParseError("negative startdepth")
         startdepth = n
@@ -804,8 +887,10 @@
         if n < 0:
             raise error.ParseError(_("negative depth"))
         stopdepth = n + 1
-    return _descendants(repo, subset, args['set'],
-                        startdepth=startdepth, stopdepth=stopdepth)
+    return _descendants(
+        repo, subset, args['set'], startdepth=startdepth, stopdepth=stopdepth
+    )
+
 
 @predicate('_firstdescendants', safe=True)
 def _firstdescendants(repo, subset, x):
@@ -813,6 +898,7 @@
     # Like ``descendants(set)`` but follows only the first parents.
     return _descendants(repo, subset, x, followfirst=True)
 
+
 @predicate('destination([set])', safe=True, weight=10)
 def destination(repo, subset, x):
     """Changesets that were created by a graft, transplant or rebase operation,
@@ -855,8 +941,11 @@
             r = src
             src = _getrevsource(repo, r)
 
-    return subset.filter(dests.__contains__,
-                         condrepr=lambda: '<destination %r>' % _sortedb(dests))
+    return subset.filter(
+        dests.__contains__,
+        condrepr=lambda: '<destination %r>' % _sortedb(dests),
+    )
+
 
 @predicate('contentdivergent()', safe=True)
 def contentdivergent(repo, subset, x):
@@ -869,6 +958,7 @@
     contentdivergent = obsmod.getrevs(repo, 'contentdivergent')
     return subset & contentdivergent
 
+
 @predicate('expectsize(set[, size])', safe=True, takeorder=True)
 def expectsize(repo, subset, x, order):
     """Return the given revset if size matches the revset size.
@@ -884,21 +974,25 @@
     err = ''
     if 'size' not in args or 'set' not in args:
         raise error.ParseError(_('invalid set of arguments'))
-    minsize, maxsize = getintrange(args['size'],
-                                   _('expectsize requires a size range'
-                                     ' or a positive integer'),
-                                   _('size range bounds must be integers'),
-                                   minsize, maxsize)
+    minsize, maxsize = getintrange(
+        args['size'],
+        _('expectsize requires a size range' ' or a positive integer'),
+        _('size range bounds must be integers'),
+        minsize,
+        maxsize,
+    )
     if minsize < 0 or maxsize < 0:
         raise error.ParseError(_('negative size'))
     rev = getset(repo, fullreposet(repo), args['set'], order=order)
     if minsize != maxsize and (len(rev) < minsize or len(rev) > maxsize):
-        err = _('revset size mismatch.'
-                ' expected between %d and %d, got %d') % (minsize, maxsize,
-                                                          len(rev))
+        err = _(
+            'revset size mismatch.' ' expected between %d and %d, got %d'
+        ) % (minsize, maxsize, len(rev))
     elif minsize == maxsize and len(rev) != minsize:
-        err = _('revset size mismatch.'
-                ' expected %d, got %d') % (minsize, len(rev))
+        err = _('revset size mismatch.' ' expected %d, got %d') % (
+            minsize,
+            len(rev),
+        )
     if err:
         raise error.RepoLookupError(err)
     if order == followorder:
@@ -906,17 +1000,21 @@
     else:
         return rev & subset
 
+
 @predicate('extdata(source)', safe=False, weight=100)
 def extdata(repo, subset, x):
     """Changesets in the specified extdata source. (EXPERIMENTAL)"""
     # i18n: "extdata" is a keyword
     args = getargsdict(x, 'extdata', 'source')
-    source = getstring(args.get('source'),
-                       # i18n: "extdata" is a keyword
-                       _('extdata takes at least 1 string argument'))
+    source = getstring(
+        args.get('source'),
+        # i18n: "extdata" is a keyword
+        _('extdata takes at least 1 string argument'),
+    )
     data = scmutil.extdatasource(repo, source)
     return subset & baseset(data)
 
+
 @predicate('extinct()', safe=True)
 def extinct(repo, subset, x):
     """Obsolete changesets with obsolete descendants only.
@@ -926,6 +1024,7 @@
     extincts = obsmod.getrevs(repo, 'extinct')
     return subset & extincts
 
+
 @predicate('extra(label, [value])', safe=True)
 def extra(repo, subset, x):
     """Changesets with the given label in the extra metadata, with the given
@@ -939,22 +1038,26 @@
         # i18n: "extra" is a keyword
         raise error.ParseError(_('extra takes at least 1 argument'))
     # i18n: "extra" is a keyword
-    label = getstring(args['label'], _('first argument to extra must be '
-                                       'a string'))
+    label = getstring(
+        args['label'], _('first argument to extra must be ' 'a string')
+    )
     value = None
 
     if 'value' in args:
         # i18n: "extra" is a keyword
-        value = getstring(args['value'], _('second argument to extra must be '
-                                           'a string'))
+        value = getstring(
+            args['value'], _('second argument to extra must be ' 'a string')
+        )
         kind, value, matcher = stringutil.stringmatcher(value)
 
     def _matchvalue(r):
         extra = repo[r].extra()
         return label in extra and (value is None or matcher(extra[label]))
 
-    return subset.filter(lambda r: _matchvalue(r),
-                         condrepr=('<extra[%r] %r>', label, value))
+    return subset.filter(
+        lambda r: _matchvalue(r), condrepr=('<extra[%r] %r>', label, value)
+    )
+
 
 @predicate('filelog(pattern)', safe=True)
 def filelog(repo, subset, x):
@@ -1019,12 +1122,14 @@
 
     return subset & s
 
+
 @predicate('first(set, [n])', safe=True, takeorder=True, weight=0)
 def first(repo, subset, x, order):
     """An alias for limit().
     """
     return limit(repo, subset, x, order)
 
+
 def _follow(repo, subset, x, name, followfirst=False):
     args = getargsdict(x, name, 'file startrev')
     revs = None
@@ -1039,8 +1144,9 @@
             ctx = mctx = repo[r]
             if r is None:
                 ctx = repo['.']
-            m = matchmod.match(repo.root, repo.getcwd(), [x],
-                               ctx=mctx, default='path')
+            m = matchmod.match(
+                repo.root, repo.getcwd(), [x], ctx=mctx, default='path'
+            )
             fctxs.extend(ctx[f].introfilectx() for f in ctx.manifest().walk(m))
         s = dagop.filerevancestors(fctxs, followfirst)
     else:
@@ -1050,6 +1156,7 @@
 
     return subset & s
 
+
 @predicate('follow([file[, startrev]])', safe=True)
 def follow(repo, subset, x):
     """
@@ -1059,6 +1166,7 @@
     """
     return _follow(repo, subset, x, 'follow')
 
+
 @predicate('_followfirst', safe=True)
 def _followfirst(repo, subset, x):
     # ``followfirst([file[, startrev]])``
@@ -1066,8 +1174,10 @@
     # of every revisions or files revisions.
     return _follow(repo, subset, x, '_followfirst', followfirst=True)
 
-@predicate('followlines(file, fromline:toline[, startrev=., descend=False])',
-           safe=True)
+
+@predicate(
+    'followlines(file, fromline:toline[, startrev=., descend=False])', safe=True
+)
 def followlines(repo, subset, x):
     """Changesets modifying `file` in line range ('fromline', 'toline').
 
@@ -1089,7 +1199,8 @@
         if len(revs) != 1:
             raise error.ParseError(
                 # i18n: "followlines" is a keyword
-                _("followlines expects exactly one revision"))
+                _("followlines expects exactly one revision")
+            )
         rev = revs.last()
 
     pat = getstring(args['file'], _("followlines requires a pattern"))
@@ -1097,29 +1208,45 @@
     msg = _("followlines expects exactly one file")
     fname = scmutil.parsefollowlinespattern(repo, rev, pat, msg)
     fromline, toline = util.processlinerange(
-        *getintrange(args['lines'][0],
-                     # i18n: "followlines" is a keyword
-                     _("followlines expects a line number or a range"),
-                     _("line range bounds must be integers")))
+        *getintrange(
+            args['lines'][0],
+            # i18n: "followlines" is a keyword
+            _("followlines expects a line number or a range"),
+            _("line range bounds must be integers"),
+        )
+    )
 
     fctx = repo[rev].filectx(fname)
     descend = False
     if 'descend' in args:
-        descend = getboolean(args['descend'],
-                             # i18n: "descend" is a keyword
-                             _("descend argument must be a boolean"))
+        descend = getboolean(
+            args['descend'],
+            # i18n: "descend" is a keyword
+            _("descend argument must be a boolean"),
+        )
     if descend:
         rs = generatorset(
-            (c.rev() for c, _linerange
-             in dagop.blockdescendants(fctx, fromline, toline)),
-            iterasc=True)
+            (
+                c.rev()
+                for c, _linerange in dagop.blockdescendants(
+                    fctx, fromline, toline
+                )
+            ),
+            iterasc=True,
+        )
     else:
         rs = generatorset(
-            (c.rev() for c, _linerange
-             in dagop.blockancestors(fctx, fromline, toline)),
-            iterasc=False)
+            (
+                c.rev()
+                for c, _linerange in dagop.blockancestors(
+                    fctx, fromline, toline
+                )
+            ),
+            iterasc=False,
+        )
     return subset & rs
 
+
 @predicate('all()', safe=True)
 def getall(repo, subset, x):
     """All changesets, the same as ``0:tip``.
@@ -1128,6 +1255,7 @@
     getargs(x, 0, 0, _("all takes no arguments"))
     return subset & spanset(repo)  # drop "null" if any
 
+
 @predicate('grep(regex)', weight=10)
 def grep(repo, subset, x):
     """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')``
@@ -1139,7 +1267,8 @@
         gr = re.compile(getstring(x, _("grep requires a string")))
     except re.error as e:
         raise error.ParseError(
-            _('invalid match pattern: %s') % stringutil.forcebytestr(e))
+            _('invalid match pattern: %s') % stringutil.forcebytestr(e)
+        )
 
     def matches(x):
         c = repo[x]
@@ -1150,6 +1279,7 @@
 
     return subset.filter(matches, condrepr=('<grep %r>', gr.pattern))
 
+
 @predicate('_matchfiles', safe=True)
 def _matchfiles(repo, subset, x):
     # _matchfiles takes a revset list of prefixed arguments:
@@ -1178,16 +1308,18 @@
             exc.append(value)
         elif prefix == 'r:':
             if rev is not None:
-                raise error.ParseError('_matchfiles expected at most one '
-                                       'revision')
-            if value == '': # empty means working directory
+                raise error.ParseError(
+                    '_matchfiles expected at most one ' 'revision'
+                )
+            if value == '':  # empty means working directory
                 rev = node.wdirrev
             else:
                 rev = value
         elif prefix == 'd:':
             if default is not None:
-                raise error.ParseError('_matchfiles expected at most one '
-                                       'default mode')
+                raise error.ParseError(
+                    '_matchfiles expected at most one ' 'default mode'
+                )
             default = value
         else:
             raise error.ParseError('invalid _matchfiles prefix: %s' % prefix)
@@ -1201,6 +1333,7 @@
     # revisions is quite expensive.
     getfiles = repo.changelog.readfiles
     wdirrev = node.wdirrev
+
     def matches(x):
         if x == wdirrev:
             files = repo[x].files()
@@ -1209,9 +1342,15 @@
 
         if not mcache[0] or (hasset and rev is None):
             r = x if rev is None else rev
-            mcache[0] = matchmod.match(repo.root, repo.getcwd(), pats,
-                                       include=inc, exclude=exc, ctx=repo[r],
-                                       default=default)
+            mcache[0] = matchmod.match(
+                repo.root,
+                repo.getcwd(),
+                pats,
+                include=inc,
+                exclude=exc,
+                ctx=repo[r],
+                default=default,
+            )
         m = mcache[0]
 
         for f in files:
@@ -1219,10 +1358,19 @@
                 return True
         return False
 
-    return subset.filter(matches,
-                         condrepr=('<matchfiles patterns=%r, include=%r '
-                                   'exclude=%r, default=%r, rev=%r>',
-                                   pats, inc, exc, default, rev))
+    return subset.filter(
+        matches,
+        condrepr=(
+            '<matchfiles patterns=%r, include=%r '
+            'exclude=%r, default=%r, rev=%r>',
+            pats,
+            inc,
+            exc,
+            default,
+            rev,
+        ),
+    )
+
 
 @predicate('file(pattern)', safe=True, weight=10)
 def hasfile(repo, subset, x):
@@ -1237,6 +1385,7 @@
     pat = getstring(x, _("file requires a pattern"))
     return _matchfiles(repo, subset, ('string', 'p:' + pat))
 
+
 @predicate('head()', safe=True)
 def head(repo, subset, x):
     """Changeset is a named branch head.
@@ -1249,6 +1398,7 @@
         hs.update(cl.rev(h) for h in ls)
     return subset & baseset(hs)
 
+
 @predicate('heads(set)', safe=True, takeorder=True)
 def heads(repo, subset, x, order):
     """Members of set with no children in set.
@@ -1270,6 +1420,7 @@
     heads = baseset(heads)
     return subset & heads
 
+
 @predicate('hidden()', safe=True)
 def hidden(repo, subset, x):
     """Hidden changesets.
@@ -1279,6 +1430,7 @@
     hiddenrevs = repoview.filterrevs(repo, 'visible')
     return subset & hiddenrevs
 
+
 @predicate('keyword(string)', safe=True, weight=10)
 def keyword(repo, subset, x):
     """Search commit message, user name, and names of changed files for
@@ -1292,11 +1444,14 @@
 
     def matches(r):
         c = repo[r]
-        return any(kw in encoding.lower(t)
-                   for t in c.files() + [c.user(), c.description()])
+        return any(
+            kw in encoding.lower(t)
+            for t in c.files() + [c.user(), c.description()]
+        )
 
     return subset.filter(matches, condrepr=('<keyword %r>', kw))
 
+
 @predicate('limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
 def limit(repo, subset, x, order):
     """First n members of set, defaulting to 1, starting from offset.
@@ -1319,6 +1474,7 @@
         return subset & ls
     return ls & subset
 
+
 @predicate('last(set, [n])', safe=True, takeorder=True)
 def last(repo, subset, x, order):
     """Last n members of set, defaulting to 1.
@@ -1339,6 +1495,7 @@
     ls.reverse()
     return ls & subset
 
+
 @predicate('max(set)', safe=True)
 def maxrev(repo, subset, x):
     """Changeset with highest revision number in set.
@@ -1354,6 +1511,7 @@
         pass
     return baseset(datarepr=('<max %r, %r>', subset, os))
 
+
 @predicate('merge()', safe=True)
 def merge(repo, subset, x):
     """Changeset is a merge changeset.
@@ -1362,13 +1520,16 @@
     getargs(x, 0, 0, _("merge takes no arguments"))
     cl = repo.changelog
     nullrev = node.nullrev
+
     def ismerge(r):
         try:
             return cl.parentrevs(r)[1] != nullrev
         except error.WdirUnsupported:
             return bool(repo[r].p2())
+
     return subset.filter(ismerge, condrepr='<merge>')
 
+
 @predicate('branchpoint()', safe=True)
 def branchpoint(repo, subset, x):
     """Changesets with more than one child.
@@ -1381,13 +1542,15 @@
     # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset
     # (and if it is not, it should.)
     baserev = min(subset)
-    parentscount = [0]*(len(repo) - baserev)
+    parentscount = [0] * (len(repo) - baserev)
     for r in cl.revs(start=baserev + 1):
         for p in cl.parentrevs(r):
             if p >= baserev:
                 parentscount[p - baserev] += 1
-    return subset.filter(lambda r: parentscount[r - baserev] > 1,
-                         condrepr='<branchpoint>')
+    return subset.filter(
+        lambda r: parentscount[r - baserev] > 1, condrepr='<branchpoint>'
+    )
+
 
 @predicate('min(set)', safe=True)
 def minrev(repo, subset, x):
@@ -1404,6 +1567,7 @@
         pass
     return baseset(datarepr=('<min %r, %r>', subset, os))
 
+
 @predicate('modifies(pattern)', safe=True, weight=30)
 def modifies(repo, subset, x):
     """Changesets modifying files matched by pattern.
@@ -1416,6 +1580,7 @@
     pat = getstring(x, _("modifies requires a pattern"))
     return checkstatus(repo, subset, pat, 0)
 
+
 @predicate('named(namespace)')
 def named(repo, subset, x):
     """The changesets in a given namespace.
@@ -1426,15 +1591,16 @@
     # i18n: "named" is a keyword
     args = getargs(x, 1, 1, _('named requires a namespace argument'))
 
-    ns = getstring(args[0],
-                   # i18n: "named" is a keyword
-                   _('the argument to named must be a string'))
+    ns = getstring(
+        args[0],
+        # i18n: "named" is a keyword
+        _('the argument to named must be a string'),
+    )
     kind, pattern, matcher = stringutil.stringmatcher(ns)
     namespaces = set()
     if kind == 'literal':
         if pattern not in repo.names:
-            raise error.RepoLookupError(_("namespace '%s' does not exist")
-                                        % ns)
+            raise error.RepoLookupError(_("namespace '%s' does not exist") % ns)
         namespaces.add(repo.names[pattern])
     else:
         for name, ns in repo.names.iteritems():
@@ -1450,6 +1616,7 @@
     names -= {node.nullrev}
     return subset & names
 
+
 @predicate('id(string)', safe=True)
 def node_(repo, subset, x):
     """Revision non-ambiguously specified by the given hex string prefix.
@@ -1481,6 +1648,7 @@
     result = baseset([rn])
     return result & subset
 
+
 @predicate('none()', safe=True)
 def none(repo, subset, x):
     """No changesets.
@@ -1489,6 +1657,7 @@
     getargs(x, 0, 0, _("none takes no arguments"))
     return baseset()
 
+
 @predicate('obsolete()', safe=True)
 def obsolete(repo, subset, x):
     """Mutable changeset with a newer version."""
@@ -1497,6 +1666,7 @@
     obsoletes = obsmod.getrevs(repo, 'obsolete')
     return subset & obsoletes
 
+
 @predicate('only(set, [set])', safe=True)
 def only(repo, subset, x):
     """Changesets that are ancestors of the first set that are not ancestors
@@ -1513,8 +1683,11 @@
             return baseset()
 
         descendants = set(dagop.revdescendants(repo, include, False))
-        exclude = [rev for rev in cl.headrevs()
-            if not rev in descendants and not rev in include]
+        exclude = [
+            rev
+            for rev in cl.headrevs()
+            if not rev in descendants and not rev in include
+        ]
     else:
         exclude = getset(repo, fullreposet(repo), args[1])
 
@@ -1523,6 +1696,7 @@
     # some optimizations from the fact this is a baseset.
     return subset & results
 
+
 @predicate('origin([set])', safe=True)
 def origin(repo, subset, x):
     """
@@ -1555,6 +1729,7 @@
     # some optimizations from the fact this is a baseset.
     return subset & o
 
+
 @predicate('outgoing([path])', safe=False, weight=10)
 def outgoing(repo, subset, x):
     """Changesets not found in the specified destination repository, or the
@@ -1565,6 +1740,7 @@
         discovery,
         hg,
     )
+
     # i18n: "outgoing" is a keyword
     l = getargs(x, 0, 1, _("outgoing takes one or no arguments"))
     # i18n: "outgoing" is a keyword
@@ -1574,8 +1750,10 @@
         dest = None
     path = repo.ui.paths.getpath(dest, default=('default-push', 'default'))
     if not path:
-        raise error.Abort(_('default repository not configured!'),
-                hint=_("see 'hg help config.paths'"))
+        raise error.Abort(
+            _('default repository not configured!'),
+            hint=_("see 'hg help config.paths'"),
+        )
     dest = path.pushloc or path.loc
     branches = path.branch, []
 
@@ -1590,6 +1768,7 @@
     o = {cl.rev(r) for r in outgoing.missing}
     return subset & o
 
+
 @predicate('p1([set])', safe=True)
 def p1(repo, subset, x):
     """First parent of changesets in set, or the working directory.
@@ -1612,6 +1791,7 @@
     # some optimizations from the fact this is a baseset.
     return subset & ps
 
+
 @predicate('p2([set])', safe=True)
 def p2(repo, subset, x):
     """Second parent of changesets in set, or the working directory.
@@ -1640,9 +1820,11 @@
     # some optimizations from the fact this is a baseset.
     return subset & ps
 
+
 def parentpost(repo, subset, x, order):
     return p1(repo, subset, x)
 
+
 @predicate('parents([set])', safe=True)
 def parents(repo, subset, x):
     """
@@ -1663,16 +1845,19 @@
     ps -= {node.nullrev}
     return subset & ps
 
+
 def _phase(repo, subset, *targets):
     """helper to select all rev in <targets> phases"""
     return repo._phasecache.getrevset(repo, targets, subset)
 
+
 @predicate('_phase(idx)', safe=True)
 def phase(repo, subset, x):
-    l = getargs(x, 1, 1, ("_phase requires one argument"))
-    target = getinteger(l[0], ("_phase expects a number"))
+    l = getargs(x, 1, 1, "_phase requires one argument")
+    target = getinteger(l[0], "_phase expects a number")
     return _phase(repo, subset, target)
 
+
 @predicate('draft()', safe=True)
 def draft(repo, subset, x):
     """Changeset in draft phase."""
@@ -1681,6 +1866,7 @@
     target = phases.draft
     return _phase(repo, subset, target)
 
+
 @predicate('secret()', safe=True)
 def secret(repo, subset, x):
     """Changeset in secret phase."""
@@ -1689,6 +1875,7 @@
     target = phases.secret
     return _phase(repo, subset, target)
 
+
 @predicate('stack([revs])', safe=True)
 def stack(repo, subset, x):
     """Experimental revset for the stack of changesets or working directory
@@ -1704,6 +1891,7 @@
 
     return subset & stacks
 
+
 def parentspec(repo, subset, x, n, order):
     """``set^0``
     The set.
@@ -1737,6 +1925,7 @@
                     ps.add(parents[1].rev())
     return subset & ps
 
+
 @predicate('present(set)', safe=True, takeorder=True)
 def present(repo, subset, x, order):
     """An empty set, if any revision in set isn't found; otherwise,
@@ -1751,12 +1940,14 @@
     except error.RepoLookupError:
         return baseset()
 
+
 # for internal use
 @predicate('_notpublic', safe=True)
 def _notpublic(repo, subset, x):
     getargs(x, 0, 0, "_notpublic takes no arguments")
     return _phase(repo, subset, phases.draft, phases.secret)
 
+
 # for internal use
 @predicate('_phaseandancestors(phasename, set)', safe=True)
 def _phaseandancestors(repo, subset, x):
@@ -1770,7 +1961,7 @@
     secret = phases.secret
     phasenamemap = {
         '_notpublic': draft,
-        'draft': draft, # follow secret's ancestors
+        'draft': draft,  # follow secret's ancestors
         'secret': secret,
     }
     if phasename not in phasenamemap:
@@ -1784,10 +1975,11 @@
 
     revs = dagop.revancestors(repo, s, cutfunc=cutfunc)
 
-    if phasename == 'draft': # need to remove secret changesets
+    if phasename == 'draft':  # need to remove secret changesets
         revs = revs.filter(lambda r: getphase(repo, r) == draft)
     return subset & revs
 
+
 @predicate('public()', safe=True)
 def public(repo, subset, x):
     """Changeset in public phase."""
@@ -1795,6 +1987,7 @@
     getargs(x, 0, 0, _("public takes no arguments"))
     return _phase(repo, subset, phases.public)
 
+
 @predicate('remote([id [,path]])', safe=False)
 def remote(repo, subset, x):
     """Local revision that corresponds to the given identifier in a
@@ -1802,13 +1995,14 @@
     synonym for the current local branch.
     """
 
-    from . import hg # avoid start-up nasties
+    from . import hg  # avoid start-up nasties
+
     # i18n: "remote" is a keyword
     l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments"))
 
     q = '.'
     if len(l) > 0:
-    # i18n: "remote" is a keyword
+        # i18n: "remote" is a keyword
         q = getstring(l[0], _("remote requires a string id"))
     if q == '.':
         q = repo['.'].branch()
@@ -1830,6 +2024,7 @@
             return baseset([r])
     return baseset()
 
+
 @predicate('removes(pattern)', safe=True, weight=30)
 def removes(repo, subset, x):
     """Changesets which remove files matching pattern.
@@ -1842,6 +2037,7 @@
     pat = getstring(x, _("removes requires a pattern"))
     return checkstatus(repo, subset, pat, 2)
 
+
 @predicate('rev(number)', safe=True)
 def rev(repo, subset, x):
     """Revision with the given numeric identifier.
@@ -1858,6 +2054,7 @@
         return baseset()
     return subset & baseset([l])
 
+
 @predicate('_rev(number)', safe=True)
 def _rev(repo, subset, x):
     # internal version of "rev(x)" that raise error if "x" is invalid
@@ -1869,9 +2066,10 @@
     except (TypeError, ValueError):
         # i18n: "rev" is a keyword
         raise error.ParseError(_("rev expects a number"))
-    repo.changelog.node(l) # check that the rev exists
+    repo.changelog.node(l)  # check that the rev exists
     return subset & baseset([l])
 
+
 @predicate('revset(set)', safe=True, takeorder=True)
 def revsetpredicate(repo, subset, x, order):
     """Strictly interpret the content as a revset.
@@ -1882,6 +2080,7 @@
     """
     return getset(repo, subset, x, order)
 
+
 @predicate('matching(revision [, field])', safe=True)
 def matching(repo, subset, x):
     """Changesets in which a given set of fields match the set of fields in the
@@ -1914,10 +2113,11 @@
 
     fieldlist = ['metadata']
     if len(l) > 1:
-            fieldlist = getstring(l[1],
-                # i18n: "matching" is a keyword
-                _("matching requires a string "
-                "as its second argument")).split()
+        fieldlist = getstring(
+            l[1],
+            # i18n: "matching" is a keyword
+            _("matching requires a string " "as its second argument"),
+        ).split()
 
     # Make sure that there are no repeated fields,
     # expand the 'special' 'metadata' field type
@@ -1943,14 +2143,26 @@
     # We may want to match more than one field
     # Not all fields take the same amount of time to be matched
     # Sort the selected fields in order of increasing matching cost
-    fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
-        'files', 'description', 'substate', 'diff']
+    fieldorder = [
+        'phase',
+        'parents',
+        'user',
+        'date',
+        'branch',
+        'summary',
+        'files',
+        'description',
+        'substate',
+        'diff',
+    ]
+
     def fieldkeyfunc(f):
         try:
             return fieldorder.index(f)
         except ValueError:
             # assume an unknown field is very costly
             return len(fieldorder)
+
     fields = list(fields)
     fields.sort(key=fieldkeyfunc)
 
@@ -1967,15 +2179,18 @@
         'phase': lambda r: repo[r].phase(),
         'substate': lambda r: repo[r].substate,
         'summary': lambda r: repo[r].description().splitlines()[0],
-        'diff': lambda r: list(repo[r].diff(
-            opts=diffutil.diffallopts(repo.ui, {'git': True}))),
+        'diff': lambda r: list(
+            repo[r].diff(opts=diffutil.diffallopts(repo.ui, {'git': True}))
+        ),
     }
     for info in fields:
         getfield = _funcs.get(info, None)
         if getfield is None:
             raise error.ParseError(
                 # i18n: "matching" is a keyword
-                _("unexpected field name passed to matching: %s") % info)
+                _("unexpected field name passed to matching: %s")
+                % info
+            )
         getfieldfuncs.append(getfield)
     # convert the getfield array of functions into a "getinfo" function
     # which returns an array of field values (or a single value if there
@@ -1995,6 +2210,7 @@
 
     return subset.filter(matches, condrepr=('<matching%r %r>', fields, revs))
 
+
 @predicate('reverse(set)', safe=True, takeorder=True, weight=0)
 def reverse(repo, subset, x, order):
     """Reverse order of set.
@@ -2004,19 +2220,23 @@
         l.reverse()
     return l
 
+
 @predicate('roots(set)', safe=True)
 def roots(repo, subset, x):
     """Changesets in set with no parent changeset in set.
     """
     s = getset(repo, fullreposet(repo), x)
     parents = repo.changelog.parentrevs
+
     def filter(r):
         for p in parents(r):
             if 0 <= p and p in s:
                 return False
         return True
+
     return subset & s.filter(filter, condrepr='<roots>')
 
+
 _sortkeyfuncs = {
     'rev': lambda c: c.rev(),
     'branch': lambda c: c.branch(),
@@ -2026,6 +2246,7 @@
     'date': lambda c: c.date()[0],
 }
 
+
 def _getsortargs(x):
     """Parse sort options into (set, [(key, reverse)], opts)"""
     args = getargsdict(x, 'sort', 'set keys topo.firstbranch')
@@ -2040,18 +2261,20 @@
     keyflags = []
     for k in keys.split():
         fk = k
-        reverse = (k.startswith('-'))
+        reverse = k.startswith('-')
         if reverse:
             k = k[1:]
         if k not in _sortkeyfuncs and k != 'topo':
             raise error.ParseError(
-                _("unknown sort key %r") % pycompat.bytestr(fk))
+                _("unknown sort key %r") % pycompat.bytestr(fk)
+            )
         keyflags.append((k, reverse))
 
     if len(keyflags) > 1 and any(k == 'topo' for k, reverse in keyflags):
         # i18n: "topo" is a keyword
-        raise error.ParseError(_('topo sort order cannot be combined '
-                                 'with other sort keys'))
+        raise error.ParseError(
+            _('topo sort order cannot be combined ' 'with other sort keys')
+        )
 
     opts = {}
     if 'topo.firstbranch' in args:
@@ -2059,13 +2282,19 @@
             opts['topo.firstbranch'] = args['topo.firstbranch']
         else:
             # i18n: "topo" and "topo.firstbranch" are keywords
-            raise error.ParseError(_('topo.firstbranch can only be used '
-                                     'when using the topo sort key'))
+            raise error.ParseError(
+                _(
+                    'topo.firstbranch can only be used '
+                    'when using the topo sort key'
+                )
+            )
 
     return args['set'], keyflags, opts
 
-@predicate('sort(set[, [-]key... [, ...]])', safe=True, takeorder=True,
-           weight=10)
+
+@predicate(
+    'sort(set[, [-]key... [, ...]])', safe=True, takeorder=True, weight=10
+)
 def sort(repo, subset, x, order):
     """Sort set by keys. The default sort order is ascending, specify a key
     as ``-key`` to sort in descending order.
@@ -2096,9 +2325,10 @@
         firstbranch = ()
         if 'topo.firstbranch' in opts:
             firstbranch = getset(repo, subset, opts['topo.firstbranch'])
-        revs = baseset(dagop.toposort(revs, repo.changelog.parentrevs,
-                                      firstbranch),
-                       istopo=True)
+        revs = baseset(
+            dagop.toposort(revs, repo.changelog.parentrevs, firstbranch),
+            istopo=True,
+        )
         if keyflags[0][1]:
             revs.reverse()
         return revs
@@ -2109,6 +2339,7 @@
         ctxs.sort(key=_sortkeyfuncs[k], reverse=reverse)
     return baseset([c.rev() for c in ctxs])
 
+
 @predicate('subrepo([pattern])')
 def subrepo(repo, subset, x):
     """Changesets that add, modify or remove the given subrepo.  If no subrepo
@@ -2153,6 +2384,7 @@
 
     return subset.filter(matches, condrepr=('<subrepo %r>', pat))
 
+
 def _mapbynodefunc(repo, s, f):
     """(repo, smartset, [node] -> [node]) -> smartset
 
@@ -2167,6 +2399,7 @@
     result = set(torev(n) for n in f(tonode(r) for r in s) if n in nodemap)
     return smartset.baseset(result - repo.changelog.filteredrevs)
 
+
 @predicate('successors(set)', safe=True)
 def successors(repo, subset, x):
     """All successors for set, including the given set themselves"""
@@ -2175,9 +2408,11 @@
     d = _mapbynodefunc(repo, s, f)
     return subset & d
 
+
 def _substringmatcher(pattern, casesensitive=True):
     kind, pattern, matcher = stringutil.stringmatcher(
-        pattern, casesensitive=casesensitive)
+        pattern, casesensitive=casesensitive
+    )
     if kind == 'literal':
         if not casesensitive:
             pattern = encoding.lower(pattern)
@@ -2186,6 +2421,7 @@
             matcher = lambda s: pattern in s
     return kind, pattern, matcher
 
+
 @predicate('tag([name])', safe=True)
 def tag(repo, subset, x):
     """The specified tag by name, or all tagged revisions if no name is given.
@@ -2197,16 +2433,19 @@
     args = getargs(x, 0, 1, _("tag takes one or no arguments"))
     cl = repo.changelog
     if args:
-        pattern = getstring(args[0],
-                            # i18n: "tag" is a keyword
-                            _('the argument to tag must be a string'))
+        pattern = getstring(
+            args[0],
+            # i18n: "tag" is a keyword
+            _('the argument to tag must be a string'),
+        )
         kind, pattern, matcher = stringutil.stringmatcher(pattern)
         if kind == 'literal':
             # avoid resolving all tags
             tn = repo._tagscache.tags.get(pattern, None)
             if tn is None:
-                raise error.RepoLookupError(_("tag '%s' does not exist")
-                                            % pattern)
+                raise error.RepoLookupError(
+                    _("tag '%s' does not exist") % pattern
+                )
             s = {repo[tn].rev()}
         else:
             s = {cl.rev(n) for t, n in repo.tagslist() if matcher(t)}
@@ -2214,10 +2453,12 @@
         s = {cl.rev(n) for t, n in repo.tagslist() if t != 'tip'}
     return subset & s
 
+
 @predicate('tagged', safe=True)
 def tagged(repo, subset, x):
     return tag(repo, subset, x)
 
+
 @predicate('orphan()', safe=True)
 def orphan(repo, subset, x):
     """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)
@@ -2237,6 +2478,7 @@
     """
     return author(repo, subset, x)
 
+
 @predicate('wdir()', safe=True, weight=0)
 def wdir(repo, subset, x):
     """Working directory. (EXPERIMENTAL)"""
@@ -2246,6 +2488,7 @@
         return baseset([node.wdirrev])
     return baseset()
 
+
 def _orderedlist(repo, subset, x):
     s = getstring(x, "internal error")
     if not s:
@@ -2268,12 +2511,16 @@
         for r in revs:
             if r in seen:
                 continue
-            if (r in subset
-                or r in _virtualrevs and isinstance(subset, fullreposet)):
+            if (
+                r in subset
+                or r in _virtualrevs
+                and isinstance(subset, fullreposet)
+            ):
                 ls.append(r)
             seen.add(r)
     return baseset(ls)
 
+
 # for internal use
 @predicate('_list', safe=True, takeorder=True)
 def _list(repo, subset, x, order):
@@ -2283,6 +2530,7 @@
     else:
         return _orderedlist(repo, subset, x)
 
+
 def _orderedintlist(repo, subset, x):
     s = getstring(x, "internal error")
     if not s:
@@ -2291,6 +2539,7 @@
     s = subset
     return baseset([r for r in ls if r in s])
 
+
 # for internal use
 @predicate('_intlist', safe=True, takeorder=True, weight=0)
 def _intlist(repo, subset, x, order):
@@ -2300,6 +2549,7 @@
     else:
         return _orderedintlist(repo, subset, x)
 
+
 def _orderedhexlist(repo, subset, x):
     s = getstring(x, "internal error")
     if not s:
@@ -2309,6 +2559,7 @@
     s = subset
     return baseset([r for r in ls if r in s])
 
+
 # for internal use
 @predicate('_hexlist', safe=True, takeorder=True)
 def _hexlist(repo, subset, x, order):
@@ -2318,6 +2569,7 @@
     else:
         return _orderedhexlist(repo, subset, x)
 
+
 methods = {
     "range": rangeset,
     "rangeall": rangeall,
@@ -2348,13 +2600,16 @@
     "generations": generationsrel,
 }
 
+
 def lookupfn(repo):
     return lambda symbol: scmutil.isrevsymbol(repo, symbol)
 
+
 def match(ui, spec, lookup=None):
     """Create a matcher for a single revision spec"""
     return matchany(ui, [spec], lookup=lookup)
 
+
 def matchany(ui, specs, lookup=None, localalias=None):
     """Create a matcher that will include any revisions matching one of the
     given specs
@@ -2366,16 +2621,20 @@
     precedence over [revsetalias] config section.
     """
     if not specs:
+
         def mfunc(repo, subset=None):
             return baseset()
+
         return mfunc
     if not all(specs):
         raise error.ParseError(_("empty query"))
     if len(specs) == 1:
         tree = revsetlang.parse(specs[0], lookup)
     else:
-        tree = ('or',
-                ('list',) + tuple(revsetlang.parse(s, lookup) for s in specs))
+        tree = (
+            'or',
+            ('list',) + tuple(revsetlang.parse(s, lookup) for s in specs),
+        )
 
     aliases = []
     warn = None
@@ -2391,8 +2650,10 @@
     tree = revsetlang.optimize(tree)
     return makematcher(tree)
 
+
 def makematcher(tree):
     """Create a matcher from an evaluatable tree"""
+
     def mfunc(repo, subset=None, order=None):
         if order is None:
             if subset is None:
@@ -2402,8 +2663,10 @@
         if subset is None:
             subset = fullreposet(repo)
         return getset(repo, subset, tree, order)
+
     return mfunc
 
+
 def loadpredicate(ui, extname, registrarobj):
     """Load revset predicates from specified registrarobj
     """
@@ -2412,6 +2675,7 @@
         if func._safe:
             safesymbols.add(name)
 
+
 # load built-in predicates explicitly to setup safesymbols
 loadpredicate(None, None, predicate)
 
--- a/mercurial/revsetlang.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/revsetlang.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,9 +18,7 @@
     smartset,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 elements = {
     # token-type: binding-strength, primary, prefix, infix, suffix
@@ -31,10 +29,20 @@
     "~": (18, None, None, ("ancestor", 18), None),
     "^": (18, None, None, ("parent", 18), "parentpost"),
     "-": (5, None, ("negate", 19), ("minus", 5), None),
-    "::": (17, "dagrangeall", ("dagrangepre", 17), ("dagrange", 17),
-           "dagrangepost"),
-    "..": (17, "dagrangeall", ("dagrangepre", 17), ("dagrange", 17),
-           "dagrangepost"),
+    "::": (
+        17,
+        "dagrangeall",
+        ("dagrangepre", 17),
+        ("dagrange", 17),
+        "dagrangepost",
+    ),
+    "..": (
+        17,
+        "dagrangeall",
+        ("dagrangepre", 17),
+        ("dagrange", 17),
+        "dagrangepost",
+    ),
     ":": (15, "rangeall", ("rangepre", 15), ("range", 15), "rangepost"),
     "not": (10, None, ("not", 10), None, None),
     "!": (10, None, ("not", 10), None, None),
@@ -61,14 +69,18 @@
 _simpleopletters = set(pycompat.iterbytestr("()[]#:=,-|&+!~^%"))
 
 # default set of valid characters for the initial letter of symbols
-_syminitletters = set(pycompat.iterbytestr(
-    pycompat.sysbytes(string.ascii_letters) +
-    pycompat.sysbytes(string.digits) +
-    '._@')) | set(map(pycompat.bytechr, pycompat.xrange(128, 256)))
+_syminitletters = set(
+    pycompat.iterbytestr(
+        pycompat.sysbytes(string.ascii_letters)
+        + pycompat.sysbytes(string.digits)
+        + '._@'
+    )
+) | set(map(pycompat.bytechr, pycompat.xrange(128, 256)))
 
 # default set of valid characters for non-initial letters of symbols
 _symletters = _syminitletters | set(pycompat.iterbytestr('-/'))
 
+
 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
     '''
     Parse a revset statement into a stream of tokens
@@ -91,8 +103,9 @@
 
     '''
     if not isinstance(program, bytes):
-        raise error.ProgrammingError('revset statement must be bytes, got %r'
-                                     % program)
+        raise error.ProgrammingError(
+            'revset statement must be bytes, got %r' % program
+        )
     program = pycompat.bytestr(program)
     if syminitletters is None:
         syminitletters = _syminitletters
@@ -117,21 +130,30 @@
     pos, l = 0, len(program)
     while pos < l:
         c = program[pos]
-        if c.isspace(): # skip inter-token whitespace
+        if c.isspace():  # skip inter-token whitespace
             pass
-        elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully
+        elif (
+            c == ':' and program[pos : pos + 2] == '::'
+        ):  # look ahead carefully
             yield ('::', None, pos)
-            pos += 1 # skip ahead
-        elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully
+            pos += 1  # skip ahead
+        elif (
+            c == '.' and program[pos : pos + 2] == '..'
+        ):  # look ahead carefully
             yield ('..', None, pos)
-            pos += 1 # skip ahead
-        elif c == '#' and program[pos:pos + 2] == '##': # look ahead carefully
+            pos += 1  # skip ahead
+        elif (
+            c == '#' and program[pos : pos + 2] == '##'
+        ):  # look ahead carefully
             yield ('##', None, pos)
-            pos += 1 # skip ahead
-        elif c in _simpleopletters: # handle simple operators
+            pos += 1  # skip ahead
+        elif c in _simpleopletters:  # handle simple operators
             yield (c, None, pos)
-        elif (c in _quoteletters or c == 'r' and
-              program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
+        elif (
+            c in _quoteletters
+            or c == 'r'
+            and program[pos : pos + 2] in ("r'", 'r"')
+        ):  # handle quoted strings
             if c == 'r':
                 pos += 1
                 c = program[pos]
@@ -140,9 +162,9 @@
                 decode = parser.unescapestr
             pos += 1
             s = pos
-            while pos < l: # find closing quote
+            while pos < l:  # find closing quote
                 d = program[pos]
-                if d == '\\': # skip over escaped characters
+                if d == '\\':  # skip over escaped characters
                     pos += 2
                     continue
                 if d == c:
@@ -155,16 +177,16 @@
         elif c in syminitletters:
             s = pos
             pos += 1
-            while pos < l: # find end of symbol
+            while pos < l:  # find end of symbol
                 d = program[pos]
                 if d not in symletters:
                     break
-                if d == '.' and program[pos - 1] == '.': # special case for ..
+                if d == '.' and program[pos - 1] == '.':  # special case for ..
                     pos -= 1
                     break
                 pos += 1
             sym = program[s:pos]
-            if sym in keywords: # operator keywords
+            if sym in keywords:  # operator keywords
                 yield (sym, None, s)
             elif '-' in sym:
                 # some jerk gave us foo-bar-baz, try to check if it's a symbol
@@ -175,36 +197,41 @@
                     # looks like an expression
                     parts = sym.split('-')
                     for p in parts[:-1]:
-                        if p: # possible consecutive -
+                        if p:  # possible consecutive -
                             yield ('symbol', p, s)
                         s += len(p)
                         yield ('-', None, s)
                         s += 1
-                    if parts[-1]: # possible trailing -
+                    if parts[-1]:  # possible trailing -
                         yield ('symbol', parts[-1], s)
             else:
                 yield ('symbol', sym, s)
             pos -= 1
         else:
-            raise error.ParseError(_("syntax error in revset '%s'") %
-                                   program, pos)
+            raise error.ParseError(
+                _("syntax error in revset '%s'") % program, pos
+            )
         pos += 1
     yield ('end', None, pos)
 
+
 # helpers
 
 _notset = object()
 
+
 def getsymbol(x):
     if x and x[0] == 'symbol':
         return x[1]
     raise error.ParseError(_('not a symbol'))
 
+
 def getstring(x, err):
     if x and (x[0] == 'string' or x[0] == 'symbol'):
         return x[1]
     raise error.ParseError(err)
 
+
 def getinteger(x, err, default=_notset):
     if not x and default is not _notset:
         return default
@@ -213,12 +240,14 @@
     except ValueError:
         raise error.ParseError(err)
 
+
 def getboolean(x, err):
     value = stringutil.parsebool(getsymbol(x))
     if value is not None:
         return value
     raise error.ParseError(err)
 
+
 def getlist(x):
     if not x:
         return []
@@ -226,6 +255,7 @@
         return list(x[1:])
     return [x]
 
+
 def getrange(x, err):
     if not x:
         raise error.ParseError(err)
@@ -240,6 +270,7 @@
         return None, None
     raise error.ParseError(err)
 
+
 def getintrange(x, err1, err2, deffirst=_notset, deflast=_notset):
     """Get [first, last] integer range (both inclusive) from a parsed tree
 
@@ -252,19 +283,28 @@
     a, b = getrange(x, err1)
     return getinteger(a, err2, deffirst), getinteger(b, err2, deflast)
 
+
 def getargs(x, min, max, err):
     l = getlist(x)
     if len(l) < min or (max >= 0 and len(l) > max):
         raise error.ParseError(err)
     return l
 
+
 def getargsdict(x, funcname, keys):
-    return parser.buildargsdict(getlist(x), funcname, parser.splitargspec(keys),
-                                keyvaluenode='keyvalue', keynode='symbol')
+    return parser.buildargsdict(
+        getlist(x),
+        funcname,
+        parser.splitargspec(keys),
+        keyvaluenode='keyvalue',
+        keynode='symbol',
+    )
+
 
 # cache of {spec: raw parsed tree} built internally
 _treecache = {}
 
+
 def _cachedtree(spec):
     # thread safe because parse() is reentrant and dict.__setitem__() is atomic
     tree = _treecache.get(spec)
@@ -272,6 +312,7 @@
         _treecache[spec] = tree = parse(spec)
     return tree
 
+
 def _build(tmplspec, *repls):
     """Create raw parsed tree from a template revset statement
 
@@ -281,6 +322,7 @@
     template = _cachedtree(tmplspec)
     return parser.buildtree(template, ('symbol', '_'), *repls)
 
+
 def _match(patspec, tree):
     """Test if a tree matches the given pattern statement; return the matches
 
@@ -290,12 +332,15 @@
     >>> _match(b'f(_)', parse(b'f(1, 2)'))
     """
     pattern = _cachedtree(patspec)
-    return parser.matchtree(pattern, tree, ('symbol', '_'),
-                            {'keyvalue', 'list'})
+    return parser.matchtree(
+        pattern, tree, ('symbol', '_'), {'keyvalue', 'list'}
+    )
+
 
 def _matchonly(revs, bases):
     return _match('ancestors(_) and not ancestors(_)', ('and', revs, bases))
 
+
 def _fixops(x):
     """Rewrite raw parsed tree to resolve ambiguous syntax which cannot be
     handled well by our simple top-down parser"""
@@ -325,6 +370,7 @@
 
     return (op,) + tuple(_fixops(y) for y in x[1:])
 
+
 def _analyze(x):
     if x is None:
         return x
@@ -353,8 +399,15 @@
         return (op, _analyze(x[1]))
     elif op == 'group':
         return _analyze(x[1])
-    elif op in {'and', 'dagrange', 'range', 'parent', 'ancestor', 'relation',
-                'subscript'}:
+    elif op in {
+        'and',
+        'dagrange',
+        'range',
+        'parent',
+        'ancestor',
+        'relation',
+        'subscript',
+    }:
         ta = _analyze(x[1])
         tb = _analyze(x[2])
         return (op, ta, tb)
@@ -371,6 +424,7 @@
         return (op, x[1], _analyze(x[2]))
     raise ValueError('invalid operator %r' % op)
 
+
 def analyze(x):
     """Transform raw parsed tree to evaluatable tree which can be fed to
     optimize() or getset()
@@ -380,13 +434,14 @@
     """
     return _analyze(x)
 
+
 def _optimize(x):
     if x is None:
         return 0, x
 
     op = x[0]
     if op in ('string', 'symbol', 'smartset'):
-        return 0.5, x # single revisions are small
+        return 0.5, x  # single revisions are small
     elif op == 'and':
         wa, ta = _optimize(x[1])
         wb, tb = _optimize(x[2])
@@ -412,6 +467,7 @@
         # fast path for machine-generated expression, that is likely to have
         # lots of trivial revisions: 'a + b + c()' to '_list(a b) + c()'
         ws, ts, ss = [], [], []
+
         def flushss():
             if not ss:
                 return
@@ -424,6 +480,7 @@
             ws.append(w)
             ts.append(t)
             del ss[:]
+
         for y in getlist(x[1]):
             w, t = _optimize(y)
             if t is not None and (t[0] == 'string' or t[0] == 'symbol'):
@@ -434,7 +491,7 @@
             ts.append(t)
         flushss()
         if len(ts) == 1:
-            return ws[0], ts[0] # 'or' operation is fully optimized out
+            return ws[0], ts[0]  # 'or' operation is fully optimized out
         return max(ws), (op, ('list',) + tuple(ts))
     elif op == 'not':
         # Optimize not public() to _notpublic() because we have a fast version
@@ -478,6 +535,7 @@
         return w + wa, (op, x[1], ta)
     raise ValueError('invalid operator %r' % op)
 
+
 def optimize(tree):
     """Optimize evaluatable tree
 
@@ -486,10 +544,12 @@
     _weight, newtree = _optimize(tree)
     return newtree
 
+
 # the set of valid characters for the initial letter of symbols in
 # alias declarations and definitions
 _aliassyminitletters = _syminitletters | {'$'}
 
+
 def _parsewith(spec, lookup=None, syminitletters=None):
     """Generate a parse tree of given spec with given tokenizing options
 
@@ -507,14 +567,17 @@
     if lookup and spec.startswith('revset(') and spec.endswith(')'):
         lookup = None
     p = parser.parser(elements)
-    tree, pos = p.parse(tokenize(spec, lookup=lookup,
-                                 syminitletters=syminitletters))
+    tree, pos = p.parse(
+        tokenize(spec, lookup=lookup, syminitletters=syminitletters)
+    )
     if pos != len(spec):
         raise error.ParseError(_('invalid token'), pos)
     return _fixops(parser.simplifyinfixops(tree, ('list', 'or')))
 
+
 class _aliasrules(parser.basealiasrules):
     """Parsing and expansion rule set of revset aliases"""
+
     _section = _('revset alias')
 
     @staticmethod
@@ -532,6 +595,7 @@
         if tree[0] == 'func' and tree[1][0] == 'symbol':
             return tree[1][1], getlist(tree[2])
 
+
 def expandaliases(tree, aliases, warn=None):
     """Expand aliases in a tree, aliases is a list of (name, value) tuples"""
     aliases = _aliasrules.buildmap(aliases)
@@ -544,11 +608,15 @@
                 alias.warned = True
     return tree
 
+
 def foldconcat(tree):
     """Fold elements to be concatenated by `##`
     """
-    if (not isinstance(tree, tuple)
-        or tree[0] in ('string', 'symbol', 'smartset')):
+    if not isinstance(tree, tuple) or tree[0] in (
+        'string',
+        'symbol',
+        'smartset',
+    ):
         return tree
     if tree[0] == '_concat':
         pending = [tree]
@@ -566,6 +634,7 @@
     else:
         return tuple(foldconcat(t) for t in tree)
 
+
 def parse(spec, lookup=None):
     try:
         return _parsewith(spec, lookup=lookup)
@@ -581,6 +650,7 @@
             inst.hint = spec + '\n' + ' ' * (loc + 1) + '^ ' + _('here')
         raise
 
+
 def _quote(s):
     r"""Quote a value in order to make it safe for the revset engine.
 
@@ -595,6 +665,7 @@
     """
     return "'%s'" % stringutil.escapestr(pycompat.bytestr(s))
 
+
 def _formatargtype(c, arg):
     if c == 'd':
         return '_rev(%d)' % int(arg)
@@ -603,7 +674,7 @@
     elif c == 'r':
         if not isinstance(arg, bytes):
             raise TypeError
-        parse(arg) # make sure syntax errors are confined
+        parse(arg)  # make sure syntax errors are confined
         return '(%s)' % arg
     elif c == 'n':
         return _quote(node.hex(arg))
@@ -614,6 +685,7 @@
             raise TypeError
     raise error.ParseError(_('unexpected revspec format character %s') % c)
 
+
 def _formatlistexp(s, t):
     l = len(s)
     if l == 0:
@@ -635,6 +707,7 @@
     m = l // 2
     return '(%s or %s)' % (_formatlistexp(s[:m], t), _formatlistexp(s[m:], t))
 
+
 def _formatintlist(data):
     try:
         l = len(data)
@@ -646,14 +719,17 @@
     except (TypeError, ValueError):
         raise error.ParseError(_('invalid argument for revspec'))
 
+
 def _formatparamexp(args, t):
     return ', '.join(_formatargtype(t, a) for a in args)
 
+
 _formatlistfuncs = {
     'l': _formatlistexp,
     'p': _formatparamexp,
 }
 
+
 def formatspec(expr, *args):
     '''
     This is a convenience function for using revsets internally, and
@@ -704,6 +780,7 @@
             raise error.ProgrammingError("unknown revspec item type: %r" % t)
     return b''.join(ret)
 
+
 def spectree(expr, *args):
     """similar to formatspec but return a parsed and optimized tree"""
     parsed = _parseargs(expr, args)
@@ -726,6 +803,7 @@
     tree = optimize(tree)
     return tree
 
+
 def _parseargs(expr, args):
     """parse the expression and replace all inexpensive args
 
@@ -763,7 +841,7 @@
         if f:
             # a list of some type, might be expensive, do not replace
             pos += 1
-            islist = (d == 'l')
+            islist = d == 'l'
             try:
                 d = expr[pos]
             except IndexError:
@@ -794,15 +872,18 @@
         pass
     return ret
 
+
 def prettyformat(tree):
     return parser.prettyformat(tree, ('string', 'symbol'))
 
+
 def depth(tree):
     if isinstance(tree, tuple):
         return max(map(depth, tree)) + 1
     else:
         return 0
 
+
 def funcsused(tree):
     if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
         return set()
@@ -814,12 +895,15 @@
             funcs.add(tree[1][1])
         return funcs
 
+
 _hashre = util.re.compile('[0-9a-fA-F]{1,40}$')
 
+
 def _ishashlikesymbol(symbol):
     """returns true if the symbol looks like a hash"""
     return _hashre.match(symbol)
 
+
 def gethashlikesymbols(tree):
     """returns the list of symbols of the tree that look like hashes
 
--- a/mercurial/scmutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/scmutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -60,6 +60,7 @@
 
 termsize = scmplatform.termsize
 
+
 class status(tuple):
     '''Named tuple with a list of files per status. The 'deleted', 'unknown'
        and 'ignored' properties are only relevant to the working copy.
@@ -67,10 +68,12 @@
 
     __slots__ = ()
 
-    def __new__(cls, modified, added, removed, deleted, unknown, ignored,
-                clean):
-        return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
-                                   ignored, clean))
+    def __new__(
+        cls, modified, added, removed, deleted, unknown, ignored, clean
+    ):
+        return tuple.__new__(
+            cls, (modified, added, removed, deleted, unknown, ignored, clean)
+        )
 
     @property
     def modified(self):
@@ -110,9 +113,11 @@
         return self[6]
 
     def __repr__(self, *args, **kwargs):
-        return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
-                 r'unknown=%s, ignored=%s, clean=%s>') %
-                tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
+        return (
+            r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
+            r'unknown=%s, ignored=%s, clean=%s>'
+        ) % tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self)
+
 
 def itersubrepos(ctx1, ctx2):
     """find subrepos in ctx1 or ctx2"""
@@ -139,6 +144,7 @@
     for subpath in missing:
         yield subpath, ctx2.nullsub(subpath, ctx1)
 
+
 def nochangesfound(ui, repo, excluded=None):
     '''Report no changes for push/pull, excluded is None or a list of
     nodes excluded from the push/pull.
@@ -151,11 +157,14 @@
                 secretlist.append(n)
 
     if secretlist:
-        ui.status(_("no changes found (ignored %d secret changesets)\n")
-                  % len(secretlist))
+        ui.status(
+            _("no changes found (ignored %d secret changesets)\n")
+            % len(secretlist)
+        )
     else:
         ui.status(_("no changes found\n"))
 
+
 def callcatch(ui, func):
     """call func() with global exception handling
 
@@ -165,7 +174,7 @@
     try:
         try:
             return func()
-        except: # re-raises
+        except:  # re-raises
             ui.traceback()
             raise
     # Global exception handling, alphabetically
@@ -173,17 +182,24 @@
     except error.LockHeld as inst:
         if inst.errno == errno.ETIMEDOUT:
             reason = _('timed out waiting for lock held by %r') % (
-                pycompat.bytestr(inst.locker))
+                pycompat.bytestr(inst.locker)
+            )
         else:
             reason = _('lock held by %r') % inst.locker
-        ui.error(_("abort: %s: %s\n") % (
-            inst.desc or stringutil.forcebytestr(inst.filename), reason))
+        ui.error(
+            _("abort: %s: %s\n")
+            % (inst.desc or stringutil.forcebytestr(inst.filename), reason)
+        )
         if not inst.locker:
             ui.error(_("(lock might be very busy)\n"))
     except error.LockUnavailable as inst:
-        ui.error(_("abort: could not lock %s: %s\n") %
-                 (inst.desc or stringutil.forcebytestr(inst.filename),
-                  encoding.strtolocal(inst.strerror)))
+        ui.error(
+            _("abort: could not lock %s: %s\n")
+            % (
+                inst.desc or stringutil.forcebytestr(inst.filename),
+                encoding.strtolocal(inst.strerror),
+            )
+        )
     except error.OutOfBandError as inst:
         if inst.args:
             msg = _("abort: remote error:\n")
@@ -234,10 +250,10 @@
         elif m in "zlib".split():
             ui.error(_("(is your Python install correct?)\n"))
     except (IOError, OSError) as inst:
-        if util.safehasattr(inst, "code"): # HTTPError
+        if util.safehasattr(inst, "code"):  # HTTPError
             ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
-        elif util.safehasattr(inst, "reason"): # URLError or SSLError
-            try: # usually it is in the form (errno, strerror)
+        elif util.safehasattr(inst, "reason"):  # URLError or SSLError
+            try:  # usually it is in the form (errno, strerror)
                 reason = inst.reason.args[1]
             except (AttributeError, IndexError):
                 # it might be anything, for example a string
@@ -246,17 +262,24 @@
                 # SSLError of Python 2.7.9 contains a unicode
                 reason = encoding.unitolocal(reason)
             ui.error(_("abort: error: %s\n") % reason)
-        elif (util.safehasattr(inst, "args")
-              and inst.args and inst.args[0] == errno.EPIPE):
+        elif (
+            util.safehasattr(inst, "args")
+            and inst.args
+            and inst.args[0] == errno.EPIPE
+        ):
             pass
-        elif getattr(inst, "strerror", None): # common IOError or OSError
+        elif getattr(inst, "strerror", None):  # common IOError or OSError
             if getattr(inst, "filename", None) is not None:
-                ui.error(_("abort: %s: '%s'\n") % (
-                    encoding.strtolocal(inst.strerror),
-                    stringutil.forcebytestr(inst.filename)))
+                ui.error(
+                    _("abort: %s: '%s'\n")
+                    % (
+                        encoding.strtolocal(inst.strerror),
+                        stringutil.forcebytestr(inst.filename),
+                    )
+                )
             else:
                 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
-        else: # suspicious IOError
+        else:  # suspicious IOError
             raise
     except MemoryError:
         ui.error(_("abort: out of memory\n"))
@@ -267,6 +290,7 @@
 
     return -1
 
+
 def checknewlabel(repo, lbl, kind):
     # Do not use the "kind" parameter in ui output.
     # It makes strings difficult to translate.
@@ -275,7 +299,8 @@
     for c in (':', '\0', '\n', '\r'):
         if c in lbl:
             raise error.Abort(
-                _("%r cannot be used in a name") % pycompat.bytestr(c))
+                _("%r cannot be used in a name") % pycompat.bytestr(c)
+            )
     try:
         int(lbl)
         raise error.Abort(_("cannot use an integer as a name"))
@@ -284,11 +309,15 @@
     if lbl.strip() != lbl:
         raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
 
+
 def checkfilename(f):
     '''Check that the filename f is an acceptable filename for a tracked file'''
     if '\r' in f or '\n' in f:
-        raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
-                          % pycompat.bytestr(f))
+        raise error.Abort(
+            _("'\\n' and '\\r' disallowed in filenames: %r")
+            % pycompat.bytestr(f)
+        )
+
 
 def checkportable(ui, f):
     '''Check if filename f is portable and warn or abort depending on config'''
@@ -302,6 +331,7 @@
                 raise error.Abort(msg)
             ui.warn(_("warning: %s\n") % msg)
 
+
 def checkportabilityalert(ui):
     '''check if the user's config requests nothing, a warning, or abort for
     non-portable filenames'''
@@ -312,9 +342,11 @@
     warn = bval or lval == 'warn'
     if bval is None and not (warn or abort or lval == 'ignore'):
         raise error.ConfigError(
-            _("ui.portablefilenames value is invalid ('%s')") % val)
+            _("ui.portablefilenames value is invalid ('%s')") % val
+        )
     return abort, warn
 
+
 class casecollisionauditor(object):
     def __init__(self, ui, abort, dirstate):
         self._ui = ui
@@ -339,6 +371,7 @@
         self._loweredfiles.add(fl)
         self._newfiles.add(f)
 
+
 def filteredhash(repo, maxrev):
     """build hash of filtered revisions in the current repoview.
 
@@ -363,20 +396,25 @@
         key = s.digest()
     return key
 
+
 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
     '''yield every hg repository under path, always recursively.
     The recurse flag will only control recursion into repo working dirs'''
+
     def errhandler(err):
         if err.filename == path:
             raise err
+
     samestat = getattr(os.path, 'samestat', None)
     if followsym and samestat is not None:
+
         def adddir(dirlst, dirname):
             dirstat = os.stat(dirname)
             match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
             if not match:
                 dirlst.append(dirstat)
             return not match
+
     else:
         followsym = False
 
@@ -386,15 +424,15 @@
     for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
         dirs.sort()
         if '.hg' in dirs:
-            yield root # found a repository
+            yield root  # found a repository
             qroot = os.path.join(root, '.hg', 'patches')
             if os.path.isdir(os.path.join(qroot, '.hg')):
-                yield qroot # we have a patch queue repo here
+                yield qroot  # we have a patch queue repo here
             if recurse:
                 # avoid recursing inside the .hg directory
                 dirs.remove('.hg')
             else:
-                dirs[:] = [] # don't descend further
+                dirs[:] = []  # don't descend further
         elif followsym:
             newdirs = []
             for d in dirs:
@@ -407,6 +445,7 @@
                         newdirs.append(d)
             dirs[:] = newdirs
 
+
 def binnode(ctx):
     """Return binary node id for a given basectx"""
     node = ctx.node()
@@ -414,6 +453,7 @@
         return wdirid
     return node
 
+
 def intrev(ctx):
     """Return integer for a given basectx that can be used in comparison or
     arithmetic operation"""
@@ -422,12 +462,14 @@
         return wdirrev
     return rev
 
+
 def formatchangeid(ctx):
     """Format changectx as '{rev}:{node|formatnode}', which is the default
     template provided by logcmdutil.changesettemplater"""
     repo = ctx.repo()
     return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
 
+
 def formatrevnode(ui, rev, node):
     """Format given revision and node depending on the current verbosity"""
     if ui.debugflag:
@@ -436,9 +478,11 @@
         hexfunc = short
     return '%d:%s' % (rev, hexfunc(node))
 
+
 def resolvehexnodeidprefix(repo, prefix):
-    if (prefix.startswith('x') and
-        repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
+    if prefix.startswith('x') and repo.ui.configbool(
+        'experimental', 'revisions.prefixhexnode'
+    ):
         prefix = prefix[1:]
     try:
         # Uses unfiltered repo because it's faster when prefix is ambiguous/
@@ -448,8 +492,9 @@
         revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
         if revset:
             # Clear config to avoid infinite recursion
-            configoverrides = {('experimental',
-                                'revisions.disambiguatewithin'): None}
+            configoverrides = {
+                ('experimental', 'revisions.disambiguatewithin'): None
+            }
             with repo.ui.configoverride(configoverrides):
                 revs = repo.anyrevs([revset], user=True)
                 matches = []
@@ -465,6 +510,7 @@
     repo.changelog.rev(node)  # make sure node isn't filtered
     return node
 
+
 def mayberevnum(repo, prefix):
     """Checks if the given prefix may be mistaken for a revision number"""
     try:
@@ -479,6 +525,7 @@
     except ValueError:
         return False
 
+
 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
     """Find the shortest unambiguous prefix that matches hexnode.
 
@@ -489,7 +536,7 @@
     # which would be unacceptably slow. so we look for hash collision in
     # unfiltered space, which means some hashes may be slightly longer.
 
-    minlength=max(minlength, 1)
+    minlength = max(minlength, 1)
 
     def disambiguate(prefix):
         """Disambiguate against revnums."""
@@ -550,6 +597,7 @@
     except error.LookupError:
         raise error.RepoLookupError()
 
+
 def isrevsymbol(repo, symbol):
     """Checks if a symbol exists in the repo.
 
@@ -562,6 +610,7 @@
     except error.RepoLookupError:
         return False
 
+
 def revsymbol(repo, symbol):
     """Returns a context given a single revision symbol (as string).
 
@@ -570,8 +619,10 @@
     not "max(public())".
     """
     if not isinstance(symbol, bytes):
-        msg = ("symbol (%s of type %s) was not a string, did you mean "
-               "repo[symbol]?" % (symbol, type(symbol)))
+        msg = (
+            "symbol (%s of type %s) was not a string, did you mean "
+            "repo[symbol]?" % (symbol, type(symbol))
+        )
         raise error.ProgrammingError(msg)
     try:
         if symbol in ('.', 'tip', 'null'):
@@ -619,10 +670,14 @@
 
     except error.WdirUnsupported:
         return repo[None]
-    except (error.FilteredIndexError, error.FilteredLookupError,
-            error.FilteredRepoLookupError):
+    except (
+        error.FilteredIndexError,
+        error.FilteredLookupError,
+        error.FilteredRepoLookupError,
+    ):
         raise _filterederror(repo, symbol)
 
+
 def _filterederror(repo, changeid):
     """build an exception to be raised about a filtered changeid
 
@@ -648,6 +703,7 @@
     msg %= (changeid, repo.filtername)
     return error.FilteredRepoLookupError(msg)
 
+
 def revsingle(repo, revspec, default='.', localalias=None):
     if not revspec and revspec != 0:
         return repo[default]
@@ -657,10 +713,12 @@
         raise error.Abort(_('empty revision set'))
     return repo[l.last()]
 
+
 def _pairspec(revspec):
     tree = revsetlang.parse(revspec)
     return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
 
+
 def revpair(repo, revs):
     if not revs:
         return repo['.'], repo[None]
@@ -673,8 +731,11 @@
     first = l.first()
     second = l.last()
 
-    if (first == second and len(revs) >= 2
-        and not all(revrange(repo, [r]) for r in revs)):
+    if (
+        first == second
+        and len(revs) >= 2
+        and not all(revrange(repo, [r]) for r in revs)
+    ):
         raise error.Abort(_('empty revision on one side of range'))
 
     # if top-level is range expression, the result must always be a pair
@@ -683,6 +744,7 @@
 
     return repo[first], repo[second]
 
+
 def revrange(repo, specs, localalias=None):
     """Execute 1 to many revsets and return the union.
 
@@ -711,6 +773,7 @@
         allspecs.append(spec)
     return repo.anyrevs(allspecs, user=True, localalias=localalias)
 
+
 def meaningfulparents(repo, ctx):
     """Return list of meaningful (or all if debug) parentrevs for rev.
 
@@ -727,6 +790,7 @@
         return []
     return parents
 
+
 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
     """Return a function that produced paths for presenting to the user.
 
@@ -751,7 +815,8 @@
             relative = stringutil.parsebool(config)
             if relative is None:
                 raise error.ConfigError(
-                    _("ui.relative-paths is not a boolean ('%s')") % config)
+                    _("ui.relative-paths is not a boolean ('%s')") % config
+                )
 
     if relative:
         cwd = repo.getcwd()
@@ -762,10 +827,12 @@
     else:
         return util.localpath
 
+
 def subdiruipathfn(subpath, uipathfn):
     '''Create a new uipathfn that treats the file as relative to subpath.'''
     return lambda f: uipathfn(posixpath.join(subpath, f))
 
+
 def anypats(pats, opts):
     '''Checks if any patterns, including --include and --exclude were given.
 
@@ -774,6 +841,7 @@
     '''
     return bool(pats or opts.get('include') or opts.get('exclude'))
 
+
 def expandpats(pats):
     '''Expand bare globs when running on windows.
     On posix we assume it already has already been done by sh.'''
@@ -793,8 +861,10 @@
         ret.append(kindpat)
     return ret
 
-def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
-                 badfn=None):
+
+def matchandpats(
+    ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
+):
     '''Return a matcher and the patterns that were used.
     The matcher will warn about bad matches, unless an alternate badfn callback
     is provided.'''
@@ -804,32 +874,44 @@
         pats = expandpats(pats or [])
 
     uipathfn = getuipathfn(ctx.repo(), legacyrelativevalue=True)
+
     def bad(f, msg):
         ctx.repo().ui.warn("%s: %s\n" % (uipathfn(f), msg))
 
     if badfn is None:
         badfn = bad
 
-    m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
-                  default, listsubrepos=opts.get('subrepos'), badfn=badfn)
+    m = ctx.match(
+        pats,
+        opts.get('include'),
+        opts.get('exclude'),
+        default,
+        listsubrepos=opts.get('subrepos'),
+        badfn=badfn,
+    )
 
     if m.always():
         pats = []
     return m, pats
 
-def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
-          badfn=None):
+
+def match(
+    ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None
+):
     '''Return a matcher that will warn about bad matches.'''
     return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
 
+
 def matchall(repo):
     '''Return a matcher that will efficiently match everything.'''
     return matchmod.always()
 
+
 def matchfiles(repo, files, badfn=None):
     '''Return a matcher that will efficiently match exactly these files.'''
     return matchmod.exact(files, badfn=badfn)
 
+
 def parsefollowlinespattern(repo, rev, pat, msg):
     """Return a file name from `pat` pattern suitable for usage in followlines
     logic.
@@ -844,6 +926,7 @@
             raise error.ParseError(msg)
         return files[0]
 
+
 def getorigvfs(ui, repo):
     """return a vfs suitable to save 'orig' file
 
@@ -853,6 +936,7 @@
         return None
     return vfs.vfs(repo.wvfs.join(origbackuppath))
 
+
 def backuppath(ui, repo, filepath):
     '''customize where working copy backup files (.orig files) are created
 
@@ -874,20 +958,21 @@
         # Remove any files that conflict with the backup file's path
         for f in reversed(list(util.finddirs(filepath))):
             if origvfs.isfileorlink(f):
-                ui.note(_('removing conflicting file: %s\n')
-                        % origvfs.join(f))
+                ui.note(_('removing conflicting file: %s\n') % origvfs.join(f))
                 origvfs.unlink(f)
                 break
 
         origvfs.makedirs(origbackupdir)
 
     if origvfs.isdir(filepath) and not origvfs.islink(filepath):
-        ui.note(_('removing conflicting directory: %s\n')
-                % origvfs.join(filepath))
+        ui.note(
+            _('removing conflicting directory: %s\n') % origvfs.join(filepath)
+        )
         origvfs.rmtree(filepath, forcibly=True)
 
     return origvfs.join(filepath)
 
+
 class _containsnode(object):
     """proxy __contains__(node) to container.__contains__ which accepts revs"""
 
@@ -898,8 +983,17 @@
     def __contains__(self, node):
         return self._revcontains(self._torev(node))
 
-def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
-                 fixphase=False, targetphase=None, backup=True):
+
+def cleanupnodes(
+    repo,
+    replacements,
+    operation,
+    moves=None,
+    metadata=None,
+    fixphase=False,
+    targetphase=None,
+    backup=True,
+):
     """do common cleanups when old nodes are replaced by new nodes
 
     That includes writing obsmarkers or stripping nodes, and moving bookmarks.
@@ -949,8 +1043,9 @@
                     allreplaced = []
                     for rep in replacements:
                         allreplaced.extend(rep)
-                    roots = list(unfi.set('max((::%n) - %ln)', oldnode,
-                                          allreplaced))
+                    roots = list(
+                        unfi.set('max((::%n) - %ln)', oldnode, allreplaced)
+                    )
                     if roots:
                         newnode = roots[0].node()
                     else:
@@ -971,14 +1066,17 @@
 
         allnewnodes.sort(key=lambda n: unfi[n].rev())
         newphases = {}
+
         def phase(ctx):
             return newphases.get(ctx.node(), ctx.phase())
+
         for newnode in allnewnodes:
             ctx = unfi[newnode]
             parentphase = max(phase(p) for p in ctx.parents())
             if targetphase is None:
-                oldphase = max(unfi[oldnode].phase()
-                               for oldnode in precursors[newnode])
+                oldphase = max(
+                    unfi[oldnode].phase() for oldnode in precursors[newnode]
+                )
                 newphase = max(oldphase, parentphase)
             else:
                 newphase = max(targetphase, parentphase)
@@ -996,13 +1094,23 @@
             oldbmarks = repo.nodebookmarks(oldnode)
             if not oldbmarks:
                 continue
-            from . import bookmarks # avoid import cycle
-            repo.ui.debug('moving bookmarks %r from %s to %s\n' %
-                          (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
-                           hex(oldnode), hex(newnode)))
+            from . import bookmarks  # avoid import cycle
+
+            repo.ui.debug(
+                'moving bookmarks %r from %s to %s\n'
+                % (
+                    pycompat.rapply(pycompat.maybebytestr, oldbmarks),
+                    hex(oldnode),
+                    hex(newnode),
+                )
+            )
             # Delete divergent bookmarks being parents of related newnodes
-            deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
-                                   allnewnodes, newnode, oldnode)
+            deleterevs = repo.revs(
+                'parents(roots(%ln & (::%n))) - parents(%n)',
+                allnewnodes,
+                newnode,
+                oldnode,
+            )
             deletenodes = _containsnode(repo, deleterevs)
             for name in oldbmarks:
                 bmarkchanges.append((name, newnode))
@@ -1033,25 +1141,31 @@
                 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
                 rels.append(rel)
             if rels:
-                obsolete.createmarkers(repo, rels, operation=operation,
-                                       metadata=metadata)
+                obsolete.createmarkers(
+                    repo, rels, operation=operation, metadata=metadata
+                )
         elif phases.supportinternal(repo) and mayusearchived:
             # this assume we do not have "unstable" nodes above the cleaned ones
             allreplaced = set()
             for ns in replacements.keys():
                 allreplaced.update(ns)
             if backup:
-                from . import repair # avoid import cycle
+                from . import repair  # avoid import cycle
+
                 node = min(allreplaced, key=repo.changelog.rev)
-                repair.backupbundle(repo, allreplaced, allreplaced, node,
-                                    operation)
+                repair.backupbundle(
+                    repo, allreplaced, allreplaced, node, operation
+                )
             phases.retractboundary(repo, tr, phases.archived, allreplaced)
         else:
-            from . import repair # avoid import cycle
+            from . import repair  # avoid import cycle
+
             tostrip = list(n for ns in replacements for n in ns)
             if tostrip:
-                repair.delayedstrip(repo.ui, repo, tostrip, operation,
-                                    backup=backup)
+                repair.delayedstrip(
+                    repo.ui, repo, tostrip, operation, backup=backup
+                )
+
 
 def addremove(repo, matcher, prefix, uipathfn, opts=None):
     if opts is None:
@@ -1079,18 +1193,22 @@
                 if sub.addremove(submatch, subprefix, subuipathfn, opts):
                     ret = 1
             except error.LookupError:
-                repo.ui.status(_("skipping missing subrepository: %s\n")
-                                 % uipathfn(subpath))
+                repo.ui.status(
+                    _("skipping missing subrepository: %s\n")
+                    % uipathfn(subpath)
+                )
 
     rejected = []
+
     def badfn(f, msg):
         if f in m.files():
             m.bad(f, msg)
         rejected.append(f)
 
     badmatch = matchmod.badmatch(m, badfn)
-    added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
-                                                                    badmatch)
+    added, unknown, deleted, removed, forgotten = _interestingfiles(
+        repo, badmatch
+    )
 
     unknownset = set(unknown + forgotten)
     toprint = unknownset.copy()
@@ -1105,8 +1223,9 @@
                 label = 'ui.addremove.removed'
             repo.ui.status(status, label=label)
 
-    renames = _findrenames(repo, m, added + unknown, removed + deleted,
-                           similarity, uipathfn)
+    renames = _findrenames(
+        repo, m, added + unknown, removed + deleted, similarity, uipathfn
+    )
 
     if not dry_run:
         _markchanges(repo, unknown + forgotten, deleted, renames)
@@ -1116,6 +1235,7 @@
             return 1
     return ret
 
+
 def marktouched(repo, files, similarity=0.0):
     '''Assert that files have somehow been operated upon. files are relative to
     the repo root.'''
@@ -1139,8 +1259,9 @@
     # the messages above too. legacyrelativevalue=True is consistent with how
     # it used to work.
     uipathfn = getuipathfn(repo, legacyrelativevalue=True)
-    renames = _findrenames(repo, m, added + unknown, removed + deleted,
-                           similarity, uipathfn)
+    renames = _findrenames(
+        repo, m, added + unknown, removed + deleted, similarity, uipathfn
+    )
 
     _markchanges(repo, unknown + forgotten, deleted, renames)
 
@@ -1149,6 +1270,7 @@
             return 1
     return 0
 
+
 def _interestingfiles(repo, matcher):
     '''Walk dirstate with matcher, looking for files that addremove would care
     about.
@@ -1161,8 +1283,13 @@
     ctx = repo[None]
     dirstate = repo.dirstate
     matcher = repo.narrowmatch(matcher, includeexact=True)
-    walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
-                                unknown=True, ignored=False, full=False)
+    walkresults = dirstate.walk(
+        matcher,
+        subrepos=sorted(ctx.substate),
+        unknown=True,
+        ignored=False,
+        full=False,
+    )
     for abs, st in walkresults.iteritems():
         dstate = dirstate[abs]
         if dstate == '?' and audit_path.check(abs):
@@ -1179,21 +1306,30 @@
 
     return added, unknown, deleted, removed, forgotten
 
+
 def _findrenames(repo, matcher, added, removed, similarity, uipathfn):
     '''Find renames from removed files to added ones.'''
     renames = {}
     if similarity > 0:
-        for old, new, score in similar.findrenames(repo, added, removed,
-                                                   similarity):
-            if (repo.ui.verbose or not matcher.exact(old)
-                or not matcher.exact(new)):
-                repo.ui.status(_('recording removal of %s as rename to %s '
-                                 '(%d%% similar)\n') %
-                               (uipathfn(old), uipathfn(new),
-                                score * 100))
+        for old, new, score in similar.findrenames(
+            repo, added, removed, similarity
+        ):
+            if (
+                repo.ui.verbose
+                or not matcher.exact(old)
+                or not matcher.exact(new)
+            ):
+                repo.ui.status(
+                    _(
+                        'recording removal of %s as rename to %s '
+                        '(%d%% similar)\n'
+                    )
+                    % (uipathfn(old), uipathfn(new), score * 100)
+                )
             renames[new] = old
     return renames
 
+
 def _markchanges(repo, unknown, deleted, renames):
     '''Marks the files in unknown as added, the files in deleted as removed,
     and the files in renames as copied.'''
@@ -1204,8 +1340,10 @@
         for new, old in renames.iteritems():
             wctx.copy(old, new)
 
+
 def getrenamedfn(repo, endrev=None):
     if copiesmod.usechangesetcentricalgo(repo):
+
         def getrenamed(fn, rev):
             ctx = repo[rev]
             p1copies = ctx.p1copies()
@@ -1215,6 +1353,7 @@
             if fn in p2copies:
                 return p2copies[fn]
             return None
+
         return getrenamed
 
     rcache = {}
@@ -1247,8 +1386,10 @@
 
     return getrenamed
 
+
 def getcopiesfn(repo, endrev=None):
     if copiesmod.usechangesetcentricalgo(repo):
+
         def copiesfn(ctx):
             if ctx.p2copies():
                 allcopies = ctx.p1copies().copy()
@@ -1257,8 +1398,10 @@
                 return sorted(allcopies.items())
             else:
                 return sorted(ctx.p1copies().items())
+
     else:
         getrenamed = getrenamedfn(repo, endrev)
+
         def copiesfn(ctx):
             copies = []
             for fn in ctx.files():
@@ -1269,25 +1412,31 @@
 
     return copiesfn
 
+
 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
     """Update the dirstate to reflect the intent of copying src to dst. For
     different reasons it might not end with dst being marked as copied from src.
     """
     origsrc = repo.dirstate.copied(src) or src
-    if dst == origsrc: # copying back a copy?
+    if dst == origsrc:  # copying back a copy?
         if repo.dirstate[dst] not in 'mn' and not dryrun:
             repo.dirstate.normallookup(dst)
     else:
         if repo.dirstate[origsrc] == 'a' and origsrc == src:
             if not ui.quiet:
-                ui.warn(_("%s has not been committed yet, so no copy "
-                          "data will be stored for %s.\n")
-                        % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
+                ui.warn(
+                    _(
+                        "%s has not been committed yet, so no copy "
+                        "data will be stored for %s.\n"
+                    )
+                    % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))
+                )
             if repo.dirstate[dst] in '?r' and not dryrun:
                 wctx.add([dst])
         elif not dryrun:
             wctx.copy(origsrc, dst)
 
+
 def movedirstate(repo, newctx, match=None):
     """Move the dirstate to newctx and adjust it as necessary.
 
@@ -1323,19 +1472,22 @@
     # Merge old parent and old working dir copies
     oldcopies = copiesmod.pathcopies(newctx, oldctx, match)
     oldcopies.update(copies)
-    copies = dict((dst, oldcopies.get(src, src))
-                  for dst, src in oldcopies.iteritems())
+    copies = dict(
+        (dst, oldcopies.get(src, src)) for dst, src in oldcopies.iteritems()
+    )
     # Adjust the dirstate copies
     for dst, src in copies.iteritems():
-        if (src not in newctx or dst in newctx or ds[dst] != 'a'):
+        if src not in newctx or dst in newctx or ds[dst] != 'a':
             src = None
         ds.copy(src, dst)
 
+
 def writerequires(opener, requirements):
     with opener('requires', 'w', atomictemp=True) as fp:
         for r in sorted(requirements):
             fp.write("%s\n" % r)
 
+
 class filecachesubentry(object):
     def __init__(self, path, stat):
         self.path = path
@@ -1391,6 +1543,7 @@
             if e.errno != errno.ENOENT:
                 raise
 
+
 class filecacheentry(object):
     def __init__(self, paths, stat=True):
         self._entries = []
@@ -1408,6 +1561,7 @@
         for entry in self._entries:
             entry.refresh()
 
+
 class filecache(object):
     """A property like decorator that tracks files under .hg/ for updates.
 
@@ -1490,8 +1644,9 @@
         else:
             ce = obj._filecache[self.name]
 
-        ce.obj = value # update cached copy
-        obj.__dict__[self.sname] = value # update copy returned by obj.x
+        ce.obj = value  # update cached copy
+        obj.__dict__[self.sname] = value  # update copy returned by obj.x
+
 
 def extdatasource(repo, source):
     """Gather a map of rev -> value dict from the specified source
@@ -1519,11 +1674,14 @@
         if spec.startswith("shell:"):
             # external commands should be run relative to the repo root
             cmd = spec[6:]
-            proc = subprocess.Popen(procutil.tonativestr(cmd),
-                                    shell=True, bufsize=-1,
-                                    close_fds=procutil.closefds,
-                                    stdout=subprocess.PIPE,
-                                    cwd=procutil.tonativestr(repo.root))
+            proc = subprocess.Popen(
+                procutil.tonativestr(cmd),
+                shell=True,
+                bufsize=-1,
+                close_fds=procutil.closefds,
+                stdout=subprocess.PIPE,
+                cwd=procutil.tonativestr(repo.root),
+            )
             src = proc.stdout
         else:
             # treat as a URL or file
@@ -1538,7 +1696,7 @@
             try:
                 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
             except (error.LookupError, error.RepoLookupError):
-                pass # we ignore data for nodes that don't exist locally
+                pass  # we ignore data for nodes that don't exist locally
     finally:
         if proc:
             try:
@@ -1550,29 +1708,36 @@
         if src:
             src.close()
     if proc and proc.returncode != 0:
-        raise error.Abort(_("extdata command '%s' failed: %s")
-                          % (cmd, procutil.explainexit(proc.returncode)))
+        raise error.Abort(
+            _("extdata command '%s' failed: %s")
+            % (cmd, procutil.explainexit(proc.returncode))
+        )
 
     return data
 
+
 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
     if lock is None:
         raise error.LockInheritanceContractViolation(
-            'lock can only be inherited while held')
+            'lock can only be inherited while held'
+        )
     if environ is None:
         environ = {}
     with lock.inherit() as locker:
         environ[envvar] = locker
         return repo.ui.system(cmd, environ=environ, *args, **kwargs)
 
+
 def wlocksub(repo, cmd, *args, **kwargs):
     """run cmd as a subprocess that allows inheriting repo's wlock
 
     This can only be called while the wlock is held. This takes all the
     arguments that ui.system does, and returns the exit code of the
     subprocess."""
-    return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
-                    **kwargs)
+    return _locksub(
+        repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args, **kwargs
+    )
+
 
 class progress(object):
     def __init__(self, ui, updatebar, topic, unit="", total=None):
@@ -1616,17 +1781,22 @@
 
         if self.total:
             pct = 100.0 * self.pos / self.total
-            self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
-                       % (self.topic, item, self.pos, self.total, unit, pct))
+            self.ui.debug(
+                '%s:%s %d/%d%s (%4.2f%%)\n'
+                % (self.topic, item, self.pos, self.total, unit, pct)
+            )
         else:
             self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
 
+
 def gdinitconfig(ui):
     """helper function to know if a repo should be created as general delta
     """
     # experimental config: format.generaldelta
-    return (ui.configbool('format', 'generaldelta')
-            or ui.configbool('format', 'usegeneraldelta'))
+    return ui.configbool('format', 'generaldelta') or ui.configbool(
+        'format', 'usegeneraldelta'
+    )
+
 
 def gddeltaconfig(ui):
     """helper function to know if incoming delta should be optimised
@@ -1634,11 +1804,13 @@
     # experimental config: format.generaldelta
     return ui.configbool('format', 'generaldelta')
 
+
 class simplekeyvaluefile(object):
     """A simple file with key=value lines
 
     Keys must be alphanumerics and start with a letter, values must not
     contain '\n' characters"""
+
     firstlinekey = '__firstline'
 
     def __init__(self, vfs, path, keys=None):
@@ -1665,8 +1837,9 @@
             # the 'if line.strip()' part prevents us from failing on empty
             # lines which only contain '\n' therefore are not skipped
             # by 'if line'
-            updatedict = dict(line[:-1].split('=', 1) for line in lines
-                                                      if line.strip())
+            updatedict = dict(
+                line[:-1].split('=', 1) for line in lines if line.strip()
+            )
             if self.firstlinekey in updatedict:
                 e = _("%r can't be used as a key")
                 raise error.CorruptedState(e % self.firstlinekey)
@@ -1703,6 +1876,7 @@
         with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
             fp.write(''.join(lines))
 
+
 _reportobsoletedsource = [
     'debugobsolete',
     'pull',
@@ -1716,6 +1890,7 @@
     'unbundle',
 ]
 
+
 def prefetchfiles(repo, revs, match):
     """Invokes the registered file prefetch functions, allowing extensions to
     ensure the corresponding files are available locally, before the command
@@ -1729,15 +1904,18 @@
 
     fileprefetchhooks(repo, revs, match)
 
+
 # a list of (repo, revs, match) prefetch functions
 fileprefetchhooks = util.hooks()
 
 # A marker that tells the evolve extension to suppress its own reporting
 _reportstroubledchangesets = True
 
+
 def registersummarycallback(repo, otr, txnname=''):
     """register a callback to issue a summary after the transaction is closed
     """
+
     def txmatch(sources):
         return any(txnname.startswith(source) for source in sources)
 
@@ -1752,17 +1930,18 @@
         # repository through the weakref.
         filtername = repo.filtername
         reporef = weakref.ref(repo.unfiltered())
+
         def wrapped(tr):
             repo = reporef()
             if filtername:
                 repo = repo.filtered(filtername)
             func(repo, tr)
+
         newcat = '%02i-txnreport' % len(categories)
         otr.addpostclose(newcat, wrapped)
         categories.append(newcat)
         return wrapped
 
-
     @reportsummary
     def reportchangegroup(repo, tr):
         cgchangesets = tr.changes.get('changegroup-count-changesets', 0)
@@ -1777,6 +1956,7 @@
             repo.ui.status(msg % (cgchangesets, cgrevisions, cgfiles, htext))
 
     if txmatch(_reportobsoletedsource):
+
         @reportsummary
         def reportobsoleted(repo, tr):
             obsoleted = obsutil.getobsoleted(repo, tr)
@@ -1784,11 +1964,11 @@
             if newmarkers:
                 repo.ui.status(_('%i new obsolescence markers\n') % newmarkers)
             if obsoleted:
-                repo.ui.status(_('obsoleted %i changesets\n')
-                               % len(obsoleted))
+                repo.ui.status(_('obsoleted %i changesets\n') % len(obsoleted))
 
-    if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
-        repo.ui.configbool('experimental', 'evolution.report-instabilities')):
+    if obsolete.isenabled(
+        repo, obsolete.createmarkersopt
+    ) and repo.ui.configbool('experimental', 'evolution.report-instabilities'):
         instabilitytypes = [
             ('orphan', 'orphan'),
             ('phase-divergent', 'phasedivergent'),
@@ -1799,22 +1979,27 @@
             filtered = repo.changelog.filteredrevs
             counts = {}
             for instability, revset in instabilitytypes:
-                counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
-                                          filtered)
+                counts[instability] = len(
+                    set(obsolete.getrevs(repo, revset)) - filtered
+                )
             return counts
 
         oldinstabilitycounts = getinstabilitycounts(repo)
+
         @reportsummary
         def reportnewinstabilities(repo, tr):
             newinstabilitycounts = getinstabilitycounts(repo)
             for instability, revset in instabilitytypes:
-                delta = (newinstabilitycounts[instability] -
-                         oldinstabilitycounts[instability])
+                delta = (
+                    newinstabilitycounts[instability]
+                    - oldinstabilitycounts[instability]
+                )
                 msg = getinstabilitymessage(delta, instability)
                 if msg:
                     repo.ui.warn(msg)
 
     if txmatch(_reportnewcssource):
+
         @reportsummary
         def reportnewcs(repo, tr):
             """Report the range of new revisions pulled/unbundled."""
@@ -1852,8 +2037,9 @@
 
             # search new changesets directly pulled as obsolete
             duplicates = tr.changes.get('revduplicates', ())
-            obsadded = unfi.revs('(%d: + %ld) and obsolete()',
-                                 origrepolen, duplicates)
+            obsadded = unfi.revs(
+                '(%d: + %ld) and obsolete()', origrepolen, duplicates
+            )
             cl = repo.changelog
             extinctadded = [r for r in obsadded if r not in cl]
             if extinctadded:
@@ -1873,13 +2059,16 @@
             if not phasetracking:
                 return
             published = [
-                rev for rev, (old, new) in phasetracking.iteritems()
+                rev
+                for rev, (old, new) in phasetracking.iteritems()
                 if new == phases.public and rev < origrepolen
             ]
             if not published:
                 return
-            repo.ui.status(_('%d local changesets published\n')
-                           % len(published))
+            repo.ui.status(
+                _('%d local changesets published\n') % len(published)
+            )
+
 
 def getinstabilitymessage(delta, instability):
     """function to return the message to show warning about new instabilities
@@ -1889,12 +2078,14 @@
     if delta > 0:
         return _('%i new %s changesets\n') % (delta, instability)
 
+
 def nodesummaries(repo, nodes, maxnumnodes=4):
     if len(nodes) <= maxnumnodes or repo.ui.verbose:
         return ' '.join(short(h) for h in nodes)
     first = ' '.join(short(h) for h in nodes[:maxnumnodes])
     return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
 
+
 def enforcesinglehead(repo, tr, desc, accountclosed=False):
     """check that no named branch has multiple heads"""
     if desc in ('strip', 'repair'):
@@ -1912,12 +2103,14 @@
             hint %= (len(heads), nodesummaries(repo, heads))
             raise error.Abort(msg, hint=hint)
 
+
 def wrapconvertsink(sink):
     """Allow extensions to wrap the sink returned by convcmd.convertsink()
     before it is used, whether or not the convert extension was formally loaded.
     """
     return sink
 
+
 def unhidehashlikerevs(repo, specs, hiddentype):
     """parse the user specs and unhide changesets whose hash or revision number
     is passed.
@@ -1927,8 +2120,9 @@
 
     returns a repo object with the required changesets unhidden
     """
-    if not repo.filtername or not repo.ui.configbool('experimental',
-                                                     'directaccess'):
+    if not repo.filtername or not repo.ui.configbool(
+        'experimental', 'directaccess'
+    ):
         return repo
 
     if repo.filtername not in ('visible', 'visible-hidden'):
@@ -1938,7 +2132,7 @@
     for spec in specs:
         try:
             tree = revsetlang.parse(spec)
-        except error.ParseError: # will be reported by scmutil.revrange()
+        except error.ParseError:  # will be reported by scmutil.revrange()
             continue
 
         symbols.update(revsetlang.gethashlikesymbols(tree))
@@ -1954,13 +2148,19 @@
     if hiddentype == 'warn':
         unfi = repo.unfiltered()
         revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
-        repo.ui.warn(_("warning: accessing hidden changesets for write "
-                       "operation: %s\n") % revstr)
+        repo.ui.warn(
+            _(
+                "warning: accessing hidden changesets for write "
+                "operation: %s\n"
+            )
+            % revstr
+        )
 
     # we have to use new filtername to separate branch/tags cache until we can
     # disbale these cache when revisions are dynamically pinned.
     return repo.filtered('visible-hidden', revs)
 
+
 def _getrevsfromsymbols(repo, symbols):
     """parse the list of symbols and returns a set of revision numbers of hidden
     changesets present in symbols"""
@@ -1995,14 +2195,20 @@
 
     return revs
 
+
 def bookmarkrevs(repo, mark):
     """
     Select revisions reachable by a given bookmark
     """
-    return repo.revs("ancestors(bookmark(%s)) - "
-                     "ancestors(head() and not bookmark(%s)) - "
-                     "ancestors(bookmark() and not bookmark(%s))",
-                     mark, mark, mark)
+    return repo.revs(
+        "ancestors(bookmark(%s)) - "
+        "ancestors(head() and not bookmark(%s)) - "
+        "ancestors(bookmark() and not bookmark(%s))",
+        mark,
+        mark,
+        mark,
+    )
+
 
 def computechangesetfilesadded(ctx):
     """return the list of files added in a changeset
@@ -2013,6 +2219,7 @@
             added.append(f)
     return added
 
+
 def computechangesetfilesremoved(ctx):
     """return the list of files removed in a changeset
     """
--- a/mercurial/server.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/server.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,12 +21,18 @@
     util,
 )
 
-from .utils import (
-    procutil,
-)
+from .utils import procutil
+
 
-def runservice(opts, parentfn=None, initfn=None, runfn=None, logfile=None,
-               runargs=None, appendpid=False):
+def runservice(
+    opts,
+    parentfn=None,
+    initfn=None,
+    runfn=None,
+    logfile=None,
+    runargs=None,
+    appendpid=False,
+):
     '''Run a command as a service.'''
 
     postexecargs = {}
@@ -38,8 +44,9 @@
             elif inst.startswith('chdir:'):
                 postexecargs['chdir'] = inst[6:]
             elif inst != 'none':
-                raise error.Abort(_('invalid value for --daemon-postexec: %s')
-                                  % inst)
+                raise error.Abort(
+                    _('invalid value for --daemon-postexec: %s') % inst
+                )
 
     # When daemonized on Windows, redirect stdout/stderr to the lockfile (which
     # gets cleaned up after the child is up and running), so that the parent can
@@ -51,8 +58,9 @@
             procutil.stdout.flush()
             procutil.stderr.flush()
 
-            fd = os.open(postexecargs['unlink'],
-                         os.O_WRONLY | os.O_APPEND | os.O_BINARY)
+            fd = os.open(
+                postexecargs['unlink'], os.O_WRONLY | os.O_APPEND | os.O_BINARY
+            )
             try:
                 os.dup2(fd, procutil.stdout.fileno())
                 os.dup2(fd, procutil.stderr.fileno())
@@ -84,10 +92,12 @@
                     del runargs[i]
                     break
                 elif runargs[i].startswith('--cwd'):
-                    del runargs[i:i + 2]
+                    del runargs[i : i + 2]
                     break
+
             def condfn():
                 return not os.path.exists(lockpath)
+
             pid = procutil.rundetached(runargs, condfn)
             if pid < 0:
                 # If the daemonized process managed to write out an error msg,
@@ -126,13 +136,17 @@
         nullfd = os.open(os.devnull, os.O_RDWR)
         logfilefd = nullfd
         if logfile:
-            logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND,
-                                0o666)
+            logfilefd = os.open(
+                logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND, 0o666
+            )
         os.dup2(nullfd, procutil.stdin.fileno())
         os.dup2(logfilefd, procutil.stdout.fileno())
         os.dup2(logfilefd, procutil.stderr.fileno())
-        stdio = (procutil.stdin.fileno(), procutil.stdout.fileno(),
-                 procutil.stderr.fileno())
+        stdio = (
+            procutil.stdin.fileno(),
+            procutil.stdout.fileno(),
+            procutil.stderr.fileno(),
+        )
         if nullfd not in stdio:
             os.close(nullfd)
         if logfile and logfilefd not in stdio:
@@ -146,12 +160,14 @@
     if runfn:
         return runfn()
 
+
 _cmdservicemap = {
     'chgunix': chgserver.chgunixservice,
     'pipe': commandserver.pipeservice,
     'unix': commandserver.unixforkingservice,
 }
 
+
 def _createcmdservice(ui, repo, opts):
     mode = opts['cmdserver']
     try:
@@ -161,6 +177,7 @@
     commandserver.setuplogging(ui, repo)
     return servicefn(ui, repo, opts)
 
+
 def _createhgwebservice(ui, repo, opts):
     # this way we can check if something was given in the command-line
     if opts.get('port'):
@@ -193,11 +210,13 @@
     else:
         servui = ui
 
-    optlist = ("name templates style address port prefix ipv6"
-               " accesslog errorlog certificate encoding")
+    optlist = (
+        "name templates style address port prefix ipv6"
+        " accesslog errorlog certificate encoding"
+    )
     for o in optlist.split():
         val = opts.get(o, '')
-        if val in (None, ''): # should check against default options instead
+        if val in (None, ''):  # should check against default options instead
             continue
         for u in alluis:
             u.setconfig("web", o, val, 'serve')
@@ -205,6 +224,7 @@
     app = hgweb.createapp(baseui, repo, webconf)
     return hgweb.httpservice(servui, app, opts)
 
+
 def createservice(ui, repo, opts):
     if opts["cmdserver"]:
         return _createcmdservice(ui, repo, opts)
--- a/mercurial/setdiscovery.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/setdiscovery.py	Sun Oct 06 09:45:02 2019 -0400
@@ -56,6 +56,7 @@
     util,
 )
 
+
 def _updatesample(revs, heads, sample, parentfn, quicksamplesize=0):
     """update an existing sample to match the expected size
 
@@ -93,6 +94,7 @@
                 dist.setdefault(p, d + 1)
                 visit.append(p)
 
+
 def _limitsample(sample, desiredlen, randomize=True):
     """return a random subset of sample of at most desiredlen item.
 
@@ -107,6 +109,7 @@
     sample.sort()
     return set(sample[:desiredlen])
 
+
 class partialdiscovery(object):
     """an object representing ongoing discovery
 
@@ -184,8 +187,10 @@
 
     def _parentsgetter(self):
         getrev = self._repo.changelog.index.__getitem__
+
         def getparents(r):
             return getrev(r)[5:7]
+
         return getparents
 
     def _childrengetter(self):
@@ -234,8 +239,9 @@
         if len(sample) >= size:
             return _limitsample(sample, size, randomize=self.randomize)
 
-        _updatesample(None, headrevs, sample, self._parentsgetter(),
-                      quicksamplesize=size)
+        _updatesample(
+            None, headrevs, sample, self._parentsgetter(), quicksamplesize=size
+        )
         return sample
 
     def takefullsample(self, headrevs, size):
@@ -270,16 +276,22 @@
                 sample.update(takefrom[:more])
         return sample
 
-partialdiscovery = policy.importrust(r'discovery',
-                                     member=r'PartialDiscovery',
-                                     default=partialdiscovery)
+
+partialdiscovery = policy.importrust(
+    r'discovery', member=r'PartialDiscovery', default=partialdiscovery
+)
+
 
-def findcommonheads(ui, local, remote,
-                    initialsamplesize=100,
-                    fullsamplesize=200,
-                    abortwhenunrelated=True,
-                    ancestorsof=None,
-                    samplegrowth=1.05):
+def findcommonheads(
+    ui,
+    local,
+    remote,
+    initialsamplesize=100,
+    fullsamplesize=200,
+    abortwhenunrelated=True,
+    ancestorsof=None,
+    samplegrowth=1.05,
+):
     '''Return a tuple (common, anyincoming, remoteheads) used to identify
     missing nodes from or in remote.
     '''
@@ -358,9 +370,7 @@
 
     with remote.commandexecutor() as e:
         fheads = e.callcommand('heads', {})
-        fknown = e.callcommand('known', {
-            'nodes': [clnode(r) for r in sample],
-        })
+        fknown = e.callcommand('known', {'nodes': [clnode(r) for r in sample],})
 
     srvheadhashes, yesno = fheads.result(), fknown.result()
 
@@ -396,8 +406,9 @@
     # full blown discovery
 
     randomize = ui.configbool('devel', 'discovery.randomize')
-    disco = partialdiscovery(local, ownheads, remote.limitedarguments,
-                             randomize=randomize)
+    disco = partialdiscovery(
+        local, ownheads, remote.limitedarguments, randomize=randomize
+    )
     # treat remote heads (and maybe own heads) as a first implicit sample
     # response
     disco.addcommons(knownsrvheads)
@@ -426,16 +437,18 @@
         roundtrips += 1
         progress.update(roundtrips)
         stats = disco.stats()
-        ui.debug("query %i; still undecided: %i, sample size is: %i\n"
-                 % (roundtrips, stats['undecided'], len(sample)))
+        ui.debug(
+            "query %i; still undecided: %i, sample size is: %i\n"
+            % (roundtrips, stats['undecided'], len(sample))
+        )
 
         # indices between sample and externalized version must match
         sample = list(sample)
 
         with remote.commandexecutor() as e:
-            yesno = e.callcommand('known', {
-                'nodes': [clnode(r) for r in sample],
-            }).result()
+            yesno = e.callcommand(
+                'known', {'nodes': [clnode(r) for r in sample],}
+            ).result()
 
         full = True
 
@@ -445,19 +458,24 @@
     elapsed = util.timer() - start
     progress.complete()
     ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
-    msg = ('found %d common and %d unknown server heads,'
-           ' %d roundtrips in %.4fs\n')
+    msg = (
+        'found %d common and %d unknown server heads,'
+        ' %d roundtrips in %.4fs\n'
+    )
     missing = set(result) - set(knownsrvheads)
-    ui.log('discovery', msg, len(result), len(missing), roundtrips,
-           elapsed)
+    ui.log('discovery', msg, len(result), len(missing), roundtrips, elapsed)
 
     if not result and srvheadhashes != [nullid]:
         if abortwhenunrelated:
             raise error.Abort(_("repository is unrelated"))
         else:
             ui.warn(_("warning: repository is unrelated\n"))
-        return ({nullid}, True, srvheadhashes,)
+        return (
+            {nullid},
+            True,
+            srvheadhashes,
+        )
 
-    anyincoming = (srvheadhashes != [nullid])
+    anyincoming = srvheadhashes != [nullid]
     result = {clnode(r) for r in result}
     return result, anyincoming, srvheadhashes
--- a/mercurial/shelve.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/shelve.py	Sun Oct 06 09:45:02 2019 -0400
@@ -66,11 +66,13 @@
 # generic user for all shelve operations
 shelveuser = 'shelve@localhost'
 
+
 class shelvedfile(object):
     """Helper for the file storing a single shelve
 
     Handles common functions on shelve files (.hg/.patch) using
     the vfs layer"""
+
     def __init__(self, repo, name, filetype=None):
         self.repo = repo
         self.name = name
@@ -124,10 +126,14 @@
                 targetphase = phases.secret
             gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
             pretip = self.repo['tip']
-            bundle2.applybundle(self.repo, gen, tr,
-                                source='unshelve',
-                                url='bundle:' + self.vfs.join(self.fname),
-                                targetphase=targetphase)
+            bundle2.applybundle(
+                self.repo,
+                gen,
+                tr,
+                source='unshelve',
+                url='bundle:' + self.vfs.join(self.fname),
+                targetphase=targetphase,
+            )
             shelvectx = self.repo['tip']
             if pretip == shelvectx:
                 shelverev = tr.changes['revduplicates'][-1]
@@ -138,8 +144,9 @@
 
     def bundlerepo(self):
         path = self.vfs.join(self.fname)
-        return bundlerepo.instance(self.repo.baseui,
-                                   'bundle://%s+%s' % (self.repo.root, path))
+        return bundlerepo.instance(
+            self.repo.baseui, 'bundle://%s+%s' % (self.repo.root, path)
+        )
 
     def writebundle(self, bases, node):
         cgversion = changegroup.safeversion(self.repo)
@@ -152,12 +159,14 @@
 
         repo = self.repo.unfiltered()
 
-        outgoing = discovery.outgoing(repo, missingroots=bases,
-                                      missingheads=[node])
+        outgoing = discovery.outgoing(
+            repo, missingroots=bases, missingheads=[node]
+        )
         cg = changegroup.makechangegroup(repo, outgoing, cgversion, 'shelve')
 
-        bundle2.writebundle(self.ui, cg, self.fname, btype, self.vfs,
-                                compression=compression)
+        bundle2.writebundle(
+            self.ui, cg, self.fname, btype, self.vfs, compression=compression
+        )
 
     def writeinfo(self, info):
         scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
@@ -165,12 +174,14 @@
     def readinfo(self):
         return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
 
+
 class shelvedstate(object):
     """Handle persistence during unshelving operations.
 
     Handles saving and restoring a shelved state. Ensures that different
     versions of a shelved state are possible and handles them appropriately.
     """
+
     _version = 2
     _filename = 'shelvedstate'
     _keep = 'keep'
@@ -185,10 +196,10 @@
         try:
             d['originalwctx'] = nodemod.bin(d['originalwctx'])
             d['pendingctx'] = nodemod.bin(d['pendingctx'])
-            d['parents'] = [nodemod.bin(h)
-                            for h in d['parents'].split(' ')]
-            d['nodestoremove'] = [nodemod.bin(h)
-                                  for h in d['nodestoremove'].split(' ')]
+            d['parents'] = [nodemod.bin(h) for h in d['parents'].split(' ')]
+            d['nodestoremove'] = [
+                nodemod.bin(h) for h in d['nodestoremove'].split(' ')
+            ]
         except (ValueError, TypeError, KeyError) as err:
             raise error.CorruptedState(pycompat.bytestr(err))
 
@@ -210,8 +221,17 @@
         # Order is important, because old shelvestate file uses it
         # to detemine values of fields (i.g. name is on the second line,
         # originalwctx is on the third and so forth). Please do not change.
-        keys = ['version', 'name', 'originalwctx', 'pendingctx', 'parents',
-                'nodestoremove', 'branchtorestore', 'keep', 'activebook']
+        keys = [
+            'version',
+            'name',
+            'originalwctx',
+            'pendingctx',
+            'parents',
+            'nodestoremove',
+            'branchtorestore',
+            'keep',
+            'activebook',
+        ]
         # this is executed only seldomly, so it is not a big deal
         # that we open this file twice
         fp = repo.vfs(cls._filename)
@@ -229,11 +249,16 @@
         if version < cls._version:
             d = cls._readold(repo)
         elif version == cls._version:
-            d = scmutil.simplekeyvaluefile(
-                repo.vfs, cls._filename).read(firstlinenonkeyval=True)
+            d = scmutil.simplekeyvaluefile(repo.vfs, cls._filename).read(
+                firstlinenonkeyval=True
+            )
         else:
-            raise error.Abort(_('this version of shelve is incompatible '
-                                'with the version used in this repo'))
+            raise error.Abort(
+                _(
+                    'this version of shelve is incompatible '
+                    'with the version used in this repo'
+                )
+            )
 
         cls._verifyandtransform(d)
         try:
@@ -255,58 +280,71 @@
         return obj
 
     @classmethod
-    def save(cls, repo, name, originalwctx, pendingctx, nodestoremove,
-             branchtorestore, keep=False, activebook='', interactive=False):
+    def save(
+        cls,
+        repo,
+        name,
+        originalwctx,
+        pendingctx,
+        nodestoremove,
+        branchtorestore,
+        keep=False,
+        activebook='',
+        interactive=False,
+    ):
         info = {
             "name": name,
             "originalwctx": nodemod.hex(originalwctx.node()),
             "pendingctx": nodemod.hex(pendingctx.node()),
-            "parents": ' '.join([nodemod.hex(p)
-                                 for p in repo.dirstate.parents()]),
-            "nodestoremove": ' '.join([nodemod.hex(n)
-                                      for n in nodestoremove]),
+            "parents": ' '.join(
+                [nodemod.hex(p) for p in repo.dirstate.parents()]
+            ),
+            "nodestoremove": ' '.join([nodemod.hex(n) for n in nodestoremove]),
             "branchtorestore": branchtorestore,
             "keep": cls._keep if keep else cls._nokeep,
-            "activebook": activebook or cls._noactivebook
+            "activebook": activebook or cls._noactivebook,
         }
         if interactive:
             info['interactive'] = cls._interactive
-        scmutil.simplekeyvaluefile(
-            repo.vfs, cls._filename).write(info,
-                                           firstline=("%d" % cls._version))
+        scmutil.simplekeyvaluefile(repo.vfs, cls._filename).write(
+            info, firstline=("%d" % cls._version)
+        )
 
     @classmethod
     def clear(cls, repo):
         repo.vfs.unlinkpath(cls._filename, ignoremissing=True)
 
+
 def cleanupoldbackups(repo):
     vfs = vfsmod.vfs(repo.vfs.join(backupdir))
     maxbackups = repo.ui.configint('shelve', 'maxbackups')
-    hgfiles = [f for f in vfs.listdir()
-               if f.endswith('.' + patchextension)]
+    hgfiles = [f for f in vfs.listdir() if f.endswith('.' + patchextension)]
     hgfiles = sorted([(vfs.stat(f)[stat.ST_MTIME], f) for f in hgfiles])
     if maxbackups > 0 and maxbackups < len(hgfiles):
         bordermtime = hgfiles[-maxbackups][0]
     else:
         bordermtime = None
-    for mtime, f in hgfiles[:len(hgfiles) - maxbackups]:
+    for mtime, f in hgfiles[: len(hgfiles) - maxbackups]:
         if mtime == bordermtime:
             # keep it, because timestamp can't decide exact order of backups
             continue
-        base = f[:-(1 + len(patchextension))]
+        base = f[: -(1 + len(patchextension))]
         for ext in shelvefileextensions:
             vfs.tryunlink(base + '.' + ext)
 
+
 def _backupactivebookmark(repo):
     activebookmark = repo._activebookmark
     if activebookmark:
         bookmarks.deactivate(repo)
     return activebookmark
 
+
 def _restoreactivebookmark(repo, mark):
     if mark:
         bookmarks.activate(repo, mark)
 
+
 def _aborttransaction(repo, tr):
     '''Abort current transaction for shelve/unshelve, but keep dirstate
     '''
@@ -315,12 +353,15 @@
     tr.abort()
     repo.dirstate.restorebackup(None, dirstatebackupname)
 
+
 def getshelvename(repo, parent, opts):
     """Decide on the name this shelve is going to have"""
+
     def gennames():
         yield label
         for i in itertools.count(1):
             yield '%s-%02d' % (label, i)
+
     name = opts.get('name')
     label = repo._activebookmark or parent.branch() or 'default'
     # slashes aren't allowed in filenames, therefore we rename it
@@ -349,6 +390,7 @@
 
     return name
 
+
 def mutableancestors(ctx):
     """return all mutable ancestors for ctx (included)
 
@@ -366,6 +408,7 @@
                 if parent.mutable():
                     visit.append(parent)
 
+
 def getcommitfunc(extra, interactive, editor=False):
     def commitfunc(ui, repo, message, match, opts):
         hasmq = util.safehasattr(repo, 'mq')
@@ -379,11 +422,18 @@
         try:
             editor_ = False
             if editor:
-                editor_ = cmdutil.getcommiteditor(editform='shelve.shelve',
-                                                  **pycompat.strkwargs(opts))
+                editor_ = cmdutil.getcommiteditor(
+                    editform='shelve.shelve', **pycompat.strkwargs(opts)
+                )
             with repo.ui.configoverride(overrides):
-                return repo.commit(message, shelveuser, opts.get('date'),
-                                   match, editor=editor_, extra=extra)
+                return repo.commit(
+                    message,
+                    shelveuser,
+                    opts.get('date'),
+                    match,
+                    editor=editor_,
+                    extra=extra,
+                )
         finally:
             if hasmq:
                 repo.mq.checkapplied = saved
@@ -396,42 +446,50 @@
 
     return interactivecommitfunc if interactive else commitfunc
 
+
 def _nothingtoshelvemessaging(ui, repo, pats, opts):
     stat = repo.status(match=scmutil.match(repo[None], pats, opts))
     if stat.deleted:
-        ui.status(_("nothing changed (%d missing files, see "
-                    "'hg status')\n") % len(stat.deleted))
+        ui.status(
+            _("nothing changed (%d missing files, see " "'hg status')\n")
+            % len(stat.deleted)
+        )
     else:
         ui.status(_("nothing changed\n"))
 
+
 def _shelvecreatedcommit(repo, node, name, match):
     info = {'node': nodemod.hex(node)}
     shelvedfile(repo, name, 'shelve').writeinfo(info)
     bases = list(mutableancestors(repo[node]))
     shelvedfile(repo, name, 'hg').writebundle(bases, node)
     with shelvedfile(repo, name, patchextension).opener('wb') as fp:
-        cmdutil.exportfile(repo, [node], fp, opts=mdiff.diffopts(git=True),
-                           match=match)
+        cmdutil.exportfile(
+            repo, [node], fp, opts=mdiff.diffopts(git=True), match=match
+        )
+
 
 def _includeunknownfiles(repo, pats, opts, extra):
-    s = repo.status(match=scmutil.match(repo[None], pats, opts),
-                    unknown=True)
+    s = repo.status(match=scmutil.match(repo[None], pats, opts), unknown=True)
     if s.unknown:
         extra['shelve_unknown'] = '\0'.join(s.unknown)
         repo[None].add(s.unknown)
 
+
 def _finishshelve(repo, tr):
     if phases.supportinternal(repo):
         tr.close()
     else:
         _aborttransaction(repo, tr)
 
+
 def createcmd(ui, repo, pats, opts):
     """subcommand that creates a new shelve"""
     with repo.wlock():
         cmdutil.checkunfinished(repo)
         return _docreatecmd(ui, repo, pats, opts)
 
+
 def _docreatecmd(ui, repo, pats, opts):
     wctx = repo[None]
     parents = wctx.parents()
@@ -455,8 +513,9 @@
         tr = repo.transaction('shelve', report=lambda x: None)
 
         interactive = opts.get('interactive', False)
-        includeunknown = (opts.get('unknown', False) and
-                          not opts.get('addremove', False))
+        includeunknown = opts.get('unknown', False) and not opts.get(
+            'addremove', False
+        )
 
         name = getshelvename(repo, parent, opts)
         activebookmark = _backupactivebookmark(repo)
@@ -473,9 +532,16 @@
         if not interactive:
             node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
         else:
-            node = cmdutil.dorecord(ui, repo, commitfunc, None,
-                                    False, cmdutil.recordfilter, *pats,
-                                    **pycompat.strkwargs(opts))
+            node = cmdutil.dorecord(
+                ui,
+                repo,
+                commitfunc,
+                None,
+                False,
+                cmdutil.recordfilter,
+                *pats,
+                **pycompat.strkwargs(opts)
+            )
         if not node:
             _nothingtoshelvemessaging(ui, repo, pats, opts)
             return 1
@@ -502,15 +568,20 @@
         _restoreactivebookmark(repo, activebookmark)
         lockmod.release(tr, lock)
 
+
 def _isbareshelve(pats, opts):
-    return (not pats
-            and not opts.get('interactive', False)
-            and not opts.get('include', False)
-            and not opts.get('exclude', False))
+    return (
+        not pats
+        and not opts.get('interactive', False)
+        and not opts.get('include', False)
+        and not opts.get('exclude', False)
+    )
+
 
 def _iswctxonnewbranch(repo):
     return repo[None].branch() != repo['.'].branch()
 
+
 def cleanupcmd(ui, repo):
     """subcommand that deletes all shelves"""
 
@@ -521,6 +592,7 @@
                 shelvedfile(repo, name).movetobackup()
             cleanupoldbackups(repo)
 
+
 def deletecmd(ui, repo, pats):
     """subcommand that deletes a specific shelve"""
     if not pats:
@@ -543,6 +615,7 @@
                 raise
             raise error.Abort(_("shelved change '%s' not found") % name)
 
+
 def listshelves(repo):
     """return all shelves in repo as list of (time, filename)"""
     try:
@@ -560,6 +633,7 @@
         info.append((st[stat.ST_MTIME], shelvedfile(repo, pfx).filename()))
     return sorted(info, reverse=True)
 
+
 def listcmd(ui, repo, pats, opts):
     """subcommand that displays the list of shelves"""
     pats = set(pats)
@@ -606,6 +680,7 @@
                 for chunk, label in patch.diffstatui(difflines, width=width):
                     ui.write(chunk, label=label)
 
+
 def patchcmds(ui, repo, pats, opts):
     """subcommand that displays shelves"""
     if len(pats) == 0:
@@ -622,11 +697,14 @@
 
     listcmd(ui, repo, pats, opts)
 
+
 def checkparents(repo, state):
     """check parent while resuming an unshelve"""
     if state.parents != repo.dirstate.parents():
-        raise error.Abort(_('working directory parents do not match unshelve '
-                           'state'))
+        raise error.Abort(
+            _('working directory parents do not match unshelve ' 'state')
+        )
+
 
 def _loadshelvedstate(ui, repo, opts):
     try:
@@ -641,16 +719,22 @@
         ui.debug(pycompat.bytestr(err) + '\n')
         if opts.get('continue'):
             msg = _('corrupted shelved state file')
-            hint = _('please run hg unshelve --abort to abort unshelve '
-                     'operation')
+            hint = _(
+                'please run hg unshelve --abort to abort unshelve ' 'operation'
+            )
             raise error.Abort(msg, hint=hint)
         elif opts.get('abort'):
             shelvedstate.clear(repo)
-            raise error.Abort(_('could not read shelved state file, your '
-                                'working copy may be in an unexpected state\n'
-                                'please update to some commit\n'))
+            raise error.Abort(
+                _(
+                    'could not read shelved state file, your '
+                    'working copy may be in an unexpected state\n'
+                    'please update to some commit\n'
+                )
+            )
     return state
 
+
 def unshelveabort(ui, repo, state):
     """subcommand that abort an in-progress unshelve"""
     with repo.lock():
@@ -658,23 +742,25 @@
             checkparents(repo, state)
 
             merge.update(repo, state.pendingctx, branchmerge=False, force=True)
-            if (state.activebookmark
-                    and state.activebookmark in repo._bookmarks):
+            if state.activebookmark and state.activebookmark in repo._bookmarks:
                 bookmarks.activate(repo, state.activebookmark)
             mergefiles(ui, repo, state.wctx, state.pendingctx)
             if not phases.supportinternal(repo):
-                repair.strip(ui, repo, state.nodestoremove, backup=False,
-                             topic='shelve')
+                repair.strip(
+                    ui, repo, state.nodestoremove, backup=False, topic='shelve'
+                )
         finally:
             shelvedstate.clear(repo)
             ui.warn(_("unshelve of '%s' aborted\n") % state.name)
 
+
 def hgabortunshelve(ui, repo):
     """logic to  abort unshelve using 'hg abort"""
     with repo.wlock():
-        state = _loadshelvedstate(ui, repo, {'abort' : True})
+        state = _loadshelvedstate(ui, repo, {'abort': True})
         return unshelveabort(ui, repo, state)
 
+
 def mergefiles(ui, repo, wctx, shelvectx):
     """updates to wctx and merges the changes from shelvectx into the
     dirstate."""
@@ -684,11 +770,14 @@
         cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents())
         ui.popbuffer()
 
+
 def restorebranch(ui, repo, branchtorestore):
     if branchtorestore and branchtorestore != repo.dirstate.branch():
         repo.dirstate.setbranch(branchtorestore)
-        ui.status(_('marked working directory as branch %s\n')
-                  % branchtorestore)
+        ui.status(
+            _('marked working directory as branch %s\n') % branchtorestore
+        )
+
 
 def unshelvecleanup(ui, repo, name, opts):
     """remove related files after an unshelve"""
@@ -698,6 +787,8 @@
             if shfile.exists():
                 shfile.movetobackup()
         cleanupoldbackups(repo)
+
+
 def unshelvecontinue(ui, repo, state, opts):
     """subcommand to continue an in-progress unshelve"""
     # We're finishing off a merge. First parent is our original
@@ -710,7 +801,8 @@
         if list(ms.unresolved()):
             raise error.Abort(
                 _("unresolved conflicts, can't continue"),
-                hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
+                hint=_("see 'hg resolve', then 'hg unshelve --continue'"),
+            )
 
         shelvectx = repo[state.parents[1]]
         pendingctx = state.pendingctx
@@ -726,8 +818,9 @@
         with repo.ui.configoverride(overrides, 'unshelve'):
             with repo.dirstate.parentchange():
                 repo.setparents(state.parents[0], nodemod.nullid)
-                newnode, ispartialunshelve = _createunshelvectx(ui,
-                        repo, shelvectx, basename, interactive, opts)
+                newnode, ispartialunshelve = _createunshelvectx(
+                    ui, repo, shelvectx, basename, interactive, opts
+                )
 
         if newnode is None:
             # If it ended up being a no-op commit, then the normal
@@ -735,8 +828,10 @@
             # here. Fix issue5494
             merge.mergestate.clean(repo)
             shelvectx = state.pendingctx
-            msg = _('note: unshelved changes already existed '
-                    'in the working copy\n')
+            msg = _(
+                'note: unshelved changes already existed '
+                'in the working copy\n'
+            )
             ui.status(msg)
         else:
             # only strip the shelvectx if we produced one
@@ -748,19 +843,22 @@
         restorebranch(ui, repo, state.branchtorestore)
 
         if not phases.supportinternal(repo):
-            repair.strip(ui, repo, state.nodestoremove, backup=False,
-                         topic='shelve')
+            repair.strip(
+                ui, repo, state.nodestoremove, backup=False, topic='shelve'
+            )
         shelvedstate.clear(repo)
         if not ispartialunshelve:
             unshelvecleanup(ui, repo, state.name, opts)
         _restoreactivebookmark(repo, state.activebookmark)
         ui.status(_("unshelve of '%s' complete\n") % state.name)
 
+
 def hgcontinueunshelve(ui, repo):
     """logic to resume unshelve using 'hg continue'"""
     with repo.wlock():
-        state = _loadshelvedstate(ui, repo, {'continue' : True})
-        return unshelvecontinue(ui, repo, state, {'keep' : state.keep})
+        state = _loadshelvedstate(ui, repo, {'continue': True})
+        return unshelvecontinue(ui, repo, state, {'keep': state.keep})
+
 
 def _commitworkingcopychanges(ui, repo, opts, tmpwctx):
     """Temporarily commit working copy changes before moving unshelve commit"""
@@ -770,11 +868,14 @@
     addedbefore = frozenset(s.added)
     if not (s.modified or s.added or s.removed):
         return tmpwctx, addedbefore
-    ui.status(_("temporarily committing pending changes "
-                "(restore with 'hg unshelve --abort')\n"))
+    ui.status(
+        _(
+            "temporarily committing pending changes "
+            "(restore with 'hg unshelve --abort')\n"
+        )
+    )
     extra = {'internal': 'shelve'}
-    commitfunc = getcommitfunc(extra=extra, interactive=False,
-                               editor=False)
+    commitfunc = getcommitfunc(extra=extra, interactive=False, editor=False)
     tempopts = {}
     tempopts['message'] = "pending changes temporary commit"
     tempopts['date'] = opts.get('date')
@@ -783,6 +884,7 @@
     tmpwctx = repo[node]
     return tmpwctx, addedbefore
 
+
 def _unshelverestorecommit(ui, repo, tr, basename):
     """Recreate commit in the repository during the unshelve"""
     repo = repo.unfiltered()
@@ -802,6 +904,7 @@
 
     return repo, shelvectx
 
+
 def _createunshelvectx(ui, repo, shelvectx, basename, interactive, opts):
     """Handles the creation of unshelve commit and updates the shelve if it
     was partially unshelved.
@@ -827,29 +930,50 @@
     opts['interactive-unshelve'] = True
     pats = []
     if not interactive:
-        newnode = repo.commit(text=shelvectx.description(),
-                              extra=shelvectx.extra(),
-                              user=shelvectx.user(),
-                              date=shelvectx.date())
+        newnode = repo.commit(
+            text=shelvectx.description(),
+            extra=shelvectx.extra(),
+            user=shelvectx.user(),
+            date=shelvectx.date(),
+        )
         return newnode, False
 
-    commitfunc = getcommitfunc(shelvectx.extra(), interactive=True,
-                               editor=True)
-    newnode = cmdutil.dorecord(ui, repo, commitfunc, None, False,
-                               cmdutil.recordfilter, *pats,
-                               **pycompat.strkwargs(opts))
-    snode = repo.commit(text=shelvectx.description(),
-                        extra=shelvectx.extra(),
-                        user=shelvectx.user())
+    commitfunc = getcommitfunc(shelvectx.extra(), interactive=True, editor=True)
+    newnode = cmdutil.dorecord(
+        ui,
+        repo,
+        commitfunc,
+        None,
+        False,
+        cmdutil.recordfilter,
+        *pats,
+        **pycompat.strkwargs(opts)
+    )
+    snode = repo.commit(
+        text=shelvectx.description(),
+        extra=shelvectx.extra(),
+        user=shelvectx.user(),
+    )
     if snode:
         m = scmutil.matchfiles(repo, repo[snode].files())
         _shelvecreatedcommit(repo, snode, basename, m)
 
     return newnode, bool(snode)
 
-def _rebaserestoredcommit(ui, repo, opts, tr, oldtiprev, basename, pctx,
-                          tmpwctx, shelvectx, branchtorestore,
-                          activebookmark):
+
+def _rebaserestoredcommit(
+    ui,
+    repo,
+    opts,
+    tr,
+    oldtiprev,
+    basename,
+    pctx,
+    tmpwctx,
+    shelvectx,
+    branchtorestore,
+    activebookmark,
+):
     """Rebase restored commit from its original location to a destination"""
     # If the shelve is not immediately on top of the commit
     # we'll be merging with, rebase it to be on top.
@@ -865,25 +989,43 @@
     }
     with repo.ui.configoverride(overrides, 'unshelve'):
         ui.status(_('rebasing shelved changes\n'))
-        stats = merge.graft(repo, shelvectx, shelvectx.p1(),
-                           labels=['shelve', 'working-copy'],
-                           keepconflictparent=True)
+        stats = merge.graft(
+            repo,
+            shelvectx,
+            shelvectx.p1(),
+            labels=['shelve', 'working-copy'],
+            keepconflictparent=True,
+        )
         if stats.unresolvedcount:
             tr.close()
 
-            nodestoremove = [repo.changelog.node(rev)
-                             for rev in pycompat.xrange(oldtiprev, len(repo))]
-            shelvedstate.save(repo, basename, pctx, tmpwctx, nodestoremove,
-                              branchtorestore, opts.get('keep'), activebookmark,
-                              interactive)
+            nodestoremove = [
+                repo.changelog.node(rev)
+                for rev in pycompat.xrange(oldtiprev, len(repo))
+            ]
+            shelvedstate.save(
+                repo,
+                basename,
+                pctx,
+                tmpwctx,
+                nodestoremove,
+                branchtorestore,
+                opts.get('keep'),
+                activebookmark,
+                interactive,
+            )
             raise error.InterventionRequired(
-                _("unresolved conflicts (see 'hg resolve', then "
-                  "'hg unshelve --continue')"))
+                _(
+                    "unresolved conflicts (see 'hg resolve', then "
+                    "'hg unshelve --continue')"
+                )
+            )
 
         with repo.dirstate.parentchange():
             repo.setparents(tmpwctx.node(), nodemod.nullid)
-            newnode, ispartialunshelve = _createunshelvectx(ui, repo,
-                                       shelvectx, basename, interactive, opts)
+            newnode, ispartialunshelve = _createunshelvectx(
+                ui, repo, shelvectx, basename, interactive, opts
+            )
 
         if newnode is None:
             # If it ended up being a no-op commit, then the normal
@@ -891,8 +1033,10 @@
             # here. Fix issue5494
             merge.mergestate.clean(repo)
             shelvectx = tmpwctx
-            msg = _('note: unshelved changes already existed '
-                    'in the working copy\n')
+            msg = _(
+                'note: unshelved changes already existed '
+                'in the working copy\n'
+            )
             ui.status(msg)
         else:
             shelvectx = repo[newnode]
@@ -900,6 +1044,7 @@
 
     return shelvectx, ispartialunshelve
 
+
 def _forgetunknownfiles(repo, shelvectx, addedbefore):
     # Forget any files that were unknown before the shelve, unknown before
     # unshelve started, but are now added.
@@ -911,6 +1056,7 @@
     toforget = (addedafter & shelveunknown) - addedbefore
     repo[None].forget(toforget)
 
+
 def _finishunshelve(repo, oldtiprev, tr, activebookmark):
     _restoreactivebookmark(repo, activebookmark)
     # The transaction aborting will strip all the commits for us,
@@ -920,6 +1066,7 @@
     repo.unfiltered().changelog.strip(oldtiprev, tr)
     _aborttransaction(repo, tr)
 
+
 def _checkunshelveuntrackedproblems(ui, repo, shelvectx):
     """Check potential problems which may result from working
     copy having untracked changes."""
@@ -931,6 +1078,7 @@
         hint = _("run hg status to see which files are missing")
         raise error.Abort(m, hint=hint)
 
+
 def dounshelve(ui, repo, *shelved, **opts):
     opts = pycompat.byteskwargs(opts)
     abortf = opts.get('abort')
@@ -948,8 +1096,12 @@
         if abortf and continuef:
             raise error.Abort(_('cannot use both abort and continue'))
         if shelved:
-            raise error.Abort(_('cannot combine abort/continue with '
-                               'naming a shelved change'))
+            raise error.Abort(
+                _(
+                    'cannot combine abort/continue with '
+                    'naming a shelved change'
+                )
+            )
         if abortf and opts.get('tool', False):
             ui.warn(_('tool option will be ignored\n'))
 
@@ -990,17 +1142,28 @@
         # to the original pctx.
 
         activebookmark = _backupactivebookmark(repo)
-        tmpwctx, addedbefore = _commitworkingcopychanges(ui, repo, opts,
-                                                         tmpwctx)
+        tmpwctx, addedbefore = _commitworkingcopychanges(
+            ui, repo, opts, tmpwctx
+        )
         repo, shelvectx = _unshelverestorecommit(ui, repo, tr, basename)
         _checkunshelveuntrackedproblems(ui, repo, shelvectx)
         branchtorestore = ''
         if shelvectx.branch() != shelvectx.p1().branch():
             branchtorestore = shelvectx.branch()
 
-        shelvectx, ispartialunshelve = _rebaserestoredcommit(ui, repo, opts,
-            tr, oldtiprev, basename, pctx, tmpwctx, shelvectx,
-            branchtorestore, activebookmark)
+        shelvectx, ispartialunshelve = _rebaserestoredcommit(
+            ui,
+            repo,
+            opts,
+            tr,
+            oldtiprev,
+            basename,
+            pctx,
+            tmpwctx,
+            shelvectx,
+            branchtorestore,
+            activebookmark,
+        )
         overrides = {('ui', 'forcemerge'): opts.get('tool', '')}
         with ui.configoverride(overrides, 'unshelve'):
             mergefiles(ui, repo, pctx, shelvectx)
--- a/mercurial/similar.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/similar.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,9 +8,8 @@
 from __future__ import absolute_import
 
 from .i18n import _
-from . import (
-    mdiff,
-)
+from . import mdiff
+
 
 def _findexactmatches(repo, added, removed):
     '''find renamed files that have no changes
@@ -21,9 +20,11 @@
     # Build table of removed files: {hash(fctx.data()): [fctx, ...]}.
     # We use hash() to discard fctx.data() from memory.
     hashes = {}
-    progress = repo.ui.makeprogress(_('searching for exact renames'),
-                                    total=(len(added) + len(removed)),
-                                    unit=_('files'))
+    progress = repo.ui.makeprogress(
+        _('searching for exact renames'),
+        total=(len(added) + len(removed)),
+        unit=_('files'),
+    )
     for fctx in removed:
         progress.increment()
         h = hash(fctx.data())
@@ -46,11 +47,13 @@
     # Done
     progress.complete()
 
+
 def _ctxdata(fctx):
     # lazily load text
     orig = fctx.data()
     return orig, mdiff.splitnewlines(orig)
 
+
 def _score(fctx, otherdata):
     orig, lines = otherdata
     text = fctx.data()
@@ -65,9 +68,11 @@
     lengths = len(text) + len(orig)
     return equal * 2.0 / lengths
 
+
 def score(fctx1, fctx2):
     return _score(fctx1, _ctxdata(fctx2))
 
+
 def _findsimilarmatches(repo, added, removed, threshold):
     '''find potentially renamed files based on similar file content
 
@@ -75,8 +80,9 @@
     (before, after, score) tuples of partial matches.
     '''
     copies = {}
-    progress = repo.ui.makeprogress(_('searching for similar files'),
-                         unit=_('files'), total=len(removed))
+    progress = repo.ui.makeprogress(
+        _('searching for similar files'), unit=_('files'), total=len(removed)
+    )
     for r in removed:
         progress.increment()
         data = None
@@ -93,9 +99,11 @@
         source, bscore = v
         yield source, dest, bscore
 
+
 def _dropempty(fctxs):
     return [x for x in fctxs if x.size() > 0]
 
+
 def findrenames(repo, added, removed, threshold):
     '''find renamed files -- yields (before, after, score) tuples'''
     wctx = repo[None]
@@ -116,6 +124,7 @@
     # If the user requested similar files to be matched, search for them also.
     if threshold < 1.0:
         addedfiles = [x for x in addedfiles if x not in matchedfiles]
-        for (a, b, score) in _findsimilarmatches(repo, addedfiles,
-                                                 removedfiles, threshold):
+        for (a, b, score) in _findsimilarmatches(
+            repo, addedfiles, removedfiles, threshold
+        ):
             yield (a.path(), b.path(), score)
--- a/mercurial/simplemerge.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/simplemerge.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,13 +24,13 @@
     mdiff,
     pycompat,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 
 class CantReprocessAndShowBase(Exception):
     pass
 
+
 def intersect(ra, rb):
     """Given two ranges return the range where they intersect or None.
 
@@ -53,23 +53,27 @@
     else:
         return None
 
+
 def compare_range(a, astart, aend, b, bstart, bend):
     """Compare a[astart:aend] == b[bstart:bend], without slicing.
     """
     if (aend - astart) != (bend - bstart):
         return False
-    for ia, ib in zip(pycompat.xrange(astart, aend),
-                      pycompat.xrange(bstart, bend)):
+    for ia, ib in zip(
+        pycompat.xrange(astart, aend), pycompat.xrange(bstart, bend)
+    ):
         if a[ia] != b[ib]:
             return False
     else:
         return True
 
+
 class Merge3Text(object):
     """3-way merge of texts.
 
     Given strings BASE, OTHER, THIS, tries to produce a combined text
     incorporating the changes from both BASE->OTHER and BASE->THIS."""
+
     def __init__(self, basetext, atext, btext, base=None, a=None, b=None):
         self.basetext = basetext
         self.atext = atext
@@ -84,16 +88,18 @@
         self.a = a
         self.b = b
 
-    def merge_lines(self,
-                    name_a=None,
-                    name_b=None,
-                    name_base=None,
-                    start_marker='<<<<<<<',
-                    mid_marker='=======',
-                    end_marker='>>>>>>>',
-                    base_marker=None,
-                    localorother=None,
-                    minimize=False):
+    def merge_lines(
+        self,
+        name_a=None,
+        name_b=None,
+        name_base=None,
+        start_marker='<<<<<<<',
+        mid_marker='=======',
+        end_marker='>>>>>>>',
+        base_marker=None,
+        localorother=None,
+        minimize=False,
+    ):
         """Return merge in cvs-like form.
         """
         self.conflicts = False
@@ -170,16 +176,18 @@
         for t in self.merge_regions():
             what = t[0]
             if what == 'unchanged':
-                yield what, self.base[t[1]:t[2]]
+                yield what, self.base[t[1] : t[2]]
             elif what == 'a' or what == 'same':
-                yield what, self.a[t[1]:t[2]]
+                yield what, self.a[t[1] : t[2]]
             elif what == 'b':
-                yield what, self.b[t[1]:t[2]]
+                yield what, self.b[t[1] : t[2]]
             elif what == 'conflict':
-                yield (what,
-                       self.base[t[1]:t[2]],
-                       self.a[t[3]:t[4]],
-                       self.b[t[5]:t[6]])
+                yield (
+                    what,
+                    self.base[t[1] : t[2]],
+                    self.a[t[3] : t[4]],
+                    self.b[t[5] : t[6]],
+                )
             else:
                 raise ValueError(what)
 
@@ -218,7 +226,7 @@
 
         for region in self.find_sync_regions():
             zmatch, zend, amatch, aend, bmatch, bend = region
-            #print 'match base [%d:%d]' % (zmatch, zend)
+            # print 'match base [%d:%d]' % (zmatch, zend)
 
             matchlen = zend - zmatch
             assert matchlen >= 0
@@ -232,16 +240,17 @@
             assert len_b >= 0
             assert len_base >= 0
 
-            #print 'unmatched a=%d, b=%d' % (len_a, len_b)
+            # print 'unmatched a=%d, b=%d' % (len_a, len_b)
 
             if len_a or len_b:
                 # try to avoid actually slicing the lists
-                equal_a = compare_range(self.a, ia, amatch,
-                                        self.base, iz, zmatch)
-                equal_b = compare_range(self.b, ib, bmatch,
-                                        self.base, iz, zmatch)
-                same = compare_range(self.a, ia, amatch,
-                                     self.b, ib, bmatch)
+                equal_a = compare_range(
+                    self.a, ia, amatch, self.base, iz, zmatch
+                )
+                equal_b = compare_range(
+                    self.b, ib, bmatch, self.base, iz, zmatch
+                )
+                same = compare_range(self.a, ia, amatch, self.b, ib, bmatch)
 
                 if same:
                     yield 'same', ia, amatch
@@ -261,7 +270,6 @@
             # if the same part of the base was deleted on both sides
             # that's OK, we can just skip it.
 
-
             if matchlen > 0:
                 assert ia == amatch
                 assert ib == bmatch
@@ -289,24 +297,34 @@
 
             # find matches at the front
             ii = 0
-            while (ii < alen and ii < blen and
-                   self.a[a1 + ii] == self.b[b1 + ii]):
+            while (
+                ii < alen and ii < blen and self.a[a1 + ii] == self.b[b1 + ii]
+            ):
                 ii += 1
             startmatches = ii
 
             # find matches at the end
             ii = 0
-            while (ii < alen and ii < blen and
-                   self.a[a2 - ii - 1] == self.b[b2 - ii - 1]):
+            while (
+                ii < alen
+                and ii < blen
+                and self.a[a2 - ii - 1] == self.b[b2 - ii - 1]
+            ):
                 ii += 1
             endmatches = ii
 
             if startmatches > 0:
                 yield 'same', a1, a1 + startmatches
 
-            yield ('conflict', z1, z2,
-                    a1 + startmatches, a2 - endmatches,
-                    b1 + startmatches, b2 - endmatches)
+            yield (
+                'conflict',
+                z1,
+                z2,
+                a1 + startmatches,
+                a2 - endmatches,
+                b1 + startmatches,
+                b2 - endmatches,
+            )
 
             if endmatches > 0:
                 yield 'same', a2 - endmatches, a2
@@ -351,13 +369,13 @@
                 bend = bsub + intlen
 
                 assert self.base[intbase:intend] == self.a[asub:aend], (
-                        (self.base[intbase:intend], self.a[asub:aend]))
+                    self.base[intbase:intend],
+                    self.a[asub:aend],
+                )
 
                 assert self.base[intbase:intend] == self.b[bsub:bend]
 
-                sl.append((intbase, intend,
-                           asub, aend,
-                           bsub, bend))
+                sl.append((intbase, intend, asub, aend, bsub, bend))
 
             # advance whichever one ends first in the base text
             if (abase + alen) < (bbase + blen):
@@ -397,6 +415,7 @@
 
         return unc
 
+
 def _verifytext(text, path, ui, opts):
     """verifies that text is non-binary (unless opts[text] is passed,
     then we just warn)"""
@@ -408,6 +427,7 @@
             raise error.Abort(msg)
     return text
 
+
 def _picklabels(defaults, overrides):
     if len(overrides) > 3:
         raise error.Abort(_("can only specify three labels."))
@@ -416,6 +436,7 @@
         result[i] = override
     return result
 
+
 def simplemerge(ui, localctx, basectx, otherctx, **opts):
     """Performs the simplemerge algorithm.
 
@@ -433,12 +454,12 @@
         # repository usually sees) might be more useful.
         return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
 
-    mode = opts.get('mode','merge')
+    mode = opts.get('mode', 'merge')
     name_a, name_b, name_base = None, None, None
     if mode != 'union':
-        name_a, name_b, name_base = _picklabels([localctx.path(),
-                                                 otherctx.path(), None],
-                                                opts.get('label', []))
+        name_a, name_b, name_base = _picklabels(
+            [localctx.path(), otherctx.path(), None], opts.get('label', [])
+        )
 
     try:
         localtext = readctx(localctx)
@@ -449,9 +470,9 @@
 
     m3 = Merge3Text(basetext, localtext, othertext)
     extrakwargs = {
-            "localorother": opts.get("localorother", None),
-            'minimize': True,
-        }
+        "localorother": opts.get("localorother", None),
+        'minimize': True,
+    }
     if mode == 'union':
         extrakwargs['start_marker'] = None
         extrakwargs['mid_marker'] = None
@@ -462,8 +483,9 @@
         extrakwargs['minimize'] = False
 
     mergedtext = ""
-    for line in m3.merge_lines(name_a=name_a, name_b=name_b,
-                               **pycompat.strkwargs(extrakwargs)):
+    for line in m3.merge_lines(
+        name_a=name_a, name_b=name_b, **pycompat.strkwargs(extrakwargs)
+    ):
         if opts.get('print'):
             ui.fout.write(line)
         else:
--- a/mercurial/smartset.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/smartset.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,15 +13,14 @@
     pycompat,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
+
 
 def _typename(o):
     return pycompat.sysbytes(type(o).__name__).lstrip('_')
 
+
 class abstractsmartset(object):
-
     def __nonzero__(self):
         """True if the smartset is not empty"""
         raise NotImplementedError()
@@ -125,8 +124,9 @@
 
         This is part of the mandatory API for smartset."""
         c = other.__contains__
-        return self.filter(lambda r: not c(r), condrepr=('<not %r>', other),
-                           cache=False)
+        return self.filter(
+            lambda r: not c(r), condrepr=('<not %r>', other), cache=False
+        )
 
     def filter(self, condition, condrepr=None, cache=True):
         """Returns this smartset filtered by condition as a new smartset.
@@ -163,6 +163,7 @@
             ys.append(y)
         return baseset(ys, datarepr=('slice=%d:%d %r', start, stop, self))
 
+
 class baseset(abstractsmartset):
     """Basic data structure that represents a revset and contains the basic
     operation that it should be able to perform.
@@ -222,6 +223,7 @@
     >>> rs._istopo
     True
     """
+
     def __init__(self, data=(), datarepr=None, istopo=False):
         """
         datarepr: a tuple of (format, obj, ...), a function or an object that
@@ -342,10 +344,15 @@
 
     def _fastsetop(self, other, op):
         # try to use native set operations as fast paths
-        if (type(other) is baseset and r'_set' in other.__dict__ and r'_set' in
-            self.__dict__ and self._ascending is not None):
-            s = baseset(data=getattr(self._set, op)(other._set),
-                        istopo=self._istopo)
+        if (
+            type(other) is baseset
+            and r'_set' in other.__dict__
+            and r'_set' in self.__dict__
+            and self._ascending is not None
+        ):
+            s = baseset(
+                data=getattr(self._set, op)(other._set), istopo=self._istopo
+            )
             s._ascending = self._ascending
         else:
             s = getattr(super(baseset, self), op)(other)
@@ -383,11 +390,13 @@
             s = pycompat.byterepr(l)
         return '<%s%s %s>' % (_typename(self), d, s)
 
+
 class filteredset(abstractsmartset):
     """Duck type for baseset class which iterates lazily over the revisions in
     the subset and contains a function which tests for membership in the
     revset
     """
+
     def __init__(self, subset, condition=lambda x: True, condrepr=None):
         """
         condition: a function that decide whether a revision in the subset
@@ -427,10 +436,12 @@
 
     def __nonzero__(self):
         fast = None
-        candidates = [self.fastasc if self.isascending() else None,
-                      self.fastdesc if self.isdescending() else None,
-                      self.fastasc,
-                      self.fastdesc]
+        candidates = [
+            self.fastasc if self.isascending() else None,
+            self.fastdesc if self.isdescending() else None,
+            self.fastasc,
+            self.fastdesc,
+        ]
         for candidate in candidates:
             if candidate is not None:
                 fast = candidate
@@ -484,7 +495,7 @@
         if it is not None:
             for x in it():
                 return x
-            return None #empty case
+            return None  # empty case
         else:
             x = None
             for x in self:
@@ -499,6 +510,7 @@
             xs.append(s)
         return '<%s %s>' % (_typename(self), ', '.join(xs))
 
+
 def _iterordered(ascending, iter1, iter2):
     """produce an ordered iteration from two iterators with the same order
 
@@ -535,6 +547,7 @@
         for val in it:
             yield val
 
+
 class addset(abstractsmartset):
     """Represent the addition of two sets
 
@@ -606,6 +619,7 @@
     >>> [x for x in rs]
     [5, 4, 3, 2, 0]
     """
+
     def __init__(self, revs1, revs2, ascending=None):
         self._r1 = revs1
         self._r2 = revs2
@@ -641,6 +655,7 @@
         if self._ascending is None:
             if self._genlist:
                 return iter(self._genlist)
+
             def arbitraryordergen():
                 for r in self._r1:
                     yield r
@@ -648,6 +663,7 @@
                 for r in self._r2:
                     if not inr1(r):
                         yield r
+
             return arbitraryordergen()
         # try to use our own fast iterator if it exists
         self._trysetasclist()
@@ -747,6 +763,7 @@
         d = {None: '', False: '-', True: '+'}[self._ascending]
         return '<%s%s %r, %r>' % (_typename(self), d, self._r1, self._r2)
 
+
 class generatorset(abstractsmartset):
     """Wrap a generator for lazy iteration
 
@@ -760,6 +777,7 @@
     >>> xs.last()  # cached
     4
     """
+
     def __new__(cls, gen, iterasc=None):
         if iterasc is None:
             typ = cls
@@ -830,7 +848,8 @@
         # iteration.
         genlist = self._genlist
         nextgen = self._consumegen()
-        _len, _next = len, next # cache global lookup
+        _len, _next = len, next  # cache global lookup
+
         def gen():
             i = 0
             while True:
@@ -842,6 +861,7 @@
                     except StopIteration:
                         return
                 i += 1
+
         return gen()
 
     def _consumegen(self):
@@ -911,6 +931,7 @@
         d = {False: '-', True: '+'}[self._ascending]
         return '<%s%s>' % (_typename(self), d)
 
+
 class _generatorsetasc(generatorset):
     """Special case of generatorset optimized for ascending generators."""
 
@@ -930,6 +951,7 @@
         self._cache[x] = False
         return False
 
+
 class _generatorsetdesc(generatorset):
     """Special case of generatorset optimized for descending generators."""
 
@@ -949,6 +971,7 @@
         self._cache[x] = False
         return False
 
+
 def spanset(repo, start=0, end=None):
     """Create a spanset that represents a range of repository revisions
 
@@ -964,6 +987,7 @@
         start, end = end + 1, start + 1
     return _spanset(start, end, ascending, repo.changelog.filteredrevs)
 
+
 class _spanset(abstractsmartset):
     """Duck type for baseset class which represents a range of revisions and
     can work lazily and without having all the range in memory
@@ -974,6 +998,7 @@
     - revision filtered with this repoview will be skipped.
 
     """
+
     def __init__(self, start, end, ascending, hiddenrevs):
         self._start = start
         self._end = end
@@ -1018,8 +1043,9 @@
 
     def __contains__(self, rev):
         hidden = self._hiddenrevs
-        return ((self._start <= rev < self._end)
-                and not (hidden and rev in hidden))
+        return (self._start <= rev < self._end) and not (
+            hidden and rev in hidden
+        )
 
     def __nonzero__(self):
         for r in self:
@@ -1081,6 +1107,7 @@
         d = {False: '-', True: '+'}[self._ascending]
         return '<%s%s %d:%d>' % (_typename(self), d, self._start, self._end)
 
+
 class fullreposet(_spanset):
     """a set containing all revisions in the repo
 
@@ -1089,8 +1116,9 @@
     """
 
     def __init__(self, repo):
-        super(fullreposet, self).__init__(0, len(repo), True,
-                                          repo.changelog.filteredrevs)
+        super(fullreposet, self).__init__(
+            0, len(repo), True, repo.changelog.filteredrevs
+        )
 
     def __and__(self, other):
         """As self contains the whole repo, all of the other set should also be
--- a/mercurial/sparse.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/sparse.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,6 +30,7 @@
 # a per-repo option, possibly a repo requirement.
 enabled = False
 
+
 def parseconfig(ui, raw, action):
     """Parse sparse config file content.
 
@@ -55,8 +56,13 @@
         elif line == '[include]':
             if havesection and current != includes:
                 # TODO pass filename into this API so we can report it.
-                raise error.Abort(_('%(action)s config cannot have includes '
-                                    'after excludes') % {'action': action})
+                raise error.Abort(
+                    _(
+                        '%(action)s config cannot have includes '
+                        'after excludes'
+                    )
+                    % {'action': action}
+                )
             havesection = True
             current = includes
             continue
@@ -65,21 +71,29 @@
             current = excludes
         elif line:
             if current is None:
-                raise error.Abort(_('%(action)s config entry outside of '
-                                    'section: %(line)s')
-                                  % {'action': action, 'line': line},
-                                  hint=_('add an [include] or [exclude] line '
-                                         'to declare the entry type'))
+                raise error.Abort(
+                    _('%(action)s config entry outside of ' 'section: %(line)s')
+                    % {'action': action, 'line': line},
+                    hint=_(
+                        'add an [include] or [exclude] line '
+                        'to declare the entry type'
+                    ),
+                )
 
             if line.strip().startswith('/'):
-                ui.warn(_('warning: %(action)s profile cannot use'
-                          ' paths starting with /, ignoring %(line)s\n')
-                        % {'action': action, 'line': line})
+                ui.warn(
+                    _(
+                        'warning: %(action)s profile cannot use'
+                        ' paths starting with /, ignoring %(line)s\n'
+                    )
+                    % {'action': action, 'line': line}
+                )
                 continue
             current.add(line)
 
     return includes, excludes, profiles
 
+
 # Exists as separate function to facilitate monkeypatching.
 def readprofile(repo, profile, changeid):
     """Resolve the raw content of a sparse profile file."""
@@ -87,6 +101,7 @@
     # resolve and can be slow.
     return repo.filectx(profile, changeid=changeid).data()
 
+
 def patternsforrev(repo, rev):
     """Obtain sparse checkout patterns for the given rev.
 
@@ -102,8 +117,9 @@
         return set(), set(), set()
 
     if rev is None:
-        raise error.Abort(_('cannot parse sparse patterns from working '
-                            'directory'))
+        raise error.Abort(
+            _('cannot parse sparse patterns from working ' 'directory')
+        )
 
     includes, excludes, profiles = parseconfig(repo.ui, raw, 'sparse')
     ctx = repo[rev]
@@ -122,10 +138,10 @@
             except error.ManifestLookupError:
                 msg = (
                     "warning: sparse profile '%s' not found "
-                    "in rev %s - ignoring it\n" % (profile, ctx))
+                    "in rev %s - ignoring it\n" % (profile, ctx)
+                )
                 # experimental config: sparse.missingwarning
-                if repo.ui.configbool(
-                        'sparse', 'missingwarning'):
+                if repo.ui.configbool('sparse', 'missingwarning'):
                     repo.ui.warn(msg)
                 else:
                     repo.ui.debug(msg)
@@ -143,14 +159,18 @@
 
     return includes, excludes, profiles
 
+
 def activeconfig(repo):
     """Determine the active sparse config rules.
 
     Rules are constructed by reading the current sparse config and bringing in
     referenced profiles from parents of the working directory.
     """
-    revs = [repo.changelog.rev(node) for node in
-            repo.dirstate.parents() if node != nullid]
+    revs = [
+        repo.changelog.rev(node)
+        for node in repo.dirstate.parents()
+        if node != nullid
+    ]
 
     allincludes = set()
     allexcludes = set()
@@ -164,6 +184,7 @@
 
     return allincludes, allexcludes, allprofiles
 
+
 def configsignature(repo, includetemp=True):
     """Obtain the signature string for the current sparse configuration.
 
@@ -189,6 +210,7 @@
 
     return '%s %s' % (signature, tempsignature)
 
+
 def writeconfig(repo, includes, excludes, profiles):
     """Write the sparse config file given a sparse configuration."""
     with repo.vfs('sparse', 'wb') as fh:
@@ -209,6 +231,7 @@
 
     repo._sparsesignaturecache.clear()
 
+
 def readtemporaryincludes(repo):
     raw = repo.vfs.tryread('tempsparse')
     if not raw:
@@ -216,16 +239,19 @@
 
     return set(raw.split('\n'))
 
+
 def writetemporaryincludes(repo, includes):
     repo.vfs.write('tempsparse', '\n'.join(sorted(includes)))
     repo._sparsesignaturecache.clear()
 
+
 def addtemporaryincludes(repo, additional):
     includes = readtemporaryincludes(repo)
     for i in additional:
         includes.add(i)
     writetemporaryincludes(repo, includes)
 
+
 def prunetemporaryincludes(repo):
     if not enabled or not repo.vfs.exists('tempsparse'):
         return
@@ -248,8 +274,9 @@
 
     typeactions = mergemod.emptyactions()
     typeactions['r'] = actions
-    mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False,
-                          wantfiledata=False)
+    mergemod.applyupdates(
+        repo, typeactions, repo[None], repo['.'], False, wantfiledata=False
+    )
 
     # Fix dirstate
     for file in dropped:
@@ -257,10 +284,12 @@
 
     repo.vfs.unlink('tempsparse')
     repo._sparsesignaturecache.clear()
-    msg = _('cleaned up %d temporarily added file(s) from the '
-            'sparse checkout\n')
+    msg = _(
+        'cleaned up %d temporarily added file(s) from the ' 'sparse checkout\n'
+    )
     repo.ui.status(msg % len(tempincludes))
 
+
 def forceincludematcher(matcher, includes):
     """Returns a matcher that returns true for any of the forced includes
     before testing against the actual matcher."""
@@ -268,6 +297,7 @@
     includematcher = matchmod.includematcher('', kindpats)
     return matchmod.unionmatcher([includematcher, matcher])
 
+
 def matcher(repo, revs=None, includetemp=True):
     """Obtain a matcher for sparse working directories for the given revs.
 
@@ -281,8 +311,11 @@
         return matchmod.always()
 
     if not revs or revs == [None]:
-        revs = [repo.changelog.rev(node)
-                for node in repo.dirstate.parents() if node != nullid]
+        revs = [
+            repo.changelog.rev(node)
+            for node in repo.dirstate.parents()
+            if node != nullid
+        ]
 
     signature = configsignature(repo, includetemp=includetemp)
 
@@ -298,9 +331,14 @@
             includes, excludes, profiles = patternsforrev(repo, rev)
 
             if includes or excludes:
-                matcher = matchmod.match(repo.root, '', [],
-                                         include=includes, exclude=excludes,
-                                         default='relpath')
+                matcher = matchmod.match(
+                    repo.root,
+                    '',
+                    [],
+                    include=includes,
+                    exclude=excludes,
+                    default='relpath',
+                )
                 matchers.append(matcher)
         except IOError:
             pass
@@ -320,6 +358,7 @@
 
     return result
 
+
 def filterupdatesactions(repo, wctx, mctx, branchmerge, actions):
     """Filter updates to only lay out files that match the sparse rules."""
     if not enabled:
@@ -367,8 +406,13 @@
                 temporaryfiles.append(f1)
 
     if len(temporaryfiles) > 0:
-        repo.ui.status(_('temporarily included %d file(s) in the sparse '
-                         'checkout for merging\n') % len(temporaryfiles))
+        repo.ui.status(
+            _(
+                'temporarily included %d file(s) in the sparse '
+                'checkout for merging\n'
+            )
+            % len(temporaryfiles)
+        )
         addtemporaryincludes(repo, temporaryfiles)
 
         # Add the new files to the working copy so they can be merged, etc
@@ -382,8 +426,9 @@
 
         typeactions = mergemod.emptyactions()
         typeactions['g'] = actions
-        mergemod.applyupdates(repo, typeactions, repo[None], repo['.'],
-                              False, wantfiledata=False)
+        mergemod.applyupdates(
+            repo, typeactions, repo[None], repo['.'], False, wantfiledata=False
+        )
 
         dirstate = repo.dirstate
         for file, flags, msg in actions:
@@ -407,6 +452,7 @@
 
     return prunedactions
 
+
 def refreshwdir(repo, origstatus, origsparsematch, force=False):
     """Refreshes working directory by taking sparse config into account.
 
@@ -430,8 +476,9 @@
             abort = not force
 
     if abort:
-        raise error.Abort(_('could not update sparseness due to pending '
-                            'changes'))
+        raise error.Abort(
+            _('could not update sparseness due to pending ' 'changes')
+        )
 
     # Calculate actions
     dirstate = repo.dirstate
@@ -470,9 +517,13 @@
         repo.ui.warn(_("pending changes to '%s'\n") % file)
         abort = not force
     if abort:
-        raise error.Abort(_('cannot change sparseness due to pending '
-                            'changes (delete the files or use '
-                            '--force to bring them back dirty)'))
+        raise error.Abort(
+            _(
+                'cannot change sparseness due to pending '
+                'changes (delete the files or use '
+                '--force to bring them back dirty)'
+            )
+        )
 
     # Check for files that were only in the dirstate.
     for file, state in dirstate.iteritems():
@@ -487,8 +538,9 @@
     for f, (m, args, msg) in actions.iteritems():
         typeactions[m].append((f, args, msg))
 
-    mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False,
-                          wantfiledata=False)
+    mergemod.applyupdates(
+        repo, typeactions, repo[None], repo['.'], False, wantfiledata=False
+    )
 
     # Fix dirstate
     for file in added:
@@ -503,6 +555,7 @@
 
     return added, dropped, lookup
 
+
 def aftercommit(repo, node):
     """Perform actions after a working directory commit."""
     # This function is called unconditionally, even if sparse isn't
@@ -519,8 +572,10 @@
 
     prunetemporaryincludes(repo)
 
-def _updateconfigandrefreshwdir(repo, includes, excludes, profiles,
-                                force=False, removing=False):
+
+def _updateconfigandrefreshwdir(
+    repo, includes, excludes, profiles, force=False, removing=False
+):
     """Update the sparse config and working directory state."""
     raw = repo.vfs.tryread('sparse')
     oldincludes, oldexcludes, oldprofiles = parseconfig(repo.ui, raw, 'sparse')
@@ -555,6 +610,7 @@
         writeconfig(repo, oldincludes, oldexcludes, oldprofiles)
         raise
 
+
 def clearrules(repo, force=False):
     """Clears include/exclude rules from the sparse config.
 
@@ -570,6 +626,7 @@
 
         _updateconfigandrefreshwdir(repo, set(), set(), profiles, force=force)
 
+
 def importfromfiles(repo, opts, paths, force=False):
     """Import sparse config rules from files.
 
@@ -589,8 +646,9 @@
             with util.posixfile(util.expandpath(p), mode='rb') as fh:
                 raw = fh.read()
 
-            iincludes, iexcludes, iprofiles = parseconfig(repo.ui, raw,
-                                                          'sparse')
+            iincludes, iexcludes, iprofiles = parseconfig(
+                repo.ui, raw, 'sparse'
+            )
             oldsize = len(includes) + len(excludes) + len(profiles)
             includes.update(iincludes - aincludes)
             excludes.update(iexcludes - aexcludes)
@@ -606,15 +664,31 @@
             includecount = len(includes - aincludes)
             excludecount = len(excludes - aexcludes)
 
-            fcounts = map(len, _updateconfigandrefreshwdir(
-                repo, includes, excludes, profiles, force=force))
+            fcounts = map(
+                len,
+                _updateconfigandrefreshwdir(
+                    repo, includes, excludes, profiles, force=force
+                ),
+            )
+
+        printchanges(
+            repo.ui, opts, profilecount, includecount, excludecount, *fcounts
+        )
+
 
-        printchanges(repo.ui, opts, profilecount, includecount, excludecount,
-                     *fcounts)
-
-def updateconfig(repo, pats, opts, include=False, exclude=False, reset=False,
-                 delete=False, enableprofile=False, disableprofile=False,
-                 force=False, usereporootpaths=False):
+def updateconfig(
+    repo,
+    pats,
+    opts,
+    include=False,
+    exclude=False,
+    reset=False,
+    delete=False,
+    enableprofile=False,
+    disableprofile=False,
+    force=False,
+    usereporootpaths=False,
+):
     """Perform a sparse config update.
 
     Only one of the actions may be performed.
@@ -623,8 +697,9 @@
     """
     with repo.wlock():
         raw = repo.vfs.tryread('sparse')
-        oldinclude, oldexclude, oldprofiles = parseconfig(repo.ui, raw,
-                                                          'sparse')
+        oldinclude, oldexclude, oldprofiles = parseconfig(
+            repo.ui, raw, 'sparse'
+        )
 
         if reset:
             newinclude = set()
@@ -645,8 +720,9 @@
             for kindpat in pats:
                 kind, pat = matchmod._patsplit(kindpat, None)
                 if kind in matchmod.cwdrelativepatternkinds or kind is None:
-                    ap = ((kind + ':' if kind else '') +
-                          pathutil.canonpath(root, cwd, pat))
+                    ap = (kind + ':' if kind else '') + pathutil.canonpath(
+                        root, cwd, pat
+                    )
                     abspats.append(ap)
                 else:
                     abspats.append(kindpat)
@@ -664,39 +740,78 @@
             newinclude.difference_update(pats)
             newexclude.difference_update(pats)
 
-        profilecount = (len(newprofiles - oldprofiles) -
-                        len(oldprofiles - newprofiles))
-        includecount = (len(newinclude - oldinclude) -
-                        len(oldinclude - newinclude))
-        excludecount = (len(newexclude - oldexclude) -
-                        len(oldexclude - newexclude))
+        profilecount = len(newprofiles - oldprofiles) - len(
+            oldprofiles - newprofiles
+        )
+        includecount = len(newinclude - oldinclude) - len(
+            oldinclude - newinclude
+        )
+        excludecount = len(newexclude - oldexclude) - len(
+            oldexclude - newexclude
+        )
 
-        fcounts = map(len, _updateconfigandrefreshwdir(
-            repo, newinclude, newexclude, newprofiles, force=force,
-            removing=reset))
+        fcounts = map(
+            len,
+            _updateconfigandrefreshwdir(
+                repo,
+                newinclude,
+                newexclude,
+                newprofiles,
+                force=force,
+                removing=reset,
+            ),
+        )
 
-        printchanges(repo.ui, opts, profilecount, includecount,
-                     excludecount, *fcounts)
+        printchanges(
+            repo.ui, opts, profilecount, includecount, excludecount, *fcounts
+        )
+
 
-def printchanges(ui, opts, profilecount=0, includecount=0, excludecount=0,
-                 added=0, dropped=0, conflicting=0):
+def printchanges(
+    ui,
+    opts,
+    profilecount=0,
+    includecount=0,
+    excludecount=0,
+    added=0,
+    dropped=0,
+    conflicting=0,
+):
     """Print output summarizing sparse config changes."""
     with ui.formatter('sparse', opts) as fm:
         fm.startitem()
-        fm.condwrite(ui.verbose, 'profiles_added', _('Profiles changed: %d\n'),
-                     profilecount)
-        fm.condwrite(ui.verbose, 'include_rules_added',
-                     _('Include rules changed: %d\n'), includecount)
-        fm.condwrite(ui.verbose, 'exclude_rules_added',
-                     _('Exclude rules changed: %d\n'), excludecount)
+        fm.condwrite(
+            ui.verbose,
+            'profiles_added',
+            _('Profiles changed: %d\n'),
+            profilecount,
+        )
+        fm.condwrite(
+            ui.verbose,
+            'include_rules_added',
+            _('Include rules changed: %d\n'),
+            includecount,
+        )
+        fm.condwrite(
+            ui.verbose,
+            'exclude_rules_added',
+            _('Exclude rules changed: %d\n'),
+            excludecount,
+        )
 
         # In 'plain' verbose mode, mergemod.applyupdates already outputs what
         # files are added or removed outside of the templating formatter
         # framework. No point in repeating ourselves in that case.
         if not fm.isplain():
-            fm.condwrite(ui.verbose, 'files_added', _('Files added: %d\n'),
-                         added)
-            fm.condwrite(ui.verbose, 'files_dropped', _('Files dropped: %d\n'),
-                         dropped)
-            fm.condwrite(ui.verbose, 'files_conflicting',
-                         _('Files conflicting: %d\n'), conflicting)
+            fm.condwrite(
+                ui.verbose, 'files_added', _('Files added: %d\n'), added
+            )
+            fm.condwrite(
+                ui.verbose, 'files_dropped', _('Files dropped: %d\n'), dropped
+            )
+            fm.condwrite(
+                ui.verbose,
+                'files_conflicting',
+                _('Files conflicting: %d\n'),
+                conflicting,
+            )
--- a/mercurial/sshpeer.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/sshpeer.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,6 +25,7 @@
     stringutil,
 )
 
+
 def _serverquote(s):
     """quote a string for the remote shell ... which we assume is sh"""
     if not s:
@@ -33,6 +34,7 @@
         return s
     return "'%s'" % s.replace("'", "'\\''")
 
+
 def _forwardoutput(ui, pipe):
     """display all data currently available on pipe as remote output.
 
@@ -43,6 +45,7 @@
             for l in s.splitlines():
                 ui.status(_("remote: "), l, '\n')
 
+
 class doublepipe(object):
     """Operate a side-channel pipe in addition of a main one
 
@@ -72,8 +75,10 @@
 
         (This will only wait for data if the setup is supported by `util.poll`)
         """
-        if (isinstance(self._main, util.bufferedinputpipe) and
-            self._main.hasbuffer):
+        if (
+            isinstance(self._main, util.bufferedinputpipe)
+            and self._main.hasbuffer
+        ):
             # Main has data. Assume side is worth poking at.
             return True, True
 
@@ -137,6 +142,7 @@
     def flush(self):
         return self._main.flush()
 
+
 def _cleanuppipes(ui, pipei, pipeo, pipee):
     """Clean up pipes used by an SSH connection."""
     if pipeo:
@@ -154,6 +160,7 @@
 
         pipee.close()
 
+
 def _makeconnection(ui, sshcmd, args, remotecmd, path, sshenv=None):
     """Create an SSH connection to a server.
 
@@ -163,8 +170,11 @@
     cmd = '%s %s %s' % (
         sshcmd,
         args,
-        procutil.shellquote('%s -R %s serve --stdio' % (
-            _serverquote(remotecmd), _serverquote(path))))
+        procutil.shellquote(
+            '%s -R %s serve --stdio'
+            % (_serverquote(remotecmd), _serverquote(path))
+        ),
+    )
 
     ui.debug('running %s\n' % cmd)
     cmd = procutil.quotecommand(cmd)
@@ -176,17 +186,21 @@
 
     return proc, stdin, stdout, stderr
 
+
 def _clientcapabilities():
     """Return list of capabilities of this client.
 
     Returns a list of capabilities that are supported by this client.
     """
     protoparams = {'partial-pull'}
-    comps = [e.wireprotosupport().name for e in
-             util.compengines.supportedwireengines(util.CLIENTROLE)]
+    comps = [
+        e.wireprotosupport().name
+        for e in util.compengines.supportedwireengines(util.CLIENTROLE)
+    ]
     protoparams.add('comp=%s' % ','.join(comps))
     return protoparams
 
+
 def _performhandshake(ui, stdin, stdout, stderr):
     def badresponse():
         # Flush any output on stderr.
@@ -363,9 +377,11 @@
 
     return protoname, caps
 
+
 class sshv1peer(wireprotov1peer.wirepeer):
-    def __init__(self, ui, url, proc, stdin, stdout, stderr, caps,
-                 autoreadstderr=True):
+    def __init__(
+        self, ui, url, proc, stdin, stdout, stderr, caps, autoreadstderr=True
+    ):
         """Create a peer from an existing SSH connection.
 
         ``proc`` is a handle on the underlying SSH process.
@@ -438,8 +454,9 @@
     __del__ = _cleanup
 
     def _sendrequest(self, cmd, args, framed=False):
-        if (self.ui.debugflag
-            and self.ui.configbool('devel', 'debug.peer-request')):
+        if self.ui.debugflag and self.ui.configbool(
+            'devel', 'debug.peer-request'
+        ):
             dbg = self.ui.debug
             line = 'devel-peer-request: %s\n'
             dbg(line % cmd)
@@ -560,12 +577,15 @@
         if self._autoreadstderr:
             self._readerr()
 
+
 class sshv2peer(sshv1peer):
     """A peer that speakers version 2 of the transport protocol."""
+
     # Currently version 2 is identical to version 1 post handshake.
     # And handshake is performed before the peer is instantiated. So
     # we need no custom code.
 
+
 def makepeer(ui, path, proc, stdin, stdout, stderr, autoreadstderr=True):
     """Make a peer instance from existing pipes.
 
@@ -587,15 +607,33 @@
         raise
 
     if protoname == wireprototypes.SSHV1:
-        return sshv1peer(ui, path, proc, stdin, stdout, stderr, caps,
-                         autoreadstderr=autoreadstderr)
+        return sshv1peer(
+            ui,
+            path,
+            proc,
+            stdin,
+            stdout,
+            stderr,
+            caps,
+            autoreadstderr=autoreadstderr,
+        )
     elif protoname == wireprototypes.SSHV2:
-        return sshv2peer(ui, path, proc, stdin, stdout, stderr, caps,
-                         autoreadstderr=autoreadstderr)
+        return sshv2peer(
+            ui,
+            path,
+            proc,
+            stdin,
+            stdout,
+            stderr,
+            caps,
+            autoreadstderr=autoreadstderr,
+        )
     else:
         _cleanuppipes(ui, stdout, stdin, stderr)
-        raise error.RepoError(_('unknown version of SSH protocol: %s') %
-                              protoname)
+        raise error.RepoError(
+            _('unknown version of SSH protocol: %s') % protoname
+        )
+
 
 def instance(ui, path, create, intents=None, createopts=None):
     """Create an SSH peer.
@@ -625,19 +663,26 @@
         # querying the remote, there's no way of knowing if the remote even
         # supports said requested feature.
         if createopts:
-            raise error.RepoError(_('cannot create remote SSH repositories '
-                                    'with extra options'))
+            raise error.RepoError(
+                _('cannot create remote SSH repositories ' 'with extra options')
+            )
 
-        cmd = '%s %s %s' % (sshcmd, args,
-            procutil.shellquote('%s init %s' %
-                (_serverquote(remotecmd), _serverquote(remotepath))))
+        cmd = '%s %s %s' % (
+            sshcmd,
+            args,
+            procutil.shellquote(
+                '%s init %s'
+                % (_serverquote(remotecmd), _serverquote(remotepath))
+            ),
+        )
         ui.debug('running %s\n' % cmd)
         res = ui.system(cmd, blockedtag='sshpeer', environ=sshenv)
         if res != 0:
             raise error.RepoError(_('could not create remote repo'))
 
-    proc, stdin, stdout, stderr = _makeconnection(ui, sshcmd, args, remotecmd,
-                                                  remotepath, sshenv)
+    proc, stdin, stdout, stderr = _makeconnection(
+        ui, sshcmd, args, remotecmd, remotepath, sshenv
+    )
 
     peer = makepeer(ui, path, proc, stdin, stdout, stderr)
 
@@ -645,8 +690,9 @@
     # capabilities.
     if 'protocaps' in peer.capabilities():
         try:
-            peer._call("protocaps",
-                       caps=' '.join(sorted(_clientcapabilities())))
+            peer._call(
+                "protocaps", caps=' '.join(sorted(_clientcapabilities()))
+            )
         except IOError:
             peer._cleanup()
             raise error.RepoError(_('capability exchange failed'))
--- a/mercurial/sslutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/sslutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -113,6 +113,7 @@
 
             return ssl.wrap_socket(socket, **args)
 
+
 def _hostsettings(ui, hostname):
     """Obtain security settings for a hostname.
 
@@ -149,10 +150,11 @@
     def validateprotocol(protocol, key):
         if protocol not in configprotocols:
             raise error.Abort(
-                _('unsupported protocol from hostsecurity.%s: %s') %
-                (key, protocol),
-                hint=_('valid protocols: %s') %
-                     ' '.join(sorted(configprotocols)))
+                _('unsupported protocol from hostsecurity.%s: %s')
+                % (key, protocol),
+                hint=_('valid protocols: %s')
+                % ' '.join(sorted(configprotocols)),
+            )
 
     # We default to TLS 1.1+ where we can because TLS 1.0 has known
     # vulnerabilities (like BEAST and POODLE). We allow users to downgrade to
@@ -165,10 +167,15 @@
         # the bold warnings on the web site.
         # internal config: hostsecurity.disabletls10warning
         if not ui.configbool('hostsecurity', 'disabletls10warning'):
-            ui.warn(_('warning: connecting to %s using legacy security '
-                      'technology (TLS 1.0); see '
-                      'https://mercurial-scm.org/wiki/SecureConnections for '
-                      'more info\n') % bhostname)
+            ui.warn(
+                _(
+                    'warning: connecting to %s using legacy security '
+                    'technology (TLS 1.0); see '
+                    'https://mercurial-scm.org/wiki/SecureConnections for '
+                    'more info\n'
+                )
+                % bhostname
+            )
         defaultprotocol = 'tls1.0'
 
     key = 'minimumprotocol'
@@ -196,10 +203,10 @@
     fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % bhostname)
     for fingerprint in fingerprints:
         if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
-            raise error.Abort(_('invalid fingerprint for %s: %s') % (
-                                bhostname, fingerprint),
-                              hint=_('must begin with "sha1:", "sha256:", '
-                                     'or "sha512:"'))
+            raise error.Abort(
+                _('invalid fingerprint for %s: %s') % (bhostname, fingerprint),
+                hint=_('must begin with "sha1:", "sha256:", ' 'or "sha512:"'),
+            )
 
         alg, fingerprint = fingerprint.split(':', 1)
         fingerprint = fingerprint.replace(':', '').lower()
@@ -231,9 +238,14 @@
     # being performed.
     cafile = ui.config('hostsecurity', '%s:verifycertsfile' % bhostname)
     if s['certfingerprints'] and cafile:
-        ui.warn(_('(hostsecurity.%s:verifycertsfile ignored when host '
-                  'fingerprints defined; using host fingerprints for '
-                  'verification)\n') % bhostname)
+        ui.warn(
+            _(
+                '(hostsecurity.%s:verifycertsfile ignored when host '
+                'fingerprints defined; using host fingerprints for '
+                'verification)\n'
+            )
+            % bhostname
+        )
 
     # Try to hook up CA certificate validation unless something above
     # makes it not necessary.
@@ -242,9 +254,10 @@
         if cafile:
             cafile = util.expandpath(cafile)
             if not os.path.exists(cafile):
-                raise error.Abort(_('path specified by %s does not exist: %s') %
-                                  ('hostsecurity.%s:verifycertsfile' % (
-                                      bhostname,), cafile))
+                raise error.Abort(
+                    _('path specified by %s does not exist: %s')
+                    % ('hostsecurity.%s:verifycertsfile' % (bhostname,), cafile)
+                )
             s['cafile'] = cafile
         else:
             # Find global certificates file in config.
@@ -253,8 +266,9 @@
             if cafile:
                 cafile = util.expandpath(cafile)
                 if not os.path.exists(cafile):
-                    raise error.Abort(_('could not find web.cacerts: %s') %
-                                      cafile)
+                    raise error.Abort(
+                        _('could not find web.cacerts: %s') % cafile
+                    )
             elif s['allowloaddefaultcerts']:
                 # CAs not defined in config. Try to find system bundles.
                 cafile = _defaultcacerts(ui)
@@ -281,6 +295,7 @@
 
     return s
 
+
 def protocolsettings(protocol):
     """Resolve the protocol for a config value.
 
@@ -304,10 +319,14 @@
     # full/real SSLContext available to us.
     if supportedprotocols == {'tls1.0'}:
         if protocol != 'tls1.0':
-            raise error.Abort(_('current Python does not support protocol '
-                                'setting %s') % protocol,
-                              hint=_('upgrade Python or disable setting since '
-                                     'only TLS 1.0 is supported'))
+            raise error.Abort(
+                _('current Python does not support protocol ' 'setting %s')
+                % protocol,
+                hint=_(
+                    'upgrade Python or disable setting since '
+                    'only TLS 1.0 is supported'
+                ),
+            )
 
         return ssl.PROTOCOL_TLSv1, 0, 'tls1.0'
 
@@ -333,6 +352,7 @@
 
     return ssl.PROTOCOL_SSLv23, options, protocol
 
+
 def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
     """Add SSL/TLS to a socket.
 
@@ -352,21 +372,29 @@
     if b'SSLKEYLOGFILE' in encoding.environ:
         try:
             import sslkeylog
-            sslkeylog.set_keylog(pycompat.fsdecode(
-                encoding.environ[b'SSLKEYLOGFILE']))
+
+            sslkeylog.set_keylog(
+                pycompat.fsdecode(encoding.environ[b'SSLKEYLOGFILE'])
+            )
             ui.warn(
-                b'sslkeylog enabled by SSLKEYLOGFILE environment variable\n')
+                b'sslkeylog enabled by SSLKEYLOGFILE environment variable\n'
+            )
         except ImportError:
-            ui.warn(b'sslkeylog module missing, '
-                    b'but SSLKEYLOGFILE set in environment\n')
+            ui.warn(
+                b'sslkeylog module missing, '
+                b'but SSLKEYLOGFILE set in environment\n'
+            )
 
     for f in (keyfile, certfile):
         if f and not os.path.exists(f):
             raise error.Abort(
                 _('certificate file (%s) does not exist; cannot connect to %s')
                 % (f, pycompat.bytesurl(serverhostname)),
-                hint=_('restore missing file or fix references '
-                       'in Mercurial config'))
+                hint=_(
+                    'restore missing file or fix references '
+                    'in Mercurial config'
+                ),
+            )
 
     settings = _hostsettings(ui, serverhostname)
 
@@ -392,26 +420,31 @@
             raise error.Abort(
                 _('could not set ciphers: %s')
                 % stringutil.forcebytestr(e.args[0]),
-                hint=_('change cipher string (%s) in config') %
-                settings['ciphers'])
+                hint=_('change cipher string (%s) in config')
+                % settings['ciphers'],
+            )
 
     if certfile is not None:
+
         def password():
             f = keyfile or certfile
             return ui.getpass(_('passphrase for %s: ') % f, '')
+
         sslcontext.load_cert_chain(certfile, keyfile, password)
 
     if settings['cafile'] is not None:
         try:
             sslcontext.load_verify_locations(cafile=settings['cafile'])
         except ssl.SSLError as e:
-            if len(e.args) == 1: # pypy has different SSLError args
+            if len(e.args) == 1:  # pypy has different SSLError args
                 msg = e.args[0]
             else:
                 msg = e.args[1]
-            raise error.Abort(_('error loading CA file %s: %s') % (
-                              settings['cafile'], stringutil.forcebytestr(msg)),
-                              hint=_('file is empty or malformed?'))
+            raise error.Abort(
+                _('error loading CA file %s: %s')
+                % (settings['cafile'], stringutil.forcebytestr(msg)),
+                hint=_('file is empty or malformed?'),
+            )
         caloaded = True
     elif settings['allowloaddefaultcerts']:
         # This is a no-op on old Python.
@@ -433,13 +466,21 @@
         # When the main 20916 bug occurs, 'sslcontext.get_ca_certs()' is a
         # non-empty list, but the following conditional is otherwise True.
         try:
-            if (caloaded and settings['verifymode'] == ssl.CERT_REQUIRED and
-                modernssl and not sslcontext.get_ca_certs()):
-                ui.warn(_('(an attempt was made to load CA certificates but '
-                          'none were loaded; see '
-                          'https://mercurial-scm.org/wiki/SecureConnections '
-                          'for how to configure Mercurial to avoid this '
-                          'error)\n'))
+            if (
+                caloaded
+                and settings['verifymode'] == ssl.CERT_REQUIRED
+                and modernssl
+                and not sslcontext.get_ca_certs()
+            ):
+                ui.warn(
+                    _(
+                        '(an attempt was made to load CA certificates but '
+                        'none were loaded; see '
+                        'https://mercurial-scm.org/wiki/SecureConnections '
+                        'for how to configure Mercurial to avoid this '
+                        'error)\n'
+                    )
+                )
         except ssl.SSLError:
             pass
 
@@ -459,51 +500,76 @@
                     # client doesn't support modern TLS versions introduced
                     # several years from when this comment was written).
                     if supportedprotocols != {'tls1.0'}:
-                        ui.warn(_(
-                            '(could not communicate with %s using security '
-                            'protocols %s; if you are using a modern Mercurial '
-                            'version, consider contacting the operator of this '
-                            'server; see '
-                            'https://mercurial-scm.org/wiki/SecureConnections '
-                            'for more info)\n') % (
+                        ui.warn(
+                            _(
+                                '(could not communicate with %s using security '
+                                'protocols %s; if you are using a modern Mercurial '
+                                'version, consider contacting the operator of this '
+                                'server; see '
+                                'https://mercurial-scm.org/wiki/SecureConnections '
+                                'for more info)\n'
+                            )
+                            % (
                                 pycompat.bytesurl(serverhostname),
-                                ', '.join(sorted(supportedprotocols))))
+                                ', '.join(sorted(supportedprotocols)),
+                            )
+                        )
                     else:
-                        ui.warn(_(
-                            '(could not communicate with %s using TLS 1.0; the '
-                            'likely cause of this is the server no longer '
-                            'supports TLS 1.0 because it has known security '
-                            'vulnerabilities; see '
-                            'https://mercurial-scm.org/wiki/SecureConnections '
-                            'for more info)\n') %
-                                pycompat.bytesurl(serverhostname))
+                        ui.warn(
+                            _(
+                                '(could not communicate with %s using TLS 1.0; the '
+                                'likely cause of this is the server no longer '
+                                'supports TLS 1.0 because it has known security '
+                                'vulnerabilities; see '
+                                'https://mercurial-scm.org/wiki/SecureConnections '
+                                'for more info)\n'
+                            )
+                            % pycompat.bytesurl(serverhostname)
+                        )
                 else:
                     # We attempted TLS 1.1+. We can only get here if the client
                     # supports the configured protocol. So the likely reason is
                     # the client wants better security than the server can
                     # offer.
-                    ui.warn(_(
-                        '(could not negotiate a common security protocol (%s+) '
-                        'with %s; the likely cause is Mercurial is configured '
-                        'to be more secure than the server can support)\n') % (
-                        settings['protocolui'],
-                        pycompat.bytesurl(serverhostname)))
-                    ui.warn(_('(consider contacting the operator of this '
-                              'server and ask them to support modern TLS '
-                              'protocol versions; or, set '
-                              'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
-                              'use of legacy, less secure protocols when '
-                              'communicating with this server)\n') %
-                            pycompat.bytesurl(serverhostname))
-                    ui.warn(_(
-                        '(see https://mercurial-scm.org/wiki/SecureConnections '
-                        'for more info)\n'))
+                    ui.warn(
+                        _(
+                            '(could not negotiate a common security protocol (%s+) '
+                            'with %s; the likely cause is Mercurial is configured '
+                            'to be more secure than the server can support)\n'
+                        )
+                        % (
+                            settings['protocolui'],
+                            pycompat.bytesurl(serverhostname),
+                        )
+                    )
+                    ui.warn(
+                        _(
+                            '(consider contacting the operator of this '
+                            'server and ask them to support modern TLS '
+                            'protocol versions; or, set '
+                            'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
+                            'use of legacy, less secure protocols when '
+                            'communicating with this server)\n'
+                        )
+                        % pycompat.bytesurl(serverhostname)
+                    )
+                    ui.warn(
+                        _(
+                            '(see https://mercurial-scm.org/wiki/SecureConnections '
+                            'for more info)\n'
+                        )
+                    )
 
-            elif (e.reason == r'CERTIFICATE_VERIFY_FAILED' and
-                pycompat.iswindows):
+            elif (
+                e.reason == r'CERTIFICATE_VERIFY_FAILED' and pycompat.iswindows
+            ):
 
-                ui.warn(_('(the full certificate chain may not be available '
-                          'locally; see "hg help debugssl")\n'))
+                ui.warn(
+                    _(
+                        '(the full certificate chain may not be available '
+                        'locally; see "hg help debugssl")\n'
+                    )
+                )
         raise
 
     # check if wrap_socket failed silently because socket had been
@@ -521,8 +587,10 @@
 
     return sslsocket
 
-def wrapserversocket(sock, ui, certfile=None, keyfile=None, cafile=None,
-                     requireclientcert=False):
+
+def wrapserversocket(
+    sock, ui, certfile=None, keyfile=None, cafile=None, requireclientcert=False
+):
     """Wrap a socket for use by servers.
 
     ``certfile`` and ``keyfile`` specify the files containing the certificate's
@@ -539,8 +607,9 @@
     # doesn't have to be as detailed as for wrapsocket().
     for f in (certfile, keyfile, cafile):
         if f and not os.path.exists(f):
-            raise error.Abort(_('referenced certificate file (%s) does not '
-                                'exist') % f)
+            raise error.Abort(
+                _('referenced certificate file (%s) does not ' 'exist') % f
+            )
 
     protocol, options, _protocolui = protocolsettings('tls1.0')
 
@@ -558,8 +627,9 @@
             raise error.Abort(_('TLS 1.2 not supported by this Python'))
         protocol = ssl.PROTOCOL_TLSv1_2
     elif exactprotocol:
-        raise error.Abort(_('invalid value for serverexactprotocol: %s') %
-                          exactprotocol)
+        raise error.Abort(
+            _('invalid value for serverexactprotocol: %s') % exactprotocol
+        )
 
     if modernssl:
         # We /could/ use create_default_context() here since it doesn't load
@@ -592,9 +662,11 @@
 
     return sslcontext.wrap_socket(sock, server_side=True)
 
+
 class wildcarderror(Exception):
     """Represents an error parsing wildcards in DNS name."""
 
+
 def _dnsnamematch(dn, hostname, maxwildcards=1):
     """Match DNS names according RFC 6125 section 6.4.3.
 
@@ -615,7 +687,8 @@
     wildcards = leftmost.count('*')
     if wildcards > maxwildcards:
         raise wildcarderror(
-            _('too many wildcards in certificate DNS name: %s') % dn)
+            _('too many wildcards in certificate DNS name: %s') % dn
+        )
 
     # speed up common case w/o wildcards
     if not wildcards:
@@ -645,6 +718,7 @@
     pat = re.compile(br'\A' + br'\.'.join(pats) + br'\Z', re.IGNORECASE)
     return pat.match(hostname) is not None
 
+
 def _verifycert(cert, hostname):
     '''Verify that cert (in socket.getpeercert() format) matches hostname.
     CRLs is not handled.
@@ -695,6 +769,7 @@
     else:
         return _('no commonName or subjectAltName found in certificate')
 
+
 def _plainapplepython():
     """return true if this seems to be a pure Apple Python that
     * is unfrozen and presumably has the whole mercurial module in the file
@@ -703,12 +778,17 @@
       for using system certificate store CAs in addition to the provided
       cacerts file
     """
-    if (not pycompat.isdarwin or procutil.mainfrozen() or
-        not pycompat.sysexecutable):
+    if (
+        not pycompat.isdarwin
+        or procutil.mainfrozen()
+        or not pycompat.sysexecutable
+    ):
         return False
     exe = os.path.realpath(pycompat.sysexecutable).lower()
-    return (exe.startswith('/usr/bin/python') or
-            exe.startswith('/system/library/frameworks/python.framework/'))
+    return exe.startswith('/usr/bin/python') or exe.startswith(
+        '/system/library/frameworks/python.framework/'
+    )
+
 
 _systemcacertpaths = [
     # RHEL, CentOS, and Fedora
@@ -717,6 +797,7 @@
     '/etc/ssl/certs/ca-certificates.crt',
 ]
 
+
 def _defaultcacerts(ui):
     """return path to default CA certificates or None.
 
@@ -731,6 +812,7 @@
     # and usable, assume the user intends it to be used and use it.
     try:
         import certifi
+
         certs = certifi.where()
         if os.path.exists(certs):
             ui.debug('using ca certificates from certifi\n')
@@ -745,9 +827,13 @@
     # Assertion: this code is only called if certificates are being verified.
     if pycompat.iswindows:
         if not _canloaddefaultcerts:
-            ui.warn(_('(unable to load Windows CA certificates; see '
-                      'https://mercurial-scm.org/wiki/SecureConnections for '
-                      'how to configure Mercurial to avoid this message)\n'))
+            ui.warn(
+                _(
+                    '(unable to load Windows CA certificates; see '
+                    'https://mercurial-scm.org/wiki/SecureConnections for '
+                    'how to configure Mercurial to avoid this message)\n'
+                )
+            )
 
         return None
 
@@ -756,7 +842,8 @@
     # trick.
     if _plainapplepython():
         dummycert = os.path.join(
-            os.path.dirname(pycompat.fsencode(__file__)), 'dummycert.pem')
+            os.path.dirname(pycompat.fsencode(__file__)), 'dummycert.pem'
+        )
         if os.path.exists(dummycert):
             return dummycert
 
@@ -767,9 +854,13 @@
         # files. Also consider exporting the keychain certs to a file during
         # Mercurial install.
         if not _canloaddefaultcerts:
-            ui.warn(_('(unable to load CA certificates; see '
-                      'https://mercurial-scm.org/wiki/SecureConnections for '
-                      'how to configure Mercurial to avoid this message)\n'))
+            ui.warn(
+                _(
+                    '(unable to load CA certificates; see '
+                    'https://mercurial-scm.org/wiki/SecureConnections for '
+                    'how to configure Mercurial to avoid this message)\n'
+                )
+            )
         return None
 
     # / is writable on Windows. Out of an abundance of caution make sure
@@ -787,20 +878,30 @@
     if not _canloaddefaultcerts:
         for path in _systemcacertpaths:
             if os.path.isfile(path):
-                ui.warn(_('(using CA certificates from %s; if you see this '
-                          'message, your Mercurial install is not properly '
-                          'configured; see '
-                          'https://mercurial-scm.org/wiki/SecureConnections '
-                          'for how to configure Mercurial to avoid this '
-                          'message)\n') % path)
+                ui.warn(
+                    _(
+                        '(using CA certificates from %s; if you see this '
+                        'message, your Mercurial install is not properly '
+                        'configured; see '
+                        'https://mercurial-scm.org/wiki/SecureConnections '
+                        'for how to configure Mercurial to avoid this '
+                        'message)\n'
+                    )
+                    % path
+                )
                 return path
 
-        ui.warn(_('(unable to load CA certificates; see '
-                  'https://mercurial-scm.org/wiki/SecureConnections for '
-                  'how to configure Mercurial to avoid this message)\n'))
+        ui.warn(
+            _(
+                '(unable to load CA certificates; see '
+                'https://mercurial-scm.org/wiki/SecureConnections for '
+                'how to configure Mercurial to avoid this message)\n'
+            )
+        )
 
     return None
 
+
 def validatesocket(sock):
     """Validate a socket meets security requirements.
 
@@ -818,8 +919,9 @@
         raise error.Abort(_('%s ssl connection error') % host)
 
     if not peercert:
-        raise error.Abort(_('%s certificate error: '
-                           'no certificate received') % host)
+        raise error.Abort(
+            _('%s certificate error: ' 'no certificate received') % host
+        )
 
     if settings['disablecertverification']:
         # We don't print the certificate fingerprint because it shouldn't
@@ -828,9 +930,14 @@
         # to verify the certificate and this message would have printed the
         # fingerprint. So printing the fingerprint here adds little to no
         # value.
-        ui.warn(_('warning: connection security to %s is disabled per current '
-                  'settings; communication is susceptible to eavesdropping '
-                  'and tampering\n') % host)
+        ui.warn(
+            _(
+                'warning: connection security to %s is disabled per current '
+                'settings; communication is susceptible to eavesdropping '
+                'and tampering\n'
+            )
+            % host
+        )
         return
 
     # If a certificate fingerprint is pinned, use it and only it to
@@ -842,23 +949,29 @@
     }
 
     def fmtfingerprint(s):
-        return ':'.join([s[x:x + 2] for x in range(0, len(s), 2)])
+        return ':'.join([s[x : x + 2] for x in range(0, len(s), 2)])
 
     nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
 
     if settings['certfingerprints']:
         for hash, fingerprint in settings['certfingerprints']:
             if peerfingerprints[hash].lower() == fingerprint:
-                ui.debug('%s certificate matched fingerprint %s:%s\n' %
-                         (host, hash, fmtfingerprint(fingerprint)))
+                ui.debug(
+                    '%s certificate matched fingerprint %s:%s\n'
+                    % (host, hash, fmtfingerprint(fingerprint))
+                )
                 if settings['legacyfingerprint']:
-                    ui.warn(_('(SHA-1 fingerprint for %s found in legacy '
-                              '[hostfingerprints] section; '
-                              'if you trust this fingerprint, remove the old '
-                              'SHA-1 fingerprint from [hostfingerprints] and '
-                              'add the following entry to the new '
-                              '[hostsecurity] section: %s:fingerprints=%s)\n') %
-                            (host, host, nicefingerprint))
+                    ui.warn(
+                        _(
+                            '(SHA-1 fingerprint for %s found in legacy '
+                            '[hostfingerprints] section; '
+                            'if you trust this fingerprint, remove the old '
+                            'SHA-1 fingerprint from [hostfingerprints] and '
+                            'add the following entry to the new '
+                            '[hostsecurity] section: %s:fingerprints=%s)\n'
+                        )
+                        % (host, host, nicefingerprint)
+                    )
                 return
 
         # Pinned fingerprint didn't match. This is a fatal error.
@@ -868,25 +981,37 @@
         else:
             section = 'hostsecurity'
             nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
-        raise error.Abort(_('certificate for %s has unexpected '
-                            'fingerprint %s') % (host, nice),
-                          hint=_('check %s configuration') % section)
+        raise error.Abort(
+            _('certificate for %s has unexpected ' 'fingerprint %s')
+            % (host, nice),
+            hint=_('check %s configuration') % section,
+        )
 
     # Security is enabled but no CAs are loaded. We can't establish trust
     # for the cert so abort.
     if not sock._hgstate['caloaded']:
         raise error.Abort(
-            _('unable to verify security of %s (no loaded CA certificates); '
-              'refusing to connect') % host,
-            hint=_('see https://mercurial-scm.org/wiki/SecureConnections for '
-                   'how to configure Mercurial to avoid this error or set '
-                   'hostsecurity.%s:fingerprints=%s to trust this server') %
-                   (host, nicefingerprint))
+            _(
+                'unable to verify security of %s (no loaded CA certificates); '
+                'refusing to connect'
+            )
+            % host,
+            hint=_(
+                'see https://mercurial-scm.org/wiki/SecureConnections for '
+                'how to configure Mercurial to avoid this error or set '
+                'hostsecurity.%s:fingerprints=%s to trust this server'
+            )
+            % (host, nicefingerprint),
+        )
 
     msg = _verifycert(peercert2, shost)
     if msg:
-        raise error.Abort(_('%s certificate error: %s') % (host, msg),
-                         hint=_('set hostsecurity.%s:certfingerprints=%s '
-                                'config setting or use --insecure to connect '
-                                'insecurely') %
-                              (host, nicefingerprint))
+        raise error.Abort(
+            _('%s certificate error: %s') % (host, msg),
+            hint=_(
+                'set hostsecurity.%s:certfingerprints=%s '
+                'config setting or use --insecure to connect '
+                'insecurely'
+            )
+            % (host, nicefingerprint),
+        )
--- a/mercurial/state.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/state.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,9 +25,8 @@
     error,
     util,
 )
-from .utils import (
-    cborutil,
-)
+from .utils import cborutil
+
 
 class cmdstate(object):
     """a wrapper class to store the state of commands like `rebase`, `graft`,
@@ -60,8 +59,9 @@
         we use third-party library cbor to serialize data to write in the file.
         """
         if not isinstance(version, int):
-            raise error.ProgrammingError("version of state file should be"
-                                         " an integer")
+            raise error.ProgrammingError(
+                "version of state file should be" " an integer"
+            )
 
         with self._repo.vfs(self.fname, 'wb', atomictemp=True) as fp:
             fp.write('%d\n' % version)
@@ -75,8 +75,9 @@
             try:
                 int(fp.readline())
             except ValueError:
-                raise error.CorruptedState("unknown version of state file"
-                                           " found")
+                raise error.CorruptedState(
+                    "unknown version of state file" " found"
+                )
 
             return cborutil.decodeall(fp.read())[0]
 
@@ -88,6 +89,7 @@
         """check whether the state file exists or not"""
         return self._repo.vfs.exists(self.fname)
 
+
 class _statecheck(object):
     """a utility class that deals with multistep operations like graft,
        histedit, bisect, update etc and check whether such commands
@@ -97,9 +99,21 @@
        multistep operation or multistep command extension.
     """
 
-    def __init__(self, opname, fname, clearable, allowcommit, reportonly,
-                 continueflag, stopflag, cmdmsg, cmdhint, statushint,
-                 abortfunc, continuefunc):
+    def __init__(
+        self,
+        opname,
+        fname,
+        clearable,
+        allowcommit,
+        reportonly,
+        continueflag,
+        stopflag,
+        cmdmsg,
+        cmdhint,
+        statushint,
+        abortfunc,
+        continuefunc,
+    ):
         self._opname = opname
         self._fname = fname
         self._clearable = clearable
@@ -118,12 +132,14 @@
         hg status --verbose
         """
         if not self._statushint:
-            hint = (_('To continue:    hg %s --continue\n'
-                      'To abort:       hg %s --abort') % (self._opname,
-                       self._opname))
+            hint = _(
+                'To continue:    hg %s --continue\n'
+                'To abort:       hg %s --abort'
+            ) % (self._opname, self._opname)
             if self._stopflag:
-                hint = hint + (_('\nTo stop:        hg %s --stop') %
-                            (self._opname))
+                hint = hint + (
+                    _('\nTo stop:        hg %s --stop') % (self._opname)
+                )
             return hint
         return self._statushint
 
@@ -132,8 +148,10 @@
         operation
         """
         if not self._cmdhint:
-                return (_("use 'hg %s --continue' or 'hg %s --abort'") %
-                        (self._opname, self._opname))
+            return _("use 'hg %s --continue' or 'hg %s --abort'") % (
+                self._opname,
+                self._opname,
+            )
         return self._cmdhint
 
     def msg(self):
@@ -155,13 +173,25 @@
         else:
             return repo.vfs.exists(self._fname)
 
+
 # A list of statecheck objects for multistep operations like graft.
 _unfinishedstates = []
 
-def addunfinished(opname, fname, clearable=False, allowcommit=False,
-                  reportonly=False, continueflag=False, stopflag=False,
-                  cmdmsg="", cmdhint="", statushint="", abortfunc=None,
-                  continuefunc=None):
+
+def addunfinished(
+    opname,
+    fname,
+    clearable=False,
+    allowcommit=False,
+    reportonly=False,
+    continueflag=False,
+    stopflag=False,
+    cmdmsg="",
+    cmdhint="",
+    statushint="",
+    abortfunc=None,
+    continuefunc=None,
+):
     """this registers a new command or operation to unfinishedstates
     opname is the name the command or operation
     fname is the file name in which data should be stored in .hg directory.
@@ -189,27 +219,47 @@
     continuefunc stores the function required to finish an interrupted
     operation.
     """
-    statecheckobj = _statecheck(opname, fname, clearable, allowcommit,
-                                reportonly, continueflag, stopflag, cmdmsg,
-                                cmdhint, statushint, abortfunc, continuefunc)
+    statecheckobj = _statecheck(
+        opname,
+        fname,
+        clearable,
+        allowcommit,
+        reportonly,
+        continueflag,
+        stopflag,
+        cmdmsg,
+        cmdhint,
+        statushint,
+        abortfunc,
+        continuefunc,
+    )
     if opname == 'merge':
         _unfinishedstates.append(statecheckobj)
     else:
         _unfinishedstates.insert(0, statecheckobj)
 
+
 addunfinished(
-    'update', fname='updatestate', clearable=True,
+    'update',
+    fname='updatestate',
+    clearable=True,
     cmdmsg=_('last update was interrupted'),
     cmdhint=_("use 'hg update' to get a consistent checkout"),
-    statushint=_("To continue:    hg update .")
+    statushint=_("To continue:    hg update ."),
 )
 addunfinished(
-    'bisect', fname='bisect.state', allowcommit=True, reportonly=True,
-    statushint=_('To mark the changeset good:    hg bisect --good\n'
-                 'To mark the changeset bad:     hg bisect --bad\n'
-                 'To abort:                      hg bisect --reset\n')
+    'bisect',
+    fname='bisect.state',
+    allowcommit=True,
+    reportonly=True,
+    statushint=_(
+        'To mark the changeset good:    hg bisect --good\n'
+        'To mark the changeset bad:     hg bisect --bad\n'
+        'To abort:                      hg bisect --reset\n'
+    ),
 )
 
+
 def getrepostate(repo):
     # experimental config: commands.status.skipstates
     skip = set(repo.ui.configlist('commands', 'status.skipstates'))
--- a/mercurial/statichttprepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/statichttprepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,6 +29,7 @@
 urlerr = util.urlerr
 urlreq = util.urlreq
 
+
 class httprangereader(object):
     def __init__(self, url, opener):
         # we assume opener has HTTPRangeHandler
@@ -45,6 +46,7 @@
 
     def seek(self, pos):
         self.pos = pos
+
     def read(self, bytes=None):
         req = urlreq.request(pycompat.strurl(self.url))
         end = ''
@@ -67,26 +69,31 @@
             # HTTPRangeHandler does nothing if remote does not support
             # Range headers and returns the full entity. Let's slice it.
             if bytes:
-                data = data[self.pos:self.pos + bytes]
+                data = data[self.pos : self.pos + bytes]
             else:
-                data = data[self.pos:]
+                data = data[self.pos :]
         elif bytes:
             data = data[:bytes]
         self.pos += len(data)
         return data
+
     def readlines(self):
         return self.read().splitlines(True)
+
     def __iter__(self):
         return iter(self.readlines())
+
     def close(self):
         pass
 
+
 # _RangeError and _HTTPRangeHandler were originally in byterange.py,
 # which was itself extracted from urlgrabber. See the last version of
 # byterange.py from history if you need more information.
 class _RangeError(IOError):
     """Error raised when an unsatisfiable range is requested."""
 
+
 class _HTTPRangeHandler(urlreq.basehandler):
     """Handler that enables HTTP Range headers.
 
@@ -107,6 +114,7 @@
         # HTTP's Range Not Satisfiable error
         raise _RangeError('Requested Range Not Satisfiable')
 
+
 def build_opener(ui, authinfo):
     # urllib cannot handle URLs with embedded user or passwd
     urlopener = url.opener(ui, authinfo)
@@ -131,14 +139,18 @@
 
     return statichttpvfs
 
+
 class statichttppeer(localrepo.localpeer):
     def local(self):
         return None
+
     def canpush(self):
         return False
 
-class statichttprepository(localrepo.localrepository,
-                           localrepo.revlogfilestorage):
+
+class statichttprepository(
+    localrepo.localrepository, localrepo.revlogfilestorage
+):
     supported = localrepo.localrepository._basesupported
 
     def __init__(self, ui, path):
@@ -178,8 +190,9 @@
                 raise error.RepoError(msg)
 
         supportedrequirements = localrepo.gathersupportedrequirements(ui)
-        localrepo.ensurerequirementsrecognized(requirements,
-                                               supportedrequirements)
+        localrepo.ensurerequirementsrecognized(
+            requirements, supportedrequirements
+        )
         localrepo.ensurerequirementscompatible(ui, requirements)
 
         # setup store
@@ -191,8 +204,9 @@
         self.requirements = requirements
 
         rootmanifest = manifest.manifestrevlog(self.svfs)
-        self.manifestlog = manifest.manifestlog(self.svfs, self, rootmanifest,
-                                                self.narrowmatch())
+        self.manifestlog = manifest.manifestlog(
+            self.svfs, self, rootmanifest, self.narrowmatch()
+        )
         self.changelog = changelog.changelog(self.svfs)
         self._tags = None
         self.nodetagscache = None
@@ -216,14 +230,19 @@
         return statichttppeer(self)
 
     def wlock(self, wait=True):
-        raise error.LockUnavailable(0, _('lock not available'), 'lock',
-                                    _('cannot lock static-http repository'))
+        raise error.LockUnavailable(
+            0,
+            _('lock not available'),
+            'lock',
+            _('cannot lock static-http repository'),
+        )
 
     def lock(self, wait=True):
         raise error.Abort(_('cannot lock static-http repository'))
 
     def _writecaches(self):
-        pass # statichttprepository are read only
+        pass  # statichttprepository are read only
+
 
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
--- a/mercurial/statprof.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/statprof.py	Sun Oct 06 09:45:02 2019 -0400
@@ -144,6 +144,7 @@
 ###########################################################################
 ## Utils
 
+
 def clock():
     times = os.times()
     return (times[0] + times[1], times[4])
@@ -152,6 +153,7 @@
 ###########################################################################
 ## Collection data structures
 
+
 class ProfileState(object):
     def __init__(self, frequency=None):
         self.reset(frequency)
@@ -196,6 +198,7 @@
             return 1
         return 0
 
+
 state = ProfileState()
 
 
@@ -214,8 +217,7 @@
 
     def __eq__(self, other):
         try:
-            return (self.lineno == other.lineno and
-                    self.path == other.path)
+            return self.lineno == other.lineno and self.path == other.path
         except:
             return False
 
@@ -248,7 +250,7 @@
 
         source = self.source
         if len(source) > length:
-            source = source[:(length - 3)] + "..."
+            source = source[: (length - 3)] + "..."
         return source
 
     def filename(self):
@@ -257,6 +259,7 @@
     def skipname(self):
         return r'%s:%s' % (self.filename(), self.function)
 
+
 class Sample(object):
     __slots__ = (r'stack', r'time')
 
@@ -269,17 +272,22 @@
         stack = []
 
         while frame:
-            stack.append(CodeSite.get(
-                pycompat.sysbytes(frame.f_code.co_filename),
-                frame.f_lineno,
-                pycompat.sysbytes(frame.f_code.co_name)))
+            stack.append(
+                CodeSite.get(
+                    pycompat.sysbytes(frame.f_code.co_filename),
+                    frame.f_lineno,
+                    pycompat.sysbytes(frame.f_code.co_name),
+                )
+            )
             frame = frame.f_back
 
         return Sample(stack, time)
 
+
 ###########################################################################
 ## SIGPROF handler
 
+
 def profile_signal_handler(signum, frame):
     if state.profile_level > 0:
         now = clock()
@@ -288,11 +296,13 @@
         timestamp = state.accumulated_time[state.timeidx]
         state.samples.append(Sample.from_frame(frame, timestamp))
 
-        signal.setitimer(signal.ITIMER_PROF,
-            state.sample_interval, 0.0)
+        signal.setitimer(signal.ITIMER_PROF, state.sample_interval, 0.0)
         state.last_start_time = now
 
+
 stopthread = threading.Event()
+
+
 def samplerthread(tid):
     while not stopthread.is_set():
         now = clock()
@@ -308,16 +318,21 @@
 
     stopthread.clear()
 
+
 ###########################################################################
 ## Profiling API
 
+
 def is_active():
     return state.profile_level > 0
 
+
 lastmechanism = None
+
+
 def start(mechanism='thread', track='cpu'):
     '''Install the profiling signal handler, and start profiling.'''
-    state.track = track # note: nesting different mode won't work
+    state.track = track  # note: nesting different mode won't work
     state.profile_level += 1
     if state.profile_level == 1:
         state.last_start_time = clock()
@@ -329,15 +344,18 @@
 
         if mechanism == 'signal':
             signal.signal(signal.SIGPROF, profile_signal_handler)
-            signal.setitimer(signal.ITIMER_PROF,
-                rpt or state.sample_interval, 0.0)
+            signal.setitimer(
+                signal.ITIMER_PROF, rpt or state.sample_interval, 0.0
+            )
         elif mechanism == 'thread':
             frame = inspect.currentframe()
             tid = [k for k, f in sys._current_frames().items() if f == frame][0]
-            state.thread = threading.Thread(target=samplerthread,
-                                 args=(tid,), name="samplerthread")
+            state.thread = threading.Thread(
+                target=samplerthread, args=(tid,), name="samplerthread"
+            )
             state.thread.start()
 
+
 def stop():
     '''Stop profiling, and uninstall the profiling signal handler.'''
     state.profile_level -= 1
@@ -358,16 +376,19 @@
 
     return state
 
+
 def save_data(path):
     with open(path, 'w+') as file:
         file.write("%f %f\n" % state.accumulated_time)
         for sample in state.samples:
             time = sample.time
             stack = sample.stack
-            sites = ['\1'.join([s.path, b'%d' % s.lineno, s.function])
-                     for s in stack]
+            sites = [
+                '\1'.join([s.path, b'%d' % s.lineno, s.function]) for s in stack
+            ]
             file.write("%d\0%s\n" % (time, '\0'.join(sites)))
 
+
 def load_data(path):
     lines = open(path, 'rb').read().splitlines()
 
@@ -380,13 +401,13 @@
         sites = []
         for rawsite in rawsites:
             siteparts = rawsite.split('\1')
-            sites.append(CodeSite.get(siteparts[0], int(siteparts[1]),
-                        siteparts[2]))
+            sites.append(
+                CodeSite.get(siteparts[0], int(siteparts[1]), siteparts[2])
+            )
 
         state.samples.append(Sample(sites, time))
 
 
-
 def reset(frequency=None):
     '''Clear out the state of the profiler.  Do not call while the
     profiler is running.
@@ -411,6 +432,7 @@
 ###########################################################################
 ## Reporting API
 
+
 class SiteStats(object):
     def __init__(self, site):
         self.site = site
@@ -453,6 +475,7 @@
 
         return [s for s in stats.itervalues()]
 
+
 class DisplayFormats:
     ByLine = 0
     ByMethod = 1
@@ -462,6 +485,7 @@
     Json = 5
     Chrome = 6
 
+
 def display(fp=None, format=3, data=None, **kwargs):
     '''Print statistics, either to stdout or the given file object.'''
     if data is None:
@@ -469,6 +493,7 @@
 
     if fp is None:
         import sys
+
         fp = sys.stdout
     if len(data.samples) == 0:
         fp.write(b'No samples recorded.\n')
@@ -496,34 +521,47 @@
         fp.write(b'Sample count: %d\n' % len(data.samples))
         fp.write(b'Total time: %f seconds (%f wall)\n' % data.accumulated_time)
 
+
 def display_by_line(data, fp):
     '''Print the profiler data with each sample line represented
     as one row in a table.  Sorted by self-time per line.'''
     stats = SiteStats.buildstats(data.samples)
     stats.sort(reverse=True, key=lambda x: x.selfseconds())
 
-    fp.write(b'%5.5s %10.10s   %7.7s  %-8.8s\n' % (
-        b'%  ', b'cumulative', b'self', b''))
-    fp.write(b'%5.5s  %9.9s  %8.8s  %-8.8s\n' % (
-        b"time", b"seconds", b"seconds", b"name"))
+    fp.write(
+        b'%5.5s %10.10s   %7.7s  %-8.8s\n'
+        % (b'%  ', b'cumulative', b'self', b'')
+    )
+    fp.write(
+        b'%5.5s  %9.9s  %8.8s  %-8.8s\n'
+        % (b"time", b"seconds", b"seconds", b"name")
+    )
 
     for stat in stats:
         site = stat.site
-        sitelabel = '%s:%d:%s' % (site.filename(),
-                                  site.lineno,
-                                  site.function)
-        fp.write(b'%6.2f %9.2f %9.2f  %s\n' % (
-            stat.selfpercent(), stat.totalseconds(),
-            stat.selfseconds(), sitelabel))
+        sitelabel = '%s:%d:%s' % (site.filename(), site.lineno, site.function)
+        fp.write(
+            b'%6.2f %9.2f %9.2f  %s\n'
+            % (
+                stat.selfpercent(),
+                stat.totalseconds(),
+                stat.selfseconds(),
+                sitelabel,
+            )
+        )
+
 
 def display_by_method(data, fp):
     '''Print the profiler data with each sample function represented
     as one row in a table.  Important lines within that function are
     output as nested rows.  Sorted by self-time per line.'''
-    fp.write(b'%5.5s %10.10s   %7.7s  %-8.8s\n' %
-          ('%  ', 'cumulative', 'self', ''))
-    fp.write(b'%5.5s  %9.9s  %8.8s  %-8.8s\n' %
-          ("time", "seconds", "seconds", "name"))
+    fp.write(
+        b'%5.5s %10.10s   %7.7s  %-8.8s\n' % ('%  ', 'cumulative', 'self', '')
+    )
+    fp.write(
+        b'%5.5s  %9.9s  %8.8s  %-8.8s\n'
+        % ("time", "seconds", "seconds", "name")
+    )
 
     stats = SiteStats.buildstats(data.samples)
 
@@ -542,11 +580,9 @@
             total_self_sec += stat.selfseconds()
             total_percent += stat.selfpercent()
 
-        functiondata.append((fname,
-                             total_cum_sec,
-                             total_self_sec,
-                             total_percent,
-                             sitestats))
+        functiondata.append(
+            (fname, total_cum_sec, total_self_sec, total_percent, sitestats)
+        )
 
     # sort by total self sec
     functiondata.sort(reverse=True, key=lambda x: x[2])
@@ -554,25 +590,36 @@
     for function in functiondata:
         if function[3] < 0.05:
             continue
-        fp.write(b'%6.2f %9.2f %9.2f  %s\n' % (
-            function[3], # total percent
-            function[1], # total cum sec
-            function[2], # total self sec
-            function[0])) # file:function
+        fp.write(
+            b'%6.2f %9.2f %9.2f  %s\n'
+            % (
+                function[3],  # total percent
+                function[1],  # total cum sec
+                function[2],  # total self sec
+                function[0],
+            )
+        )  # file:function
 
         function[4].sort(reverse=True, key=lambda i: i.selfseconds())
         for stat in function[4]:
             # only show line numbers for significant locations (>1% time spent)
             if stat.selfpercent() > 1:
                 source = stat.site.getsource(25)
-                if sys.version_info.major >= 3 and not isinstance(source, bytes):
+                if sys.version_info.major >= 3 and not isinstance(
+                    source, bytes
+                ):
                     source = pycompat.bytestr(source)
 
-                stattuple = (stat.selfpercent(), stat.selfseconds(),
-                             stat.site.lineno, source)
+                stattuple = (
+                    stat.selfpercent(),
+                    stat.selfseconds(),
+                    stat.site.lineno,
+                    source,
+                )
 
                 fp.write(b'%33.0f%% %6.2f   line %d: %s\n' % stattuple)
 
+
 def display_about_method(data, fp, function=None, **kwargs):
     if function is None:
         raise Exception("Invalid function")
@@ -587,8 +634,9 @@
 
     for sample in data.samples:
         for i, site in enumerate(sample.stack):
-            if site.function == function and (not filename
-                or site.filename() == filename):
+            if site.function == function and (
+                not filename or site.filename() == filename
+            ):
                 relevant_samples += 1
                 if i != len(sample.stack) - 1:
                     parent = sample.stack[i + 1]
@@ -605,17 +653,24 @@
     parents = [(parent, count) for parent, count in parents.iteritems()]
     parents.sort(reverse=True, key=lambda x: x[1])
     for parent, count in parents:
-        fp.write(b'%6.2f%%   %s:%s   line %s: %s\n' %
-            (count / relevant_samples * 100,
-             pycompat.fsencode(parent.filename()),
-             pycompat.sysbytes(parent.function),
-             parent.lineno,
-             pycompat.sysbytes(parent.getsource(50))))
+        fp.write(
+            b'%6.2f%%   %s:%s   line %s: %s\n'
+            % (
+                count / relevant_samples * 100,
+                pycompat.fsencode(parent.filename()),
+                pycompat.sysbytes(parent.function),
+                parent.lineno,
+                pycompat.sysbytes(parent.getsource(50)),
+            )
+        )
 
     stats = SiteStats.buildstats(data.samples)
-    stats = [s for s in stats
-               if s.site.function == function and
-               (not filename or s.site.filename() == filename)]
+    stats = [
+        s
+        for s in stats
+        if s.site.function == function
+        and (not filename or s.site.filename() == filename)
+    ]
 
     total_cum_sec = 0
     total_self_sec = 0
@@ -630,20 +685,27 @@
     fp.write(
         b'\n    %s:%s    Total: %0.2fs (%0.2f%%)    Self: %0.2fs (%0.2f%%)\n\n'
         % (
-        pycompat.sysbytes(filename or '___'),
-        pycompat.sysbytes(function),
-        total_cum_sec,
-        total_cum_percent,
-        total_self_sec,
-        total_self_percent
-        ))
+            pycompat.sysbytes(filename or '___'),
+            pycompat.sysbytes(function),
+            total_cum_sec,
+            total_cum_percent,
+            total_self_sec,
+            total_self_percent,
+        )
+    )
 
     children = [(child, count) for child, count in children.iteritems()]
     children.sort(reverse=True, key=lambda x: x[1])
     for child, count in children:
-        fp.write(b'        %6.2f%%   line %s: %s\n' %
-              (count / relevant_samples * 100, child.lineno,
-               pycompat.sysbytes(child.getsource(50))))
+        fp.write(
+            b'        %6.2f%%   line %s: %s\n'
+            % (
+                count / relevant_samples * 100,
+                child.lineno,
+                pycompat.sysbytes(child.getsource(50)),
+            )
+        )
+
 
 def display_hotpath(data, fp, limit=0.05, **kwargs):
     class HotNode(object):
@@ -677,8 +739,11 @@
 
     def _write(node, depth, multiple_siblings):
         site = node.site
-        visiblechildren = [c for c in node.children.itervalues()
-                             if c.count >= (limit * root.count)]
+        visiblechildren = [
+            c
+            for c in node.children.itervalues()
+            if c.count >= (limit * root.count)
+        ]
         if site:
             indent = depth * 2 - 1
             filename = ''
@@ -689,13 +754,18 @@
                 function = childsite.function
 
             # lots of string formatting
-            listpattern = ''.ljust(indent) +\
-                          ('\\' if multiple_siblings else '|') +\
-                          ' %4.1f%%' +\
-                          (' %5.2fs' % node.count if showtime else '') +\
-                          '  %s %s'
-            liststring = listpattern % (node.count / root.count * 100,
-                                        filename, function)
+            listpattern = (
+                ''.ljust(indent)
+                + ('\\' if multiple_siblings else '|')
+                + ' %4.1f%%'
+                + (' %5.2fs' % node.count if showtime else '')
+                + '  %s %s'
+            )
+            liststring = listpattern % (
+                node.count / root.count * 100,
+                filename,
+                function,
+            )
             codepattern = '%' + ('%d' % (55 - len(liststring))) + 's %d:  %s'
             codestring = codepattern % ('line', site.lineno, site.getsource(30))
 
@@ -720,6 +790,7 @@
     if root.count > 0:
         _write(root, 0, False)
 
+
 def write_to_flame(data, fp, scriptpath=None, outputfile=None, **kwargs):
     if scriptpath is None:
         scriptpath = encoding.environ['HOME'] + '/flamegraph.pl'
@@ -750,7 +821,10 @@
     os.system("perl ~/flamegraph.pl %s > %s" % (path, outputfile))
     fp.write(b'Written to %s\n' % outputfile)
 
+
 _pathcache = {}
+
+
 def simplifypath(path):
     '''Attempt to make the path to a Python module easier to read by
     removing whatever part of the Python search path it was found
@@ -762,11 +836,12 @@
     for p in [hgpath] + sys.path:
         prefix = p + os.sep
         if path.startswith(prefix):
-            path = path[len(prefix):]
+            path = path[len(prefix) :]
             break
     _pathcache[path] = path
     return path
 
+
 def write_to_json(data, fp):
     samples = []
 
@@ -775,9 +850,12 @@
 
         for frame in sample.stack:
             stack.append(
-                (pycompat.sysstr(frame.path),
-                 frame.lineno,
-                 pycompat.sysstr(frame.function)))
+                (
+                    pycompat.sysstr(frame.path),
+                    frame.lineno,
+                    pycompat.sysstr(frame.function),
+                )
+            )
 
         samples.append((sample.time, stack))
 
@@ -787,6 +865,7 @@
 
     fp.write(data)
 
+
 def write_to_chrome(data, fp, minthreshold=0.005, maxthreshold=0.999):
     samples = []
     laststack = collections.deque()
@@ -796,7 +875,7 @@
     # representation to save space. It's fiddly but worth it.
     # We maintain a bijection between stack and ID.
     stack2id = {}
-    id2stack = [] # will eventually be rendered
+    id2stack = []  # will eventually be rendered
 
     def stackid(stack):
         if not stack:
@@ -841,8 +920,16 @@
         if minthreshold <= duration <= maxthreshold:
             # ensure no zero-duration events
             sampletime = max(oldtime + clamp, sample.time)
-            samples.append(dict(ph=r'E', name=oldfunc, cat=oldcat, sf=oldsid,
-                                ts=sampletime*1e6, pid=0))
+            samples.append(
+                dict(
+                    ph=r'E',
+                    name=oldfunc,
+                    cat=oldcat,
+                    sf=oldsid,
+                    ts=sampletime * 1e6,
+                    pid=0,
+                )
+            )
         else:
             blacklist.add(oldidx)
 
@@ -850,10 +937,16 @@
     # events given only stack snapshots.
 
     for sample in data.samples:
-        stack = tuple(((r'%s:%d' % (simplifypath(pycompat.sysstr(frame.path)),
-                                    frame.lineno),
-                        pycompat.sysstr(frame.function))
-                       for frame in sample.stack))
+        stack = tuple(
+            (
+                (
+                    r'%s:%d'
+                    % (simplifypath(pycompat.sysstr(frame.path)), frame.lineno),
+                    pycompat.sysstr(frame.function),
+                )
+                for frame in sample.stack
+            )
+        )
         qstack = collections.deque(stack)
         if laststack == qstack:
             continue
@@ -867,23 +960,35 @@
             laststack.appendleft(f)
             path, name = f
             sid = stackid(tuple(laststack))
-            samples.append(dict(ph=r'B', name=name, cat=path,
-                                ts=sample.time*1e6, sf=sid, pid=0))
+            samples.append(
+                dict(
+                    ph=r'B',
+                    name=name,
+                    cat=path,
+                    ts=sample.time * 1e6,
+                    sf=sid,
+                    pid=0,
+                )
+            )
         laststack = collections.deque(stack)
     while laststack:
         poplast()
-    events = [sample for idx, sample in enumerate(samples)
-              if idx not in blacklist]
-    frames = collections.OrderedDict((str(k), v)
-                                     for (k,v) in enumerate(id2stack))
+    events = [
+        sample for idx, sample in enumerate(samples) if idx not in blacklist
+    ]
+    frames = collections.OrderedDict(
+        (str(k), v) for (k, v) in enumerate(id2stack)
+    )
     data = json.dumps(dict(traceEvents=events, stackFrames=frames), indent=1)
     if not isinstance(data, bytes):
         data = data.encode('utf-8')
     fp.write(data)
     fp.write('\n')
 
+
 def printusage():
-    print(r"""
+    print(
+        r"""
 The statprof command line allows you to inspect the last profile's results in
 the following forms:
 
@@ -900,7 +1005,9 @@
     flame [-s --script-path] [-o --output-file path]
         Writes out a flamegraph to output-file (defaults to ~/flamegraph.svg)
         Requires that ~/flamegraph.pl exist.
-        (Specify alternate script path with --script-path.)""")
+        (Specify alternate script path with --script-path.)"""
+    )
+
 
 def main(argv=None):
     if argv is None:
@@ -932,8 +1039,11 @@
 
     # process options
     try:
-        opts, args = pycompat.getoptb(sys.argv[optstart:], "hl:f:o:p:",
-                                   ["help", "limit=", "file=", "output-file=", "script-path="])
+        opts, args = pycompat.getoptb(
+            sys.argv[optstart:],
+            "hl:f:o:p:",
+            ["help", "limit=", "file=", "output-file=", "script-path="],
+        )
     except getopt.error as msg:
         print(msg)
         printusage()
@@ -966,5 +1076,6 @@
 
     return 0
 
+
 if __name__ == r"__main__":
     sys.exit(main())
--- a/mercurial/store.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/store.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,6 +30,7 @@
 # It is done to prevent loading large fncache files into memory
 fncache_chunksize = 10 ** 6
 
+
 def _matchtrackedpath(path, matcher):
     """parses a fncache entry and returns whether the entry is tracking a path
     matched by matcher or not.
@@ -40,12 +41,13 @@
         return True
     path = decodedir(path)
     if path.startswith('data/'):
-        return matcher(path[len('data/'):-len('.i')])
+        return matcher(path[len('data/') : -len('.i')])
     elif path.startswith('meta/'):
-        return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')])
+        return matcher.visitdir(path[len('meta/') : -len('/00manifest.i')])
 
     raise error.ProgrammingError("cannot decode path %s" % path)
 
+
 # This avoids a collision between a file named foo and a dir named
 # foo.i or foo.d
 def _encodedir(path):
@@ -59,13 +61,16 @@
     >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
     'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
     '''
-    return (path
-            .replace(".hg/", ".hg.hg/")
-            .replace(".i/", ".i.hg/")
-            .replace(".d/", ".d.hg/"))
+    return (
+        path.replace(".hg/", ".hg.hg/")
+        .replace(".i/", ".i.hg/")
+        .replace(".d/", ".d.hg/")
+    )
+
 
 encodedir = getattr(parsers, 'encodedir', _encodedir)
 
+
 def decodedir(path):
     '''
     >>> decodedir(b'data/foo.i')
@@ -77,10 +82,12 @@
     '''
     if ".hg/" not in path:
         return path
-    return (path
-            .replace(".d.hg/", ".d/")
-            .replace(".i.hg/", ".i/")
-            .replace(".hg.hg/", ".hg/"))
+    return (
+        path.replace(".d.hg/", ".d/")
+        .replace(".i.hg/", ".i/")
+        .replace(".hg.hg/", ".hg/")
+    )
+
 
 def _reserved():
     ''' characters that are problematic for filesystems
@@ -99,6 +106,7 @@
     for x in winreserved:
         yield x
 
+
 def _buildencodefun():
     '''
     >>> enc, dec = _buildencodefun()
@@ -137,24 +145,31 @@
     dmap = {}
     for k, v in cmap.iteritems():
         dmap[v] = k
+
     def decode(s):
         i = 0
         while i < len(s):
             for l in pycompat.xrange(1, 4):
                 try:
-                    yield dmap[s[i:i + l]]
+                    yield dmap[s[i : i + l]]
                     i += l
                     break
                 except KeyError:
                     pass
             else:
                 raise KeyError
-    return (lambda s: ''.join([cmap[s[c:c + 1]]
-                               for c in pycompat.xrange(len(s))]),
-            lambda s: ''.join(list(decode(s))))
+
+    return (
+        lambda s: ''.join(
+            [cmap[s[c : c + 1]] for c in pycompat.xrange(len(s))]
+        ),
+        lambda s: ''.join(list(decode(s))),
+    )
+
 
 _encodefname, _decodefname = _buildencodefun()
 
+
 def encodefilename(s):
     '''
     >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
@@ -162,6 +177,7 @@
     '''
     return _encodefname(encodedir(s))
 
+
 def decodefilename(s):
     '''
     >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
@@ -169,6 +185,7 @@
     '''
     return decodedir(_decodefname(s))
 
+
 def _buildlowerencodefun():
     '''
     >>> f = _buildlowerencodefun()
@@ -187,15 +204,20 @@
         cmap[xchr(x)] = "~%02x" % x
     for x in range(ord("A"), ord("Z") + 1):
         cmap[xchr(x)] = xchr(x).lower()
+
     def lowerencode(s):
         return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
+
     return lowerencode
 
+
 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
 
 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
-_winres3 = ('aux', 'con', 'prn', 'nul') # length 3
-_winres4 = ('com', 'lpt')               # length 4 (with trailing 1..9)
+_winres3 = ('aux', 'con', 'prn', 'nul')  # length 3
+_winres4 = ('com', 'lpt')  # length 4 (with trailing 1..9)
+
+
 def _auxencode(path, dotencode):
     '''
     Encodes filenames containing names reserved by Windows or which end in
@@ -228,9 +250,9 @@
             l = n.find('.')
             if l == -1:
                 l = len(n)
-            if ((l == 3 and n[:3] in _winres3) or
-                (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
-                        and n[:3] in _winres4)):
+            if (l == 3 and n[:3] in _winres3) or (
+                l == 4 and n[3:4] <= '9' and n[3:4] >= '1' and n[:3] in _winres4
+            ):
                 # encode third letter ('aux' -> 'au~78')
                 ec = "~%02x" % ord(n[2:3])
                 n = n[0:2] + ec + n[3:]
@@ -240,13 +262,15 @@
             path[i] = n[:-1] + "~%02x" % ord(n[-1:])
     return path
 
+
 _maxstorepathlen = 120
 _dirprefixlen = 8
 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
 
+
 def _hashencode(path, dotencode):
     digest = node.hex(hashlib.sha1(path).digest())
-    le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
+    le = lowerencode(path[5:]).split('/')  # skips prefix 'data/' or 'meta/'
     parts = _auxencode(le, dotencode)
     basename = parts[-1]
     _root, ext = os.path.splitext(basename)
@@ -275,6 +299,7 @@
         res = 'dh/' + dirs + filler + digest + ext
     return res
 
+
 def _hybridencode(path, dotencode):
     '''encodes path with a length limit
 
@@ -313,6 +338,7 @@
         res = _hashencode(path, dotencode)
     return res
 
+
 def _pathencode(path):
     de = encodedir(path)
     if len(path) > _maxstorepathlen:
@@ -323,30 +349,39 @@
         return _hashencode(de, True)
     return res
 
+
 _pathencode = getattr(parsers, 'pathencode', _pathencode)
 
+
 def _plainhybridencode(f):
     return _hybridencode(f, False)
 
+
 def _calcmode(vfs):
     try:
         # files in .hg/ will be created using this mode
         mode = vfs.stat().st_mode
-            # avoid some useless chmods
+        # avoid some useless chmods
         if (0o777 & ~util.umask) == (0o777 & mode):
             mode = None
     except OSError:
         mode = None
     return mode
 
-_data = ('bookmarks narrowspec data meta 00manifest.d 00manifest.i'
-         ' 00changelog.d 00changelog.i phaseroots obsstore')
+
+_data = (
+    'bookmarks narrowspec data meta 00manifest.d 00manifest.i'
+    ' 00changelog.d 00changelog.i phaseroots obsstore'
+)
+
 
 def isrevlog(f, kind, st):
     return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
 
+
 class basicstore(object):
     '''base class for local repository stores'''
+
     def __init__(self, path, vfstype):
         vfs = vfstype(path)
         self.path = vfs.base
@@ -386,8 +421,7 @@
 
     def manifestlog(self, repo, storenarrowmatch):
         rootstore = manifest.manifestrevlog(self.vfs)
-        return manifest.manifestlog(
-            self.vfs, repo, rootstore, storenarrowmatch)
+        return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
 
     def datafiles(self, matcher=None):
         return self._walk('data', True) + self._walk('meta', True)
@@ -431,6 +465,7 @@
             path = path + "/"
         return self.vfs.exists(path)
 
+
 class encodedstore(basicstore):
     def __init__(self, path, vfstype):
         vfs = vfstype(path + '/store')
@@ -455,8 +490,10 @@
         return self.path + '/' + encodefilename(f)
 
     def copylist(self):
-        return (['requires', '00changelog.i'] +
-                ['store/' + f for f in _data.split()])
+        return ['requires', '00changelog.i'] + [
+            'store/' + f for f in _data.split()
+        ]
+
 
 class fncache(object):
     # the filename used to be partially encoded
@@ -492,8 +529,8 @@
             chunk += c
             try:
                 p = chunk.rindex(b'\n')
-                self.entries.update(decodedir(chunk[:p + 1]).splitlines())
-                chunk = chunk[p + 1:]
+                self.entries.update(decodedir(chunk[: p + 1]).splitlines())
+                chunk = chunk[p + 1 :]
             except ValueError:
                 # substring '\n' not found, maybe the entry is bigger than the
                 # chunksize, so let's keep iterating
@@ -504,9 +541,12 @@
             if warn:
                 warn(msg + '\n')
             else:
-                raise error.Abort(msg,
-                                  hint=_("use 'hg debugrebuildfncache' to "
-                                         "rebuild the fncache"))
+                raise error.Abort(
+                    msg,
+                    hint=_(
+                        "use 'hg debugrebuildfncache' to " "rebuild the fncache"
+                    ),
+                )
         self._checkentries(fp, warn)
         fp.close()
 
@@ -573,6 +613,7 @@
             self._load()
         return iter(self.entries | self.addls)
 
+
 class _fncachevfs(vfsmod.proxyvfs):
     def __init__(self, vfs, fnc, encode):
         vfsmod.proxyvfs.__init__(self, vfs)
@@ -581,8 +622,9 @@
 
     def __call__(self, path, mode='r', *args, **kw):
         encoded = self.encode(path)
-        if mode not in ('r', 'rb') and (path.startswith('data/') or
-                                        path.startswith('meta/')):
+        if mode not in ('r', 'rb') and (
+            path.startswith('data/') or path.startswith('meta/')
+        ):
             # do not trigger a fncache load when adding a file that already is
             # known to exist.
             notload = self.fncache.entries is None and self.vfs.exists(encoded)
@@ -601,6 +643,7 @@
         else:
             return self.vfs.join(path)
 
+
 class fncachestore(basicstore):
     def __init__(self, path, vfstype, dotencode):
         if dotencode:
@@ -637,10 +680,11 @@
                     raise
 
     def copylist(self):
-        d = ('bookmarks narrowspec data meta dh fncache phaseroots obsstore'
-             ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
-        return (['requires', '00changelog.i'] +
-                ['store/' + f for f in d.split()])
+        d = (
+            'bookmarks narrowspec data meta dh fncache phaseroots obsstore'
+            ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
+        )
+        return ['requires', '00changelog.i'] + ['store/' + f for f in d.split()]
 
     def write(self, tr):
         self.fncache.write(tr)
--- a/mercurial/streamclone.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/streamclone.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,9 +12,7 @@
 import struct
 
 from .i18n import _
-from .interfaces import (
-    repository,
-)
+from .interfaces import repository
 from . import (
     cacheutil,
     error,
@@ -25,6 +23,7 @@
     util,
 )
 
+
 def canperformstreamclone(pullop, bundle2=False):
     """Whether it is possible to perform a streaming clone as part of pull.
 
@@ -44,8 +43,8 @@
         if 'v2' in pullop.remotebundle2caps.get('stream', []):
             bundle2supported = True
         # else
-            # Server doesn't support bundle2 stream clone or doesn't support
-            # the versions we support. Fall back and possibly allow legacy.
+        # Server doesn't support bundle2 stream clone or doesn't support
+        # the versions we support. Fall back and possibly allow legacy.
 
     # Ensures legacy code path uses available bundle2.
     if bundle2supported and not bundle2:
@@ -87,26 +86,37 @@
         streamreqs = remote.capable('streamreqs')
         # This is weird and shouldn't happen with modern servers.
         if not streamreqs:
-            pullop.repo.ui.warn(_(
-                'warning: stream clone requested but server has them '
-                'disabled\n'))
+            pullop.repo.ui.warn(
+                _(
+                    'warning: stream clone requested but server has them '
+                    'disabled\n'
+                )
+            )
             return False, None
 
         streamreqs = set(streamreqs.split(','))
         # Server requires something we don't support. Bail.
         missingreqs = streamreqs - repo.supportedformats
         if missingreqs:
-            pullop.repo.ui.warn(_(
-                'warning: stream clone requested but client is missing '
-                'requirements: %s\n') % ', '.join(sorted(missingreqs)))
             pullop.repo.ui.warn(
-                _('(see https://www.mercurial-scm.org/wiki/MissingRequirement '
-                  'for more information)\n'))
+                _(
+                    'warning: stream clone requested but client is missing '
+                    'requirements: %s\n'
+                )
+                % ', '.join(sorted(missingreqs))
+            )
+            pullop.repo.ui.warn(
+                _(
+                    '(see https://www.mercurial-scm.org/wiki/MissingRequirement '
+                    'for more information)\n'
+                )
+            )
             return False, None
         requirements = streamreqs
 
     return True, requirements
 
+
 def maybeperformlegacystreamclone(pullop):
     """Possibly perform a legacy stream clone operation.
 
@@ -147,7 +157,8 @@
         resp = int(l)
     except ValueError:
         raise error.ResponseError(
-            _('unexpected response from remote server:'), l)
+            _('unexpected response from remote server:'), l
+        )
     if resp == 1:
         raise error.Abort(_('operation forbidden by server'))
     elif resp == 2:
@@ -160,7 +171,8 @@
         filecount, bytecount = map(int, l.split(' ', 1))
     except (ValueError, TypeError):
         raise error.ResponseError(
-            _('unexpected response from remote server:'), l)
+            _('unexpected response from remote server:'), l
+        )
 
     with repo.lock():
         consumev1(repo, fp, filecount, bytecount)
@@ -169,9 +181,11 @@
         #                    new format-related remote requirements
         # requirements from the streamed-in repository
         repo.requirements = requirements | (
-                repo.requirements - repo.supportedformats)
+            repo.requirements - repo.supportedformats
+        )
         repo.svfs.options = localrepo.resolvestorevfsoptions(
-            repo.ui, repo.requirements, repo.features)
+            repo.ui, repo.requirements, repo.features
+        )
         repo._writerequirements()
 
         if rbranchmap:
@@ -179,6 +193,7 @@
 
         repo.invalidate()
 
+
 def allowservergeneration(repo):
     """Whether streaming clones are allowed from the server."""
     if repository.REPO_FEATURE_STREAM_CLONE not in repo.features:
@@ -195,10 +210,12 @@
 
     return True
 
+
 # This is it's own function so extensions can override it.
 def _walkstreamfiles(repo, matcher=None):
     return repo.store.walk(matcher)
 
+
 def generatev1(repo):
     """Emit content for version 1 of a streaming clone.
 
@@ -228,8 +245,9 @@
                 entries.append((name, size))
                 total_bytes += size
 
-    repo.ui.debug('%d files, %d bytes to transfer\n' %
-                  (len(entries), total_bytes))
+    repo.ui.debug(
+        '%d files, %d bytes to transfer\n' % (len(entries), total_bytes)
+    )
 
     svfs = repo.svfs
     debugflag = repo.ui.debugflag
@@ -251,6 +269,7 @@
 
     return len(entries), total_bytes, emitrevlogdata()
 
+
 def generatev1wireproto(repo):
     """Emit content for version 1 of streaming clone suitable for the wire.
 
@@ -278,6 +297,7 @@
     for chunk in it:
         yield chunk
 
+
 def generatebundlev1(repo, compression='UN'):
     """Emit content for version 1 of a stream clone bundle.
 
@@ -311,8 +331,9 @@
         yield compression
 
         filecount, bytecount, it = generatev1(repo)
-        repo.ui.status(_('writing %d bytes for %d files\n') %
-                         (bytecount, filecount))
+        repo.ui.status(
+            _('writing %d bytes for %d files\n') % (bytecount, filecount)
+        )
 
         yield struct.pack('>QQ', filecount, bytecount)
         yield struct.pack('>H', len(requires) + 1)
@@ -321,8 +342,9 @@
         # This is where we'll add compression in the future.
         assert compression == 'UN'
 
-        progress = repo.ui.makeprogress(_('bundle'), total=bytecount,
-                                        unit=_('bytes'))
+        progress = repo.ui.makeprogress(
+            _('bundle'), total=bytecount, unit=_('bytes')
+        )
         progress.update(0)
 
         for chunk in it:
@@ -333,6 +355,7 @@
 
     return requirements, gen()
 
+
 def consumev1(repo, fp, filecount, bytecount):
     """Apply the contents from version 1 of a streaming clone file handle.
 
@@ -343,10 +366,13 @@
     handled by this function.
     """
     with repo.lock():
-        repo.ui.status(_('%d files to transfer, %s of data\n') %
-                       (filecount, util.bytecount(bytecount)))
-        progress = repo.ui.makeprogress(_('clone'), total=bytecount,
-                                        unit=_('bytes'))
+        repo.ui.status(
+            _('%d files to transfer, %s of data\n')
+            % (filecount, util.bytecount(bytecount))
+        )
+        progress = repo.ui.makeprogress(
+            _('clone'), total=bytecount, unit=_('bytes')
+        )
         progress.update(0)
         start = util.timer()
 
@@ -374,10 +400,12 @@
                         size = int(size)
                     except (ValueError, TypeError):
                         raise error.ResponseError(
-                            _('unexpected response from remote server:'), l)
+                            _('unexpected response from remote server:'), l
+                        )
                     if repo.ui.debugflag:
-                        repo.ui.debug('adding %s (%s)\n' %
-                                      (name, util.bytecount(size)))
+                        repo.ui.debug(
+                            'adding %s (%s)\n' % (name, util.bytecount(size))
+                        )
                     # for backwards compat, name was partially encoded
                     path = store.decodedir(name)
                     with repo.svfs(path, 'w', backgroundclose=True) as ofp:
@@ -393,28 +421,41 @@
         if elapsed <= 0:
             elapsed = 0.001
         progress.complete()
-        repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
-                       (util.bytecount(bytecount), elapsed,
-                        util.bytecount(bytecount / elapsed)))
+        repo.ui.status(
+            _('transferred %s in %.1f seconds (%s/sec)\n')
+            % (
+                util.bytecount(bytecount),
+                elapsed,
+                util.bytecount(bytecount / elapsed),
+            )
+        )
+
 
 def readbundle1header(fp):
     compression = fp.read(2)
     if compression != 'UN':
-        raise error.Abort(_('only uncompressed stream clone bundles are '
-            'supported; got %s') % compression)
+        raise error.Abort(
+            _('only uncompressed stream clone bundles are ' 'supported; got %s')
+            % compression
+        )
 
     filecount, bytecount = struct.unpack('>QQ', fp.read(16))
     requireslen = struct.unpack('>H', fp.read(2))[0]
     requires = fp.read(requireslen)
 
     if not requires.endswith('\0'):
-        raise error.Abort(_('malformed stream clone bundle: '
-                            'requirements not properly encoded'))
+        raise error.Abort(
+            _(
+                'malformed stream clone bundle: '
+                'requirements not properly encoded'
+            )
+        )
 
     requirements = set(requires.rstrip('\0').split(','))
 
     return filecount, bytecount, requirements
 
+
 def applybundlev1(repo, fp):
     """Apply the content from a stream clone bundle version 1.
 
@@ -422,37 +463,42 @@
     is at the 2 byte compression identifier.
     """
     if len(repo):
-        raise error.Abort(_('cannot apply stream clone bundle on non-empty '
-                            'repo'))
+        raise error.Abort(
+            _('cannot apply stream clone bundle on non-empty ' 'repo')
+        )
 
     filecount, bytecount, requirements = readbundle1header(fp)
     missingreqs = requirements - repo.supportedformats
     if missingreqs:
-        raise error.Abort(_('unable to apply stream clone: '
-                            'unsupported format: %s') %
-                            ', '.join(sorted(missingreqs)))
+        raise error.Abort(
+            _('unable to apply stream clone: ' 'unsupported format: %s')
+            % ', '.join(sorted(missingreqs))
+        )
 
     consumev1(repo, fp, filecount, bytecount)
 
+
 class streamcloneapplier(object):
     """Class to manage applying streaming clone bundles.
 
     We need to wrap ``applybundlev1()`` in a dedicated type to enable bundle
     readers to perform bundle type-specific functionality.
     """
+
     def __init__(self, fh):
         self._fh = fh
 
     def apply(self, repo):
         return applybundlev1(repo, self._fh)
 
+
 # type of file to stream
-_fileappend = 0 # append only file
-_filefull = 1   # full snapshot file
+_fileappend = 0  # append only file
+_filefull = 1  # full snapshot file
 
 # Source of the file
-_srcstore = 's' # store (svfs)
-_srccache = 'c' # cache (cache)
+_srcstore = 's'  # store (svfs)
+_srccache = 'c'  # cache (cache)
 
 # This is it's own function so extensions can override it.
 def _walkstreamfullstorefiles(repo):
@@ -462,6 +508,7 @@
         fnames.append('phaseroots')
     return fnames
 
+
 def _filterfull(entry, copy, vfsmap):
     """actually copy the snapshot files"""
     src, name, ftype, data = entry
@@ -469,22 +516,26 @@
         return entry
     return (src, name, ftype, copy(vfsmap[src].join(name)))
 
+
 @contextlib.contextmanager
 def maketempcopies():
     """return a function to temporary copy file"""
     files = []
     try:
+
         def copy(src):
             fd, dst = pycompat.mkstemp()
             os.close(fd)
             files.append(dst)
             util.copyfiles(src, dst, hardlink=True)
             return dst
+
         yield copy
     finally:
         for tmp in files:
             util.tryunlink(tmp)
 
+
 def _makemap(repo):
     """make a (src -> vfs) map for the repo"""
     vfsmap = {
@@ -497,16 +548,18 @@
 
     return vfsmap
 
+
 def _emit2(repo, entries, totalfilesize):
     """actually emit the stream bundle"""
     vfsmap = _makemap(repo)
-    progress = repo.ui.makeprogress(_('bundle'), total=totalfilesize,
-                                    unit=_('bytes'))
+    progress = repo.ui.makeprogress(
+        _('bundle'), total=totalfilesize, unit=_('bytes')
+    )
     progress.update(0)
     with maketempcopies() as copy, progress:
         # copy is delayed until we are in the try
         entries = [_filterfull(e, copy, vfsmap) for e in entries]
-        yield None # this release the lock on the repository
+        yield None  # this release the lock on the repository
         seen = 0
 
         for src, name, ftype, data in entries:
@@ -533,6 +586,7 @@
             finally:
                 fp.close()
 
+
 def generatev2(repo, includes, excludes, includeobsmarkers):
     """Emit content for version 2 of a streaming clone.
 
@@ -578,6 +632,7 @@
 
     return len(entries), totalfilesize, chunks
 
+
 @contextlib.contextmanager
 def nested(*ctxs):
     this = ctxs[0]
@@ -589,6 +644,7 @@
         else:
             yield
 
+
 def consumev2(repo, fp, filecount, filesize):
     """Apply the contents from a version 2 streaming clone.
 
@@ -596,19 +652,21 @@
     method.
     """
     with repo.lock():
-        repo.ui.status(_('%d files to transfer, %s of data\n') %
-                       (filecount, util.bytecount(filesize)))
+        repo.ui.status(
+            _('%d files to transfer, %s of data\n')
+            % (filecount, util.bytecount(filesize))
+        )
 
         start = util.timer()
-        progress = repo.ui.makeprogress(_('clone'), total=filesize,
-                                        unit=_('bytes'))
+        progress = repo.ui.makeprogress(
+            _('clone'), total=filesize, unit=_('bytes')
+        )
         progress.update(0)
 
         vfsmap = _makemap(repo)
 
         with repo.transaction('clone'):
-            ctxs = (vfs.backgroundclosing(repo.ui)
-                    for vfs in vfsmap.values())
+            ctxs = (vfs.backgroundclosing(repo.ui) for vfs in vfsmap.values())
             with nested(*ctxs):
                 for i in range(filecount):
                     src = util.readexactly(fp, 1)
@@ -619,8 +677,10 @@
                     name = util.readexactly(fp, namelen)
 
                     if repo.ui.debugflag:
-                        repo.ui.debug('adding [%s] %s (%s)\n' %
-                                      (src, name, util.bytecount(datalen)))
+                        repo.ui.debug(
+                            'adding [%s] %s (%s)\n'
+                            % (src, name, util.bytecount(datalen))
+                        )
 
                     with vfs(name, 'w') as ofp:
                         for chunk in util.filechunkiter(fp, limit=datalen):
@@ -634,19 +694,26 @@
         elapsed = util.timer() - start
         if elapsed <= 0:
             elapsed = 0.001
-        repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
-                       (util.bytecount(progress.pos), elapsed,
-                        util.bytecount(progress.pos / elapsed)))
+        repo.ui.status(
+            _('transferred %s in %.1f seconds (%s/sec)\n')
+            % (
+                util.bytecount(progress.pos),
+                elapsed,
+                util.bytecount(progress.pos / elapsed),
+            )
+        )
         progress.complete()
 
+
 def applybundlev2(repo, fp, filecount, filesize, requirements):
     from . import localrepo
 
     missingreqs = [r for r in requirements if r not in repo.supported]
     if missingreqs:
-        raise error.Abort(_('unable to apply stream clone: '
-                            'unsupported format: %s') %
-                          ', '.join(sorted(missingreqs)))
+        raise error.Abort(
+            _('unable to apply stream clone: ' 'unsupported format: %s')
+            % ', '.join(sorted(missingreqs))
+        )
 
     consumev2(repo, fp, filecount, filesize)
 
@@ -654,7 +721,9 @@
     #                    new format-related remote requirements
     # requirements from the streamed-in repository
     repo.requirements = set(requirements) | (
-            repo.requirements - repo.supportedformats)
+        repo.requirements - repo.supportedformats
+    )
     repo.svfs.options = localrepo.resolvestorevfsoptions(
-        repo.ui, repo.requirements, repo.features)
+        repo.ui, repo.requirements, repo.features
+    )
     repo._writerequirements()
--- a/mercurial/subrepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/subrepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -47,6 +47,7 @@
 _abssource = subrepoutil._abssource
 propertycache = util.propertycache
 
+
 def _expandedabspath(path):
     '''
     get a path or url and if it is a path expand it and return an absolute path
@@ -57,17 +58,21 @@
         path = util.normpath(os.path.abspath(u.path))
     return path
 
+
 def _getstorehashcachename(remotepath):
     '''get a unique filename for the store hash cache of a remote repository'''
     return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
 
+
 class SubrepoAbort(error.Abort):
     """Exception class used to avoid handling a subrepo error more than once"""
+
     def __init__(self, *args, **kw):
         self.subrepo = kw.pop(r'subrepo', None)
         self.cause = kw.pop(r'cause', None)
         error.Abort.__init__(self, *args, **kw)
 
+
 def annotatesubrepoerror(func):
     def decoratedmethod(self, *args, **kargs):
         try:
@@ -77,30 +82,39 @@
             raise ex
         except error.Abort as ex:
             subrepo = subrelpath(self)
-            errormsg = (stringutil.forcebytestr(ex) + ' '
-                        + _('(in subrepository "%s")') % subrepo)
+            errormsg = (
+                stringutil.forcebytestr(ex)
+                + ' '
+                + _('(in subrepository "%s")') % subrepo
+            )
             # avoid handling this exception by raising a SubrepoAbort exception
-            raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
-                               cause=sys.exc_info())
+            raise SubrepoAbort(
+                errormsg, hint=ex.hint, subrepo=subrepo, cause=sys.exc_info()
+            )
         return res
+
     return decoratedmethod
 
+
 def _updateprompt(ui, sub, dirty, local, remote):
     if dirty:
-        msg = (_(' subrepository sources for %s differ\n'
-                 'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
-                 'what do you want to do?'
-                 '$$ &Local $$ &Remote')
-               % (subrelpath(sub), local, remote))
+        msg = _(
+            ' subrepository sources for %s differ\n'
+            'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
+            'what do you want to do?'
+            '$$ &Local $$ &Remote'
+        ) % (subrelpath(sub), local, remote)
     else:
-        msg = (_(' subrepository sources for %s differ (in checked out '
-                 'version)\n'
-                 'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
-                 'what do you want to do?'
-                 '$$ &Local $$ &Remote')
-               % (subrelpath(sub), local, remote))
+        msg = _(
+            ' subrepository sources for %s differ (in checked out '
+            'version)\n'
+            'you can use (l)ocal source (%s) or (r)emote source (%s).\n'
+            'what do you want to do?'
+            '$$ &Local $$ &Remote'
+        ) % (subrelpath(sub), local, remote)
     return ui.promptchoice(msg, 0)
 
+
 def _sanitize(ui, vfs, ignore):
     for dirname, dirs, names in vfs.walk():
         for i, d in enumerate(dirs):
@@ -111,41 +125,55 @@
             continue
         for f in names:
             if f.lower() == 'hgrc':
-                ui.warn(_("warning: removing potentially hostile 'hgrc' "
-                          "in '%s'\n") % vfs.join(dirname))
+                ui.warn(
+                    _(
+                        "warning: removing potentially hostile 'hgrc' "
+                        "in '%s'\n"
+                    )
+                    % vfs.join(dirname)
+                )
                 vfs.unlink(vfs.reljoin(dirname, f))
 
+
 def _auditsubrepopath(repo, path):
     # sanity check for potentially unsafe paths such as '~' and '$FOO'
     if path.startswith('~') or '$' in path or util.expandpath(path) != path:
-        raise error.Abort(_('subrepo path contains illegal component: %s')
-                          % path)
+        raise error.Abort(
+            _('subrepo path contains illegal component: %s') % path
+        )
     # auditor doesn't check if the path itself is a symlink
     pathutil.pathauditor(repo.root)(path)
     if repo.wvfs.islink(path):
         raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
 
+
 SUBREPO_ALLOWED_DEFAULTS = {
     'hg': True,
     'git': False,
     'svn': False,
 }
 
+
 def _checktype(ui, kind):
     # subrepos.allowed is a master kill switch. If disabled, subrepos are
     # disabled period.
     if not ui.configbool('subrepos', 'allowed', True):
-        raise error.Abort(_('subrepos not enabled'),
-                          hint=_("see 'hg help config.subrepos' for details"))
+        raise error.Abort(
+            _('subrepos not enabled'),
+            hint=_("see 'hg help config.subrepos' for details"),
+        )
 
     default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
     if not ui.configbool('subrepos', '%s:allowed' % kind, default):
-        raise error.Abort(_('%s subrepos not allowed') % kind,
-                          hint=_("see 'hg help config.subrepos' for details"))
+        raise error.Abort(
+            _('%s subrepos not allowed') % kind,
+            hint=_("see 'hg help config.subrepos' for details"),
+        )
 
     if kind not in types:
         raise error.Abort(_('unknown subrepo type %s') % kind)
 
+
 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
     """return instance of the right subrepo class for subrepo in path"""
     # subrepo inherently violates our import layering rules
@@ -154,6 +182,7 @@
     # scripts that don't use our demand-loading
     global hg
     from . import hg as h
+
     hg = h
 
     repo = ctx.repo()
@@ -164,6 +193,7 @@
         state = (state[0], ctx.subrev(path), state[2])
     return types[state[2]](ctx, path, state[:2], allowcreate)
 
+
 def nullsubrepo(ctx, path, pctx):
     """return an empty subrepo in pctx for the extant subrepo in ctx"""
     # subrepo inherently violates our import layering rules
@@ -172,6 +202,7 @@
     # scripts that don't use our demand-loading
     global hg
     from . import hg as h
+
     hg = h
 
     repo = ctx.repo()
@@ -183,10 +214,11 @@
         subrev = "0" * 40
     return types[state[2]](pctx, path, (state[0], subrev), True)
 
+
 # subrepo classes need to implement the following abstract class:
 
+
 class abstractsubrepo(object):
-
     def __init__(self, ctx, path):
         """Initialize abstractsubrepo part
 
@@ -233,14 +265,14 @@
         This returns None, otherwise.
         """
         if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
-            return _('uncommitted changes in subrepository "%s"'
-                     ) % subrelpath(self)
+            return _('uncommitted changes in subrepository "%s"') % subrelpath(
+                self
+            )
 
     def bailifchanged(self, ignoreupdate=False, hint=None):
         """raise Abort if subrepository is ``dirty()``
         """
-        dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
-                                       missing=True)
+        dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
         if dirtyreason:
             raise error.Abort(dirtyreason, hint=hint)
 
@@ -338,15 +370,17 @@
             files = self.files()
         total = len(files)
         relpath = subrelpath(self)
-        progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
-                                        unit=_('files'), total=total)
+        progress = self.ui.makeprogress(
+            _('archiving (%s)') % relpath, unit=_('files'), total=total
+        )
         progress.update(0)
         for name in files:
             flags = self.fileflags(name)
             mode = 'x' in flags and 0o755 or 0o644
             symlink = 'l' in flags
-            archiver.addfile(prefix + name, mode, symlink,
-                             self.filedata(name, decode))
+            archiver.addfile(
+                prefix + name, mode, symlink, self.filedata(name, decode)
+            )
             progress.increment()
         progress.complete()
         return total
@@ -360,19 +394,31 @@
     def forget(self, match, prefix, uipathfn, dryrun, interactive):
         return ([], [])
 
-    def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
-                    dryrun, warnings):
+    def removefiles(
+        self,
+        matcher,
+        prefix,
+        uipathfn,
+        after,
+        force,
+        subrepos,
+        dryrun,
+        warnings,
+    ):
         """remove the matched files from the subrepository and the filesystem,
         possibly by force and/or after the file has been removed from the
         filesystem.  Return 0 on success, 1 on any warning.
         """
-        warnings.append(_("warning: removefiles not implemented (%s)")
-                        % self._path)
+        warnings.append(
+            _("warning: removefiles not implemented (%s)") % self._path
+        )
         return 1
 
     def revert(self, substate, *pats, **opts):
-        self.ui.warn(_('%s: reverting %s subrepos is unsupported\n')
-                     % (substate[0], substate[2]))
+        self.ui.warn(
+            _('%s: reverting %s subrepos is unsupported\n')
+            % (substate[0], substate[2])
+        )
         return []
 
     def shortid(self, revid):
@@ -401,6 +447,7 @@
         """
         return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
 
+
 class hgsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state, allowcreate):
         super(hgsubrepo, self).__init__(ctx, path)
@@ -411,13 +458,15 @@
         # repository constructor does expand variables in path, which is
         # unsafe since subrepo path might come from untrusted source.
         if os.path.realpath(util.expandpath(root)) != root:
-            raise error.Abort(_('subrepo path contains illegal component: %s')
-                              % path)
+            raise error.Abort(
+                _('subrepo path contains illegal component: %s') % path
+            )
         self._repo = hg.repository(r.baseui, root, create=create)
         if self._repo.root != root:
-            raise error.ProgrammingError('failed to reject unsafe subrepo '
-                                         'path: %s (expanded to %s)'
-                                         % (root, self._repo.root))
+            raise error.ProgrammingError(
+                'failed to reject unsafe subrepo '
+                'path: %s (expanded to %s)' % (root, self._repo.root)
+            )
 
         # Propagate the parent's --hidden option
         if r is r.unfiltered():
@@ -519,8 +568,9 @@
 
     @annotatesubrepoerror
     def add(self, ui, match, prefix, uipathfn, explicitonly, **opts):
-        return cmdutil.add(ui, self._repo, match, prefix, uipathfn,
-                           explicitonly, **opts)
+        return cmdutil.add(
+            ui, self._repo, match, prefix, uipathfn, explicitonly, **opts
+        )
 
     @annotatesubrepoerror
     def addremove(self, m, prefix, uipathfn, opts):
@@ -535,8 +585,9 @@
     def cat(self, match, fm, fntemplate, prefix, **opts):
         rev = self._state[1]
         ctx = self._repo[rev]
-        return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
-                           prefix, **opts)
+        return cmdutil.cat(
+            self.ui, self._repo, ctx, match, fm, fntemplate, prefix, **opts
+        )
 
     @annotatesubrepoerror
     def status(self, rev2, **opts):
@@ -546,8 +597,10 @@
             ctx2 = self._repo[rev2]
             return self._repo.status(ctx1, ctx2, **opts)
         except error.RepoLookupError as inst:
-            self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
-                         % (inst, subrelpath(self)))
+            self.ui.warn(
+                _('warning: error "%s" in subrepository "%s"\n')
+                % (inst, subrelpath(self))
+            )
             return scmutil.status([], [], [], [], [], [], [])
 
     @annotatesubrepoerror
@@ -558,12 +611,22 @@
             # in hex format
             if node2 is not None:
                 node2 = node.bin(node2)
-            logcmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2,
-                                      match, prefix=prefix, listsubrepos=True,
-                                      **opts)
+            logcmdutil.diffordiffstat(
+                ui,
+                self._repo,
+                diffopts,
+                node1,
+                node2,
+                match,
+                prefix=prefix,
+                listsubrepos=True,
+                **opts
+            )
         except error.RepoLookupError as inst:
-            self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
-                          % (inst, subrelpath(self)))
+            self.ui.warn(
+                _('warning: error "%s" in subrepository "%s"\n')
+                % (inst, subrelpath(self))
+            )
 
     @annotatesubrepoerror
     def archive(self, archiver, prefix, match=None, decode=True):
@@ -573,27 +636,27 @@
             files = [f for f in files if match(f)]
         rev = self._state[1]
         ctx = self._repo[rev]
-        scmutil.prefetchfiles(self._repo, [ctx.rev()],
-                              scmutil.matchfiles(self._repo, files))
+        scmutil.prefetchfiles(
+            self._repo, [ctx.rev()], scmutil.matchfiles(self._repo, files)
+        )
         total = abstractsubrepo.archive(self, archiver, prefix, match)
         for subpath in ctx.substate:
             s = subrepo(ctx, subpath, True)
             submatch = matchmod.subdirmatcher(subpath, match)
             subprefix = prefix + subpath + '/'
-            total += s.archive(archiver, subprefix, submatch,
-                               decode)
+            total += s.archive(archiver, subprefix, submatch, decode)
         return total
 
     @annotatesubrepoerror
     def dirty(self, ignoreupdate=False, missing=False):
         r = self._state[1]
-        if r == '' and not ignoreupdate: # no state recorded
+        if r == '' and not ignoreupdate:  # no state recorded
             return True
         w = self._repo[None]
         if r != w.p1().hex() and not ignoreupdate:
             # different version checked out
             return True
-        return w.dirty(missing=missing) # working directory changed
+        return w.dirty(missing=missing)  # working directory changed
 
     def basestate(self):
         return self._repo['.'].hex()
@@ -610,7 +673,7 @@
         self.ui.debug("committing subrepo %s\n" % subrelpath(self))
         n = self._repo.commit(text, user, date)
         if not n:
-            return self._repo['.'].hex() # different version checked out
+            return self._repo['.'].hex()  # different version checked out
         return node.hex(n)
 
     @annotatesubrepoerror
@@ -649,11 +712,17 @@
             # A simpler option is for the user to configure clone pooling, and
             # work with that.
             if parentrepo.shared() and hg.islocal(srcurl):
-                self.ui.status(_('sharing subrepo %s from %s\n')
-                               % (subrelpath(self), srcurl))
-                shared = hg.share(self._repo._subparent.baseui,
-                                  getpeer(), self._repo.root,
-                                  update=False, bookmarks=False)
+                self.ui.status(
+                    _('sharing subrepo %s from %s\n')
+                    % (subrelpath(self), srcurl)
+                )
+                shared = hg.share(
+                    self._repo._subparent.baseui,
+                    getpeer(),
+                    self._repo.root,
+                    update=False,
+                    bookmarks=False,
+                )
                 self._repo = shared.local()
             else:
                 # TODO: find a common place for this and this code in the
@@ -670,17 +739,26 @@
                 else:
                     shareopts = {}
 
-                self.ui.status(_('cloning subrepo %s from %s\n')
-                               % (subrelpath(self), util.hidepassword(srcurl)))
-                other, cloned = hg.clone(self._repo._subparent.baseui, {},
-                                         getpeer(), self._repo.root,
-                                         update=False, shareopts=shareopts)
+                self.ui.status(
+                    _('cloning subrepo %s from %s\n')
+                    % (subrelpath(self), util.hidepassword(srcurl))
+                )
+                other, cloned = hg.clone(
+                    self._repo._subparent.baseui,
+                    {},
+                    getpeer(),
+                    self._repo.root,
+                    update=False,
+                    shareopts=shareopts,
+                )
                 self._repo = cloned.local()
             self._initrepo(parentrepo, source, create=True)
             self._cachestorehash(srcurl)
         else:
-            self.ui.status(_('pulling subrepo %s from %s\n')
-                           % (subrelpath(self), util.hidepassword(srcurl)))
+            self.ui.status(
+                _('pulling subrepo %s from %s\n')
+                % (subrelpath(self), util.hidepassword(srcurl))
+            )
             cleansub = self.storeclean(srcurl)
             exchange.pull(self._repo, getpeer())
             if cleansub:
@@ -700,7 +778,8 @@
             if ctx.hidden():
                 urepo.ui.warn(
                     _('revision %s in subrepository "%s" is hidden\n')
-                    % (revision[0:12], self._path))
+                    % (revision[0:12], self._path)
+                )
                 repo = urepo
         hg.updaterepo(repo, revision, overwrite)
 
@@ -713,12 +792,14 @@
 
         def mergefunc():
             if anc == cur and dst.branch() == cur.branch():
-                self.ui.debug('updating subrepository "%s"\n'
-                              % subrelpath(self))
+                self.ui.debug(
+                    'updating subrepository "%s"\n' % subrelpath(self)
+                )
                 hg.update(self._repo, state[1])
             elif anc == dst:
-                self.ui.debug('skipping subrepository "%s"\n'
-                              % subrelpath(self))
+                self.ui.debug(
+                    'skipping subrepository "%s"\n' % subrelpath(self)
+                )
             else:
                 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
                 hg.merge(self._repo, state[1], remind=False)
@@ -741,7 +822,7 @@
 
         # push subrepos depth-first for coherent ordering
         c = self._repo['.']
-        subs = c.substate # only repos that are committed
+        subs = c.substate  # only repos that are committed
         for s in sorted(subs):
             if c.sub(s).push(opts) == 0:
                 return False
@@ -751,10 +832,13 @@
             if self.storeclean(dsturl):
                 self.ui.status(
                     _('no changes made to subrepo %s since last push to %s\n')
-                    % (subrelpath(self), util.hidepassword(dsturl)))
+                    % (subrelpath(self), util.hidepassword(dsturl))
+                )
                 return None
-        self.ui.status(_('pushing subrepo %s to %s\n') %
-            (subrelpath(self), util.hidepassword(dsturl)))
+        self.ui.status(
+            _('pushing subrepo %s to %s\n')
+            % (subrelpath(self), util.hidepassword(dsturl))
+        )
         other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
         res = exchange.push(self._repo, other, force, newbranch=newbranch)
 
@@ -825,8 +909,10 @@
                 pm = matchmod.prefixdirmatcher(subpath, sm, badfn=badfn)
                 matchers.append(pm)
             except error.LookupError:
-                self.ui.status(_("skipping missing subrepository: %s\n")
-                               % self.wvfs.reljoin(reporelpath(self), subpath))
+                self.ui.status(
+                    _("skipping missing subrepository: %s\n")
+                    % self.wvfs.reljoin(reporelpath(self), subpath)
+                )
         if len(matchers) == 1:
             return matchers[0]
         return matchmod.unionmatcher(matchers)
@@ -837,14 +923,40 @@
 
     @annotatesubrepoerror
     def forget(self, match, prefix, uipathfn, dryrun, interactive):
-        return cmdutil.forget(self.ui, self._repo, match, prefix, uipathfn,
-                              True, dryrun=dryrun, interactive=interactive)
+        return cmdutil.forget(
+            self.ui,
+            self._repo,
+            match,
+            prefix,
+            uipathfn,
+            True,
+            dryrun=dryrun,
+            interactive=interactive,
+        )
 
     @annotatesubrepoerror
-    def removefiles(self, matcher, prefix, uipathfn, after, force, subrepos,
-                    dryrun, warnings):
-        return cmdutil.remove(self.ui, self._repo, matcher, prefix, uipathfn,
-                              after, force, subrepos, dryrun)
+    def removefiles(
+        self,
+        matcher,
+        prefix,
+        uipathfn,
+        after,
+        force,
+        subrepos,
+        dryrun,
+        warnings,
+    ):
+        return cmdutil.remove(
+            self.ui,
+            self._repo,
+            matcher,
+            prefix,
+            uipathfn,
+            after,
+            force,
+            subrepos,
+            dryrun,
+        )
 
     @annotatesubrepoerror
     def revert(self, substate, *pats, **opts):
@@ -888,6 +1000,7 @@
         # scripts that don't use our demand-loading
         global hg
         from . import hg as h
+
         hg = h
 
         # Nothing prevents a user from sharing in a repo, and then making that a
@@ -906,14 +1019,18 @@
                 # Since hidden revisions aren't pushed/pulled, it seems worth an
                 # explicit warning.
                 ui = self._repo.ui
-                ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
-                        (self._relpath, node.short(self._ctx.node())))
+                ui.warn(
+                    _("subrepo '%s' is hidden in revision %s\n")
+                    % (self._relpath, node.short(self._ctx.node()))
+                )
             return 0
         except error.RepoLookupError:
             # A missing subrepo revision may be a case of needing to pull it, so
             # don't treat this as an error.
-            self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
-                               (self._relpath, node.short(self._ctx.node())))
+            self._repo.ui.warn(
+                _("subrepo '%s' not found in revision %s\n")
+                % (self._relpath, node.short(self._ctx.node()))
+            )
             return 0
 
     @propertycache
@@ -929,14 +1046,16 @@
         # Keep consistent dir separators by avoiding vfs.join(self._path)
         return reporelpath(self._repo)
 
+
 class svnsubrepo(abstractsubrepo):
     def __init__(self, ctx, path, state, allowcreate):
         super(svnsubrepo, self).__init__(ctx, path)
         self._state = state
         self._exe = procutil.findexe('svn')
         if not self._exe:
-            raise error.Abort(_("'svn' executable not found for subrepo '%s'")
-                             % self._path)
+            raise error.Abort(
+                _("'svn' executable not found for subrepo '%s'") % self._path
+            )
 
     def _svncommand(self, commands, filename='', failok=False):
         cmd = [self._exe]
@@ -953,8 +1072,9 @@
                 cmd.append('--non-interactive')
         cmd.extend(commands)
         if filename is not None:
-            path = self.wvfs.reljoin(self._ctx.repo().origroot,
-                                     self._path, filename)
+            path = self.wvfs.reljoin(
+                self._ctx.repo().origroot, self._path, filename
+            )
             cmd.append(path)
         env = dict(encoding.environ)
         # Avoid localized output, preserve current locale for everything else.
@@ -963,16 +1083,22 @@
             env['LANG'] = lc_all
             del env['LC_ALL']
         env['LC_MESSAGES'] = 'C'
-        p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
-                             bufsize=-1, close_fds=procutil.closefds,
-                             stdout=subprocess.PIPE, stderr=subprocess.PIPE,
-                             env=procutil.tonativeenv(env), **extrakw)
+        p = subprocess.Popen(
+            pycompat.rapply(procutil.tonativestr, cmd),
+            bufsize=-1,
+            close_fds=procutil.closefds,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            env=procutil.tonativeenv(env),
+            **extrakw
+        )
         stdout, stderr = map(util.fromnativeeol, p.communicate())
         stderr = stderr.strip()
         if not failok:
             if p.returncode:
-                raise error.Abort(stderr or 'exited with code %d'
-                                  % p.returncode)
+                raise error.Abort(
+                    stderr or 'exited with code %d' % p.returncode
+                )
             if stderr:
                 self.ui.warn(stderr + '\n')
         return stdout, stderr
@@ -1000,8 +1126,10 @@
             rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
             commits = entries[0].getElementsByTagName(r'commit')
             if commits:
-                lastrev = pycompat.bytestr(
-                    commits[0].getAttribute(r'revision')) or '0'
+                lastrev = (
+                    pycompat.bytestr(commits[0].getAttribute(r'revision'))
+                    or '0'
+                )
         return (lastrev, rev)
 
     def _wcrev(self):
@@ -1027,8 +1155,12 @@
                 externals.append(path)
             elif item == r'missing':
                 missing.append(path)
-            if (item not in (r'', r'normal', r'unversioned', r'external')
-                or props not in (r'', r'none', r'normal')):
+            if item not in (
+                r'',
+                r'normal',
+                r'unversioned',
+                r'external',
+            ) or props not in (r'', r'none', r'normal'):
                 changes.append(path)
         for path in changes:
             for ext in externals:
@@ -1092,8 +1224,10 @@
     @annotatesubrepoerror
     def remove(self):
         if self.dirty():
-            self.ui.warn(_('not removing repo %s because '
-                           'it has changes.\n') % self._path)
+            self.ui.warn(
+                _('not removing repo %s because ' 'it has changes.\n')
+                % self._path
+            )
             return
         self.ui.note(_('removing subrepo %s\n') % self._path)
 
@@ -1121,8 +1255,9 @@
         status, err = self._svncommand(args, failok=True)
         _sanitize(self.ui, self.wvfs, '.svn')
         if not re.search('Checked out revision [0-9]+.', status):
-            if ('is already a working copy for a different URL' in err
-                and (self._wcchanged()[:2] == (False, False))):
+            if 'is already a working copy for a different URL' in err and (
+                self._wcchanged()[:2] == (False, False)
+            ):
                 # obstructed but clean working copy, so just blow it away.
                 self.remove()
                 self.get(state, overwrite=False)
@@ -1153,9 +1288,11 @@
             kind = pycompat.bytestr(e.getAttribute(r'kind'))
             if kind != 'file':
                 continue
-            name = r''.join(c.data for c
-                            in e.getElementsByTagName(r'name')[0].childNodes
-                            if c.nodeType == c.TEXT_NODE)
+            name = r''.join(
+                c.data
+                for c in e.getElementsByTagName(r'name')[0].childNodes
+                if c.nodeType == c.TEXT_NODE
+            )
             paths.append(name.encode('utf8'))
         return paths
 
@@ -1179,23 +1316,33 @@
             genericerror = _("error executing git for subrepo '%s': %s")
             notfoundhint = _("check git is installed and in your PATH")
             if e.errno != errno.ENOENT:
-                raise error.Abort(genericerror % (
-                    self._path, encoding.strtolocal(e.strerror)))
+                raise error.Abort(
+                    genericerror % (self._path, encoding.strtolocal(e.strerror))
+                )
             elif pycompat.iswindows:
                 try:
                     self._gitexecutable = 'git.cmd'
                     out, err = self._gitnodir(['--version'])
                 except OSError as e2:
                     if e2.errno == errno.ENOENT:
-                        raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
-                            " for subrepo '%s'") % self._path,
-                            hint=notfoundhint)
+                        raise error.Abort(
+                            _(
+                                "couldn't find 'git' or 'git.cmd'"
+                                " for subrepo '%s'"
+                            )
+                            % self._path,
+                            hint=notfoundhint,
+                        )
                     else:
-                        raise error.Abort(genericerror % (self._path,
-                            encoding.strtolocal(e2.strerror)))
+                        raise error.Abort(
+                            genericerror
+                            % (self._path, encoding.strtolocal(e2.strerror))
+                        )
             else:
-                raise error.Abort(_("couldn't find git for subrepo '%s'")
-                    % self._path, hint=notfoundhint)
+                raise error.Abort(
+                    _("couldn't find git for subrepo '%s'") % self._path,
+                    hint=notfoundhint,
+                )
         versionstatus = self._checkversion(out)
         if versionstatus == 'unknown':
             self.ui.warn(_('cannot retrieve git version\n'))
@@ -1256,8 +1403,9 @@
         return self._gitdir(commands, env=env, stream=stream)[0]
 
     def _gitdir(self, commands, env=None, stream=False):
-        return self._gitnodir(commands, env=env, stream=stream,
-                              cwd=self._abspath)
+        return self._gitnodir(
+            commands, env=env, stream=stream, cwd=self._abspath
+        )
 
     def _gitnodir(self, commands, env=None, stream=False, cwd=None):
         """Calls the git command
@@ -1282,13 +1430,17 @@
             # insert the argument in the front,
             # the end of git diff arguments is used for paths
             commands.insert(1, '--color')
-        p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
-                                             [self._gitexecutable] + commands),
-                             bufsize=-1,
-                             cwd=pycompat.rapply(procutil.tonativestr, cwd),
-                             env=procutil.tonativeenv(env),
-                             close_fds=procutil.closefds,
-                             stdout=subprocess.PIPE, stderr=errpipe)
+        p = subprocess.Popen(
+            pycompat.rapply(
+                procutil.tonativestr, [self._gitexecutable] + commands
+            ),
+            bufsize=-1,
+            cwd=pycompat.rapply(procutil.tonativestr, cwd),
+            env=procutil.tonativeenv(env),
+            close_fds=procutil.closefds,
+            stdout=subprocess.PIPE,
+            stderr=errpipe,
+        )
         if stream:
             return p.stdout, None
 
@@ -1302,8 +1454,10 @@
             if command in ('cat-file', 'symbolic-ref'):
                 return retdata, p.returncode
             # for all others, abort
-            raise error.Abort(_('git %s error %d in %s') %
-                             (command, p.returncode, self._relpath))
+            raise error.Abort(
+                _('git %s error %d in %s')
+                % (command, p.returncode, self._relpath)
+            )
 
         return retdata, p.returncode
 
@@ -1349,15 +1503,17 @@
         branch2rev = {}
         rev2branch = {}
 
-        out = self._gitcommand(['for-each-ref', '--format',
-                                '%(objectname) %(refname)'])
+        out = self._gitcommand(
+            ['for-each-ref', '--format', '%(objectname) %(refname)']
+        )
         for line in out.split('\n'):
             revision, ref = line.split(' ')
-            if (not ref.startswith('refs/heads/') and
-                not ref.startswith('refs/remotes/')):
+            if not ref.startswith('refs/heads/') and not ref.startswith(
+                'refs/remotes/'
+            ):
                 continue
             if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
-                continue # ignore remote/HEAD redirects
+                continue  # ignore remote/HEAD redirects
             branch2rev[ref] = revision
             rev2branch.setdefault(revision, []).append(ref)
         return branch2rev, rev2branch
@@ -1373,8 +1529,9 @@
             remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
             if remote:
                 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
-                tracking['refs/remotes/%s/%s' %
-                         (remote, ref.split('/', 2)[2])] = b
+                tracking[
+                    'refs/remotes/%s/%s' % (remote, ref.split('/', 2)[2])
+                ] = b
         return tracking
 
     def _abssource(self, source):
@@ -1392,18 +1549,23 @@
             util.checksafessh(source)
 
             source = self._abssource(source)
-            self.ui.status(_('cloning subrepo %s from %s\n') %
-                            (self._relpath, source))
+            self.ui.status(
+                _('cloning subrepo %s from %s\n') % (self._relpath, source)
+            )
             self._gitnodir(['clone', source, self._abspath])
         if self._githavelocally(revision):
             return
-        self.ui.status(_('pulling subrepo %s from %s\n') %
-                        (self._relpath, self._gitremote('origin')))
+        self.ui.status(
+            _('pulling subrepo %s from %s\n')
+            % (self._relpath, self._gitremote('origin'))
+        )
         # try only origin: the originally cloned repo
         self._gitcommand(['fetch'])
         if not self._githavelocally(revision):
-            raise error.Abort(_('revision %s does not exist in subrepository '
-                                '"%s"\n') % (revision, self._relpath))
+            raise error.Abort(
+                _('revision %s does not exist in subrepository ' '"%s"\n')
+                % (revision, self._relpath)
+            )
 
     @annotatesubrepoerror
     def dirty(self, ignoreupdate=False, missing=False):
@@ -1458,10 +1620,13 @@
 
         def rawcheckout():
             # no branch to checkout, check it out with no branch
-            self.ui.warn(_('checking out detached HEAD in '
-                           'subrepository "%s"\n') % self._relpath)
-            self.ui.warn(_('check out a git branch if you intend '
-                            'to make changes\n'))
+            self.ui.warn(
+                _('checking out detached HEAD in ' 'subrepository "%s"\n')
+                % self._relpath
+            )
+            self.ui.warn(
+                _('check out a git branch if you intend ' 'to make changes\n')
+            )
             checkout(['-q', revision])
 
         if revision not in rev2branch:
@@ -1519,8 +1684,9 @@
         if date:
             # git's date parser silently ignores when seconds < 1e9
             # convert to ISO8601
-            env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
-                                                  '%Y-%m-%dT%H:%M:%S %1%2')
+            env['GIT_AUTHOR_DATE'] = dateutil.datestr(
+                date, '%Y-%m-%dT%H:%M:%S %1%2'
+            )
         self._gitcommand(cmd, env=env)
         # make sure commit works otherwise HEAD might not exist under certain
         # circumstances
@@ -1536,7 +1702,7 @@
 
         def mergefunc():
             if base == revision:
-                self.get(state) # fast forward merge
+                self.get(state)  # fast forward merge
             elif base != self._state[1]:
                 self._gitcommand(['merge', '--no-commit', revision])
             _sanitize(self.ui, self.wvfs, '.git')
@@ -1544,8 +1710,9 @@
         if self.dirty():
             if self._gitstate() != revision:
                 dirty = self._gitstate() == self._state[1] or code != 0
-                if _updateprompt(self.ui, self, dirty,
-                                 self._state[1][:7], revision[:7]):
+                if _updateprompt(
+                    self.ui, self, dirty, self._state[1][:7], revision[:7]
+                ):
                     mergefunc()
         else:
             mergefunc()
@@ -1577,17 +1744,28 @@
         if current:
             # determine if the current branch is even useful
             if not self._gitisancestor(self._state[1], current):
-                self.ui.warn(_('unrelated git branch checked out '
-                               'in subrepository "%s"\n') % self._relpath)
+                self.ui.warn(
+                    _(
+                        'unrelated git branch checked out '
+                        'in subrepository "%s"\n'
+                    )
+                    % self._relpath
+                )
                 return False
-            self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
-                           (current.split('/', 2)[2], self._relpath))
+            self.ui.status(
+                _('pushing branch %s of subrepository "%s"\n')
+                % (current.split('/', 2)[2], self._relpath)
+            )
             ret = self._gitdir(cmd + ['origin', current])
             return ret[1] == 0
         else:
-            self.ui.warn(_('no branch checked out in subrepository "%s"\n'
-                           'cannot push revision %s\n') %
-                          (self._relpath, self._state[1]))
+            self.ui.warn(
+                _(
+                    'no branch checked out in subrepository "%s"\n'
+                    'cannot push revision %s\n'
+                )
+                % (self._relpath, self._state[1])
+            )
             return False
 
     @annotatesubrepoerror
@@ -1613,7 +1791,7 @@
             exact = match.exact(f)
             command = ["add"]
             if exact:
-                command.append("-f") #should be added, even if ignored
+                command.append("-f")  # should be added, even if ignored
             if ui.verbose or not exact:
                 ui.status(_('adding %s\n') % uipathfn(f))
 
@@ -1634,8 +1812,10 @@
         if self._gitmissing():
             return
         if self.dirty():
-            self.ui.warn(_('not removing repo %s because '
-                           'it has changes.\n') % self._relpath)
+            self.ui.warn(
+                _('not removing repo %s because ' 'it has changes.\n')
+                % self._relpath
+            )
             return
         # we can't fully delete the repository as it may contain
         # local-only history
@@ -1662,8 +1842,9 @@
         tarstream = self._gitcommand(['archive', revision], stream=True)
         tar = tarfile.open(fileobj=tarstream, mode=r'r|')
         relpath = subrelpath(self)
-        progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
-                                        unit=_('files'))
+        progress = self.ui.makeprogress(
+            _('archiving (%s)') % relpath, unit=_('files')
+        )
         progress.update(0)
         for info in tar:
             if info.isdir():
@@ -1681,12 +1862,11 @@
         progress.complete()
         return total
 
-
     @annotatesubrepoerror
     def cat(self, match, fm, fntemplate, prefix, **opts):
         rev = self._state[1]
         if match.anypats():
-            return 1 #No support for include/exclude yet
+            return 1  # No support for include/exclude yet
 
         if not match.files():
             return 1
@@ -1694,13 +1874,13 @@
         # TODO: add support for non-plain formatter (see cmdutil.cat())
         for f in match.files():
             output = self._gitcommand(["show", "%s:%s" % (rev, f)])
-            fp = cmdutil.makefileobj(self._ctx, fntemplate,
-                                     pathname=self.wvfs.reljoin(prefix, f))
+            fp = cmdutil.makefileobj(
+                self._ctx, fntemplate, pathname=self.wvfs.reljoin(prefix, f)
+            )
             fp.write(output)
             fp.close()
         return 0
 
-
     @annotatesubrepoerror
     def status(self, rev2, **opts):
         rev1 = self._state[1]
@@ -1718,7 +1898,7 @@
             tab = line.find('\t')
             if tab == -1:
                 continue
-            status, f = line[tab - 1:tab], line[tab + 1:]
+            status, f = line[tab - 1 : tab], line[tab + 1 :]
             if status == 'M':
                 modified.append(f)
             elif status == 'A':
@@ -1743,7 +1923,7 @@
             if not line:
                 continue
             st = line[0:2]
-            #moves and copies show 2 files on one line
+            # moves and copies show 2 files on one line
             if line.find('\0') >= 0:
                 filename1, filename2 = line[3:].split('\0')
             else:
@@ -1765,8 +1945,9 @@
                 if not f in changedfiles:
                     clean.append(f)
 
-        return scmutil.status(modified, added, removed, deleted,
-                              unknown, ignored, clean)
+        return scmutil.status(
+            modified, added, removed, deleted, unknown, ignored, clean
+        )
 
     @annotatesubrepoerror
     def diff(self, ui, diffopts, node2, match, prefix, **opts):
@@ -1779,18 +1960,22 @@
             cmd.append('-U%d' % diffopts.context)
 
         if diffopts.noprefix:
-            cmd.extend(['--src-prefix=%s/' % prefix,
-                        '--dst-prefix=%s/' % prefix])
+            cmd.extend(
+                ['--src-prefix=%s/' % prefix, '--dst-prefix=%s/' % prefix]
+            )
         else:
-            cmd.extend(['--src-prefix=a/%s/' % prefix,
-                        '--dst-prefix=b/%s/' % prefix])
+            cmd.extend(
+                ['--src-prefix=a/%s/' % prefix, '--dst-prefix=b/%s/' % prefix]
+            )
 
         if diffopts.ignorews:
             cmd.append('--ignore-all-space')
         if diffopts.ignorewsamount:
             cmd.append('--ignore-space-change')
-        if (self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4)
-            and diffopts.ignoreblanklines):
+        if (
+            self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4)
+            and diffopts.ignoreblanklines
+        ):
             cmd.append('--ignore-blank-lines')
 
         cmd.append(node1)
@@ -1820,10 +2005,13 @@
                 # backuppath() expects a path relative to the parent repo (the
                 # repo that ui.origbackuppath is relative to)
                 parentname = os.path.join(self._path, name)
-                bakname = scmutil.backuppath(self.ui, self._subparent,
-                                             parentname)
-                self.ui.note(_('saving current version of %s as %s\n') %
-                        (name, os.path.relpath(bakname)))
+                bakname = scmutil.backuppath(
+                    self.ui, self._subparent, parentname
+                )
+                self.ui.note(
+                    _('saving current version of %s as %s\n')
+                    % (name, os.path.relpath(bakname))
+                )
                 util.rename(self.wvfs.join(name), bakname)
 
         if not opts.get(r'dry_run'):
@@ -1833,8 +2021,9 @@
     def shortid(self, revid):
         return revid[:7]
 
+
 types = {
     'hg': hgsubrepo,
     'svn': svnsubrepo,
     'git': gitsubrepo,
-    }
+}
--- a/mercurial/subrepoutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/subrepoutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,12 +21,11 @@
     phases,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 nullstate = ('', '', 'empty')
 
+
 def state(ctx, ui):
     """return a state dict, mapping subrepo paths configured in .hgsub
     to tuple: (source from .hgsub, revision from .hgsubstate, kind
@@ -34,6 +33,7 @@
     """
     p = config.config()
     repo = ctx.repo()
+
     def read(f, sections=None, remap=None):
         if f in ctx:
             try:
@@ -42,13 +42,17 @@
                 if err.errno != errno.ENOENT:
                     raise
                 # handle missing subrepo spec files as removed
-                ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
-                        repo.pathto(f))
+                ui.warn(
+                    _("warning: subrepo spec file \'%s\' not found\n")
+                    % repo.pathto(f)
+                )
                 return
             p.parse(f, data, sections, remap, read)
         else:
-            raise error.Abort(_("subrepo spec file \'%s\' not found") %
-                             repo.pathto(f))
+            raise error.Abort(
+                _("subrepo spec file \'%s\' not found") % repo.pathto(f)
+            )
+
     if '.hgsub' in ctx:
         read('.hgsub')
 
@@ -65,9 +69,13 @@
                 try:
                     revision, path = l.split(" ", 1)
                 except ValueError:
-                    raise error.Abort(_("invalid subrepository revision "
-                                       "specifier in \'%s\' line %d")
-                                     % (repo.pathto('.hgsubstate'), (i + 1)))
+                    raise error.Abort(
+                        _(
+                            "invalid subrepository revision "
+                            "specifier in \'%s\' line %d"
+                        )
+                        % (repo.pathto('.hgsubstate'), (i + 1))
+                    )
                 rev[path] = revision
         except IOError as err:
             if err.errno != errno.ENOENT:
@@ -85,9 +93,13 @@
             try:
                 src = re.sub(pattern, repl, src, 1)
             except re.error as e:
-                raise error.Abort(_("bad subrepository pattern in %s: %s")
-                                 % (p.source('subpaths', pattern),
-                                    stringutil.forcebytestr(e)))
+                raise error.Abort(
+                    _("bad subrepository pattern in %s: %s")
+                    % (
+                        p.source('subpaths', pattern),
+                        stringutil.forcebytestr(e),
+                    )
+                )
         return src
 
     state = {}
@@ -98,7 +110,7 @@
                 raise error.Abort(_('missing ] in subrepository source'))
             kind, src = src.split(']', 1)
             kind = kind[1:]
-            src = src.lstrip() # strip any extra whitespace after ']'
+            src = src.lstrip()  # strip any extra whitespace after ']'
 
         if not util.url(src).isabs():
             parent = _abssource(repo, abort=False)
@@ -120,16 +132,21 @@
 
     return state
 
+
 def writestate(repo, state):
     """rewrite .hgsubstate in (outer) repo with these subrepo states"""
-    lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
-                                                if state[s][1] != nullstate[1]]
+    lines = [
+        '%s %s\n' % (state[s][1], s)
+        for s in sorted(state)
+        if state[s][1] != nullstate[1]
+    ]
     repo.wwrite('.hgsubstate', ''.join(lines), '')
 
+
 def submerge(repo, wctx, mctx, actx, overwrite, labels=None):
     """delegated from merge.applyupdates: merging of .hgsubstate file
     in working context, merging context and ancestor context"""
-    if mctx == actx: # backwards?
+    if mctx == actx:  # backwards?
         actx = wctx.p1()
     s1 = wctx.substate
     s2 = mctx.substate
@@ -146,36 +163,41 @@
     promptssrc = filemerge.partextras(labels)
     for s, l in sorted(s1.iteritems()):
         a = sa.get(s, nullstate)
-        ld = l # local state with possible dirty flag for compares
+        ld = l  # local state with possible dirty flag for compares
         if wctx.sub(s).dirty():
             ld = (l[0], l[1] + "+")
-        if wctx == actx: # overwrite
+        if wctx == actx:  # overwrite
             a = ld
 
         prompts = promptssrc.copy()
         prompts['s'] = s
         if s in s2:
             r = s2[s]
-            if ld == r or r == a: # no change or local is newer
+            if ld == r or r == a:  # no change or local is newer
                 sm[s] = l
                 continue
-            elif ld == a: # other side changed
+            elif ld == a:  # other side changed
                 debug(s, "other changed, get", r)
                 wctx.sub(s).get(r, overwrite)
                 sm[s] = r
-            elif ld[0] != r[0]: # sources differ
+            elif ld[0] != r[0]:  # sources differ
                 prompts['lo'] = l[0]
                 prompts['ro'] = r[0]
                 if repo.ui.promptchoice(
-                    _(' subrepository sources for %(s)s differ\n'
-                      'you can use (l)ocal%(l)s source (%(lo)s)'
-                      ' or (r)emote%(o)s source (%(ro)s).\n'
-                      'what do you want to do?'
-                      '$$ &Local $$ &Remote') % prompts, 0):
+                    _(
+                        ' subrepository sources for %(s)s differ\n'
+                        'you can use (l)ocal%(l)s source (%(lo)s)'
+                        ' or (r)emote%(o)s source (%(ro)s).\n'
+                        'what do you want to do?'
+                        '$$ &Local $$ &Remote'
+                    )
+                    % prompts,
+                    0,
+                ):
                     debug(s, "prompt changed, get", r)
                     wctx.sub(s).get(r, overwrite)
                     sm[s] = r
-            elif ld[1] == a[1]: # local side is unchanged
+            elif ld[1] == a[1]:  # local side is unchanged
                 debug(s, "other side changed, get", r)
                 wctx.sub(s).get(r, overwrite)
                 sm[s] = r
@@ -185,13 +207,17 @@
                 prompts['sl'] = srepo.shortid(l[1])
                 prompts['sr'] = srepo.shortid(r[1])
                 option = repo.ui.promptchoice(
-                    _(' subrepository %(s)s diverged (local revision: %(sl)s, '
-                      'remote revision: %(sr)s)\n'
-                      'you can (m)erge, keep (l)ocal%(l)s or keep '
-                      '(r)emote%(o)s.\n'
-                      'what do you want to do?'
-                      '$$ &Merge $$ &Local $$ &Remote')
-                    % prompts, 0)
+                    _(
+                        ' subrepository %(s)s diverged (local revision: %(sl)s, '
+                        'remote revision: %(sr)s)\n'
+                        'you can (m)erge, keep (l)ocal%(l)s or keep '
+                        '(r)emote%(o)s.\n'
+                        'what do you want to do?'
+                        '$$ &Merge $$ &Local $$ &Remote'
+                    )
+                    % prompts,
+                    0,
+                )
                 if option == 0:
                     wctx.sub(s).merge(r)
                     sm[s] = l
@@ -203,19 +229,24 @@
                     wctx.sub(s).get(r, overwrite)
                     sm[s] = r
                     debug(s, "get remote subrepo revision", r)
-        elif ld == a: # remote removed, local unchanged
+        elif ld == a:  # remote removed, local unchanged
             debug(s, "remote removed, remove")
             wctx.sub(s).remove()
-        elif a == nullstate: # not present in remote or ancestor
+        elif a == nullstate:  # not present in remote or ancestor
             debug(s, "local added, keep")
             sm[s] = l
             continue
         else:
             if repo.ui.promptchoice(
-                _(' local%(l)s changed subrepository %(s)s'
-                  ' which remote%(o)s removed\n'
-                  'use (c)hanged version or (d)elete?'
-                  '$$ &Changed $$ &Delete') % prompts, 0):
+                _(
+                    ' local%(l)s changed subrepository %(s)s'
+                    ' which remote%(o)s removed\n'
+                    'use (c)hanged version or (d)elete?'
+                    '$$ &Changed $$ &Delete'
+                )
+                % prompts,
+                0,
+            ):
                 debug(s, "prompt remove")
                 wctx.sub(s).remove()
 
@@ -229,11 +260,19 @@
         elif r != sa[s]:
             prompts = promptssrc.copy()
             prompts['s'] = s
-            if repo.ui.promptchoice(
-                _(' remote%(o)s changed subrepository %(s)s'
-                  ' which local%(l)s removed\n'
-                  'use (c)hanged version or (d)elete?'
-                  '$$ &Changed $$ &Delete') % prompts, 0) == 0:
+            if (
+                repo.ui.promptchoice(
+                    _(
+                        ' remote%(o)s changed subrepository %(s)s'
+                        ' which local%(l)s removed\n'
+                        'use (c)hanged version or (d)elete?'
+                        '$$ &Changed $$ &Delete'
+                    )
+                    % prompts,
+                    0,
+                )
+                == 0
+            ):
                 debug(s, "prompt recreate", r)
                 mctx.sub(s).get(r)
                 sm[s] = r
@@ -242,6 +281,7 @@
     writestate(repo, sm)
     return sm
 
+
 def precommit(ui, wctx, status, match, force=False):
     """Calculate .hgsubstate changes that should be applied before committing
 
@@ -274,12 +314,15 @@
                     continue
                 if not force:
                     raise error.Abort(
-                        _("commit with new subrepo %s excluded") % s)
+                        _("commit with new subrepo %s excluded") % s
+                    )
             dirtyreason = wctx.sub(s).dirtyreason(True)
             if dirtyreason:
                 if not ui.configbool('ui', 'commitsubrepos'):
-                    raise error.Abort(dirtyreason,
-                        hint=_("use --subrepos for recursive commit"))
+                    raise error.Abort(
+                        dirtyreason,
+                        hint=_("use --subrepos for recursive commit"),
+                    )
                 subs.append(s)
                 commitsubs.add(s)
             else:
@@ -293,31 +336,35 @@
             r = [s for s in p.substate if s not in newstate]
             subs += [s for s in r if match(s)]
         if subs:
-            if (not match('.hgsub') and
-                '.hgsub' in (wctx.modified() + wctx.added())):
+            if not match('.hgsub') and '.hgsub' in (
+                wctx.modified() + wctx.added()
+            ):
                 raise error.Abort(_("can't commit subrepos without .hgsub"))
             status.modified.insert(0, '.hgsubstate')
 
     elif '.hgsub' in status.removed:
         # clean up .hgsubstate when .hgsub is removed
-        if ('.hgsubstate' in wctx and
-            '.hgsubstate' not in (status.modified + status.added +
-                                  status.removed)):
+        if '.hgsubstate' in wctx and '.hgsubstate' not in (
+            status.modified + status.added + status.removed
+        ):
             status.removed.insert(0, '.hgsubstate')
 
     return subs, commitsubs, newstate
 
+
 def reporelpath(repo):
     """return path to this (sub)repo as seen from outermost repo"""
     parent = repo
     while util.safehasattr(parent, '_subparent'):
         parent = parent._subparent
-    return repo.root[len(pathutil.normasprefix(parent.root)):]
+    return repo.root[len(pathutil.normasprefix(parent.root)) :]
+
 
 def subrelpath(sub):
     """return path to this subrepo as seen from outermost repo"""
     return sub._relpath
 
+
 def _abssource(repo, push=False, abort=True):
     """return pull/push path of repo - either based on parent repo .hgsub info
     or on the top repo config. Abort or return None if no source found."""
@@ -332,7 +379,7 @@
             parent.path = posixpath.join(parent.path or '', source.path)
             parent.path = posixpath.normpath(parent.path)
             return bytes(parent)
-    else: # recursion reached top repo
+    else:  # recursion reached top repo
         path = None
         if util.safehasattr(repo, '_subtoppath'):
             path = repo._subtoppath
@@ -365,6 +412,7 @@
     if abort:
         raise error.Abort(_("default path for subrepository not found"))
 
+
 def newcommitphase(ui, ctx):
     commitphase = phases.newcommitphase(ui)
     substate = getattr(ctx, "substate", None)
@@ -372,8 +420,9 @@
         return commitphase
     check = ui.config('phases', 'checksubrepos')
     if check not in ('ignore', 'follow', 'abort'):
-        raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
-                         % (check))
+        raise error.Abort(
+            _('invalid phases.checksubrepos configuration: %s') % check
+        )
     if check == 'ignore':
         return commitphase
     maxphase = phases.public
@@ -386,12 +435,23 @@
             maxsub = s
     if commitphase < maxphase:
         if check == 'abort':
-            raise error.Abort(_("can't commit in %s phase"
-                               " conflicting %s from subrepository %s") %
-                             (phases.phasenames[commitphase],
-                              phases.phasenames[maxphase], maxsub))
-        ui.warn(_("warning: changes are committed in"
-                  " %s phase from subrepository %s\n") %
-                (phases.phasenames[maxphase], maxsub))
+            raise error.Abort(
+                _(
+                    "can't commit in %s phase"
+                    " conflicting %s from subrepository %s"
+                )
+                % (
+                    phases.phasenames[commitphase],
+                    phases.phasenames[maxphase],
+                    maxsub,
+                )
+            )
+        ui.warn(
+            _(
+                "warning: changes are committed in"
+                " %s phase from subrepository %s\n"
+            )
+            % (phases.phasenames[maxphase], maxsub)
+        )
         return maxphase
     return commitphase
--- a/mercurial/tagmerge.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/tagmerge.py	Sun Oct 06 09:45:02 2019 -0400
@@ -78,13 +78,14 @@
     hex,
     nullid,
 )
-from .import (
+from . import (
     tags as tagsmod,
     util,
 )
 
 hexnullid = hex(nullid)
 
+
 def readtagsformerge(ui, repo, lines, fn='', keeplinenums=False):
     '''read the .hgtags file into a structure that is suitable for merging
 
@@ -92,14 +93,16 @@
     with each tag. This is done because only the line numbers of the first
     parent are useful for merging.
     '''
-    filetags = tagsmod._readtaghist(ui, repo, lines, fn=fn, recode=None,
-                                    calcnodelines=True)[1]
+    filetags = tagsmod._readtaghist(
+        ui, repo, lines, fn=fn, recode=None, calcnodelines=True
+    )[1]
     for tagname, taginfo in filetags.items():
         if not keeplinenums:
             for el in taginfo:
                 el[1] = None
     return filetags
 
+
 def grouptagnodesbyline(tagnodes):
     '''
     Group nearby nodes (i.e. those that must be written next to each other)
@@ -134,6 +137,7 @@
             prevlinenum = linenum
     return groupednodes
 
+
 def writemergedtags(fcd, mergedtags):
     '''
     write the merged tags while trying to minimize the diff to the first parent
@@ -169,6 +173,7 @@
     mergedtagstring = '\n'.join([tags for rank, tags in finaltags if tags])
     fcd.write(mergedtagstring + '\n', fcd.flags())
 
+
 def singletagmerge(p1nodes, p2nodes):
     '''
     merge the nodes corresponding to a single tag
@@ -214,6 +219,7 @@
     # whole list of lr nodes
     return lrnodes + hrnodes[commonidx:]
 
+
 def merge(repo, fcd, fco, fca):
     '''
     Merge the tags of two revisions, taking into account the base tags
@@ -223,14 +229,14 @@
     # read the p1, p2 and base tags
     # only keep the line numbers for the p1 tags
     p1tags = readtagsformerge(
-        ui, repo, fcd.data().splitlines(), fn="p1 tags",
-        keeplinenums=True)
+        ui, repo, fcd.data().splitlines(), fn="p1 tags", keeplinenums=True
+    )
     p2tags = readtagsformerge(
-        ui, repo, fco.data().splitlines(), fn="p2 tags",
-        keeplinenums=False)
+        ui, repo, fco.data().splitlines(), fn="p2 tags", keeplinenums=False
+    )
     basetags = readtagsformerge(
-        ui, repo, fca.data().splitlines(), fn="base tags",
-        keeplinenums=False)
+        ui, repo, fca.data().splitlines(), fn="base tags", keeplinenums=False
+    )
 
     # recover the list of "lost tags" (i.e. those that were found on the base
     # revision but not on one of the revisions being merged)
@@ -259,9 +265,13 @@
 
     if conflictedtags:
         numconflicts = len(conflictedtags)
-        ui.warn(_('automatic .hgtags merge failed\n'
-            'the following %d tags are in conflict: %s\n')
-            % (numconflicts, ', '.join(sorted(conflictedtags))))
+        ui.warn(
+            _(
+                'automatic .hgtags merge failed\n'
+                'the following %d tags are in conflict: %s\n'
+            )
+            % (numconflicts, ', '.join(sorted(conflictedtags)))
+        )
         return True, 1
 
     writemergedtags(fcd, mergedtags)
--- a/mercurial/tags.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/tags.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,9 +30,7 @@
     scmutil,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 # Tags computation can be expensive and caches exist to make it fast in
 # the common case.
@@ -83,6 +81,7 @@
 # The most recent changeset (in terms of revlog ordering for the head
 # setting it) for each tag is last.
 
+
 def fnoderevs(ui, repo, revs):
     """return the list of '.hgtags' fnodes used in a set revisions
 
@@ -95,6 +94,7 @@
     fnodes = _filterfnodes(fnodes, nodes)
     return fnodes
 
+
 def _nulltonone(value):
     """convert nullid to None
 
@@ -104,6 +104,7 @@
         return None
     return value
 
+
 def difftags(ui, repo, oldfnodes, newfnodes):
     """list differences between tags expressed in two set of file-nodes
 
@@ -134,6 +135,7 @@
     entries.sort()
     return entries
 
+
 def writediff(fp, difflist):
     """write tags diff information to a file.
 
@@ -172,6 +174,7 @@
             fp.write(updateold % (old, tag))
             fp.write(updatenew % (new, tag))
 
+
 def findglobaltags(ui, repo):
     '''Find global tags in a repo: return a tagsmap
 
@@ -190,8 +193,9 @@
         return alltags
 
     for head in reversed(heads):  # oldest to newest
-        assert head in repo.changelog.nodemap, (
-               "tag cache returned bogus head %s" % short(head))
+        assert (
+            head in repo.changelog.nodemap
+        ), "tag cache returned bogus head %s" % short(head)
     fnodes = _filterfnodes(tagfnode, reversed(heads))
     alltags = _tagsfromfnodes(ui, repo, fnodes)
 
@@ -200,6 +204,7 @@
         _writetagcache(ui, repo, valid, alltags)
     return alltags
 
+
 def _filterfnodes(tagfnode, nodes):
     """return a list of unique fnodes
 
@@ -215,6 +220,7 @@
             fnodes.append(fnode)
     return fnodes
 
+
 def _tagsfromfnodes(ui, repo, fnodes):
     """return a tagsmap from a list of file-node
 
@@ -232,6 +238,7 @@
         _updatetags(filetags, alltags)
     return alltags
 
+
 def readlocaltags(ui, repo, alltags, tagtypes):
     '''Read local tags in repo. Update alltags and tagtypes.'''
     try:
@@ -244,8 +251,8 @@
     # localtags is in the local encoding; re-encode to UTF-8 on
     # input for consistency with the rest of this module.
     filetags = _readtags(
-        ui, repo, data.splitlines(), "localtags",
-        recode=encoding.fromlocal)
+        ui, repo, data.splitlines(), "localtags", recode=encoding.fromlocal
+    )
 
     # remove tags pointing to invalid nodes
     cl = repo.changelog
@@ -257,6 +264,7 @@
 
     _updatetags(filetags, alltags, 'local', tagtypes)
 
+
 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
     '''Read tag definitions from a file (or any source of lines).
 
@@ -314,6 +322,7 @@
         bintaghist[name].append(nodebin)
     return bintaghist, hextaglines
 
+
 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
     '''Read tag definitions from a file (or any source of lines).
 
@@ -323,8 +332,9 @@
     is the list of node ids previously associated with it (in file order).
     All node ids are binary, not hex.
     '''
-    filetags, nodelines = _readtaghist(ui, repo, lines, fn, recode=recode,
-                                       calcnodelines=calcnodelines)
+    filetags, nodelines = _readtaghist(
+        ui, repo, lines, fn, recode=recode, calcnodelines=calcnodelines
+    )
     # util.sortdict().__setitem__ is much slower at replacing then inserting
     # new entries. The difference can matter if there are thousands of tags.
     # Create a new sortdict to avoid the performance penalty.
@@ -333,6 +343,7 @@
         newtags[tag] = (taghist[-1], taghist[:-1])
     return newtags
 
+
 def _updatetags(filetags, alltags, tagtype=None, tagtypes=None):
     """Incorporate the tag info read from one file into dictionnaries
 
@@ -357,14 +368,18 @@
         # otherwise we win because we're tip-most
         anode, ahist = nodehist
         bnode, bhist = alltags[name]
-        if (bnode != anode and anode in bhist and
-            (bnode not in ahist or len(bhist) > len(ahist))):
+        if (
+            bnode != anode
+            and anode in bhist
+            and (bnode not in ahist or len(bhist) > len(ahist))
+        ):
             anode = bnode
         elif tagtype is not None:
             tagtypes[name] = tagtype
         ahist.extend([n for n in bhist if n not in ahist])
         alltags[name] = anode, ahist
 
+
 def _filename(repo):
     """name of a tagcache file for a given repo or repoview"""
     filename = 'tags2'
@@ -372,6 +387,7 @@
         filename = '%s-%s' % (filename, repo.filtername)
     return filename
 
+
 def _readtagcache(ui, repo):
     '''Read the tag cache.
 
@@ -419,14 +435,16 @@
     # (Unchanged tip trivially means no changesets have been added.
     # But, thanks to localrepository.destroyed(), it also means none
     # have been destroyed by strip or rollback.)
-    if (cacherev == tiprev
-            and cachenode == tipnode
-            and cachehash == scmutil.filteredhash(repo, tiprev)):
+    if (
+        cacherev == tiprev
+        and cachenode == tipnode
+        and cachehash == scmutil.filteredhash(repo, tiprev)
+    ):
         tags = _readtags(ui, repo, cachelines, cachefile.name)
         cachefile.close()
         return (None, None, None, tags, False)
     if cachefile:
-        cachefile.close()               # ignore rest of file
+        cachefile.close()  # ignore rest of file
 
     valid = (tiprev, tipnode, scmutil.filteredhash(repo, tiprev))
 
@@ -454,7 +472,6 @@
         # potentially expensive search.
         return ([], {}, valid, None, True)
 
-
     # Now we have to lookup the .hgtags filenode for every new head.
     # This is the most expensive part of finding tags, so performance
     # depends primarily on the size of newheads.  Worst case: no cache
@@ -466,6 +483,7 @@
     # cachefnode to get to each .hgtags revision quickly.
     return (repoheads, cachefnode, valid, None, True)
 
+
 def _getfnodes(ui, repo, nodes):
     """return .hgtags fnodes for a list of changeset nodes
 
@@ -483,11 +501,16 @@
     fnodescache.write()
 
     duration = util.timer() - starttime
-    ui.log('tagscache',
-           '%d/%d cache hits/lookups in %0.4f seconds\n',
-           fnodescache.hitcount, fnodescache.lookupcount, duration)
+    ui.log(
+        'tagscache',
+        '%d/%d cache hits/lookups in %0.4f seconds\n',
+        fnodescache.hitcount,
+        fnodescache.lookupcount,
+        duration,
+    )
     return cachefnode
 
+
 def _writetagcache(ui, repo, valid, cachetags):
     filename = _filename(repo)
     try:
@@ -495,8 +518,12 @@
     except (OSError, IOError):
         return
 
-    ui.log('tagscache', 'writing .hg/cache/%s with %d tags\n',
-           filename, len(cachetags))
+    ui.log(
+        'tagscache',
+        'writing .hg/cache/%s with %d tags\n',
+        filename,
+        len(cachetags),
+    )
 
     if valid[2]:
         cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2])))
@@ -517,6 +544,7 @@
     except (OSError, IOError):
         pass
 
+
 def tag(repo, names, node, message, local, user, date, editor=False):
     '''tag a revision with one or more symbolic names.
 
@@ -541,26 +569,30 @@
     if not local:
         m = matchmod.exact(['.hgtags'])
         if any(repo.status(match=m, unknown=True, ignored=True)):
-            raise error.Abort(_('working copy of .hgtags is changed'),
-                             hint=_('please commit .hgtags manually'))
+            raise error.Abort(
+                _('working copy of .hgtags is changed'),
+                hint=_('please commit .hgtags manually'),
+            )
 
     with repo.wlock():
-        repo.tags() # instantiate the cache
-        _tag(repo, names, node, message, local, user, date,
-             editor=editor)
+        repo.tags()  # instantiate the cache
+        _tag(repo, names, node, message, local, user, date, editor=editor)
+
 
-def _tag(repo, names, node, message, local, user, date, extra=None,
-         editor=False):
+def _tag(
+    repo, names, node, message, local, user, date, extra=None, editor=False
+):
     if isinstance(names, bytes):
         names = (names,)
 
     branches = repo.branchmap()
     for name in names:
-        repo.hook('pretag', throw=True, node=hex(node), tag=name,
-                  local=local)
+        repo.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
         if name in branches:
-            repo.ui.warn(_("warning: tag %s conflicts with existing"
-            " branch name\n") % name)
+            repo.ui.warn(
+                _("warning: tag %s conflicts with existing" " branch name\n")
+                % name
+            )
 
     def writetags(fp, names, munge, prevtags):
         fp.seek(0, io.SEEK_END)
@@ -572,8 +604,7 @@
             else:
                 m = name
 
-            if (repo._tagscache.tagtypes and
-                name in repo._tagscache.tagtypes):
+            if repo._tagscache.tagtypes and name in repo._tagscache.tagtypes:
                 old = repo.tags().get(name, nullid)
                 fp.write('%s %s\n' % (hex(old), m))
             fp.write('%s %s\n' % (hex(node), m))
@@ -614,18 +645,21 @@
         repo[None].add(['.hgtags'])
 
     m = matchmod.exact(['.hgtags'])
-    tagnode = repo.commit(message, user, date, extra=extra, match=m,
-                          editor=editor)
+    tagnode = repo.commit(
+        message, user, date, extra=extra, match=m, editor=editor
+    )
 
     for name in names:
         repo.hook('tag', node=hex(node), tag=name, local=local)
 
     return tagnode
 
+
 _fnodescachefile = 'hgtagsfnodes1'
-_fnodesrecsize = 4 + 20 # changeset fragment + filenode
+_fnodesrecsize = 4 + 20  # changeset fragment + filenode
 _fnodesmissingrec = '\xff' * 24
 
+
 class hgtagsfnodescache(object):
     """Persistent cache mapping revisions to .hgtags filenodes.
 
@@ -645,6 +679,7 @@
     Instances behave like lists. ``c[i]`` works where i is a rev or
     changeset node. Missing indexes are populated automatically on access.
     """
+
     def __init__(self, repo):
         assert repo.filtername is None
 
@@ -654,7 +689,6 @@
         self.lookupcount = 0
         self.hitcount = 0
 
-
         try:
             data = repo.cachevfs.read(_fnodescachefile)
         except (OSError, IOError):
@@ -703,7 +737,7 @@
         self.lookupcount += 1
 
         offset = rev * _fnodesrecsize
-        record = '%s' % self._raw[offset:offset + _fnodesrecsize]
+        record = '%s' % self._raw[offset : offset + _fnodesrecsize]
         properprefix = node[0:4]
 
         # Validate and return existing entry.
@@ -766,7 +800,7 @@
     def _writeentry(self, offset, prefix, fnode):
         # Slices on array instances only accept other array.
         entry = bytearray(prefix + fnode)
-        self._raw[offset:offset + _fnodesrecsize] = entry
+        self._raw[offset : offset + _fnodesrecsize] = entry
         # self._dirtyoffset could be None.
         self._dirtyoffset = min(self._dirtyoffset or 0, offset or 0)
 
@@ -779,7 +813,7 @@
         if self._dirtyoffset is None:
             return
 
-        data = self._raw[self._dirtyoffset:]
+        data = self._raw[self._dirtyoffset :]
         if not data:
             return
 
@@ -788,8 +822,11 @@
         try:
             lock = repo.wlock(wait=False)
         except error.LockError:
-            repo.ui.log('tagscache', 'not writing .hg/cache/%s because '
-                        'lock cannot be acquired\n' % (_fnodescachefile))
+            repo.ui.log(
+                'tagscache',
+                'not writing .hg/cache/%s because '
+                'lock cannot be acquired\n' % _fnodescachefile,
+            )
             return
 
         try:
@@ -799,19 +836,23 @@
                 actualoffset = f.tell()
                 if actualoffset < self._dirtyoffset:
                     self._dirtyoffset = actualoffset
-                    data = self._raw[self._dirtyoffset:]
+                    data = self._raw[self._dirtyoffset :]
                 f.seek(self._dirtyoffset)
                 f.truncate()
-                repo.ui.log('tagscache',
-                            'writing %d bytes to cache/%s\n' % (
-                            len(data), _fnodescachefile))
+                repo.ui.log(
+                    'tagscache',
+                    'writing %d bytes to cache/%s\n'
+                    % (len(data), _fnodescachefile),
+                )
                 f.write(data)
                 self._dirtyoffset = None
             finally:
                 f.close()
         except (IOError, OSError) as inst:
-            repo.ui.log('tagscache',
-                        "couldn't write cache/%s: %s\n" % (
-                            _fnodescachefile, stringutil.forcebytestr(inst)))
+            repo.ui.log(
+                'tagscache',
+                "couldn't write cache/%s: %s\n"
+                % (_fnodescachefile, stringutil.forcebytestr(inst)),
+            )
         finally:
             lock.release()
--- a/mercurial/templatefilters.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/templatefilters.py	Sun Oct 06 09:45:02 2019 -0400
@@ -42,6 +42,7 @@
 
 templatefilter = registrar.templatefilter(filters)
 
+
 @templatefilter('addbreaks', intype=bytes)
 def addbreaks(text):
     """Any text. Add an XHTML "<br />" tag before the end of
@@ -49,13 +50,17 @@
     """
     return text.replace('\n', '<br/>\n')
 
-agescales = [("year", 3600 * 24 * 365, 'Y'),
-             ("month", 3600 * 24 * 30, 'M'),
-             ("week", 3600 * 24 * 7, 'W'),
-             ("day", 3600 * 24, 'd'),
-             ("hour", 3600, 'h'),
-             ("minute", 60, 'm'),
-             ("second", 1, 's')]
+
+agescales = [
+    ("year", 3600 * 24 * 365, 'Y'),
+    ("month", 3600 * 24 * 30, 'M'),
+    ("week", 3600 * 24 * 7, 'W'),
+    ("day", 3600 * 24, 'd'),
+    ("hour", 3600, 'h'),
+    ("minute", 60, 'm'),
+    ("second", 1, 's'),
+]
+
 
 @templatefilter('age', intype=templateutil.date)
 def age(date, abbrev=False):
@@ -67,6 +72,7 @@
         if c == 1:
             return t
         return t + "s"
+
     def fmt(t, c, a):
         if abbrev:
             return "%d%s" % (c, a)
@@ -92,6 +98,7 @@
                 return '%s from now' % fmt(t, n, a)
             return '%s ago' % fmt(t, n, a)
 
+
 @templatefilter('basename', intype=bytes)
 def basename(path):
     """Any text. Treats the text as a path, and returns the last
@@ -100,11 +107,13 @@
     """
     return os.path.basename(path)
 
+
 @templatefilter('cbor')
 def cbor(obj):
     """Any object. Serializes the object to CBOR bytes."""
     return b''.join(cborutil.streamencode(obj))
 
+
 @templatefilter('commondir')
 def commondir(filelist):
     """List of text. Treats each list item as file name with /
@@ -118,17 +127,19 @@
     For example, ["foo/bar/baz", "foo/baz/bar"] becomes "foo" and
     ["foo/bar", "baz"] becomes "".
     """
+
     def common(a, b):
         if len(a) > len(b):
-            a = b[:len(a)]
+            a = b[: len(a)]
         elif len(b) > len(a):
-            b = b[:len(a)]
+            b = b[: len(a)]
         if a == b:
             return a
         for i in pycompat.xrange(len(a)):
             if a[i] != b[i]:
                 return a[:i]
         return a
+
     try:
         if not filelist:
             return ""
@@ -144,6 +155,7 @@
     except TypeError:
         raise error.ParseError(_('argument is not a list of text'))
 
+
 @templatefilter('count')
 def count(i):
     """List or text. Returns the length as an integer."""
@@ -152,6 +164,7 @@
     except TypeError:
         raise error.ParseError(_('not countable'))
 
+
 @templatefilter('dirname', intype=bytes)
 def dirname(path):
     """Any text. Treats the text as a path, and strips the last
@@ -159,6 +172,7 @@
     """
     return os.path.dirname(path)
 
+
 @templatefilter('domain', intype=bytes)
 def domain(author):
     """Any text. Finds the first string that looks like an email
@@ -168,12 +182,13 @@
     f = author.find('@')
     if f == -1:
         return ''
-    author = author[f + 1:]
+    author = author[f + 1 :]
     f = author.find('>')
     if f >= 0:
         author = author[:f]
     return author
 
+
 @templatefilter('email', intype=bytes)
 def email(text):
     """Any text. Extracts the first string that looks like an email
@@ -182,6 +197,7 @@
     """
     return stringutil.email(text)
 
+
 @templatefilter('escape', intype=bytes)
 def escape(text):
     """Any text. Replaces the special XML/XHTML characters "&", "<"
@@ -189,9 +205,11 @@
     """
     return url.escape(text.replace('\0', ''), True)
 
+
 para_re = None
 space_re = None
 
+
 def fill(text, width, initindent='', hangindent=''):
     '''fill many paragraphs with optional indentation.'''
     global para_re, space_re
@@ -208,27 +226,40 @@
                 w = len(uctext)
                 while w > 0 and uctext[w - 1].isspace():
                     w -= 1
-                yield (encoding.unitolocal(uctext[:w]),
-                       encoding.unitolocal(uctext[w:]))
+                yield (
+                    encoding.unitolocal(uctext[:w]),
+                    encoding.unitolocal(uctext[w:]),
+                )
                 break
-            yield text[start:m.start(0)], m.group(1)
+            yield text[start : m.start(0)], m.group(1)
             start = m.end(1)
 
-    return "".join([stringutil.wrap(space_re.sub(' ',
-                                                 stringutil.wrap(para, width)),
-                                    width, initindent, hangindent) + rest
-                    for para, rest in findparas()])
+    return "".join(
+        [
+            stringutil.wrap(
+                space_re.sub(' ', stringutil.wrap(para, width)),
+                width,
+                initindent,
+                hangindent,
+            )
+            + rest
+            for para, rest in findparas()
+        ]
+    )
+
 
 @templatefilter('fill68', intype=bytes)
 def fill68(text):
     """Any text. Wraps the text to fit in 68 columns."""
     return fill(text, 68)
 
+
 @templatefilter('fill76', intype=bytes)
 def fill76(text):
     """Any text. Wraps the text to fit in 76 columns."""
     return fill(text, 76)
 
+
 @templatefilter('firstline', intype=bytes)
 def firstline(text):
     """Any text. Returns the first line of text."""
@@ -237,6 +268,7 @@
     except IndexError:
         return ''
 
+
 @templatefilter('hex', intype=bytes)
 def hexfilter(text):
     """Any text. Convert a binary Mercurial node identifier into
@@ -244,6 +276,7 @@
     """
     return node.hex(text)
 
+
 @templatefilter('hgdate', intype=templateutil.date)
 def hgdate(text):
     """Date. Returns the date as a pair of numbers: "1157407993
@@ -251,6 +284,7 @@
     """
     return "%d %d" % text
 
+
 @templatefilter('isodate', intype=templateutil.date)
 def isodate(text):
     """Date. Returns the date in ISO 8601 format: "2009-08-18 13:00
@@ -258,6 +292,7 @@
     """
     return dateutil.datestr(text, '%Y-%m-%d %H:%M %1%2')
 
+
 @templatefilter('isodatesec', intype=templateutil.date)
 def isodatesec(text):
     """Date. Returns the date in ISO 8601 format, including
@@ -266,11 +301,13 @@
     """
     return dateutil.datestr(text, '%Y-%m-%d %H:%M:%S %1%2')
 
+
 def indent(text, prefix):
     '''indent each non-empty line of text after first with prefix.'''
     lines = text.splitlines()
     num_lines = len(lines)
     endswithnewline = text[-1:] == '\n'
+
     def indenter():
         for i in pycompat.xrange(num_lines):
             l = lines[i]
@@ -279,8 +316,10 @@
             yield l
             if i < num_lines - 1 or endswithnewline:
                 yield '\n'
+
     return "".join(indenter())
 
+
 @templatefilter('json')
 def json(obj, paranoid=True):
     """Any object. Serializes the object to a JSON formatted text."""
@@ -296,27 +335,33 @@
         return '"%s"' % encoding.jsonescape(obj, paranoid=paranoid)
     elif isinstance(obj, type(u'')):
         raise error.ProgrammingError(
-            'Mercurial only does output with bytes: %r' % obj)
+            'Mercurial only does output with bytes: %r' % obj
+        )
     elif util.safehasattr(obj, 'keys'):
-        out = ['"%s": %s' % (encoding.jsonescape(k, paranoid=paranoid),
-                             json(v, paranoid))
-               for k, v in sorted(obj.iteritems())]
+        out = [
+            '"%s": %s'
+            % (encoding.jsonescape(k, paranoid=paranoid), json(v, paranoid))
+            for k, v in sorted(obj.iteritems())
+        ]
         return '{' + ', '.join(out) + '}'
     elif util.safehasattr(obj, '__iter__'):
         out = [json(i, paranoid) for i in obj]
         return '[' + ', '.join(out) + ']'
     raise error.ProgrammingError('cannot encode %r' % obj)
 
+
 @templatefilter('lower', intype=bytes)
 def lower(text):
     """Any text. Converts the text to lowercase."""
     return encoding.lower(text)
 
+
 @templatefilter('nonempty', intype=bytes)
 def nonempty(text):
     """Any text. Returns '(none)' if the string is empty."""
     return text or "(none)"
 
+
 @templatefilter('obfuscate', intype=bytes)
 def obfuscate(text):
     """Any text. Returns the input text rendered as a sequence of
@@ -325,6 +370,7 @@
     text = unicode(text, pycompat.sysstr(encoding.encoding), r'replace')
     return ''.join(['&#%d;' % ord(c) for c in text])
 
+
 @templatefilter('permissions', intype=bytes)
 def permissions(flags):
     if "l" in flags:
@@ -333,6 +379,7 @@
         return "-rwxr-xr-x"
     return "-rw-r--r--"
 
+
 @templatefilter('person', intype=bytes)
 def person(author):
     """Any text. Returns the name before an email address,
@@ -340,6 +387,7 @@
     """
     return stringutil.person(author)
 
+
 @templatefilter('revescape', intype=bytes)
 def revescape(text):
     """Any text. Escapes all "special" characters, except @.
@@ -348,6 +396,7 @@
     """
     return urlreq.quote(text, safe='/@').replace('/', '%252F')
 
+
 @templatefilter('rfc3339date', intype=templateutil.date)
 def rfc3339date(text):
     """Date. Returns a date using the Internet date format
@@ -355,6 +404,7 @@
     """
     return dateutil.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2")
 
+
 @templatefilter('rfc822date', intype=templateutil.date)
 def rfc822date(text):
     """Date. Returns a date using the same format used in email
@@ -362,6 +412,7 @@
     """
     return dateutil.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2")
 
+
 @templatefilter('short', intype=bytes)
 def short(text):
     """Changeset hash. Returns the short form of a changeset hash,
@@ -369,6 +420,7 @@
     """
     return text[:12]
 
+
 @templatefilter('shortbisect', intype=bytes)
 def shortbisect(label):
     """Any text. Treats `label` as a bisection status, and
@@ -380,25 +432,30 @@
         return label[0:1].upper()
     return ' '
 
+
 @templatefilter('shortdate', intype=templateutil.date)
 def shortdate(text):
     """Date. Returns a date like "2006-09-18"."""
     return dateutil.shortdate(text)
 
+
 @templatefilter('slashpath', intype=bytes)
 def slashpath(path):
     """Any text. Replaces the native path separator with slash."""
     return util.pconvert(path)
 
+
 @templatefilter('splitlines', intype=bytes)
 def splitlines(text):
     """Any text. Split text into a list of lines."""
     return templateutil.hybridlist(text.splitlines(), name='line')
 
+
 @templatefilter('stringescape', intype=bytes)
 def stringescape(text):
     return stringutil.escapestr(text)
 
+
 @templatefilter('stringify', intype=bytes)
 def stringify(thing):
     """Any type. Turns the value into text by converting values into
@@ -406,6 +463,7 @@
     """
     return thing  # coerced by the intype
 
+
 @templatefilter('stripdir', intype=bytes)
 def stripdir(text):
     """Treat the text as path and strip a directory level, if
@@ -417,6 +475,7 @@
     else:
         return dir
 
+
 @templatefilter('tabindent', intype=bytes)
 def tabindent(text):
     """Any text. Returns the text, with every non-empty line
@@ -424,11 +483,13 @@
     """
     return indent(text, '\t')
 
+
 @templatefilter('upper', intype=bytes)
 def upper(text):
     """Any text. Converts the text to uppercase."""
     return encoding.upper(text)
 
+
 @templatefilter('urlescape', intype=bytes)
 def urlescape(text):
     """Any text. Escapes all "special" characters. For example,
@@ -436,32 +497,38 @@
     """
     return urlreq.quote(text)
 
+
 @templatefilter('user', intype=bytes)
 def userfilter(text):
     """Any text. Returns a short representation of a user name or email
     address."""
     return stringutil.shortuser(text)
 
+
 @templatefilter('emailuser', intype=bytes)
 def emailuser(text):
     """Any text. Returns the user portion of an email address."""
     return stringutil.emailuser(text)
 
+
 @templatefilter('utf8', intype=bytes)
 def utf8(text):
     """Any text. Converts from the local character encoding to UTF-8."""
     return encoding.fromlocal(text)
 
+
 @templatefilter('xmlescape', intype=bytes)
 def xmlescape(text):
-    text = (text
-            .replace('&', '&amp;')
-            .replace('<', '&lt;')
-            .replace('>', '&gt;')
-            .replace('"', '&quot;')
-            .replace("'", '&#39;')) # &apos; invalid in HTML
+    text = (
+        text.replace('&', '&amp;')
+        .replace('<', '&lt;')
+        .replace('>', '&gt;')
+        .replace('"', '&quot;')
+        .replace("'", '&#39;')
+    )  # &apos; invalid in HTML
     return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text)
 
+
 def websub(text, websubtable):
     """:websub: Any text. Only applies to hgweb. Applies the regular
     expression replacements defined in the websub section.
@@ -471,11 +538,13 @@
             text = regexp.sub(format, text)
     return text
 
+
 def loadfilter(ui, extname, registrarobj):
     """Load template filter from specified registrarobj
     """
     for name, func in registrarobj._table.iteritems():
         filters[name] = func
 
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = filters.values()
--- a/mercurial/templatefuncs.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/templatefuncs.py	Sun Oct 06 09:45:02 2019 -0400
@@ -49,6 +49,7 @@
 funcs = {}
 templatefunc = registrar.templatefunc(funcs)
 
+
 @templatefunc('date(date[, fmt])')
 def date(context, mapping, args):
     """Format a date. See :hg:`help dates` for formatting
@@ -58,9 +59,13 @@
         # i18n: "date" is a keyword
         raise error.ParseError(_("date expects one or two arguments"))
 
-    date = evaldate(context, mapping, args[0],
-                    # i18n: "date" is a keyword
-                    _("date expects a date information"))
+    date = evaldate(
+        context,
+        mapping,
+        args[0],
+        # i18n: "date" is a keyword
+        _("date expects a date information"),
+    )
     fmt = None
     if len(args) == 2:
         fmt = evalstring(context, mapping, args[1])
@@ -69,6 +74,7 @@
     else:
         return dateutil.datestr(date, fmt)
 
+
 @templatefunc('dict([[key=]value...])', argspec='*args **kwargs')
 def dict_(context, mapping, args):
     """Construct a dict from key-value pairs. A key may be omitted if
@@ -83,12 +89,16 @@
             raise error.ParseError(_("duplicated dict key '%s' inferred") % k)
         data[k] = evalfuncarg(context, mapping, v)
 
-    data.update((k, evalfuncarg(context, mapping, v))
-                for k, v in args['kwargs'].iteritems())
+    data.update(
+        (k, evalfuncarg(context, mapping, v))
+        for k, v in args['kwargs'].iteritems()
+    )
     return templateutil.hybriddict(data)
 
-@templatefunc('diff([includepattern [, excludepattern]])',
-              requires={'ctx', 'ui'})
+
+@templatefunc(
+    'diff([includepattern [, excludepattern]])', requires={'ctx', 'ui'}
+)
 def diff(context, mapping, args):
     """Show a diff, optionally
     specifying files to include or exclude."""
@@ -106,11 +116,13 @@
     ctx = context.resource(mapping, 'ctx')
     ui = context.resource(mapping, 'ui')
     diffopts = diffutil.diffallopts(ui)
-    chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1)),
-                      opts=diffopts)
+    chunks = ctx.diff(
+        match=ctx.match([], getpatterns(0), getpatterns(1)), opts=diffopts
+    )
 
     return ''.join(chunks)
 
+
 @templatefunc('extdata(source)', argspec='source', requires={'ctx', 'cache'})
 def extdata(context, mapping, args):
     """Show a text read from the specified extdata source. (EXPERIMENTAL)"""
@@ -122,8 +134,10 @@
     if not source:
         sym = templateutil.findsymbolicname(args['source'])
         if sym:
-            raise error.ParseError(_('empty data source specified'),
-                                   hint=_("did you mean extdata('%s')?") % sym)
+            raise error.ParseError(
+                _('empty data source specified'),
+                hint=_("did you mean extdata('%s')?") % sym,
+            )
         else:
             raise error.ParseError(_('empty data source specified'))
     cache = context.resource(mapping, 'cache').setdefault('extdata', {})
@@ -134,6 +148,7 @@
         data = cache[source] = scmutil.extdatasource(ctx.repo(), source)
     return data.get(ctx.rev(), '')
 
+
 @templatefunc('files(pattern)', requires={'ctx'})
 def files(context, mapping, args):
     """All files of the current changeset matching the pattern. See
@@ -148,6 +163,7 @@
     files = list(ctx.matches(m))
     return templateutil.compatfileslist(context, mapping, "file", files)
 
+
 @templatefunc('fill(text[, width[, initialident[, hangindent]]])')
 def fill(context, mapping, args):
     """Fill many
@@ -161,9 +177,13 @@
     initindent = ''
     hangindent = ''
     if 2 <= len(args) <= 4:
-        width = evalinteger(context, mapping, args[1],
-                            # i18n: "fill" is a keyword
-                            _("fill expects an integer width"))
+        width = evalinteger(
+            context,
+            mapping,
+            args[1],
+            # i18n: "fill" is a keyword
+            _("fill expects an integer width"),
+        )
         try:
             initindent = evalstring(context, mapping, args[2])
             hangindent = evalstring(context, mapping, args[3])
@@ -172,6 +192,7 @@
 
     return templatefilters.fill(text, width, initindent, hangindent)
 
+
 @templatefunc('filter(iterable[, expr])')
 def filter_(context, mapping, args):
     """Remove empty elements from a list or a dict. If expr specified, it's
@@ -181,16 +202,21 @@
         raise error.ParseError(_("filter expects one or two arguments"))
     iterable = evalwrapped(context, mapping, args[0])
     if len(args) == 1:
+
         def select(w):
             return w.tobool(context, mapping)
+
     else:
+
         def select(w):
             if not isinstance(w, templateutil.mappable):
                 raise error.ParseError(_("not filterable by expression"))
             lm = context.overlaymap(mapping, w.tomap(context))
             return evalboolean(context, lm, args[1])
+
     return iterable.filter(context, mapping, select)
 
+
 @templatefunc('formatnode(node)', requires={'ui'})
 def formatnode(context, mapping, args):
     """Obtain the preferred form of a changeset hash. (DEPRECATED)"""
@@ -204,6 +230,7 @@
         return node
     return templatefilters.short(node)
 
+
 @templatefunc('mailmap(author)', requires={'repo', 'cache'})
 def mailmap(context, mapping, args):
     """Return the author, updated according to the value
@@ -222,9 +249,11 @@
 
     return stringutil.mapname(cache['mailmap'], author)
 
+
 @templatefunc(
     'pad(text, width[, fillchar=\' \'[, left=False[, truncate=False]]])',
-    argspec='text width fillchar left truncate')
+    argspec='text width fillchar left truncate',
+)
 def pad(context, mapping, args):
     """Pad text with a
     fill character."""
@@ -232,9 +261,13 @@
         # i18n: "pad" is a keyword
         raise error.ParseError(_("pad() expects two to four arguments"))
 
-    width = evalinteger(context, mapping, args['width'],
-                        # i18n: "pad" is a keyword
-                        _("pad() expects an integer width"))
+    width = evalinteger(
+        context,
+        mapping,
+        args['width'],
+        # i18n: "pad" is a keyword
+        _("pad() expects an integer width"),
+    )
 
     text = evalstring(context, mapping, args['text'])
 
@@ -261,6 +294,7 @@
     else:
         return text + fillchar * fillwidth
 
+
 @templatefunc('indent(text, indentchars[, firstline])')
 def indent(context, mapping, args):
     """Indents all non-empty lines
@@ -282,6 +316,7 @@
     # the indent function doesn't indent the first line, so we do it here
     return templatefilters.indent(firstline + text, indent)
 
+
 @templatefunc('get(dict, key)')
 def get(context, mapping, args):
     """Get an attribute/key from an object. Some keywords
@@ -300,24 +335,28 @@
         hint = _("get() expects a dict as first argument")
         raise error.ParseError(bytes(err), hint=hint)
 
+
 @templatefunc('config(section, name[, default])', requires={'ui'})
 def config(context, mapping, args):
     """Returns the requested hgrc config option as a string."""
     fn = context.resource(mapping, 'ui').config
     return _config(context, mapping, args, fn, evalstring)
 
+
 @templatefunc('configbool(section, name[, default])', requires={'ui'})
 def configbool(context, mapping, args):
     """Returns the requested hgrc config option as a boolean."""
     fn = context.resource(mapping, 'ui').configbool
     return _config(context, mapping, args, fn, evalboolean)
 
+
 @templatefunc('configint(section, name[, default])', requires={'ui'})
 def configint(context, mapping, args):
     """Returns the requested hgrc config option as an integer."""
     fn = context.resource(mapping, 'ui').configint
     return _config(context, mapping, args, fn, evalinteger)
 
+
 def _config(context, mapping, args, configfn, defaultfn):
     if not (2 <= len(args) <= 3):
         raise error.ParseError(_("config expects two or three arguments"))
@@ -333,6 +372,7 @@
     else:
         return configfn(section, name)
 
+
 @templatefunc('if(expr, then[, else])')
 def if_(context, mapping, args):
     """Conditionally execute based on the result of
@@ -347,6 +387,7 @@
     elif len(args) == 3:
         return evalrawexp(context, mapping, args[2])
 
+
 @templatefunc('ifcontains(needle, haystack, then[, else])')
 def ifcontains(context, mapping, args):
     """Conditionally execute based
@@ -367,6 +408,7 @@
     elif len(args) == 4:
         return evalrawexp(context, mapping, args[3])
 
+
 @templatefunc('ifeq(expr1, expr2, then[, else])')
 def ifeq(context, mapping, args):
     """Conditionally execute based on
@@ -382,6 +424,7 @@
     elif len(args) == 4:
         return evalrawexp(context, mapping, args[3])
 
+
 @templatefunc('join(list, sep)')
 def join(context, mapping, args):
     """Join items in a list with a delimiter."""
@@ -395,6 +438,7 @@
         joiner = evalstring(context, mapping, args[1])
     return joinset.join(context, mapping, joiner)
 
+
 @templatefunc('label(label, expr)', requires={'ui'})
 def label(context, mapping, args):
     """Apply a label to generated content. Content with
@@ -412,6 +456,7 @@
 
     return ui.label(thing, label)
 
+
 @templatefunc('latesttag([pattern])')
 def latesttag(context, mapping, args):
     """The global tags matching the given pattern on the
@@ -429,6 +474,7 @@
         pattern = evalstring(context, mapping, args[0])
     return templatekw.showlatesttags(context, mapping, pattern)
 
+
 @templatefunc('localdate(date[, tz])')
 def localdate(context, mapping, args):
     """Converts a date to the specified timezone.
@@ -437,9 +483,13 @@
         # i18n: "localdate" is a keyword
         raise error.ParseError(_("localdate expects one or two arguments"))
 
-    date = evaldate(context, mapping, args[0],
-                    # i18n: "localdate" is a keyword
-                    _("localdate expects a date information"))
+    date = evaldate(
+        context,
+        mapping,
+        args[0],
+        # i18n: "localdate" is a keyword
+        _("localdate expects a date information"),
+    )
     if len(args) >= 2:
         tzoffset = None
         tz = evalfuncarg(context, mapping, args[1])
@@ -457,6 +507,7 @@
         tzoffset = dateutil.makedate()[1]
     return templateutil.date((date[0], tzoffset))
 
+
 @templatefunc('max(iterable)')
 def max_(context, mapping, args, **kwargs):
     """Return the max of an iterable"""
@@ -472,6 +523,7 @@
         hint = _("max first argument should be an iterable")
         raise error.ParseError(bytes(err), hint=hint)
 
+
 @templatefunc('min(iterable)')
 def min_(context, mapping, args, **kwargs):
     """Return the min of an iterable"""
@@ -487,6 +539,7 @@
         hint = _("min first argument should be an iterable")
         raise error.ParseError(bytes(err), hint=hint)
 
+
 @templatefunc('mod(a, b)')
 def mod(context, mapping, args):
     """Calculate a mod b such that a / b + a mod b == a"""
@@ -495,8 +548,10 @@
         raise error.ParseError(_("mod expects two arguments"))
 
     func = lambda a, b: a % b
-    return templateutil.runarithmetic(context, mapping,
-                                      (func, args[0], args[1]))
+    return templateutil.runarithmetic(
+        context, mapping, (func, args[0], args[1])
+    )
+
 
 @templatefunc('obsfateoperations(markers)')
 def obsfateoperations(context, mapping, args):
@@ -515,6 +570,7 @@
         errmsg = _("obsfateoperations first argument should be an iterable")
         raise error.ParseError(errmsg)
 
+
 @templatefunc('obsfatedate(markers)')
 def obsfatedate(context, mapping, args):
     """Compute obsfate related information based on markers (EXPERIMENTAL)"""
@@ -533,6 +589,7 @@
         errmsg = _("obsfatedate first argument should be an iterable")
         raise error.ParseError(errmsg)
 
+
 @templatefunc('obsfateusers(markers)')
 def obsfateusers(context, mapping, args):
     """Compute obsfate related information based on markers (EXPERIMENTAL)"""
@@ -547,10 +604,12 @@
         return templateutil.hybridlist(data, name='user')
     except (TypeError, KeyError, ValueError):
         # i18n: "obsfateusers" is a keyword
-        msg = _("obsfateusers first argument should be an iterable of "
-                "obsmakers")
+        msg = _(
+            "obsfateusers first argument should be an iterable of " "obsmakers"
+        )
         raise error.ParseError(msg)
 
+
 @templatefunc('obsfateverb(successors, markers)')
 def obsfateverb(context, mapping, args):
     """Compute obsfate related information based on successors (EXPERIMENTAL)"""
@@ -568,6 +627,7 @@
         errmsg = _("obsfateverb first argument should be countable")
         raise error.ParseError(errmsg)
 
+
 @templatefunc('relpath(path)', requires={'repo'})
 def relpath(context, mapping, args):
     """Convert a repository-absolute path into a filesystem path relative to
@@ -580,6 +640,7 @@
     path = evalstring(context, mapping, args[0])
     return repo.pathto(path)
 
+
 @templatefunc('revset(query[, formatargs...])', requires={'repo', 'cache'})
 def revset(context, mapping, args):
     """Execute a revision set query. See
@@ -608,6 +669,7 @@
             revsetcache[raw] = revs
     return templatekw.showrevslist(context, mapping, "revision", revs)
 
+
 @templatefunc('rstdoc(text, style)')
 def rstdoc(context, mapping, args):
     """Format reStructuredText."""
@@ -620,6 +682,7 @@
 
     return minirst.format(text, style=style, keep=['verbose'])
 
+
 @templatefunc('search(pattern, text)')
 def search(context, mapping, args):
     """Look for the first text matching the regular expression pattern.
@@ -636,14 +699,18 @@
         # i18n: "search" is a keyword
         raise error.ParseError(_(b'search got an invalid pattern: %s') % pat)
     # named groups shouldn't shadow *reserved* resource keywords
-    badgroups = (context.knownresourcekeys()
-                 & set(pycompat.byteskwargs(patre.groupindex)))
+    badgroups = context.knownresourcekeys() & set(
+        pycompat.byteskwargs(patre.groupindex)
+    )
     if badgroups:
         raise error.ParseError(
             # i18n: "search" is a keyword
             _(b'invalid group %(group)s in search pattern: %(pat)s')
-            % {b'group': b', '.join("'%s'" % g for g in sorted(badgroups)),
-               b'pat': pat})
+            % {
+                b'group': b', '.join("'%s'" % g for g in sorted(badgroups)),
+                b'pat': pat,
+            }
+        )
 
     match = patre.search(src)
     if not match:
@@ -654,6 +721,7 @@
     lm.update(pycompat.byteskwargs(match.groupdict()))
     return templateutil.mappingdict(lm, tmpl=b'{0}')
 
+
 @templatefunc('separate(sep, args...)', argspec='sep *args')
 def separate(context, mapping, args):
     """Add a separator between non-empty arguments."""
@@ -673,6 +741,7 @@
             yield sep
         yield argstr
 
+
 @templatefunc('shortest(node, minlength=4)', requires={'repo', 'cache'})
 def shortest(context, mapping, args):
     """Obtain the shortest representation of
@@ -685,9 +754,13 @@
 
     minlength = 4
     if len(args) > 1:
-        minlength = evalinteger(context, mapping, args[1],
-                                # i18n: "shortest" is a keyword
-                                _("shortest() expects an integer minlength"))
+        minlength = evalinteger(
+            context,
+            mapping,
+            args[1],
+            # i18n: "shortest" is a keyword
+            _("shortest() expects an integer minlength"),
+        )
 
     repo = context.resource(mapping, 'repo')
     if len(hexnode) > 40:
@@ -712,6 +785,7 @@
     except error.RepoLookupError:
         return hexnode
 
+
 @templatefunc('strip(text[, chars])')
 def strip(context, mapping, args):
     """Strip characters from a string. By default,
@@ -726,6 +800,7 @@
         return text.strip(chars)
     return text.strip()
 
+
 @templatefunc('sub(pattern, replacement, expression)')
 def sub(context, mapping, args):
     """Perform text substitution
@@ -748,6 +823,7 @@
         # i18n: "sub" is a keyword
         raise error.ParseError(_("sub got an invalid replacement: %s") % rpl)
 
+
 @templatefunc('startswith(pattern, text)')
 def startswith(context, mapping, args):
     """Returns the value from the "text" argument
@@ -762,17 +838,23 @@
         return text
     return ''
 
+
 @templatefunc('word(number, text[, separator])')
 def word(context, mapping, args):
     """Return the nth word from a string."""
     if not (2 <= len(args) <= 3):
         # i18n: "word" is a keyword
-        raise error.ParseError(_("word expects two or three arguments, got %d")
-                               % len(args))
+        raise error.ParseError(
+            _("word expects two or three arguments, got %d") % len(args)
+        )
 
-    num = evalinteger(context, mapping, args[0],
-                      # i18n: "word" is a keyword
-                      _("word expects an integer index"))
+    num = evalinteger(
+        context,
+        mapping,
+        args[0],
+        # i18n: "word" is a keyword
+        _("word expects an integer index"),
+    )
     text = evalstring(context, mapping, args[1])
     if len(args) == 3:
         splitter = evalstring(context, mapping, args[2])
@@ -785,11 +867,13 @@
     else:
         return tokens[num]
 
+
 def loadfunction(ui, extname, registrarobj):
     """Load template function from specified registrarobj
     """
     for name, func in registrarobj._table.iteritems():
         funcs[name] = func
 
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = funcs.values()
--- a/mercurial/templatekw.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/templatekw.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,9 +29,7 @@
     templateutil,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 _hybrid = templateutil.hybrid
 hybriddict = templateutil.hybriddict
@@ -40,6 +38,7 @@
 compatlist = templateutil.compatlist
 _showcompatlist = templateutil._showcompatlist
 
+
 def getlatesttags(context, mapping, pattern=None):
     '''return date, distance and name for the latest tag of rev'''
     repo = context.resource(mapping, 'repo')
@@ -66,9 +65,11 @@
         if rev in latesttags:
             continue
         ctx = repo[rev]
-        tags = [t for t in ctx.tags()
-                if (repo.tagtype(t) and repo.tagtype(t) != 'local'
-                    and match(t))]
+        tags = [
+            t
+            for t in ctx.tags()
+            if (repo.tagtype(t) and repo.tagtype(t) != 'local' and match(t))
+        ]
         if tags:
             latesttags[rev] = ctx.date()[0], 0, [t for t in sorted(tags)]
             continue
@@ -80,6 +81,7 @@
                     # comparison in this case.
                     pdate, pdist, ptag = max(ptags)
                 else:
+
                     def key(x):
                         tag = x[2][0]
                         if ctx.rev() is None:
@@ -93,6 +95,7 @@
                         # Smallest number of changes since tag wins. Date is
                         # used as tiebreaker.
                         return [-changessincetag, x[0]]
+
                     pdate, pdist, ptag = max(ptags, key=key)
             else:
                 pdate, pdist, ptag = ptags[0]
@@ -104,29 +107,37 @@
         latesttags[rev] = pdate, pdist + 1, ptag
     return latesttags[rev]
 
+
 def getlogcolumns():
     """Return a dict of log column labels"""
     _ = pycompat.identity  # temporarily disable gettext
     # i18n: column positioning for "hg log"
-    columns = _('bookmark:    %s\n'
-                'branch:      %s\n'
-                'changeset:   %s\n'
-                'copies:      %s\n'
-                'date:        %s\n'
-                'extra:       %s=%s\n'
-                'files+:      %s\n'
-                'files-:      %s\n'
-                'files:       %s\n'
-                'instability: %s\n'
-                'manifest:    %s\n'
-                'obsolete:    %s\n'
-                'parent:      %s\n'
-                'phase:       %s\n'
-                'summary:     %s\n'
-                'tag:         %s\n'
-                'user:        %s\n')
-    return dict(zip([s.split(':', 1)[0] for s in columns.splitlines()],
-                    i18n._(columns).splitlines(True)))
+    columns = _(
+        'bookmark:    %s\n'
+        'branch:      %s\n'
+        'changeset:   %s\n'
+        'copies:      %s\n'
+        'date:        %s\n'
+        'extra:       %s=%s\n'
+        'files+:      %s\n'
+        'files-:      %s\n'
+        'files:       %s\n'
+        'instability: %s\n'
+        'manifest:    %s\n'
+        'obsolete:    %s\n'
+        'parent:      %s\n'
+        'phase:       %s\n'
+        'summary:     %s\n'
+        'tag:         %s\n'
+        'user:        %s\n'
+    )
+    return dict(
+        zip(
+            [s.split(':', 1)[0] for s in columns.splitlines()],
+            i18n._(columns).splitlines(True),
+        )
+    )
+
 
 # basic internal templates
 _changeidtmpl = '{rev}:{node|formatnode}'
@@ -137,7 +148,7 @@
     'manifest': _changeidtmpl,
     'file_copy': '{name} ({source})',
     'envvar': '{key}={value}',
-    'extra': '{key}={value|stringescape}'
+    'extra': '{key}={value|stringescape}',
 }
 # filecopy is preserved for compatibility reasons
 defaulttempl['filecopy'] = defaulttempl['file_copy']
@@ -146,11 +157,13 @@
 keywords = {}
 templatekeyword = registrar.templatekeyword(keywords)
 
+
 @templatekeyword('author', requires={'ctx'})
 def showauthor(context, mapping):
     """Alias for ``{user}``"""
     return showuser(context, mapping)
 
+
 @templatekeyword('bisect', requires={'repo', 'ctx'})
 def showbisect(context, mapping):
     """String. The changeset bisection status."""
@@ -158,6 +171,7 @@
     ctx = context.resource(mapping, 'ctx')
     return hbisect.label(repo, ctx.node())
 
+
 @templatekeyword('branch', requires={'ctx'})
 def showbranch(context, mapping):
     """String. The name of the branch on which the changeset was
@@ -166,6 +180,7 @@
     ctx = context.resource(mapping, 'ctx')
     return ctx.branch()
 
+
 @templatekeyword('branches', requires={'ctx'})
 def showbranches(context, mapping):
     """List of strings. The name of the branch on which the
@@ -175,10 +190,12 @@
     ctx = context.resource(mapping, 'ctx')
     branch = ctx.branch()
     if branch != 'default':
-        return compatlist(context, mapping, 'branch', [branch],
-                          plural='branches')
+        return compatlist(
+            context, mapping, 'branch', [branch], plural='branches'
+        )
     return compatlist(context, mapping, 'branch', [], plural='branches')
 
+
 @templatekeyword('bookmarks', requires={'repo', 'ctx'})
 def showbookmarks(context, mapping):
     """List of strings. Any bookmarks associated with the
@@ -192,6 +209,7 @@
     f = _showcompatlist(context, mapping, 'bookmark', bookmarks)
     return _hybrid(f, bookmarks, makemap, pycompat.identity)
 
+
 @templatekeyword('children', requires={'ctx'})
 def showchildren(context, mapping):
     """List of strings. The children of the changeset."""
@@ -199,6 +217,7 @@
     childrevs = ['%d:%s' % (cctx.rev(), cctx) for cctx in ctx.children()]
     return compatlist(context, mapping, 'children', childrevs, element='child')
 
+
 # Deprecated, but kept alive for help generation a purpose.
 @templatekeyword('currentbookmark', requires={'repo', 'ctx'})
 def showcurrentbookmark(context, mapping):
@@ -206,6 +225,7 @@
     (DEPRECATED)"""
     return showactivebookmark(context, mapping)
 
+
 @templatekeyword('activebookmark', requires={'repo', 'ctx'})
 def showactivebookmark(context, mapping):
     """String. The active bookmark, if it is associated with the changeset."""
@@ -216,6 +236,7 @@
         return active
     return ''
 
+
 @templatekeyword('date', requires={'ctx'})
 def showdate(context, mapping):
     """Date information. The date when the changeset was committed."""
@@ -224,6 +245,7 @@
     # python-hglib splits date at decimal separator.
     return templateutil.date(ctx.date(), showfmt='%d.0%d')
 
+
 @templatekeyword('desc', requires={'ctx'})
 def showdescription(context, mapping):
     """String. The text of the changeset description."""
@@ -237,6 +259,7 @@
     else:
         return s.strip()
 
+
 @templatekeyword('diffstat', requires={'ui', 'ctx'})
 def showdiffstat(context, mapping):
     """String. Statistics of changes with the following format:
@@ -250,6 +273,7 @@
     maxname, maxtotal, adds, removes, binary = patch.diffstatsum(stats)
     return '%d: +%d/-%d' % (len(stats), adds, removes)
 
+
 @templatekeyword('envvars', requires={'ui'})
 def showenvvars(context, mapping):
     """A dictionary of environment variables. (EXPERIMENTAL)"""
@@ -258,6 +282,7 @@
     env = util.sortdict((k, env[k]) for k in sorted(env))
     return compatdict(context, mapping, 'envvar', env, plural='envvars')
 
+
 @templatekeyword('extras', requires={'ctx'})
 def showextras(context, mapping):
     """List of dicts with key, value entries of the 'extras'
@@ -268,19 +293,26 @@
     makemap = lambda k: {'key': k, 'value': extras[k]}
     c = [makemap(k) for k in extras]
     f = _showcompatlist(context, mapping, 'extra', c, plural='extras')
-    return _hybrid(f, extras, makemap,
-                   lambda k: '%s=%s' % (k, stringutil.escapestr(extras[k])))
+    return _hybrid(
+        f,
+        extras,
+        makemap,
+        lambda k: '%s=%s' % (k, stringutil.escapestr(extras[k])),
+    )
+
 
 def _getfilestatus(context, mapping, listall=False):
     ctx = context.resource(mapping, 'ctx')
     revcache = context.resource(mapping, 'revcache')
     if 'filestatus' not in revcache or revcache['filestatusall'] < listall:
-        stat = ctx.p1().status(ctx, listignored=listall, listclean=listall,
-                               listunknown=listall)
+        stat = ctx.p1().status(
+            ctx, listignored=listall, listclean=listall, listunknown=listall
+        )
         revcache['filestatus'] = stat
         revcache['filestatusall'] = listall
     return revcache['filestatus']
 
+
 def _getfilestatusmap(context, mapping, listall=False):
     revcache = context.resource(mapping, 'revcache')
     if 'filestatusmap' not in revcache or revcache['filestatusall'] < listall:
@@ -290,8 +322,8 @@
             statmap.update((f, char) for f in files)
     return revcache['filestatusmap']  # {path: statchar}
 
-@templatekeyword('file_copies',
-                 requires={'repo', 'ctx', 'cache', 'revcache'})
+
+@templatekeyword('file_copies', requires={'repo', 'ctx', 'cache', 'revcache'})
 def showfilecopies(context, mapping):
     """List of strings. Files copied in this changeset with
     their sources.
@@ -305,8 +337,10 @@
             cache['getcopies'] = scmutil.getcopiesfn(repo)
         getcopies = cache['getcopies']
         copies = getcopies(ctx)
-    return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
-                                             copies)
+    return templateutil.compatfilecopiesdict(
+        context, mapping, 'file_copy', copies
+    )
+
 
 # showfilecopiesswitch() displays file copies only if copy records are
 # provided before calling the templater, usually with a --copies
@@ -317,29 +351,37 @@
     only if the --copied switch is set.
     """
     copies = context.resource(mapping, 'revcache').get('copies') or []
-    return templateutil.compatfilecopiesdict(context, mapping, 'file_copy',
-                                             copies)
+    return templateutil.compatfilecopiesdict(
+        context, mapping, 'file_copy', copies
+    )
+
 
 @templatekeyword('file_adds', requires={'ctx', 'revcache'})
 def showfileadds(context, mapping):
     """List of strings. Files added by this changeset."""
     ctx = context.resource(mapping, 'ctx')
-    return templateutil.compatfileslist(context, mapping, 'file_add',
-                                        ctx.filesadded())
+    return templateutil.compatfileslist(
+        context, mapping, 'file_add', ctx.filesadded()
+    )
+
 
 @templatekeyword('file_dels', requires={'ctx', 'revcache'})
 def showfiledels(context, mapping):
     """List of strings. Files removed by this changeset."""
     ctx = context.resource(mapping, 'ctx')
-    return templateutil.compatfileslist(context, mapping, 'file_del',
-                                        ctx.filesremoved())
+    return templateutil.compatfileslist(
+        context, mapping, 'file_del', ctx.filesremoved()
+    )
+
 
 @templatekeyword('file_mods', requires={'ctx', 'revcache'})
 def showfilemods(context, mapping):
     """List of strings. Files modified by this changeset."""
     ctx = context.resource(mapping, 'ctx')
-    return templateutil.compatfileslist(context, mapping, 'file_mod',
-                                        ctx.filesmodified())
+    return templateutil.compatfileslist(
+        context, mapping, 'file_mod', ctx.filesmodified()
+    )
+
 
 @templatekeyword('files', requires={'ctx'})
 def showfiles(context, mapping):
@@ -349,6 +391,7 @@
     ctx = context.resource(mapping, 'ctx')
     return templateutil.compatfileslist(context, mapping, 'file', ctx.files())
 
+
 @templatekeyword('graphnode', requires={'repo', 'ctx'})
 def showgraphnode(context, mapping):
     """String. The character representing the changeset node in an ASCII
@@ -357,9 +400,11 @@
     ctx = context.resource(mapping, 'ctx')
     return getgraphnode(repo, ctx)
 
+
 def getgraphnode(repo, ctx):
     return getgraphnodecurrent(repo, ctx) or getgraphnodesymbol(ctx)
 
+
 def getgraphnodecurrent(repo, ctx):
     wpnodes = repo.dirstate.parents()
     if wpnodes[1] == nullid:
@@ -369,6 +414,7 @@
     else:
         return ''
 
+
 def getgraphnodesymbol(ctx):
     if ctx.obsolete():
         return 'x'
@@ -379,18 +425,21 @@
     else:
         return 'o'
 
+
 @templatekeyword('graphwidth', requires=())
 def showgraphwidth(context, mapping):
     """Integer. The width of the graph drawn by 'log --graph' or zero."""
     # just hosts documentation; should be overridden by template mapping
     return 0
 
+
 @templatekeyword('index', requires=())
 def showindex(context, mapping):
     """Integer. The current iteration of the loop. (0 indexed)"""
     # just hosts documentation; should be overridden by template mapping
     raise error.Abort(_("can't use index in this context"))
 
+
 @templatekeyword('latesttag', requires={'repo', 'ctx', 'cache'})
 def showlatesttag(context, mapping):
     """List of strings. The global tags on the most recent globally
@@ -399,6 +448,7 @@
     """
     return showlatesttags(context, mapping, None)
 
+
 def showlatesttags(context, mapping, pattern):
     """helper method for the latesttag keyword and function"""
     latesttags = getlatesttags(context, mapping, pattern)
@@ -409,19 +459,21 @@
     makemap = lambda v: {
         'changes': _showchangessincetag,
         'distance': latesttags[1],
-        'latesttag': v,   # BC with {latesttag % '{latesttag}'}
-        'tag': v
+        'latesttag': v,  # BC with {latesttag % '{latesttag}'}
+        'tag': v,
     }
 
     tags = latesttags[2]
     f = _showcompatlist(context, mapping, 'latesttag', tags, separator=':')
     return _hybrid(f, tags, makemap, pycompat.identity)
 
+
 @templatekeyword('latesttagdistance', requires={'repo', 'ctx', 'cache'})
 def showlatesttagdistance(context, mapping):
     """Integer. Longest path to the latest tag."""
     return getlatesttags(context, mapping)[1]
 
+
 @templatekeyword('changessincelatesttag', requires={'repo', 'ctx', 'cache'})
 def showchangessincelatesttag(context, mapping):
     """Integer. All ancestors not in the latest tag."""
@@ -429,6 +481,7 @@
     mapping = context.overlaymap(mapping, {'tag': tag})
     return _showchangessincetag(context, mapping)
 
+
 def _showchangessincetag(context, mapping):
     repo = context.resource(mapping, 'repo')
     ctx = context.resource(mapping, 'ctx')
@@ -443,9 +496,11 @@
 
     return len(repo.revs('only(%ld, %s)', revs, tag)) + offset
 
+
 # teach templater latesttags.changes is switched to (context, mapping) API
 _showchangessincetag._requires = {'repo', 'ctx'}
 
+
 @templatekeyword('manifest', requires={'repo', 'ctx'})
 def showmanifest(context, mapping):
     repo = context.resource(mapping, 'repo')
@@ -459,8 +514,10 @@
     mhex = hex(mnode)
     mapping = context.overlaymap(mapping, {'rev': mrev, 'node': mhex})
     f = context.process('manifest', mapping)
-    return templateutil.hybriditem(f, None, f,
-                                   lambda x: {'rev': mrev, 'node': mhex})
+    return templateutil.hybriditem(
+        f, None, f, lambda x: {'rev': mrev, 'node': mhex}
+    )
+
 
 @templatekeyword('obsfate', requires={'ui', 'repo', 'ctx'})
 def showobsfate(context, mapping):
@@ -475,20 +532,24 @@
     values = []
 
     for x in succsandmarkers.tovalue(context, mapping):
-        v = obsutil.obsfateprinter(ui, repo, x['successors'], x['markers'],
-                                   scmutil.formatchangeid)
+        v = obsutil.obsfateprinter(
+            ui, repo, x['successors'], x['markers'], scmutil.formatchangeid
+        )
         values.append(v)
 
     return compatlist(context, mapping, "fate", values)
 
+
 def shownames(context, mapping, namespace):
     """helper method to generate a template keyword for a namespace"""
     repo = context.resource(mapping, 'repo')
     ctx = context.resource(mapping, 'ctx')
     ns = repo.names[namespace]
     names = ns.names(repo, ctx.node())
-    return compatlist(context, mapping, ns.templatename, names,
-                      plural=namespace)
+    return compatlist(
+        context, mapping, ns.templatename, names, plural=namespace
+    )
+
 
 @templatekeyword('namespaces', requires={'repo', 'ctx'})
 def shownamespaces(context, mapping):
@@ -498,6 +559,7 @@
     ctx = context.resource(mapping, 'ctx')
 
     namespaces = util.sortdict()
+
     def makensmapfn(ns):
         # 'name' for iterating over namespaces, templatename for local reference
         return lambda v: {'name': v, ns.templatename: v}
@@ -519,6 +581,7 @@
 
     return _hybrid(f, namespaces, makemap, pycompat.identity)
 
+
 @templatekeyword('negrev', requires={'repo', 'ctx'})
 def shownegrev(context, mapping):
     """Integer. The repository-local changeset negative revision number,
@@ -530,6 +593,7 @@
     repo = context.resource(mapping, 'repo')
     return rev - len(repo)
 
+
 @templatekeyword('node', requires={'ctx'})
 def shownode(context, mapping):
     """String. The changeset identification hash, as a 40 hexadecimal
@@ -538,6 +602,7 @@
     ctx = context.resource(mapping, 'ctx')
     return ctx.hex()
 
+
 @templatekeyword('obsolete', requires={'ctx'})
 def showobsolete(context, mapping):
     """String. Whether the changeset is obsolete. (EXPERIMENTAL)"""
@@ -546,12 +611,14 @@
         return 'obsolete'
     return ''
 
+
 @templatekeyword('path', requires={'fctx'})
 def showpath(context, mapping):
     """String. Repository-absolute path of the current file. (EXPERIMENTAL)"""
     fctx = context.resource(mapping, 'fctx')
     return fctx.path()
 
+
 @templatekeyword('peerurls', requires={'repo'})
 def showpeerurls(context, mapping):
     """A dictionary of repository locations defined in the [paths] section
@@ -560,13 +627,16 @@
     # see commands.paths() for naming of dictionary keys
     paths = repo.ui.paths
     urls = util.sortdict((k, p.rawloc) for k, p in sorted(paths.iteritems()))
+
     def makemap(k):
         p = paths[k]
         d = {'name': k, 'url': p.rawloc}
         d.update((o, v) for o, v in sorted(p.suboptions.iteritems()))
         return d
+
     return _hybrid(None, urls, makemap, lambda k: '%s=%s' % (k, urls[k]))
 
+
 @templatekeyword("predecessors", requires={'repo', 'ctx'})
 def showpredecessors(context, mapping):
     """Returns the list of the closest visible predecessors. (EXPERIMENTAL)"""
@@ -575,9 +645,13 @@
     predecessors = sorted(obsutil.closestpredecessors(repo, ctx.node()))
     predecessors = pycompat.maplist(hex, predecessors)
 
-    return _hybrid(None, predecessors,
-                   lambda x: {'ctx': repo[x]},
-                   lambda x: scmutil.formatchangeid(repo[x]))
+    return _hybrid(
+        None,
+        predecessors,
+        lambda x: {'ctx': repo[x]},
+        lambda x: scmutil.formatchangeid(repo[x]),
+    )
+
 
 @templatekeyword('reporoot', requires={'repo'})
 def showreporoot(context, mapping):
@@ -585,12 +659,14 @@
     repo = context.resource(mapping, 'repo')
     return repo.root
 
+
 @templatekeyword('size', requires={'fctx'})
 def showsize(context, mapping):
     """Integer. Size of the current file in bytes. (EXPERIMENTAL)"""
     fctx = context.resource(mapping, 'fctx')
     return fctx.size()
 
+
 # requires 'fctx' to denote {status} depends on (ctx, path) pair
 @templatekeyword('status', requires={'ctx', 'fctx', 'revcache'})
 def showstatus(context, mapping):
@@ -604,6 +680,7 @@
         statmap = _getfilestatusmap(context, mapping, listall=True)
     return statmap.get(path)
 
+
 @templatekeyword("successorssets", requires={'repo', 'ctx'})
 def showsuccessorssets(context, mapping):
     """Returns a string of sets of successors for a changectx. Format used
@@ -619,8 +696,12 @@
 
     data = []
     for ss in ssets:
-        h = _hybrid(None, ss, lambda x: {'ctx': repo[x]},
-                    lambda x: scmutil.formatchangeid(repo[x]))
+        h = _hybrid(
+            None,
+            ss,
+            lambda x: {'ctx': repo[x]},
+            lambda x: scmutil.formatchangeid(repo[x]),
+        )
         data.append(h)
 
     # Format the successorssets
@@ -630,8 +711,10 @@
     def gen(data):
         yield "; ".join(render(d) for d in data)
 
-    return _hybrid(gen(data), data, lambda x: {'successorset': x},
-                   pycompat.identity)
+    return _hybrid(
+        gen(data), data, lambda x: {'successorset': x}, pycompat.identity
+    )
+
 
 @templatekeyword("succsandmarkers", requires={'repo', 'ctx'})
 def showsuccsandmarkers(context, mapping):
@@ -655,9 +738,12 @@
         successors = i['successors']
 
         successors = [hex(n) for n in successors]
-        successors = _hybrid(None, successors,
-                             lambda x: {'ctx': repo[x]},
-                             lambda x: scmutil.formatchangeid(repo[x]))
+        successors = _hybrid(
+            None,
+            successors,
+            lambda x: {'ctx': repo[x]},
+            lambda x: scmutil.formatchangeid(repo[x]),
+        )
 
         # Format markers
         finalmarkers = []
@@ -674,6 +760,7 @@
 
     return templateutil.mappinglist(data)
 
+
 @templatekeyword('p1', requires={'ctx'})
 def showp1(context, mapping):
     """Changeset. The changeset's first parent. ``{p1.rev}`` for the revision
@@ -681,6 +768,7 @@
     ctx = context.resource(mapping, 'ctx')
     return templateutil.mappingdict({'ctx': ctx.p1()}, tmpl=_changeidtmpl)
 
+
 @templatekeyword('p2', requires={'ctx'})
 def showp2(context, mapping):
     """Changeset. The changeset's second parent. ``{p2.rev}`` for the revision
@@ -688,6 +776,7 @@
     ctx = context.resource(mapping, 'ctx')
     return templateutil.mappingdict({'ctx': ctx.p2()}, tmpl=_changeidtmpl)
 
+
 @templatekeyword('p1rev', requires={'ctx'})
 def showp1rev(context, mapping):
     """Integer. The repository-local revision number of the changeset's
@@ -695,6 +784,7 @@
     ctx = context.resource(mapping, 'ctx')
     return ctx.p1().rev()
 
+
 @templatekeyword('p2rev', requires={'ctx'})
 def showp2rev(context, mapping):
     """Integer. The repository-local revision number of the changeset's
@@ -702,6 +792,7 @@
     ctx = context.resource(mapping, 'ctx')
     return ctx.p2().rev()
 
+
 @templatekeyword('p1node', requires={'ctx'})
 def showp1node(context, mapping):
     """String. The identification hash of the changeset's first parent,
@@ -710,6 +801,7 @@
     ctx = context.resource(mapping, 'ctx')
     return ctx.p1().hex()
 
+
 @templatekeyword('p2node', requires={'ctx'})
 def showp2node(context, mapping):
     """String. The identification hash of the changeset's second
@@ -718,6 +810,7 @@
     ctx = context.resource(mapping, 'ctx')
     return ctx.p2().hex()
 
+
 @templatekeyword('parents', requires={'repo', 'ctx'})
 def showparents(context, mapping):
     """List of strings. The parents of the changeset in "rev:node"
@@ -727,13 +820,19 @@
     ctx = context.resource(mapping, 'ctx')
     pctxs = scmutil.meaningfulparents(repo, ctx)
     prevs = [p.rev() for p in pctxs]
-    parents = [[('rev', p.rev()),
-                ('node', p.hex()),
-                ('phase', p.phasestr())]
-               for p in pctxs]
+    parents = [
+        [('rev', p.rev()), ('node', p.hex()), ('phase', p.phasestr())]
+        for p in pctxs
+    ]
     f = _showcompatlist(context, mapping, 'parent', parents)
-    return _hybrid(f, prevs, lambda x: {'ctx': repo[x]},
-                   lambda x: scmutil.formatchangeid(repo[x]), keytype=int)
+    return _hybrid(
+        f,
+        prevs,
+        lambda x: {'ctx': repo[x]},
+        lambda x: scmutil.formatchangeid(repo[x]),
+        keytype=int,
+    )
+
 
 @templatekeyword('phase', requires={'ctx'})
 def showphase(context, mapping):
@@ -741,18 +840,21 @@
     ctx = context.resource(mapping, 'ctx')
     return ctx.phasestr()
 
+
 @templatekeyword('phaseidx', requires={'ctx'})
 def showphaseidx(context, mapping):
     """Integer. The changeset phase index. (ADVANCED)"""
     ctx = context.resource(mapping, 'ctx')
     return ctx.phase()
 
+
 @templatekeyword('rev', requires={'ctx'})
 def showrev(context, mapping):
     """Integer. The repository-local changeset revision number."""
     ctx = context.resource(mapping, 'ctx')
     return scmutil.intrev(ctx)
 
+
 def showrevslist(context, mapping, name, revs):
     """helper to generate a list of revisions in which a mapped template will
     be evaluated"""
@@ -761,9 +863,15 @@
     def f():
         srevs = ['%d' % r for r in revs]
         return _showcompatlist(context, mapping, name, srevs)
-    return _hybrid(f, revs,
-                   lambda x: {name: x, 'ctx': repo[x]},
-                   pycompat.identity, keytype=int)
+
+    return _hybrid(
+        f,
+        revs,
+        lambda x: {name: x, 'ctx': repo[x]},
+        pycompat.identity,
+        keytype=int,
+    )
+
 
 @templatekeyword('subrepos', requires={'ctx'})
 def showsubrepos(context, mapping):
@@ -776,12 +884,13 @@
     subrepos = []
     for sub in substate:
         if sub not in psubstate or substate[sub] != psubstate[sub]:
-            subrepos.append(sub) # modified or newly added in ctx
+            subrepos.append(sub)  # modified or newly added in ctx
     for sub in psubstate:
         if sub not in substate:
-            subrepos.append(sub) # removed in ctx
+            subrepos.append(sub)  # removed in ctx
     return compatlist(context, mapping, 'subrepo', sorted(subrepos))
 
+
 # don't remove "showtags" definition, even though namespaces will put
 # a helper function for "tags" keyword into "keywords" map automatically,
 # because online help text is built without namespaces initialization
@@ -790,26 +899,35 @@
     """List of strings. Any tags associated with the changeset."""
     return shownames(context, mapping, 'tags')
 
+
 @templatekeyword('termwidth', requires={'ui'})
 def showtermwidth(context, mapping):
     """Integer. The width of the current terminal."""
     ui = context.resource(mapping, 'ui')
     return ui.termwidth()
 
+
 @templatekeyword('user', requires={'ctx'})
 def showuser(context, mapping):
     """String. The unmodified author of the changeset."""
     ctx = context.resource(mapping, 'ctx')
     return ctx.user()
 
+
 @templatekeyword('instabilities', requires={'ctx'})
 def showinstabilities(context, mapping):
     """List of strings. Evolution instabilities affecting the changeset.
     (EXPERIMENTAL)
     """
     ctx = context.resource(mapping, 'ctx')
-    return compatlist(context, mapping, 'instability', ctx.instabilities(),
-                      plural='instabilities')
+    return compatlist(
+        context,
+        mapping,
+        'instability',
+        ctx.instabilities(),
+        plural='instabilities',
+    )
+
 
 @templatekeyword('verbosity', requires={'ui'})
 def showverbosity(context, mapping):
@@ -825,6 +943,7 @@
         return 'verbose'
     return ''
 
+
 @templatekeyword('whyunstable', requires={'repo', 'ctx'})
 def showwhyunstable(context, mapping):
     """List of dicts explaining all instabilities of a changeset.
@@ -841,20 +960,27 @@
     for entry in entries:
         if entry.get('divergentnodes'):
             dnodes = entry['divergentnodes']
-            dnhybrid = _hybrid(None, [dnode.hex() for dnode in dnodes],
-                               lambda x: {'ctx': repo[x]},
-                               lambda x: formatnode(repo[x]))
+            dnhybrid = _hybrid(
+                None,
+                [dnode.hex() for dnode in dnodes],
+                lambda x: {'ctx': repo[x]},
+                lambda x: formatnode(repo[x]),
+            )
             entry['divergentnodes'] = dnhybrid
 
-    tmpl = ('{instability}:{if(divergentnodes, " ")}{divergentnodes} '
-            '{reason} {node|short}')
+    tmpl = (
+        '{instability}:{if(divergentnodes, " ")}{divergentnodes} '
+        '{reason} {node|short}'
+    )
     return templateutil.mappinglist(entries, tmpl=tmpl, sep='\n')
 
+
 def loadkeyword(ui, extname, registrarobj):
     """Load template keyword from specified registrarobj
     """
     for name, func in registrarobj._table.iteritems():
         keywords[name] = func
 
+
 # tell hggettext to extract docstrings from these functions:
 i18nfunctions = keywords.values()
--- a/mercurial/templater.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/templater.py	Sun Oct 06 09:45:02 2019 -0400
@@ -79,9 +79,7 @@
     templateutil,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 # template parsing
 
@@ -105,6 +103,7 @@
     "end": (0, None, None, None, None),
 }
 
+
 def tokenize(program, start, end, term=None):
     """Parse a template expression into a stream of tokens, which must end
     with term if specified"""
@@ -112,22 +111,22 @@
     program = pycompat.bytestr(program)
     while pos < end:
         c = program[pos]
-        if c.isspace(): # skip inter-token whitespace
+        if c.isspace():  # skip inter-token whitespace
             pass
-        elif c in "(=,).%|+-*/": # handle simple operators
+        elif c in "(=,).%|+-*/":  # handle simple operators
             yield (c, None, pos)
-        elif c in '"\'': # handle quoted templates
+        elif c in '"\'':  # handle quoted templates
             s = pos + 1
             data, pos = _parsetemplate(program, s, end, c)
             yield ('template', data, s)
             pos -= 1
-        elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'):
+        elif c == 'r' and program[pos : pos + 2] in ("r'", 'r"'):
             # handle quoted strings
             c = program[pos + 1]
             s = pos = pos + 2
-            while pos < end: # find closing quote
+            while pos < end:  # find closing quote
                 d = program[pos]
-                if d == '\\': # skip over escaped characters
+                if d == '\\':  # skip over escaped characters
                     pos += 2
                     continue
                 if d == c:
@@ -145,8 +144,12 @@
                 pos += 1
             yield ('integer', program[s:pos], s)
             pos -= 1
-        elif (c == '\\' and program[pos:pos + 2] in (br"\'", br'\"')
-              or c == 'r' and program[pos:pos + 3] in (br"r\'", br'r\"')):
+        elif (
+            c == '\\'
+            and program[pos : pos + 2] in (br"\'", br'\"')
+            or c == 'r'
+            and program[pos : pos + 3] in (br"r\'", br'r\"')
+        ):
             # handle escaped quoted strings for compatibility with 2.9.2-3.4,
             # where some of nested templates were preprocessed as strings and
             # then compiled. therefore, \"...\" was allowed. (issue4733)
@@ -162,11 +165,11 @@
                 token = 'string'
             else:
                 token = 'template'
-            quote = program[pos:pos + 2]
+            quote = program[pos : pos + 2]
             s = pos = pos + 2
-            while pos < end: # find closing escaped quote
+            while pos < end:  # find closing escaped quote
                 if program.startswith('\\\\\\', pos, end):
-                    pos += 4 # skip over double escaped characters
+                    pos += 4  # skip over double escaped characters
                     continue
                 if program.startswith(quote, pos, end):
                     # interpret as if it were a part of an outer string
@@ -182,7 +185,7 @@
         elif c.isalnum() or c in '_':
             s = pos
             pos += 1
-            while pos < end: # find end of symbol
+            while pos < end:  # find end of symbol
                 d = program[pos]
                 if not (d.isalnum() or d == "_"):
                     break
@@ -200,6 +203,7 @@
         raise error.ParseError(_("unterminated template expansion"), start)
     yield ('end', None, pos)
 
+
 def _parsetemplate(tmpl, start, stop, quote=''):
     r"""
     >>> _parsetemplate(b'foo{bar}"baz', 0, 12)
@@ -225,6 +229,7 @@
             raise error.ProgrammingError('unexpected type: %s' % typ)
     raise error.ProgrammingError('unterminated scanning of template')
 
+
 def scantemplate(tmpl, raw=False):
     r"""Scan (type, start, end) positions of outermost elements in template
 
@@ -253,6 +258,7 @@
             last = (typ, pos)
     raise error.ProgrammingError('unterminated scanning of template')
 
+
 def _scantemplate(tmpl, start, stop, quote='', raw=False):
     """Parse template string into chunks of strings and template expressions"""
     sepchars = '{' + quote
@@ -261,20 +267,21 @@
     p = parser.parser(elements)
     try:
         while pos < stop:
-            n = min((tmpl.find(c, pos, stop)
-                     for c in pycompat.bytestr(sepchars)),
-                    key=lambda n: (n < 0, n))
+            n = min(
+                (tmpl.find(c, pos, stop) for c in pycompat.bytestr(sepchars)),
+                key=lambda n: (n < 0, n),
+            )
             if n < 0:
                 yield ('string', unescape(tmpl[pos:stop]), pos)
                 pos = stop
                 break
-            c = tmpl[n:n + 1]
+            c = tmpl[n : n + 1]
             bs = 0  # count leading backslashes
             if not raw:
                 bs = (n - pos) - len(tmpl[pos:n].rstrip('\\'))
             if bs % 2 == 1:
                 # escaped (e.g. '\{', '\\\{', but not '\\{')
-                yield ('string', unescape(tmpl[pos:n - 1]) + c, pos)
+                yield ('string', unescape(tmpl[pos : n - 1]) + c, pos)
                 pos = n + 1
                 continue
             if n > pos:
@@ -303,11 +310,13 @@
             # failed to parse, but in a hint we get a open paren at the
             # start. Therefore, we print "loc + 1" spaces (instead of "loc")
             # to line up the caret with the location of the error.
-            inst.hint = (tmpl + '\n'
-                         + ' ' * (loc + 1 + offset) + '^ ' + _('here'))
+            inst.hint = (
+                tmpl + '\n' + ' ' * (loc + 1 + offset) + '^ ' + _('here')
+            )
         raise
     yield ('end', None, pos)
 
+
 def _unnesttemplatelist(tree):
     """Expand list of templates to node tuple
 
@@ -342,12 +351,14 @@
     else:
         return (op,) + xs
 
+
 def parse(tmpl):
     """Parse template string into tree"""
     parsed, pos = _parsetemplate(tmpl, 0, len(tmpl))
     assert pos == len(tmpl), 'unquoted template should be consumed'
     return _unnesttemplatelist(('template', parsed))
 
+
 def _parseexpr(expr):
     """Parse a template expression into tree
 
@@ -370,9 +381,11 @@
         raise error.ParseError(_('invalid token'), pos)
     return _unnesttemplatelist(tree)
 
+
 def prettyformat(tree):
     return parser.prettyformat(tree, ('integer', 'string', 'symbol'))
 
+
 def compileexp(exp, context, curmethods):
     """Compile parsed template tree to (func, data) pair"""
     if not exp:
@@ -380,13 +393,16 @@
     t = exp[0]
     return curmethods[t](exp, context)
 
+
 # template evaluation
 
+
 def getsymbol(exp):
     if exp[0] == 'symbol':
         return exp[1]
     raise error.ParseError(_("expected a symbol, got '%s'") % exp[0])
 
+
 def getlist(x):
     if not x:
         return []
@@ -394,6 +410,7 @@
         return getlist(x[1]) + [x[2]]
     return [x]
 
+
 def gettemplate(exp, context):
     """Compile given template tree or load named template from map file;
     returns (func, data) pair"""
@@ -406,13 +423,16 @@
         return context._load(exp[1])
     raise error.ParseError(_("expected template specifier"))
 
+
 def _runrecursivesymbol(context, mapping, key):
     raise error.Abort(_("recursive reference '%s' in template") % key)
 
+
 def buildtemplate(exp, context):
     ctmpl = [compileexp(e, context, methods) for e in exp[1:]]
     return (templateutil.runtemplate, ctmpl)
 
+
 def buildfilter(exp, context):
     n = getsymbol(exp[2])
     if n in context._filters:
@@ -425,25 +445,30 @@
         return (f, args)
     raise error.ParseError(_("unknown function '%s'") % n)
 
+
 def buildmap(exp, context):
     darg = compileexp(exp[1], context, methods)
     targ = gettemplate(exp[2], context)
     return (templateutil.runmap, (darg, targ))
 
+
 def buildmember(exp, context):
     darg = compileexp(exp[1], context, methods)
     memb = getsymbol(exp[2])
     return (templateutil.runmember, (darg, memb))
 
+
 def buildnegate(exp, context):
     arg = compileexp(exp[1], context, exprmethods)
     return (templateutil.runnegate, arg)
 
+
 def buildarithmetic(exp, context, func):
     left = compileexp(exp[1], context, exprmethods)
     right = compileexp(exp[2], context, exprmethods)
     return (templateutil.runarithmetic, (func, left, right))
 
+
 def buildfunc(exp, context):
     n = getsymbol(exp[1])
     if n in context._funcs:
@@ -458,6 +483,7 @@
         return (templateutil.runfilter, (args[0], f))
     raise error.ParseError(_("unknown function '%s'") % n)
 
+
 def _buildfuncargs(exp, context, curmethods, funcname, argspec):
     """Compile parsed tree of function arguments into list or dict of
     (func, data) pairs
@@ -473,9 +499,12 @@
     >>> list(args.keys()), list(args[b'opts'].keys())
     (['opts'], ['opts', 'k'])
     """
+
     def compiledict(xs):
-        return util.sortdict((k, compileexp(x, context, curmethods))
-                             for k, x in xs.iteritems())
+        return util.sortdict(
+            (k, compileexp(x, context, curmethods)) for k, x in xs.iteritems()
+        )
+
     def compilelist(xs):
         return [compileexp(x, context, curmethods) for x in xs]
 
@@ -485,8 +514,13 @@
 
     # function with argspec: return dict of named args
     _poskeys, varkey, _keys, optkey = argspec = parser.splitargspec(argspec)
-    treeargs = parser.buildargsdict(getlist(exp), funcname, argspec,
-                                    keyvaluenode='keyvalue', keynode='symbol')
+    treeargs = parser.buildargsdict(
+        getlist(exp),
+        funcname,
+        argspec,
+        keyvaluenode='keyvalue',
+        keynode='symbol',
+    )
     compargs = util.sortdict()
     if varkey:
         compargs[varkey] = compilelist(treeargs.pop(varkey))
@@ -495,12 +529,17 @@
     compargs.update(compiledict(treeargs))
     return compargs
 
+
 def buildkeyvaluepair(exp, content):
     raise error.ParseError(_("can't use a key-value pair in this context"))
 
+
 def buildlist(exp, context):
-    raise error.ParseError(_("can't use a list in this context"),
-                           hint=_('check place of comma and parens'))
+    raise error.ParseError(
+        _("can't use a list in this context"),
+        hint=_('check place of comma and parens'),
+    )
+
 
 # methods to interpret function arguments or inner expressions (e.g. {_(x)})
 exprmethods = {
@@ -520,14 +559,16 @@
     "negate": buildnegate,
     "*": lambda e, c: buildarithmetic(e, c, lambda a, b: a * b),
     "/": lambda e, c: buildarithmetic(e, c, lambda a, b: a // b),
-    }
+}
 
 # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"})
 methods = exprmethods.copy()
 methods["integer"] = exprmethods["symbol"]  # '{1}' as variable
 
+
 class _aliasrules(parser.basealiasrules):
     """Parsing and expansion rule set of template aliases"""
+
     _section = _('template alias')
     _parse = staticmethod(_parseexpr)
 
@@ -540,19 +581,23 @@
         if tree[0] == '|' and tree[2][0] == 'symbol':
             return tree[2][1], [tree[1]]
 
+
 def expandaliases(tree, aliases):
     """Return new tree of aliases are expanded"""
     aliasmap = _aliasrules.buildmap(aliases)
     return _aliasrules.expand(aliasmap, tree)
 
+
 # template engine
 
+
 def unquotestring(s):
     '''unwrap quotes if any; otherwise returns unmodified string'''
     if len(s) < 2 or s[0] not in "'\"" or s[0] != s[-1]:
         return s
     return s[1:-1]
 
+
 class resourcemapper(object):
     """Mapper of internal template resources"""
 
@@ -575,6 +620,7 @@
         """Return a dict of additional mapping items which should be paired
         with the given new mapping"""
 
+
 class nullresourcemapper(resourcemapper):
     def availablekeys(self, mapping):
         return set()
@@ -588,6 +634,7 @@
     def populatemap(self, context, origmapping, newmapping):
         return {}
 
+
 class engine(object):
     '''template expansion engine.
 
@@ -630,12 +677,18 @@
         # new resources, so the defaults will be re-evaluated (issue5612)
         knownres = self._resources.knownkeys()
         newres = self._resources.availablekeys(newmapping)
-        mapping = {k: v for k, v in origmapping.iteritems()
-                   if (k in knownres  # not a symbol per self.symbol()
-                       or newres.isdisjoint(self._defaultrequires(k)))}
+        mapping = {
+            k: v
+            for k, v in origmapping.iteritems()
+            if (
+                k in knownres  # not a symbol per self.symbol()
+                or newres.isdisjoint(self._defaultrequires(k))
+            )
+        }
         mapping.update(newmapping)
         mapping.update(
-            self._resources.populatemap(self, origmapping, newmapping))
+            self._resources.populatemap(self, origmapping, newmapping)
+        )
         return mapping
 
     def _defaultrequires(self, key):
@@ -668,7 +721,8 @@
         v = self._resources.lookup(mapping, key)
         if v is None:
             raise templateutil.ResourceUnavailable(
-                _('template resource not available: %s') % key)
+                _('template resource not available: %s') % key
+            )
         return v
 
     def _load(self, t):
@@ -679,7 +733,7 @@
             self._cache[t] = (_runrecursivesymbol, t)
             try:
                 self._cache[t] = compileexp(x, self, methods)
-            except: # re-raises
+            except:  # re-raises
                 del self._cache[t]
                 raise
         return self._cache[t]
@@ -725,6 +779,7 @@
             mapping = extramapping
         return templateutil.flatten(self, mapping, func(self, mapping, data))
 
+
 def stylelist():
     paths = templatepaths()
     if not paths:
@@ -739,11 +794,14 @@
             stylelist.append(split[1])
     return ", ".join(sorted(stylelist))
 
+
 def _readmapfile(mapfile):
     """Load template elements from the given map file"""
     if not os.path.exists(mapfile):
-        raise error.Abort(_("style '%s' not found") % mapfile,
-                          hint=_("available styles: %s") % stylelist())
+        raise error.Abort(
+            _("style '%s' not found") % mapfile,
+            hint=_("available styles: %s") % stylelist(),
+        )
 
     base = os.path.dirname(mapfile)
     conf = config.config(includepaths=templatepaths())
@@ -774,18 +832,21 @@
 
     for key, val in conf['templates'].items():
         if not val:
-            raise error.ParseError(_('missing value'),
-                                   conf.source('templates', key))
+            raise error.ParseError(
+                _('missing value'), conf.source('templates', key)
+            )
         if val[0] in "'\"":
             if val[0] != val[-1]:
-                raise error.ParseError(_('unmatched quotes'),
-                                       conf.source('templates', key))
+                raise error.ParseError(
+                    _('unmatched quotes'), conf.source('templates', key)
+                )
             cache[key] = unquotestring(val)
         elif key != '__base__':
             tmap[key] = os.path.join(base, val)
     aliases.extend(conf['templatealias'].items())
     return cache, tmap, aliases
 
+
 class loader(object):
     """Load template fragments optionally from a map file"""
 
@@ -806,11 +867,13 @@
                 self.cache[t] = util.readfile(self._map[t])
             except KeyError as inst:
                 raise templateutil.TemplateNotFound(
-                    _('"%s" not in template map') % inst.args[0])
+                    _('"%s" not in template map') % inst.args[0]
+                )
             except IOError as inst:
-                reason = (_('template file %s: %s')
-                          % (self._map[t],
-                             stringutil.forcebytestr(inst.args[1])))
+                reason = _('template file %s: %s') % (
+                    self._map[t],
+                    stringutil.forcebytestr(inst.args[1]),
+                )
                 raise IOError(inst.args[0], encoding.strfromlocal(reason))
         return self._parse(self.cache[t])
 
@@ -827,7 +890,7 @@
         if op == 'symbol':
             s = tree[1]
             if s in syms[0]:
-                return # avoid recursion: s -> cache[s] -> s
+                return  # avoid recursion: s -> cache[s] -> s
             syms[0].add(s)
             if s in self.cache or s in self._map:
                 # s may be a reference for named template
@@ -857,10 +920,18 @@
         self._findsymbolsused(self.load(t), syms)
         return syms
 
+
 class templater(object):
-
-    def __init__(self, filters=None, defaults=None, resources=None,
-                 cache=None, aliases=(), minchunk=1024, maxchunk=65536):
+    def __init__(
+        self,
+        filters=None,
+        defaults=None,
+        resources=None,
+        cache=None,
+        aliases=(),
+        minchunk=1024,
+        maxchunk=65536,
+    ):
         """Create template engine optionally with preloaded template fragments
 
         - ``filters``: a dict of functions to transform a value into another.
@@ -882,8 +953,16 @@
         self._minchunk, self._maxchunk = minchunk, maxchunk
 
     @classmethod
-    def frommapfile(cls, mapfile, filters=None, defaults=None, resources=None,
-                    cache=None, minchunk=1024, maxchunk=65536):
+    def frommapfile(
+        cls,
+        mapfile,
+        filters=None,
+        defaults=None,
+        resources=None,
+        cache=None,
+        minchunk=1024,
+        maxchunk=65536,
+    ):
         """Create templater from the specified map file"""
         t = cls(filters, defaults, resources, cache, [], minchunk, maxchunk)
         cache, tmap, aliases = _readmapfile(mapfile)
@@ -941,17 +1020,19 @@
         yields chunks"""
         stream = self._proc.process(t, mapping)
         if self._minchunk:
-            stream = util.increasingchunks(stream, min=self._minchunk,
-                                           max=self._maxchunk)
+            stream = util.increasingchunks(
+                stream, min=self._minchunk, max=self._maxchunk
+            )
         return stream
 
+
 def templatepaths():
     '''return locations used for template files.'''
     pathsrel = ['templates']
-    paths = [os.path.normpath(os.path.join(util.datapath, f))
-             for f in pathsrel]
+    paths = [os.path.normpath(os.path.join(util.datapath, f)) for f in pathsrel]
     return [p for p in paths if os.path.isdir(p)]
 
+
 def templatepath(name):
     '''return location of template file. returns None if not found.'''
     for p in templatepaths():
@@ -960,6 +1041,7 @@
             return f
     return None
 
+
 def stylemap(styles, paths=None):
     """Return path to mapfile for a given style.
 
@@ -979,10 +1061,13 @@
 
     for style in styles:
         # only plain name is allowed to honor template paths
-        if (not style
+        if (
+            not style
             or style in (pycompat.oscurdir, pycompat.ospardir)
             or pycompat.ossep in style
-            or pycompat.osaltsep and pycompat.osaltsep in style):
+            or pycompat.osaltsep
+            and pycompat.osaltsep in style
+        ):
             continue
         locations = [os.path.join(style, 'map'), 'map-' + style]
         locations.append('map')
--- a/mercurial/templateutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/templateutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,12 +21,15 @@
     stringutil,
 )
 
+
 class ResourceUnavailable(error.Abort):
     pass
 
+
 class TemplateNotFound(error.Abort):
     pass
 
+
 class wrapped(object):
     """Object requiring extra conversion prior to displaying or processing
     as value
@@ -103,6 +106,7 @@
         A returned value must be serializable by templaterfilters.json().
         """
 
+
 class mappable(object):
     """Object which can be converted to a single template mapping"""
 
@@ -113,6 +117,7 @@
     def tomap(self, context):
         """Create a single template mapping representing this"""
 
+
 class wrappedbytes(wrapped):
     """Wrapper for byte string"""
 
@@ -124,8 +129,9 @@
         return item in self._value
 
     def getmember(self, context, mapping, key):
-        raise error.ParseError(_('%r is not a dictionary')
-                               % pycompat.bytestr(self._value))
+        raise error.ParseError(
+            _('%r is not a dictionary') % pycompat.bytestr(self._value)
+        )
 
     def getmin(self, context, mapping):
         return self._getby(context, mapping, min)
@@ -139,12 +145,14 @@
         return func(pycompat.iterbytestr(self._value))
 
     def filter(self, context, mapping, select):
-        raise error.ParseError(_('%r is not filterable')
-                               % pycompat.bytestr(self._value))
+        raise error.ParseError(
+            _('%r is not filterable') % pycompat.bytestr(self._value)
+        )
 
     def itermaps(self, context):
-        raise error.ParseError(_('%r is not iterable of mappings')
-                               % pycompat.bytestr(self._value))
+        raise error.ParseError(
+            _('%r is not iterable of mappings') % pycompat.bytestr(self._value)
+        )
 
     def join(self, context, mapping, sep):
         return joinitems(pycompat.iterbytestr(self._value), sep)
@@ -158,6 +166,7 @@
     def tovalue(self, context, mapping):
         return self._value
 
+
 class wrappedvalue(wrapped):
     """Generic wrapper for pure non-list/dict/bytes value"""
 
@@ -180,8 +189,9 @@
         raise error.ParseError(_("%r is not iterable") % self._value)
 
     def itermaps(self, context):
-        raise error.ParseError(_('%r is not iterable of mappings')
-                               % self._value)
+        raise error.ParseError(
+            _('%r is not iterable of mappings') % self._value
+        )
 
     def join(self, context, mapping, sep):
         raise error.ParseError(_('%r is not iterable') % self._value)
@@ -202,6 +212,7 @@
     def tovalue(self, context, mapping):
         return self._value
 
+
 class date(mappable, wrapped):
     """Wrapper for date tuple"""
 
@@ -241,6 +252,7 @@
     def tovalue(self, context, mapping):
         return (self._unixtime, self._tzoffset)
 
+
 class hybrid(wrapped):
     """Wrapper for list or dict to support legacy template
 
@@ -293,8 +305,11 @@
 
     def filter(self, context, mapping, select):
         if util.safehasattr(self._values, 'get'):
-            values = {k: v for k, v in self._values.iteritems()
-                      if select(self._wrapvalue(k, v))}
+            values = {
+                k: v
+                for k, v in self._values.iteritems()
+                if select(self._wrapvalue(k, v))
+            }
         else:
             values = [v for v in self._values if select(self._wrapvalue(v, v))]
         return hybrid(None, values, self._makemap, self._joinfmt, self._keytype)
@@ -324,10 +339,12 @@
         # TODO: make it non-recursive for trivial lists/dicts
         xs = self._values
         if util.safehasattr(xs, 'get'):
-            return {k: unwrapvalue(context, mapping, v)
-                    for k, v in xs.iteritems()}
+            return {
+                k: unwrapvalue(context, mapping, v) for k, v in xs.iteritems()
+            }
         return [unwrapvalue(context, mapping, x) for x in xs]
 
+
 class hybriditem(mappable, wrapped):
     """Wrapper for non-list/dict object to support map operation
 
@@ -386,6 +403,7 @@
     def tovalue(self, context, mapping):
         return _unthunk(context, mapping, self._value)
 
+
 class _mappingsequence(wrapped):
     """Wrapper for sequence of template mappings
 
@@ -437,10 +455,16 @@
         for nm in self.itermaps(context):
             # drop internal resources (recursively) which shouldn't be displayed
             lm = context.overlaymap(mapping, nm)
-            items.append({k: unwrapvalue(context, lm, v)
-                          for k, v in nm.iteritems() if k not in knownres})
+            items.append(
+                {
+                    k: unwrapvalue(context, lm, v)
+                    for k, v in nm.iteritems()
+                    if k not in knownres
+                }
+            )
         return items
 
+
 class mappinggenerator(_mappingsequence):
     """Wrapper for generator of template mappings
 
@@ -459,6 +483,7 @@
     def tobool(self, context, mapping):
         return _nonempty(self.itermaps(context))
 
+
 class mappinglist(_mappingsequence):
     """Wrapper for list of template mappings"""
 
@@ -472,6 +497,7 @@
     def tobool(self, context, mapping):
         return bool(self._mappings)
 
+
 class mappingdict(mappable, _mappingsequence):
     """Wrapper for a single template mapping
 
@@ -495,6 +521,7 @@
     def tovalue(self, context, mapping):
         return super(mappingdict, self).tovalue(context, mapping)[0]
 
+
 class mappingnone(wrappedvalue):
     """Wrapper for None, but supports map operation
 
@@ -508,6 +535,7 @@
     def itermaps(self, context):
         return iter([])
 
+
 class mappedgenerator(wrapped):
     """Wrapper for generator of strings which acts as a list
 
@@ -568,14 +596,20 @@
     def tovalue(self, context, mapping):
         return [stringify(context, mapping, x) for x in self._gen(context)]
 
+
 def hybriddict(data, key='key', value='value', fmt=None, gen=None):
     """Wrap data to support both dict-like and string-like operations"""
     prefmt = pycompat.identity
     if fmt is None:
         fmt = '%s=%s'
         prefmt = pycompat.bytestr
-    return hybrid(gen, data, lambda k: {key: k, value: data[k]},
-                  lambda k: fmt % (prefmt(k), prefmt(data[k])))
+    return hybrid(
+        gen,
+        data,
+        lambda k: {key: k, value: data[k]},
+        lambda k: fmt % (prefmt(k), prefmt(data[k])),
+    )
+
 
 def hybridlist(data, name, fmt=None, gen=None):
     """Wrap data to support both list-like and string-like operations"""
@@ -585,8 +619,18 @@
         prefmt = pycompat.bytestr
     return hybrid(gen, data, lambda x: {name: x}, lambda x: fmt % prefmt(x))
 
-def compatdict(context, mapping, name, data, key='key', value='value',
-               fmt=None, plural=None, separator=' '):
+
+def compatdict(
+    context,
+    mapping,
+    name,
+    data,
+    key='key',
+    value='value',
+    fmt=None,
+    plural=None,
+    separator=' ',
+):
     """Wrap data like hybriddict(), but also supports old-style list template
 
     This exists for backward compatibility with the old-style template. Use
@@ -596,8 +640,17 @@
     f = _showcompatlist(context, mapping, name, c, plural, separator)
     return hybriddict(data, key=key, value=value, fmt=fmt, gen=f)
 
-def compatlist(context, mapping, name, data, element=None, fmt=None,
-               plural=None, separator=' '):
+
+def compatlist(
+    context,
+    mapping,
+    name,
+    data,
+    element=None,
+    fmt=None,
+    plural=None,
+    separator=' ',
+):
     """Wrap data like hybridlist(), but also supports old-style list template
 
     This exists for backward compatibility with the old-style template. Use
@@ -606,6 +659,7 @@
     f = _showcompatlist(context, mapping, name, data, plural, separator)
     return hybridlist(data, name=element or name, fmt=fmt, gen=f)
 
+
 def compatfilecopiesdict(context, mapping, name, copies):
     """Wrap list of (dest, source) file names to support old-style list
     template and field names
@@ -617,9 +671,13 @@
     c = [{'name': k, 'source': v} for k, v in copies]
     f = _showcompatlist(context, mapping, name, c, plural='file_copies')
     copies = util.sortdict(copies)
-    return hybrid(f, copies,
-                  lambda k: {'name': k, 'path': k, 'source': copies[k]},
-                  lambda k: '%s (%s)' % (k, copies[k]))
+    return hybrid(
+        f,
+        copies,
+        lambda k: {'name': k, 'path': k, 'source': copies[k]},
+        lambda k: '%s (%s)' % (k, copies[k]),
+    )
+
 
 def compatfileslist(context, mapping, name, files):
     """Wrap list of file names to support old-style list template and field
@@ -629,8 +687,8 @@
     keywords.
     """
     f = _showcompatlist(context, mapping, name, files)
-    return hybrid(f, files, lambda x: {'file': x, 'path': x},
-                  pycompat.identity)
+    return hybrid(f, files, lambda x: {'file': x, 'path': x}, pycompat.identity)
+
 
 def _showcompatlist(context, mapping, name, values, plural=None, separator=' '):
     """Return a generator that renders old-style list template
@@ -673,6 +731,7 @@
     startname = 'start_' + plural
     if context.preload(startname):
         yield context.process(startname, mapping)
+
     def one(v, tag=name):
         vmapping = {}
         try:
@@ -689,6 +748,7 @@
                 vmapping[name] = v
         vmapping = context.overlaymap(mapping, vmapping)
         return context.process(tag, vmapping)
+
     lastname = 'last_' + name
     if context.preload(lastname):
         last = values.pop()
@@ -702,6 +762,7 @@
     if context.preload(endname):
         yield context.process(endname, mapping)
 
+
 def flatten(context, mapping, thing):
     """Yield a single stream from a possibly nested set of iterators"""
     if isinstance(thing, wrapped):
@@ -711,8 +772,10 @@
     elif isinstance(thing, str):
         # We can only hit this on Python 3, and it's here to guard
         # against infinite recursion.
-        raise error.ProgrammingError('Mercurial IO including templates is done'
-                                     ' with bytes, not strings, got %r' % thing)
+        raise error.ProgrammingError(
+            'Mercurial IO including templates is done'
+            ' with bytes, not strings, got %r' % thing
+        )
     elif thing is None:
         pass
     elif not util.safehasattr(thing, '__iter__'):
@@ -731,12 +794,14 @@
                 for j in flatten(context, mapping, i):
                     yield j
 
+
 def stringify(context, mapping, thing):
     """Turn values into bytes by converting into text and concatenating them"""
     if isinstance(thing, bytes):
         return thing  # retain localstr to be round-tripped
     return b''.join(flatten(context, mapping, thing))
 
+
 def findsymbolicname(arg):
     """Find symbolic name for the given compiled expression; returns None
     if nothing found reliably"""
@@ -749,6 +814,7 @@
         else:
             return None
 
+
 def _nonempty(xiter):
     try:
         next(xiter)
@@ -756,23 +822,27 @@
     except StopIteration:
         return False
 
+
 def _unthunk(context, mapping, thing):
     """Evaluate a lazy byte string into value"""
     if not isinstance(thing, types.GeneratorType):
         return thing
     return stringify(context, mapping, thing)
 
+
 def evalrawexp(context, mapping, arg):
     """Evaluate given argument as a bare template object which may require
     further processing (such as folding generator of strings)"""
     func, data = arg
     return func(context, mapping, data)
 
+
 def evalwrapped(context, mapping, arg):
     """Evaluate given argument to wrapped object"""
     thing = evalrawexp(context, mapping, arg)
     return makewrapped(context, mapping, thing)
 
+
 def makewrapped(context, mapping, thing):
     """Lift object to a wrapped type"""
     if isinstance(thing, wrapped):
@@ -782,10 +852,12 @@
         return wrappedbytes(thing)
     return wrappedvalue(thing)
 
+
 def evalfuncarg(context, mapping, arg):
     """Evaluate given argument as value type"""
     return unwrapvalue(context, mapping, evalrawexp(context, mapping, arg))
 
+
 def unwrapvalue(context, mapping, thing):
     """Move the inner value object out of the wrapper"""
     if isinstance(thing, wrapped):
@@ -794,6 +866,7 @@
     # such as date tuple, but filter does not want generator.
     return _unthunk(context, mapping, thing)
 
+
 def evalboolean(context, mapping, arg):
     """Evaluate given argument as boolean, but also takes boolean literals"""
     func, data = arg
@@ -806,12 +879,14 @@
         thing = func(context, mapping, data)
     return makewrapped(context, mapping, thing).tobool(context, mapping)
 
+
 def evaldate(context, mapping, arg, err=None):
     """Evaluate given argument as a date tuple or a date string; returns
     a (unixtime, offset) tuple"""
     thing = evalrawexp(context, mapping, arg)
     return unwrapdate(context, mapping, thing, err)
 
+
 def unwrapdate(context, mapping, thing, err=None):
     if isinstance(thing, date):
         return thing.tovalue(context, mapping)
@@ -826,10 +901,12 @@
             raise
         raise error.ParseError(err)
 
+
 def evalinteger(context, mapping, arg, err=None):
     thing = evalrawexp(context, mapping, arg)
     return unwrapinteger(context, mapping, thing, err)
 
+
 def unwrapinteger(context, mapping, thing, err=None):
     thing = unwrapvalue(context, mapping, thing)
     try:
@@ -837,9 +914,11 @@
     except (TypeError, ValueError):
         raise error.ParseError(err or _('not an integer'))
 
+
 def evalstring(context, mapping, arg):
     return stringify(context, mapping, evalrawexp(context, mapping, arg))
 
+
 def evalstringliteral(context, mapping, arg):
     """Evaluate given argument as string template, but returns symbol name
     if it is unknown"""
@@ -850,6 +929,7 @@
         thing = func(context, mapping, data)
     return stringify(context, mapping, thing)
 
+
 _unwrapfuncbytype = {
     None: unwrapvalue,
     bytes: stringify,
@@ -857,6 +937,7 @@
     int: unwrapinteger,
 }
 
+
 def unwrapastype(context, mapping, thing, typ):
     """Move the inner value object out of the wrapper and coerce its type"""
     try:
@@ -865,17 +946,22 @@
         raise error.ProgrammingError('invalid type specified: %r' % typ)
     return f(context, mapping, thing)
 
+
 def runinteger(context, mapping, data):
     return int(data)
 
+
 def runstring(context, mapping, data):
     return data
 
+
 def _recursivesymbolblocker(key):
     def showrecursion(context, mapping):
         raise error.Abort(_("recursive reference '%s' in template") % key)
+
     return showrecursion
 
+
 def runsymbol(context, mapping, key, default=''):
     v = context.symbol(mapping, key)
     if v is None:
@@ -896,10 +982,12 @@
             return None
     return v
 
+
 def runtemplate(context, mapping, template):
     for arg in template:
         yield evalrawexp(context, mapping, arg)
 
+
 def runfilter(context, mapping, data):
     arg, filt = data
     thing = evalrawexp(context, mapping, arg)
@@ -910,13 +998,17 @@
     except error.ParseError as e:
         raise error.ParseError(bytes(e), hint=_formatfiltererror(arg, filt))
 
+
 def _formatfiltererror(arg, filt):
     fn = pycompat.sysbytes(filt.__name__)
     sym = findsymbolicname(arg)
     if not sym:
         return _("incompatible use of template filter '%s'") % fn
-    return (_("template filter '%s' is not compatible with keyword '%s'")
-            % (fn, sym))
+    return _("template filter '%s' is not compatible with keyword '%s'") % (
+        fn,
+        sym,
+    )
+
 
 def _iteroverlaymaps(context, origmapping, newmappings):
     """Generate combined mappings from the original mapping and an iterable
@@ -926,6 +1018,7 @@
         lm['index'] = i
         yield lm
 
+
 def _applymap(context, mapping, d, darg, targ):
     try:
         diter = d.itermaps(context)
@@ -938,11 +1031,13 @@
     for lm in _iteroverlaymaps(context, mapping, diter):
         yield evalrawexp(context, lm, targ)
 
+
 def runmap(context, mapping, data):
     darg, targ = data
     d = evalwrapped(context, mapping, darg)
     return mappedgenerator(_applymap, args=(mapping, d, darg, targ))
 
+
 def runmember(context, mapping, data):
     darg, memb = data
     d = evalwrapped(context, mapping, darg)
@@ -958,22 +1053,28 @@
         hint = _("keyword '%s' does not support member operation") % sym
         raise error.ParseError(bytes(err), hint=hint)
 
+
 def runnegate(context, mapping, data):
-    data = evalinteger(context, mapping, data,
-                       _('negation needs an integer argument'))
+    data = evalinteger(
+        context, mapping, data, _('negation needs an integer argument')
+    )
     return -data
 
+
 def runarithmetic(context, mapping, data):
     func, left, right = data
-    left = evalinteger(context, mapping, left,
-                       _('arithmetic only defined on integers'))
-    right = evalinteger(context, mapping, right,
-                        _('arithmetic only defined on integers'))
+    left = evalinteger(
+        context, mapping, left, _('arithmetic only defined on integers')
+    )
+    right = evalinteger(
+        context, mapping, right, _('arithmetic only defined on integers')
+    )
     try:
         return func(left, right)
     except ZeroDivisionError:
         raise error.Abort(_('division by zero is not defined'))
 
+
 def joinitems(itemiter, sep):
     """Join items with the separator; Returns generator of bytes"""
     first = True
--- a/mercurial/testing/storage.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/testing/storage.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,17 +18,16 @@
     error,
     mdiff,
 )
-from ..interfaces import (
-    repository,
-)
-from ..utils import (
-    storageutil,
-)
+from ..interfaces import repository
+from ..utils import storageutil
+
 
 class basetestcase(unittest.TestCase):
     if not getattr(unittest.TestCase, r'assertRaisesRegex', False):
-        assertRaisesRegex = (# camelcase-required
-            unittest.TestCase.assertRaisesRegexp)
+        assertRaisesRegex = (  # camelcase-required
+            unittest.TestCase.assertRaisesRegexp
+        )
+
 
 class ifileindextests(basetestcase):
     """Generic tests for the ifileindex interface.
@@ -38,6 +37,7 @@
 
     Use ``makeifileindextests()`` to create an instance of this type.
     """
+
     def testempty(self):
         f = self._makefilefn()
         self.assertEqual(len(f), 0, 'new file store has 0 length by default')
@@ -398,6 +398,7 @@
         self.assertEqual(f.children(node4), [])
         self.assertEqual(f.children(node5), [])
 
+
 class ifiledatatests(basetestcase):
     """Generic tests for the ifiledata interface.
 
@@ -406,12 +407,15 @@
 
     Use ``makeifiledatatests()`` to create an instance of this type.
     """
+
     def testempty(self):
         f = self._makefilefn()
 
         self.assertEqual(f.storageinfo(), {})
-        self.assertEqual(f.storageinfo(revisionscount=True, trackedsize=True),
-                         {'revisionscount': 0, 'trackedsize': 0})
+        self.assertEqual(
+            f.storageinfo(revisionscount=True, trackedsize=True),
+            {'revisionscount': 0, 'trackedsize': 0},
+        )
 
         self.assertEqual(f.size(nullrev), 0)
 
@@ -466,8 +470,10 @@
             node = f.add(fulltext, None, tr, 0, nullid, nullid)
 
         self.assertEqual(f.storageinfo(), {})
-        self.assertEqual(f.storageinfo(revisionscount=True, trackedsize=True),
-                         {'revisionscount': 1, 'trackedsize': len(fulltext)})
+        self.assertEqual(
+            f.storageinfo(revisionscount=True, trackedsize=True),
+            {'revisionscount': 1, 'trackedsize': len(fulltext)},
+        )
 
         self.assertEqual(f.size(0), len(fulltext))
 
@@ -537,7 +543,8 @@
             {
                 'revisionscount': 3,
                 'trackedsize': len(fulltext0) + len(fulltext1) + len(fulltext2),
-            })
+            },
+        )
 
         self.assertEqual(f.size(0), len(fulltext0))
         self.assertEqual(f.size(1), len(fulltext1))
@@ -603,9 +610,10 @@
         self.assertEqual(rev.basenode, node0)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x01' +
-                         fulltext1)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x01' + fulltext1,
+        )
 
         rev = next(gen)
 
@@ -616,9 +624,10 @@
         self.assertEqual(rev.basenode, node1)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x04\x01\x00\x00\x04\x02' +
-                         fulltext2)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x04\x01\x00\x00\x04\x02' + fulltext2,
+        )
 
         with self.assertRaises(StopIteration):
             next(gen)
@@ -646,9 +655,10 @@
         self.assertEqual(rev.basenode, node0)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x01' +
-                         fulltext1)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x01' + fulltext1,
+        )
 
         rev = next(gen)
 
@@ -659,9 +669,10 @@
         self.assertEqual(rev.basenode, node1)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x04\x01\x00\x00\x04\x02' +
-                         fulltext2)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x04\x01\x00\x00\x04\x02' + fulltext2,
+        )
 
         with self.assertRaises(StopIteration):
             next(gen)
@@ -672,14 +683,14 @@
 
         # nodesorder=storage is recognized. But we can't test it thoroughly
         # because behavior is storage-dependent.
-        res = list(f.emitrevisions([node2, node1, node0],
-                                         nodesorder='storage'))
+        res = list(f.emitrevisions([node2, node1, node0], nodesorder='storage'))
         self.assertEqual(len(res), 3)
         self.assertEqual({o.node for o in res}, {node0, node1, node2})
 
         # nodesorder=nodes forces the order.
-        gen = f.emitrevisions([node2, node0], nodesorder='nodes',
-                              revisiondata=True)
+        gen = f.emitrevisions(
+            [node2, node0], nodesorder='nodes', revisiondata=True
+        )
 
         rev = next(gen)
         self.assertEqual(rev.node, node2)
@@ -719,16 +730,18 @@
         self.assertEqual(rev.basenode, node1)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x04\x01\x00\x00\x04\x02' +
-                         fulltext2)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x04\x01\x00\x00\x04\x02' + fulltext2,
+        )
 
         with self.assertRaises(StopIteration):
             next(gen)
 
         # assumehaveparentrevisions=True allows delta against initial revision.
-        gen = f.emitrevisions([node2, node1],
-                              revisiondata=True, assumehaveparentrevisions=True)
+        gen = f.emitrevisions(
+            [node2, node1], revisiondata=True, assumehaveparentrevisions=True
+        )
 
         rev = next(gen)
         self.assertEqual(rev.node, node1)
@@ -737,14 +750,16 @@
         self.assertEqual(rev.basenode, node0)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x01' +
-                         fulltext1)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x01' + fulltext1,
+        )
 
         # forceprevious=True forces a delta against the previous revision.
         # Special case for initial revision.
-        gen = f.emitrevisions([node0], revisiondata=True,
-                              deltamode=repository.CG_DELTAMODE_PREV)
+        gen = f.emitrevisions(
+            [node0], revisiondata=True, deltamode=repository.CG_DELTAMODE_PREV
+        )
 
         rev = next(gen)
         self.assertEqual(rev.node, node0)
@@ -753,15 +768,19 @@
         self.assertEqual(rev.basenode, nullid)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00' +
-                         fulltext0)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00' + fulltext0,
+        )
 
         with self.assertRaises(StopIteration):
             next(gen)
 
-        gen = f.emitrevisions([node0, node2], revisiondata=True,
-                              deltamode=repository.CG_DELTAMODE_PREV)
+        gen = f.emitrevisions(
+            [node0, node2],
+            revisiondata=True,
+            deltamode=repository.CG_DELTAMODE_PREV,
+        )
 
         rev = next(gen)
         self.assertEqual(rev.node, node0)
@@ -770,9 +789,10 @@
         self.assertEqual(rev.basenode, nullid)
         self.assertIsNone(rev.baserevisionsize)
         self.assertIsNone(rev.revision)
-        self.assertEqual(rev.delta,
-                         b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00' +
-                         fulltext0)
+        self.assertEqual(
+            rev.delta,
+            b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00' + fulltext0,
+        )
 
         rev = next(gen)
         self.assertEqual(rev.node, node2)
@@ -798,17 +818,21 @@
             b'copyrev': b'b' * 40,
         }
 
-        stored1 = b''.join([
-            b'\x01\ncopy: source0\n',
-            b'copyrev: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n\x01\n',
-            fulltext1,
-        ])
+        stored1 = b''.join(
+            [
+                b'\x01\ncopy: source0\n',
+                b'copyrev: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n\x01\n',
+                fulltext1,
+            ]
+        )
 
-        stored2 = b''.join([
-            b'\x01\ncopy: source1\n',
-            b'copyrev: bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n\x01\n',
-            fulltext2,
-        ])
+        stored2 = b''.join(
+            [
+                b'\x01\ncopy: source1\n',
+                b'copyrev: bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb\n\x01\n',
+                fulltext2,
+            ]
+        )
 
         f = self._makefilefn()
         with self._maketransactionfn() as tr:
@@ -847,11 +871,13 @@
             b'copy': b'source0',
             b'copyrev': b'b' * 40,
         }
-        stored1 = b''.join([
-            b'\x01\ncopy: source0\n',
-            b'copyrev: %s\n' % (b'b' * 40),
-            b'\x01\n\x01\nbar',
-        ])
+        stored1 = b''.join(
+            [
+                b'\x01\ncopy: source0\n',
+                b'copyrev: %s\n' % (b'b' * 40),
+                b'\x01\n\x01\nbar',
+            ]
+        )
 
         f = self._makefilefn()
         with self._maketransactionfn() as tr:
@@ -888,8 +914,9 @@
             node0 = f.add(fulltext0, None, tr, 0, nullid, nullid)
             node1 = b'\xaa' * 20
 
-            self._addrawrevisionfn(f, tr, node1, node0, nullid, 1,
-                                   rawtext=fulltext1)
+            self._addrawrevisionfn(
+                f, tr, node1, node0, nullid, 1, rawtext=fulltext1
+            )
 
         self.assertEqual(len(f), 2)
         self.assertEqual(f.parents(node1), (node0, nullid))
@@ -922,8 +949,9 @@
             node0 = f.add(fulltext0, None, tr, 0, nullid, nullid)
             node1 = b'\xaa' * 20
 
-            self._addrawrevisionfn(f, tr, node1, node0, nullid, 1,
-                                   rawtext=fulltext1)
+            self._addrawrevisionfn(
+                f, tr, node1, node0, nullid, 1, rawtext=fulltext1
+            )
 
         with self.assertRaises(error.StorageError):
             f.rawdata(node1)
@@ -943,8 +971,9 @@
             node0 = f.add(fulltext0, None, tr, 0, nullid, nullid)
             node1 = b'\xaa' * 20
 
-            self._addrawrevisionfn(f, tr, node1, node0, nullid, 1,
-                                   rawtext=fulltext1)
+            self._addrawrevisionfn(
+                f, tr, node1, node0, nullid, 1, rawtext=fulltext1
+            )
 
         with self.assertRaises(error.StorageError):
             f.read(node1)
@@ -963,8 +992,9 @@
             node0 = f.add(fulltext0, None, tr, 0, nullid, nullid)
             node1 = b'\xaa' * 20
 
-            self._addrawrevisionfn(f, tr, node1, node0, nullid, 1,
-                                   rawtext=fulltext1)
+            self._addrawrevisionfn(
+                f, tr, node1, node0, nullid, 1, rawtext=fulltext1
+            )
 
         with self.assertRaises(error.StorageError):
             f.read(node1)
@@ -973,8 +1003,9 @@
 
         with self._maketransactionfn() as tr:
             delta = mdiff.textdiff(fulltext1, fulltext2)
-            self._addrawrevisionfn(f, tr, node2, node1, nullid,
-                                   2, delta=(1, delta))
+            self._addrawrevisionfn(
+                f, tr, node2, node1, nullid, 2, delta=(1, delta)
+            )
 
         self.assertEqual(len(f), 3)
 
@@ -985,9 +1016,7 @@
     def testcensored(self):
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({
-            b'censored': b'tombstone',
-        }, b'')
+        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -995,8 +1024,9 @@
             # The node value doesn't matter since we can't verify it.
             node1 = b'\xbb' * 20
 
-            self._addrawrevisionfn(f, tr, node1, node0, nullid, 1, stored1,
-                                   censored=True)
+            self._addrawrevisionfn(
+                f, tr, node1, node0, nullid, 1, stored1, censored=True
+            )
 
         self.assertTrue(f.iscensored(1))
 
@@ -1015,9 +1045,7 @@
 
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({
-            b'censored': b'tombstone',
-        }, b'')
+        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1025,12 +1053,14 @@
             # The node value doesn't matter since we can't verify it.
             node1 = b'\xbb' * 20
 
-            self._addrawrevisionfn(f, tr, node1, node0, nullid, 1, stored1,
-                                   censored=True)
+            self._addrawrevisionfn(
+                f, tr, node1, node0, nullid, 1, stored1, censored=True
+            )
 
         with self.assertRaises(error.CensoredNodeError):
             f.rawdata(1)
 
+
 class ifilemutationtests(basetestcase):
     """Generic tests for the ifilemutation interface.
 
@@ -1039,6 +1069,7 @@
 
     Use ``makeifilemutationtests()`` to create an instance of this type.
     """
+
     def testaddnoop(self):
         f = self._makefilefn()
         with self._maketransactionfn() as tr:
@@ -1073,6 +1104,7 @@
         f = self._makefilefn()
 
         callbackargs = []
+
         def cb(*args, **kwargs):
             callbackargs.append((args, kwargs))
 
@@ -1101,9 +1133,13 @@
         with self._maketransactionfn() as tr:
             nodes = f.addgroup(deltas, linkmapper, tr, addrevisioncb=cb)
 
-        self.assertEqual(nodes, [
-            b'\x49\xd8\xcb\xb1\x5c\xe2\x57\x92\x04\x47'
-            b'\x00\x6b\x46\x97\x8b\x7a\xf9\x80\xa9\x79'])
+        self.assertEqual(
+            nodes,
+            [
+                b'\x49\xd8\xcb\xb1\x5c\xe2\x57\x92\x04\x47'
+                b'\x00\x6b\x46\x97\x8b\x7a\xf9\x80\xa9\x79'
+            ],
+        )
 
         self.assertEqual(len(callbackargs), 1)
         self.assertEqual(callbackargs[0][0][1], nodes[0])
@@ -1149,9 +1185,7 @@
         # Attempt to apply a delta made against a censored revision.
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({
-            b'censored': b'tombstone',
-        }, b'')
+        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid)
@@ -1159,8 +1193,9 @@
             # The node value doesn't matter since we can't verify it.
             node1 = b'\xbb' * 20
 
-            self._addrawrevisionfn(f, tr, node1, node0, nullid, 1, stored1,
-                                   censored=True)
+            self._addrawrevisionfn(
+                f, tr, node1, node0, nullid, 1, stored1, censored=True
+            )
 
         delta = mdiff.textdiff(b'bar\n' * 30, (b'bar\n' * 30) + b'baz\n')
         deltas = [(b'\xcc' * 20, node1, nullid, b'\x01' * 20, node1, delta, 0)]
@@ -1306,6 +1341,7 @@
         with self.assertRaises(error.LookupError):
             f.rev(node1)
 
+
 def makeifileindextests(makefilefn, maketransactionfn, addrawrevisionfn):
     """Create a unittest.TestCase class suitable for testing file storage.
 
@@ -1331,6 +1367,7 @@
     }
     return type(r'ifileindextests', (ifileindextests,), d)
 
+
 def makeifiledatatests(makefilefn, maketransactionfn, addrawrevisionfn):
     d = {
         r'_makefilefn': makefilefn,
@@ -1339,6 +1376,7 @@
     }
     return type(r'ifiledatatests', (ifiledatatests,), d)
 
+
 def makeifilemutationtests(makefilefn, maketransactionfn, addrawrevisionfn):
     d = {
         r'_makefilefn': makefilefn,
--- a/mercurial/transaction.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/transaction.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,43 +21,54 @@
     pycompat,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 version = 2
 
 # These are the file generators that should only be executed after the
 # finalizers are done, since they rely on the output of the finalizers (like
 # the changelog having been written).
-postfinalizegenerators = {
-    'bookmarks',
-    'dirstate'
-}
+postfinalizegenerators = {'bookmarks', 'dirstate'}
 
-gengroupall='all'
-gengroupprefinalize='prefinalize'
-gengrouppostfinalize='postfinalize'
+gengroupall = 'all'
+gengroupprefinalize = 'prefinalize'
+gengrouppostfinalize = 'postfinalize'
+
 
 def active(func):
     def _active(self, *args, **kwds):
         if self._count == 0:
-            raise error.Abort(_(
-                'cannot use transaction when it is already committed/aborted'))
+            raise error.Abort(
+                _('cannot use transaction when it is already committed/aborted')
+            )
         return func(self, *args, **kwds)
+
     return _active
 
-def _playback(journal, report, opener, vfsmap, entries, backupentries,
-              unlink=True, checkambigfiles=None):
+
+def _playback(
+    journal,
+    report,
+    opener,
+    vfsmap,
+    entries,
+    backupentries,
+    unlink=True,
+    checkambigfiles=None,
+):
     for f, o, _ignore in entries:
         if o or not unlink:
             checkambig = checkambigfiles and (f, '') in checkambigfiles
             try:
                 fp = opener(f, 'a', checkambig=checkambig)
                 if fp.tell() < o:
-                    raise error.Abort(_(
+                    raise error.Abort(
+                        _(
                             "attempted to truncate %s to %d bytes, but it was "
-                            "already %d bytes\n") % (f, o, fp.tell()))
+                            "already %d bytes\n"
+                        )
+                        % (f, o, fp.tell())
+                    )
                 fp.truncate(o)
                 fp.close()
             except IOError:
@@ -73,8 +84,7 @@
     backupfiles = []
     for l, f, b, c in backupentries:
         if l not in vfsmap and c:
-            report("couldn't handle %s: unknown cache location %s\n"
-                        % (b, l))
+            report("couldn't handle %s: unknown cache location %s\n" % (b, l))
         vfs = vfsmap[l]
         try:
             if f and b:
@@ -109,10 +119,22 @@
         # only pure backup file remains, it is sage to ignore any error
         pass
 
+
 class transaction(util.transactional):
-    def __init__(self, report, opener, vfsmap, journalname, undoname=None,
-                 after=None, createmode=None, validator=None, releasefn=None,
-                 checkambigfiles=None, name=r'<unnamed>'):
+    def __init__(
+        self,
+        report,
+        opener,
+        vfsmap,
+        journalname,
+        undoname=None,
+        after=None,
+        createmode=None,
+        validator=None,
+        releasefn=None,
+        checkambigfiles=None,
+        name=r'<unnamed>',
+    ):
         """Begin a new transaction
 
         Begins a new transaction that allows rolling back writes in the event of
@@ -197,8 +219,11 @@
 
     def __repr__(self):
         name = r'/'.join(self._names)
-        return (r'<transaction name=%s, count=%d, usages=%d>' %
-                (name, self._count, self._usages))
+        return r'<transaction name=%s, count=%d, usages=%d>' % (
+            name,
+            self._count,
+            self._usages,
+        )
 
     def __del__(self):
         if self._journal:
@@ -290,8 +315,7 @@
         self._addbackupentry((location, '', tmpfile, False))
 
     @active
-    def addfilegenerator(self, genid, filenames, genfunc, order=0,
-                         location=''):
+    def addfilegenerator(self, genid, filenames, genfunc, order=0, location=''):
         """add a function to generates some files at transaction commit
 
         The `genfunc` argument is a function capable of generating proper
@@ -334,8 +358,8 @@
             # for generation at closing, check if it's before or after finalize
             postfinalize = group == gengrouppostfinalize
             if (
-                    group != gengroupall
-                    and (id in postfinalizegenerators) != postfinalize
+                group != gengroupall
+                and (id in postfinalizegenerators) != postfinalize
             ):
                 continue
 
@@ -350,8 +374,9 @@
                     else:
                         self.addbackup(name, location=location)
                         checkambig = (name, location) in self._checkambigfiles
-                    files.append(vfs(name, 'w', atomictemp=True,
-                                     checkambig=checkambig))
+                    files.append(
+                        vfs(name, 'w', atomictemp=True, checkambig=checkambig)
+                    )
                 genfunc(*files)
                 for f in files:
                     f.close()
@@ -469,7 +494,7 @@
         '''commit the transaction'''
         if self._count == 1:
             self._validator(self)  # will raise exception if needed
-            self._validator = None # Help prevent cycles.
+            self._validator = None  # Help prevent cycles.
             self._generatefiles(group=gengroupprefinalize)
             categories = sorted(self._finalizecallback)
             for cat in categories:
@@ -486,8 +511,9 @@
         # cleanup temporary files
         for l, f, b, c in self._backupentries:
             if l not in self._vfsmap and c:
-                self._report("couldn't remove %s: unknown cache location %s\n"
-                             % (b, l))
+                self._report(
+                    "couldn't remove %s: unknown cache location %s\n" % (b, l)
+                )
                 continue
             vfs = self._vfsmap[l]
             if not f and b and vfs.exists(b):
@@ -497,21 +523,23 @@
                     if not c:
                         raise
                     # Abort may be raise by read only opener
-                    self._report("couldn't remove %s: %s\n"
-                                 % (vfs.join(b), inst))
+                    self._report(
+                        "couldn't remove %s: %s\n" % (vfs.join(b), inst)
+                    )
         self._entries = []
         self._writeundo()
         if self._after:
             self._after()
-            self._after = None # Help prevent cycles.
+            self._after = None  # Help prevent cycles.
         if self._opener.isfile(self._backupjournal):
             self._opener.unlink(self._backupjournal)
         if self._opener.isfile(self._journal):
             self._opener.unlink(self._journal)
         for l, _f, b, c in self._backupentries:
             if l not in self._vfsmap and c:
-                self._report("couldn't remove %s: unknown cache location"
-                             "%s\n" % (b, l))
+                self._report(
+                    "couldn't remove %s: unknown cache location" "%s\n" % (b, l)
+                )
                 continue
             vfs = self._vfsmap[l]
             if b and vfs.exists(b):
@@ -521,13 +549,14 @@
                     if not c:
                         raise
                     # Abort may be raise by read only opener
-                    self._report("couldn't remove %s: %s\n"
-                                 % (vfs.join(b), inst))
+                    self._report(
+                        "couldn't remove %s: %s\n" % (vfs.join(b), inst)
+                    )
         self._backupentries = []
         self._journal = None
 
-        self._releasefn(self, True) # notify success of closing transaction
-        self._releasefn = None # Help prevent cycles.
+        self._releasefn(self, True)  # notify success of closing transaction
+        self._releasefn = None  # Help prevent cycles.
 
         # run post close action
         categories = sorted(self._postclosecallback)
@@ -547,8 +576,9 @@
         """write transaction data for possible future undo call"""
         if self._undoname is None:
             return
-        undobackupfile = self._opener.open("%s.backupfiles" % self._undoname,
-                                           'w')
+        undobackupfile = self._opener.open(
+            "%s.backupfiles" % self._undoname, 'w'
+        )
         undobackupfile.write('%d\n' % version)
         for l, f, b, c in self._backupentries:
             if not f:  # temporary file
@@ -557,8 +587,10 @@
                 u = ''
             else:
                 if l not in self._vfsmap and c:
-                    self._report("couldn't remove %s: unknown cache location"
-                                 "%s\n" % (b, l))
+                    self._report(
+                        "couldn't remove %s: unknown cache location"
+                        "%s\n" % (b, l)
+                    )
                     continue
                 vfs = self._vfsmap[l]
                 base, name = vfs.split(b)
@@ -569,7 +601,6 @@
             undobackupfile.write("%s\0%s\0%s\0%d\n" % (l, f, u, c))
         undobackupfile.close()
 
-
     def _abort(self):
         self._count = 0
         self._usages = 0
@@ -591,18 +622,27 @@
                     self._abortcallback[cat](self)
                 # Prevent double usage and help clear cycles.
                 self._abortcallback = None
-                _playback(self._journal, self._report, self._opener,
-                          self._vfsmap, self._entries, self._backupentries,
-                          False, checkambigfiles=self._checkambigfiles)
+                _playback(
+                    self._journal,
+                    self._report,
+                    self._opener,
+                    self._vfsmap,
+                    self._entries,
+                    self._backupentries,
+                    False,
+                    checkambigfiles=self._checkambigfiles,
+                )
                 self._report(_("rollback completed\n"))
             except BaseException as exc:
                 self._report(_("rollback failed - please run hg recover\n"))
-                self._report(_("(failure reason: %s)\n")
-                             % stringutil.forcebytestr(exc))
+                self._report(
+                    _("(failure reason: %s)\n") % stringutil.forcebytestr(exc)
+                )
         finally:
             self._journal = None
-            self._releasefn(self, False) # notify failure of transaction
-            self._releasefn = None # Help prevent cycles.
+            self._releasefn(self, False)  # notify failure of transaction
+            self._releasefn = None  # Help prevent cycles.
+
 
 def rollback(opener, vfsmap, file, report, checkambigfiles=None):
     """Rolls back the transaction contained in the given file
@@ -631,8 +671,7 @@
             f, o = l.split('\0')
             entries.append((f, int(o), None))
         except ValueError:
-            report(
-                _("couldn't read journal entry %r!\n") % pycompat.bytestr(l))
+            report(_("couldn't read journal entry %r!\n") % pycompat.bytestr(l))
 
     backupjournal = "%s.backupfiles" % file
     if opener.exists(backupjournal):
@@ -648,8 +687,19 @@
                         l, f, b, c = line.split('\0')
                         backupentries.append((l, f, b, bool(c)))
             else:
-                report(_("journal was created by a different version of "
-                         "Mercurial\n"))
+                report(
+                    _(
+                        "journal was created by a different version of "
+                        "Mercurial\n"
+                    )
+                )
 
-    _playback(file, report, opener, vfsmap, entries, backupentries,
-              checkambigfiles=checkambigfiles)
+    _playback(
+        file,
+        report,
+        opener,
+        vfsmap,
+        entries,
+        backupentries,
+        checkambigfiles=checkambigfiles,
+    )
--- a/mercurial/treediscovery.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/treediscovery.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,6 +19,7 @@
     pycompat,
 )
 
+
 def findcommonincoming(repo, remote, heads=None, force=False):
     """Return a tuple (common, fetch, heads) used to identify the common
     subset of nodes between repo and remote.
@@ -79,27 +80,27 @@
             if n[0] in seen:
                 continue
 
-            repo.ui.debug("examining %s:%s\n"
-                          % (short(n[0]), short(n[1])))
-            if n[0] == nullid: # found the end of the branch
+            repo.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1])))
+            if n[0] == nullid:  # found the end of the branch
                 pass
             elif n in seenbranch:
                 repo.ui.debug("branch already found\n")
                 continue
-            elif n[1] and knownnode(n[1]): # do we know the base?
-                repo.ui.debug("found incomplete branch %s:%s\n"
-                              % (short(n[0]), short(n[1])))
-                search.append(n[0:2]) # schedule branch range for scanning
+            elif n[1] and knownnode(n[1]):  # do we know the base?
+                repo.ui.debug(
+                    "found incomplete branch %s:%s\n"
+                    % (short(n[0]), short(n[1]))
+                )
+                search.append(n[0:2])  # schedule branch range for scanning
                 seenbranch.add(n)
             else:
                 if n[1] not in seen and n[1] not in fetch:
                     if knownnode(n[2]) and knownnode(n[3]):
-                        repo.ui.debug("found new changeset %s\n" %
-                                      short(n[1]))
-                        fetch.add(n[1]) # earliest unknown
+                        repo.ui.debug("found new changeset %s\n" % short(n[1]))
+                        fetch.add(n[1])  # earliest unknown
                     for p in n[2:4]:
                         if knownnode(p):
-                            base.add(p) # latest known
+                            base.add(p)  # latest known
 
                 for p in n[2:4]:
                     if p not in req and not knownnode(p):
@@ -110,17 +111,19 @@
         if r:
             reqcnt += 1
             progress.increment()
-            repo.ui.debug("request %d: %s\n" %
-                        (reqcnt, " ".join(map(short, r))))
+            repo.ui.debug(
+                "request %d: %s\n" % (reqcnt, " ".join(map(short, r)))
+            )
             for p in pycompat.xrange(0, len(r), 10):
                 with remote.commandexecutor() as e:
-                    branches = e.callcommand('branches', {
-                        'nodes': r[p:p + 10],
-                    }).result()
+                    branches = e.callcommand(
+                        'branches', {'nodes': r[p : p + 10],}
+                    ).result()
 
                 for b in branches:
-                    repo.ui.debug("received %s:%s\n" %
-                                  (short(b[0]), short(b[1])))
+                    repo.ui.debug(
+                        "received %s:%s\n" % (short(b[0]), short(b[1]))
+                    )
                     unknown.append(b)
 
     # do binary search on the branches we found
@@ -140,13 +143,16 @@
                 repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
                 if knownnode(i):
                     if f <= 2:
-                        repo.ui.debug("found new branch changeset %s\n" %
-                                          short(p))
+                        repo.ui.debug(
+                            "found new branch changeset %s\n" % short(p)
+                        )
                         fetch.add(p)
                         base.add(i)
                     else:
-                        repo.ui.debug("narrowed branch search to %s:%s\n"
-                                      % (short(p), short(i)))
+                        repo.ui.debug(
+                            "narrowed branch search to %s:%s\n"
+                            % (short(p), short(i))
+                        )
                         newsearch.append((p, i))
                     break
                 p, f = i, f * 2
@@ -155,8 +161,7 @@
     # sanity check our fetch list
     for f in fetch:
         if knownnode(f):
-            raise error.RepoError(_("already have changeset ")
-                                  + short(f[:4]))
+            raise error.RepoError(_("already have changeset ") + short(f[:4]))
 
     base = list(base)
     if base == [nullid]:
@@ -165,8 +170,11 @@
         else:
             raise error.Abort(_("repository is unrelated"))
 
-    repo.ui.debug("found new changesets starting at " +
-                 " ".join([short(f) for f in fetch]) + "\n")
+    repo.ui.debug(
+        "found new changesets starting at "
+        + " ".join([short(f) for f in fetch])
+        + "\n"
+    )
 
     progress.complete()
     repo.ui.debug("%d total queries\n" % reqcnt)
--- a/mercurial/txnutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/txnutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,9 +9,8 @@
 
 import errno
 
-from . import (
-    encoding,
-)
+from . import encoding
+
 
 def mayhavepending(root):
     '''return whether 'root' may have pending changes, which are
@@ -19,6 +18,7 @@
     '''
     return root == encoding.environ.get('HG_PENDING')
 
+
 def trypending(root, vfs, filename, **kwargs):
     '''Open  file to be read according to HG_PENDING environment variable
 
--- a/mercurial/ui.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/ui.py	Sun Oct 06 09:45:02 2019 -0400
@@ -46,8 +46,9 @@
 urlreq = util.urlreq
 
 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
-_keepalnum = ''.join(c for c in map(pycompat.bytechr, range(256))
-                     if not c.isalnum())
+_keepalnum = ''.join(
+    c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
+)
 
 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
 tweakrc = b"""
@@ -78,8 +79,7 @@
 """
 
 samplehgrcs = {
-    'user':
-b"""# example user config (see 'hg help config' for more info)
+    'user': b"""# example user config (see 'hg help config' for more info)
 [ui]
 # name and email, e.g.
 # username = Jane Doe <jdoe@example.com>
@@ -106,9 +106,7 @@
 # rebase =
 # uncommit =
 """,
-
-    'cloned':
-b"""# example repository config (see 'hg help config' for more info)
+    'cloned': b"""# example repository config (see 'hg help config' for more info)
 [paths]
 default = %s
 
@@ -123,9 +121,7 @@
 # name and email (local to this repository, optional), e.g.
 # username = Jane Doe <jdoe@example.com>
 """,
-
-    'local':
-b"""# example repository config (see 'hg help config' for more info)
+    'local': b"""# example repository config (see 'hg help config' for more info)
 [paths]
 # path aliases to other clones of this repo in URLs or filesystem paths
 # (see 'hg help config.paths' for more info)
@@ -139,9 +135,7 @@
 # name and email (local to this repository, optional), e.g.
 # username = Jane Doe <jdoe@example.com>
 """,
-
-    'global':
-b"""# example system-wide hg config (see 'hg help config' for more info)
+    'global': b"""# example system-wide hg config (see 'hg help config' for more info)
 
 [ui]
 # uncomment to disable color in command output
@@ -161,14 +155,18 @@
 """,
 }
 
+
 def _maybestrurl(maybebytes):
     return pycompat.rapply(pycompat.strurl, maybebytes)
 
+
 def _maybebytesurl(maybestr):
     return pycompat.rapply(pycompat.bytesurl, maybestr)
 
+
 class httppasswordmgrdbproxy(object):
     """Delays loading urllib2 until it's needed."""
+
     def __init__(self):
         self._mgr = None
 
@@ -179,17 +177,23 @@
 
     def add_password(self, realm, uris, user, passwd):
         return self._get_mgr().add_password(
-            _maybestrurl(realm), _maybestrurl(uris),
-            _maybestrurl(user), _maybestrurl(passwd))
+            _maybestrurl(realm),
+            _maybestrurl(uris),
+            _maybestrurl(user),
+            _maybestrurl(passwd),
+        )
 
     def find_user_password(self, realm, uri):
         mgr = self._get_mgr()
-        return _maybebytesurl(mgr.find_user_password(_maybestrurl(realm),
-                                                     _maybestrurl(uri)))
+        return _maybebytesurl(
+            mgr.find_user_password(_maybestrurl(realm), _maybestrurl(uri))
+        )
+
 
 def _catchterm(*args):
     raise error.SignalInterrupt
 
+
 # unique object used to detect no default value has been provided when
 # retrieving configuration value.
 _unset = object()
@@ -197,6 +201,7 @@
 # _reqexithandlers: callbacks run at the end of a request
 _reqexithandlers = []
 
+
 class ui(object):
     def __init__(self, src=None):
         """Create a fresh new ui object if no src given
@@ -216,9 +221,9 @@
         self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
         self._reportuntrusted = True
         self._knownconfig = configitems.coreitems
-        self._ocfg = config.config() # overlay
-        self._tcfg = config.config() # trusted
-        self._ucfg = config.config() # untrusted
+        self._ocfg = config.config()  # overlay
+        self._tcfg = config.config()  # trusted
+        self._ucfg = config.config()  # untrusted
         self._trustusers = set()
         self._trustgroups = set()
         self.callhooks = True
@@ -349,7 +354,8 @@
             yield
         finally:
             self._blockedtimes[key + '_blocked'] += (
-                (util.timer() - starttime) * 1000)
+                util.timer() - starttime
+            ) * 1000
 
     @contextlib.contextmanager
     def uninterruptible(self):
@@ -361,8 +367,9 @@
         that control-C etc can be blocked if desired.
         """
         enabled = self.configbool('experimental', 'nointerrupt')
-        if (enabled and
-            self.configbool('experimental', 'nointerrupt-interactiveonly')):
+        if enabled and self.configbool(
+            'experimental', 'nointerrupt-interactiveonly'
+        ):
             enabled = self.interactive()
         if self._uninterruptible or not enabled:
             # if nointerrupt support is turned off, the process isn't
@@ -370,11 +377,14 @@
             # block, do nothing.
             yield
             return
+
         def warn():
             self.warn(_("shutting down cleanly\n"))
             self.warn(
-                _("press ^C again to terminate immediately (dangerous)\n"))
+                _("press ^C again to terminate immediately (dangerous)\n")
+            )
             return True
+
         with procutil.uninterruptible(warn):
             try:
                 self._uninterruptible = True
@@ -400,16 +410,19 @@
             return True
 
         if self._reportuntrusted:
-            self.warn(_('not trusting file %s from untrusted '
-                        'user %s, group %s\n') % (f, user, group))
+            self.warn(
+                _('not trusting file %s from untrusted ' 'user %s, group %s\n')
+                % (f, user, group)
+            )
         return False
 
-    def readconfig(self, filename, root=None, trust=False,
-                   sections=None, remap=None):
+    def readconfig(
+        self, filename, root=None, trust=False, sections=None, remap=None
+    ):
         try:
             fp = open(filename, r'rb')
         except IOError:
-            if not sections: # ignore unless we were looking for something
+            if not sections:  # ignore unless we were looking for something
                 return
             raise
 
@@ -425,9 +438,18 @@
             self.warn(_("ignored: %s\n") % stringutil.forcebytestr(inst))
 
         if self.plain():
-            for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
-                      'logtemplate', 'message-output', 'statuscopies', 'style',
-                      'traceback', 'verbose'):
+            for k in (
+                'debug',
+                'fallbackencoding',
+                'quiet',
+                'slash',
+                'logtemplate',
+                'message-output',
+                'statuscopies',
+                'style',
+                'traceback',
+                'verbose',
+            ):
                 if k in cfg['ui']:
                     del cfg['ui'][k]
             for k, v in cfg.items('defaults'):
@@ -469,8 +491,10 @@
                         continue
                     if '%%' in p:
                         s = self.configsource('paths', n) or 'none'
-                        self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
-                                  % (n, p, s))
+                        self.warn(
+                            _("(deprecated '%%' in path %s=%s from %s)\n")
+                            % (n, p, s)
+                        )
                         p = p.replace('%%', '%')
                     p = util.expandpath(p)
                     if not util.hasscheme(p) and not os.path.isabs(p):
@@ -485,8 +509,9 @@
             self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
             if self.verbose and self.quiet:
                 self.quiet = self.verbose = False
-            self._reportuntrusted = self.debugflag or self.configbool("ui",
-                "report_untrusted")
+            self._reportuntrusted = self.debugflag or self.configbool(
+                "ui", "report_untrusted"
+            )
             self.tracebackflag = self.configbool('ui', 'traceback')
             self.logblockedtimes = self.configbool('ui', 'logblockedtimes')
 
@@ -504,9 +529,12 @@
                 self.setlogger(b'debug', logger)
 
     def backupconfig(self, section, item):
-        return (self._ocfg.backup(section, item),
-                self._tcfg.backup(section, item),
-                self._ucfg.backup(section, item),)
+        return (
+            self._ocfg.backup(section, item),
+            self._tcfg.backup(section, item),
+            self._ucfg.backup(section, item),
+        )
+
     def restoreconfig(self, data):
         self._ocfg.restore(data[0])
         self._tcfg.restore(data[1])
@@ -526,8 +554,9 @@
 
     def config(self, section, name, default=_unset, untrusted=False):
         """return the plain string version of a config"""
-        value = self._config(section, name, default=default,
-                             untrusted=untrusted)
+        value = self._config(
+            section, name, default=default, untrusted=untrusted
+        )
         if value is _unset:
             return None
         return value
@@ -544,7 +573,7 @@
             else:
                 itemdefault = item.default
         else:
-            msg = ("accessing unregistered config item: '%s.%s'")
+            msg = "accessing unregistered config item: '%s.%s'"
             msg %= (section, name)
             self.develwarn(msg, 2, 'warn-config-unknown')
 
@@ -558,11 +587,15 @@
                 self.develwarn(msg, 2, 'warn-config-default')
             else:
                 value = itemdefault
-        elif (item is not None
-              and item.default is not configitems.dynamicdefault
-              and default != itemdefault):
-            msg = ("specifying a mismatched default value for a registered "
-                   "config item: '%s.%s' '%s'")
+        elif (
+            item is not None
+            and item.default is not configitems.dynamicdefault
+            and default != itemdefault
+        ):
+            msg = (
+                "specifying a mismatched default value for a registered "
+                "config item: '%s.%s' '%s'"
+            )
             msg %= (section, name, pycompat.bytestr(default))
             self.develwarn(msg, 2, 'warn-config-default')
 
@@ -576,8 +609,10 @@
             for s, n in alternates:
                 uvalue = self._ucfg.get(s, n)
                 if uvalue is not None and uvalue != value:
-                    self.debug("ignoring untrusted configuration option "
-                               "%s.%s = %s\n" % (s, n, uvalue))
+                    self.debug(
+                        "ignoring untrusted configuration option "
+                        "%s.%s = %s\n" % (s, n, uvalue)
+                    )
         return value
 
     def configsuboptions(self, section, name, default=_unset, untrusted=False):
@@ -596,14 +631,16 @@
         prefix = '%s:' % name
         for k, v in data.items(section):
             if k.startswith(prefix):
-                sub[k[len(prefix):]] = v
+                sub[k[len(prefix) :]] = v
 
         if self.debugflag and not untrusted and self._reportuntrusted:
             for k, v in sub.items():
                 uvalue = self._ucfg.get(section, '%s:%s' % (name, k))
                 if uvalue is not None and uvalue != v:
-                    self.debug('ignoring untrusted configuration option '
-                               '%s:%s.%s = %s\n' % (section, name, k, uvalue))
+                    self.debug(
+                        'ignoring untrusted configuration option '
+                        '%s:%s.%s = %s\n' % (section, name, k, uvalue)
+                    )
 
         return main, sub
 
@@ -651,12 +688,14 @@
             return v
         b = stringutil.parsebool(v)
         if b is None:
-            raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
-                                    % (section, name, v))
+            raise error.ConfigError(
+                _("%s.%s is not a boolean ('%s')") % (section, name, v)
+            )
         return b
 
-    def configwith(self, convert, section, name, default=_unset,
-                   desc=None, untrusted=False):
+    def configwith(
+        self, convert, section, name, default=_unset, desc=None, untrusted=False
+    ):
         """parse a configuration element with a conversion function
 
         >>> u = ui(); s = b'foo'
@@ -681,14 +720,15 @@
 
         v = self.config(section, name, default, untrusted)
         if v is None:
-            return v # do not attempt to convert None
+            return v  # do not attempt to convert None
         try:
             return convert(v)
         except (ValueError, error.ParseError):
             if desc is None:
                 desc = pycompat.sysbytes(convert.__name__)
-            raise error.ConfigError(_("%s.%s is not a valid %s ('%s')")
-                                    % (section, name, desc, v))
+            raise error.ConfigError(
+                _("%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
+            )
 
     def configint(self, section, name, default=_unset, untrusted=False):
         """parse a configuration element as an integer
@@ -709,8 +749,9 @@
         ConfigError: foo.invalid is not a valid integer ('somevalue')
         """
 
-        return self.configwith(int, section, name, default, 'integer',
-                               untrusted)
+        return self.configwith(
+            int, section, name, default, 'integer', untrusted
+        )
 
     def configbytes(self, section, name, default=_unset, untrusted=False):
         """parse a configuration element as a quantity in bytes
@@ -744,8 +785,10 @@
         try:
             return util.sizetoint(value)
         except error.ParseError:
-            raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
-                                    % (section, name, value))
+            raise error.ConfigError(
+                _("%s.%s is not a byte quantity ('%s')")
+                % (section, name, value)
+            )
 
     def configlist(self, section, name, default=_unset, untrusted=False):
         """parse a configuration element as a list of comma/space separated
@@ -760,8 +803,9 @@
         ['this', 'is', 'a small', 'test']
         """
         # default is not always a list
-        v = self.configwith(config.parselist, section, name, default,
-                               'list', untrusted)
+        v = self.configwith(
+            config.parselist, section, name, default, 'list', untrusted
+        )
         if isinstance(v, bytes):
             return config.parselist(v)
         elif v is None:
@@ -777,8 +821,9 @@
         (0, 0)
         """
         if self.config(section, name, default, untrusted):
-            return self.configwith(dateutil.parsedate, section, name, default,
-                                   'date', untrusted)
+            return self.configwith(
+                dateutil.parsedate, section, name, default, 'date', untrusted
+            )
         if default is _unset:
             return None
         return default
@@ -808,8 +853,10 @@
         if self.debugflag and not untrusted and self._reportuntrusted:
             for k, v in self._ucfg.items(section):
                 if self._tcfg.get(section, k) != v:
-                    self.debug("ignoring untrusted configuration option "
-                               "%s.%s = %s\n" % (section, k, v))
+                    self.debug(
+                        "ignoring untrusted configuration option "
+                        "%s.%s = %s\n" % (section, k, v)
+                    )
         return items
 
     def walkconfig(self, untrusted=False):
@@ -834,11 +881,14 @@
         - False if feature is disabled by default and not included in HGPLAIN
         - True otherwise
         '''
-        if ('HGPLAIN' not in encoding.environ and
-                'HGPLAINEXCEPT' not in encoding.environ):
+        if (
+            'HGPLAIN' not in encoding.environ
+            and 'HGPLAINEXCEPT' not in encoding.environ
+        ):
             return False
-        exceptions = encoding.environ.get('HGPLAINEXCEPT',
-                '').strip().split(',')
+        exceptions = (
+            encoding.environ.get('HGPLAINEXCEPT', '').strip().split(',')
+        )
         # TODO: add support for HGPLAIN=+feature,-feature syntax
         if '+strictflags' not in encoding.environ.get('HGPLAIN', '').split(','):
             exceptions.append('strictflags')
@@ -869,18 +919,22 @@
             user = self.prompt(_("enter a commit username:"), default=None)
         if user is None and not self.interactive():
             try:
-                user = '%s@%s' % (procutil.getuser(),
-                                  encoding.strtolocal(socket.getfqdn()))
+                user = '%s@%s' % (
+                    procutil.getuser(),
+                    encoding.strtolocal(socket.getfqdn()),
+                )
                 self.warn(_("no username found, using '%s' instead\n") % user)
             except KeyError:
                 pass
         if not user:
-            raise error.Abort(_('no username supplied'),
-                             hint=_("use 'hg config --edit' "
-                                    'to set your username'))
+            raise error.Abort(
+                _('no username supplied'),
+                hint=_("use 'hg config --edit' " 'to set your username'),
+            )
         if "\n" in user:
-            raise error.Abort(_("username %r contains a newline\n")
-                              % pycompat.bytestr(user))
+            raise error.Abort(
+                _("username %r contains a newline\n") % pycompat.bytestr(user)
+            )
         return user
 
     def shortuser(self, user):
@@ -1043,7 +1097,8 @@
             raise error.StdioError(err)
         finally:
             self._blockedtimes['stdio_blocked'] += (
-                (util.timer() - starttime) * 1000)
+                util.timer() - starttime
+            ) * 1000
 
     def write_err(self, *args, **opts):
         self._write(self._ferr, *args, **opts)
@@ -1087,14 +1142,18 @@
             if dest is self._ferr and not getattr(self._ferr, 'closed', False):
                 dest.flush()
         except IOError as err:
-            if (dest is self._ferr
-                and err.errno in (errno.EPIPE, errno.EIO, errno.EBADF)):
+            if dest is self._ferr and err.errno in (
+                errno.EPIPE,
+                errno.EIO,
+                errno.EBADF,
+            ):
                 # no way to report the error, so ignore it
                 return
             raise error.StdioError(err)
         finally:
             self._blockedtimes['stdio_blocked'] += (
-                (util.timer() - starttime) * 1000)
+                util.timer() - starttime
+            ) * 1000
 
     def _writemsg(self, dest, *args, **opts):
         _writemsgwith(self._write, dest, *args, **opts)
@@ -1119,7 +1178,8 @@
                         raise error.StdioError(err)
         finally:
             self._blockedtimes['stdio_blocked'] += (
-                (util.timer() - starttime) * 1000)
+                util.timer() - starttime
+            ) * 1000
 
     def _isatty(self, fh):
         if self.configbool('ui', 'nontty'):
@@ -1175,8 +1235,7 @@
           command: The full, non-aliased name of the command. That is, "log"
                    not "history, "summary" not "summ", etc.
         """
-        if (self._disablepager
-            or self.pageractive):
+        if self._disablepager or self.pageractive:
             # how pager should do is already determined
             return
 
@@ -1193,7 +1252,8 @@
             or self.plain()
             or self._buffers
             # TODO: expose debugger-enabled on the UI object
-            or '--debugger' in pycompat.sysargv):
+            or '--debugger' in pycompat.sysargv
+        ):
             # We only want to paginate if the ui appears to be
             # interactive, the user didn't say HGPLAIN or
             # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
@@ -1208,8 +1268,9 @@
             if name not in encoding.environ:
                 pagerenv[name] = value
 
-        self.debug('starting pager for command %s\n' %
-                   stringutil.pprint(command))
+        self.debug(
+            'starting pager for command %s\n' % stringutil.pprint(command)
+        )
         self.flush()
 
         wasformatted = self.formatted()
@@ -1257,22 +1318,29 @@
             # determine which one to use.
             fullcmd = procutil.findexe(command)
             if not fullcmd:
-                self.warn(_("missing pager command '%s', skipping pager\n")
-                          % command)
+                self.warn(
+                    _("missing pager command '%s', skipping pager\n") % command
+                )
                 return False
 
             command = fullcmd
 
         try:
             pager = subprocess.Popen(
-                procutil.tonativestr(command), shell=shell, bufsize=-1,
-                close_fds=procutil.closefds, stdin=subprocess.PIPE,
-                stdout=procutil.stdout, stderr=procutil.stderr,
-                env=procutil.tonativeenv(procutil.shellenviron(env)))
+                procutil.tonativestr(command),
+                shell=shell,
+                bufsize=-1,
+                close_fds=procutil.closefds,
+                stdin=subprocess.PIPE,
+                stdout=procutil.stdout,
+                stderr=procutil.stderr,
+                env=procutil.tonativeenv(procutil.shellenviron(env)),
+            )
         except OSError as e:
             if e.errno == errno.ENOENT and not shell:
-                self.warn(_("missing pager command '%s', skipping pager\n")
-                          % command)
+                self.warn(
+                    _("missing pager command '%s', skipping pager\n") % command
+                )
                 return False
             raise
 
@@ -1332,14 +1400,8 @@
         alldefaults = frozenset(["text", "curses"])
 
         featureinterfaces = {
-            "chunkselector": [
-                "text",
-                "curses",
-            ],
-            "histedit": [
-                "text",
-                "curses",
-            ],
+            "chunkselector": ["text", "curses",],
+            "histedit": ["text", "curses",],
         }
 
         # Feature-specific interface
@@ -1352,8 +1414,8 @@
             # Programming error, not user error. We need a use case to
             # define the right thing to do here.
             raise ValueError(
-                "Feature %s does not handle all default interfaces" %
-                feature)
+                "Feature %s does not handle all default interfaces" % feature
+            )
 
         if self.plain() or encoding.environ.get('TERM') == 'dumb':
             return "text"
@@ -1371,14 +1433,17 @@
 
         if i is not None and defaultinterface != i:
             if f is not None:
-                self.warn(_("invalid value for ui.interface: %s\n") %
-                          (i,))
+                self.warn(_("invalid value for ui.interface: %s\n") % (i,))
             else:
-                self.warn(_("invalid value for ui.interface: %s (using %s)\n") %
-                         (i, choseninterface))
+                self.warn(
+                    _("invalid value for ui.interface: %s (using %s)\n")
+                    % (i, choseninterface)
+                )
         if f is not None and choseninterface != f:
-            self.warn(_("invalid value for ui.interface.%s: %s (using %s)\n") %
-                      (feature, f, choseninterface))
+            self.warn(
+                _("invalid value for ui.interface.%s: %s (using %s)\n")
+                % (feature, f, choseninterface)
+            )
 
         return choseninterface
 
@@ -1447,14 +1512,18 @@
         # because they have to be text streams with *no buffering*. Instead,
         # we use rawinput() only if call_readline() will be invoked by
         # PyOS_Readline(), so no I/O will be made at Python layer.
-        usereadline = (self._isatty(self._fin) and self._isatty(self._fout)
-                       and procutil.isstdin(self._fin)
-                       and procutil.isstdout(self._fout))
+        usereadline = (
+            self._isatty(self._fin)
+            and self._isatty(self._fout)
+            and procutil.isstdin(self._fin)
+            and procutil.isstdout(self._fout)
+        )
         if usereadline:
             try:
                 # magically add command line editing support, where
                 # available
                 import readline
+
                 # force demandimport to really load the module
                 readline.read_history_file
                 # windows sometimes raises something other than ImportError
@@ -1464,8 +1533,9 @@
         if self._colormode == 'win32' or not usereadline:
             if not promptopts:
                 promptopts = {}
-            self._writemsgnobuf(self._fmsgout, prompt, type='prompt',
-                                **promptopts)
+            self._writemsgnobuf(
+                self._fmsgout, prompt, type='prompt', **promptopts
+            )
             self.flush()
             prompt = ' '
         else:
@@ -1500,8 +1570,9 @@
         default = opts[r'default']
         if not self.interactive():
             self._writemsg(self._fmsgout, msg, ' ', type='prompt', **opts)
-            self._writemsg(self._fmsgout, default or '', "\n",
-                           type='promptecho')
+            self._writemsg(
+                self._fmsgout, default or '', "\n", type='promptecho'
+            )
             return default
         try:
             r = self._readline(prompt=msg, promptopts=opts)
@@ -1536,9 +1607,11 @@
         m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
         msg = m.group(1)
         choices = [p.strip(' ') for p in m.group(2).split('$$')]
+
         def choicetuple(s):
             ampidx = s.index('&')
-            return s[ampidx + 1:ampidx + 2].lower(), s.replace('&', '', 1)
+            return s[ampidx + 1 : ampidx + 2].lower(), s.replace('&', '', 1)
+
         return (msg, [choicetuple(s) for s in choices])
 
     def promptchoice(self, prompt, default=0):
@@ -1565,8 +1638,12 @@
         if not self.interactive():
             return default
         try:
-            self._writemsg(self._fmsgerr, prompt or _('password: '),
-                           type='prompt', password=True)
+            self._writemsg(
+                self._fmsgerr,
+                prompt or _('password: '),
+                type='prompt',
+                password=True,
+            )
             # disable getpass() only if explicitly specified. it's still valid
             # to interact with tty even if fin is not a tty.
             with self.timeblockedsection('stdio'):
@@ -1619,19 +1696,31 @@
             self._writemsg(self._fmsgout, type='debug', *msg, **opts)
             self.log(b'debug', b'%s', b''.join(msg))
 
-    def edit(self, text, user, extra=None, editform=None, pending=None,
-             repopath=None, action=None):
+    def edit(
+        self,
+        text,
+        user,
+        extra=None,
+        editform=None,
+        pending=None,
+        repopath=None,
+        action=None,
+    ):
         if action is None:
-            self.develwarn('action is None but will soon be a required '
-                           'parameter to ui.edit()')
+            self.develwarn(
+                'action is None but will soon be a required '
+                'parameter to ui.edit()'
+            )
         extra_defaults = {
             'prefix': 'editor',
             'suffix': '.txt',
         }
         if extra is not None:
             if extra.get('suffix') is not None:
-                self.develwarn('extra.suffix is not None but will soon be '
-                               'ignored by ui.edit()')
+                self.develwarn(
+                    'extra.suffix is not None but will soon be '
+                    'ignored by ui.edit()'
+                )
             extra_defaults.update(extra)
         extra = extra_defaults
 
@@ -1645,9 +1734,9 @@
         rdir = None
         if self.configbool('experimental', 'editortmpinhg'):
             rdir = repopath
-        (fd, name) = pycompat.mkstemp(prefix='hg-' + extra['prefix'] + '-',
-                                      suffix=suffix,
-                                      dir=rdir)
+        (fd, name) = pycompat.mkstemp(
+            prefix='hg-' + extra['prefix'] + '-', suffix=suffix, dir=rdir
+        )
         try:
             f = os.fdopen(fd, r'wb')
             f.write(util.tonativeeol(text))
@@ -1667,10 +1756,13 @@
 
             editor = self.geteditor()
 
-            self.system("%s \"%s\"" % (editor, name),
-                        environ=environ,
-                        onerr=error.Abort, errprefix=_("edit failed"),
-                        blockedtag='editor')
+            self.system(
+                "%s \"%s\"" % (editor, name),
+                environ=environ,
+                onerr=error.Abort,
+                errprefix=_("edit failed"),
+                blockedtag='editor',
+            )
 
             f = open(name, r'rb')
             t = util.fromnativeeol(f.read())
@@ -1680,8 +1772,15 @@
 
         return t
 
-    def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None,
-               blockedtag=None):
+    def system(
+        self,
+        cmd,
+        environ=None,
+        cwd=None,
+        onerr=None,
+        errprefix=None,
+        blockedtag=None,
+    ):
         '''execute shell command with appropriate output stream. command
         output will be redirected if fout is not stdout.
 
@@ -1699,8 +1798,10 @@
         with self.timeblockedsection(blockedtag):
             rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
         if rc and onerr:
-            errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
-                                procutil.explainexit(rc))
+            errmsg = '%s %s' % (
+                os.path.basename(cmd.split(None, 1)[0]),
+                procutil.explainexit(rc),
+            )
             if errprefix:
                 errmsg = '%s: %s' % (errprefix, errmsg)
             raise onerr(errmsg)
@@ -1726,10 +1827,12 @@
                 exconly = traceback.format_exception_only(cause[0], cause[1])
 
                 # exclude frame where 'exc' was chained and rethrown from exctb
-                self.write_err('Traceback (most recent call last):\n',
-                               ''.join(exctb[:-1]),
-                               ''.join(causetb),
-                               ''.join(exconly))
+                self.write_err(
+                    'Traceback (most recent call last):\n',
+                    ''.join(exctb[:-1]),
+                    ''.join(causetb),
+                    ''.join(exconly),
+                )
             else:
                 output = traceback.format_exception(exc[0], exc[1], exc[2])
                 self.write_err(encoding.strtolocal(r''.join(output)))
@@ -1744,21 +1847,25 @@
             editor = 'E'
         else:
             editor = 'vi'
-        return (encoding.environ.get("HGEDITOR") or
-                self.config("ui", "editor", editor))
+        return encoding.environ.get("HGEDITOR") or self.config(
+            "ui", "editor", editor
+        )
 
     @util.propertycache
     def _progbar(self):
         """setup the progbar singleton to the ui object"""
-        if (self.quiet or self.debugflag
-                or self.configbool('progress', 'disable')
-                or not progress.shouldprint(self)):
+        if (
+            self.quiet
+            or self.debugflag
+            or self.configbool('progress', 'disable')
+            or not progress.shouldprint(self)
+        ):
             return None
         return getprogbar(self)
 
     def _progclear(self):
         """clear progress bar output if any. use it before any output"""
-        if not haveprogbar(): # nothing loaded yet
+        if not haveprogbar():  # nothing loaded yet
             return
         if self._progbar is not None and self._progbar.printed:
             self._progbar.clear()
@@ -1778,8 +1885,7 @@
         All topics should be marked closed by setting pos to None at
         termination.
         '''
-        self.deprecwarn("use ui.makeprogress() instead of ui.progress()",
-                        "5.1")
+        self.deprecwarn("use ui.makeprogress() instead of ui.progress()", "5.1")
         progress = self.makeprogress(topic, unit, total)
         if pos is not None:
             progress.update(pos, item=item)
@@ -1795,13 +1901,23 @@
             # time) from progbar. we might want to support update delay to
             # reduce the cost of transferring progress messages.
             def updatebar(topic, pos, item, unit, total):
-                self._fmsgerr.write(None, type=b'progress', topic=topic,
-                                    pos=pos, item=item, unit=unit, total=total)
+                self._fmsgerr.write(
+                    None,
+                    type=b'progress',
+                    topic=topic,
+                    pos=pos,
+                    item=item,
+                    unit=unit,
+                    total=total,
+                )
+
         elif self._progbar is not None:
             updatebar = self._progbar.progress
         else:
+
             def updatebar(topic, pos, item, unit, total):
                 pass
+
         return scmutil.progress(self, updatebar, topic, unit, total)
 
     def getlogger(self, name):
@@ -1829,8 +1945,9 @@
         '''
         if not self._loggers:
             return
-        activeloggers = [l for l in self._loggers.itervalues()
-                         if l.tracked(event)]
+        activeloggers = [
+            l for l in self._loggers.itervalues() if l.tracked(event)
+        ]
         if not activeloggers:
             return
         msg = msgfmt % msgargs
@@ -1868,20 +1985,22 @@
             if config is None or not self.configbool('devel', config):
                 return
         msg = 'devel-warn: ' + msg
-        stacklevel += 1 # get in develwarn
+        stacklevel += 1  # get in develwarn
         if self.tracebackflag:
             util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
-            self.log('develwarn', '%s at:\n%s' %
-                     (msg, ''.join(util.getstackframes(stacklevel))))
+            self.log(
+                'develwarn',
+                '%s at:\n%s' % (msg, ''.join(util.getstackframes(stacklevel))),
+            )
         else:
             curframe = inspect.currentframe()
             calframe = inspect.getouterframes(curframe, 2)
             fname, lineno, fmsg = calframe[stacklevel][1:4]
             fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
-            self.write_err('%s at: %s:%d (%s)\n'
-                           % (msg, fname, lineno, fmsg))
-            self.log('develwarn', '%s at: %s:%d (%s)\n',
-                     msg, fname, lineno, fmsg)
+            self.write_err('%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
+            self.log(
+                'develwarn', '%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
+            )
             curframe = calframe = None  # avoid cycles
 
     def deprecwarn(self, msg, version, stacklevel=2):
@@ -1890,11 +2009,15 @@
         - msg: message explaining what is deprecated and how to upgrade,
         - version: last version where the API will be supported,
         """
-        if not (self.configbool('devel', 'all-warnings')
-                or self.configbool('devel', 'deprec-warn')):
+        if not (
+            self.configbool('devel', 'all-warnings')
+            or self.configbool('devel', 'deprec-warn')
+        ):
             return
-        msg += ("\n(compatibility will be dropped after Mercurial-%s,"
-                " update your code.)") % version
+        msg += (
+            "\n(compatibility will be dropped after Mercurial-%s,"
+            " update your code.)"
+        ) % version
         self.develwarn(msg, stacklevel=stacklevel, config='deprec-warn')
 
     def exportableenviron(self):
@@ -1922,12 +2045,14 @@
             if ('ui', 'quiet') in overrides:
                 self.fixconfig(section='ui')
 
+
 class paths(dict):
     """Represents a collection of paths and their configs.
 
     Data is initially derived from ui instances and the config files they have
     loaded.
     """
+
     def __init__(self, ui):
         dict.__init__(self)
 
@@ -1973,11 +2098,12 @@
                 # We don't pass sub-options in, so no need to pass ui instance.
                 return path(None, None, rawloc=name)
             except ValueError:
-                raise error.RepoError(_('repository %s does not exist') %
-                                        name)
+                raise error.RepoError(_('repository %s does not exist') % name)
+
 
 _pathsuboptions = {}
 
+
 def pathsuboption(option, attr):
     """Decorator used to declare a path sub-option.
 
@@ -1992,11 +2118,14 @@
     This decorator can be used to perform additional verification of
     sub-options and to change the type of sub-options.
     """
+
     def register(func):
         _pathsuboptions[option] = (attr, func)
         return func
+
     return register
 
+
 @pathsuboption('pushurl', 'pushloc')
 def pushurlpathoption(ui, path, value):
     u = util.url(value)
@@ -2008,16 +2137,20 @@
     # Don't support the #foo syntax in the push URL to declare branch to
     # push.
     if u.fragment:
-        ui.warn(_('("#fragment" in paths.%s:pushurl not supported; '
-                  'ignoring)\n') % path.name)
+        ui.warn(
+            _('("#fragment" in paths.%s:pushurl not supported; ' 'ignoring)\n')
+            % path.name
+        )
         u.fragment = None
 
     return bytes(u)
 
+
 @pathsuboption('pushrev', 'pushrev')
 def pushrevpathoption(ui, path, value):
     return value
 
+
 class path(object):
     """Represents an individual path and its configuration."""
 
@@ -2053,8 +2186,9 @@
         # When given a raw location but not a symbolic name, validate the
         # location is valid.
         if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
-            raise ValueError('location is not a URL or path to a local '
-                             'repo: %s' % rawloc)
+            raise ValueError(
+                'location is not a URL or path to a local ' 'repo: %s' % rawloc
+            )
 
         suboptions = suboptions or {}
 
@@ -2093,10 +2227,12 @@
                 d[subopt] = value
         return d
 
+
 # we instantiate one globally shared progress bar to avoid
 # competing progress bars when multiple UI objects get created
 _progresssingleton = None
 
+
 def getprogbar(ui):
     global _progresssingleton
     if _progresssingleton is None:
@@ -2105,9 +2241,11 @@
         _progresssingleton = progress.progbar(ui)
     return _progresssingleton
 
+
 def haveprogbar():
     return _progresssingleton is not None
 
+
 def _selectmsgdests(ui):
     name = ui.config(b'ui', b'message-output')
     if name == b'channel':
@@ -2123,6 +2261,7 @@
         return ui.ferr, ui.ferr
     raise error.Abort(b'invalid ui.message-output destination: %s' % name)
 
+
 def _writemsgwith(write, dest, *args, **opts):
     """Write ui message with the given ui._write*() function
 
--- a/mercurial/unionrepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/unionrepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,6 +30,7 @@
     vfs as vfsmod,
 )
 
+
 class unionrevlog(revlog.revlog):
     def __init__(self, opener, indexfile, revlog2, linkmapper):
         # How it works:
@@ -44,20 +45,20 @@
 
         n = len(self)
         self.repotiprev = n - 1
-        self.bundlerevs = set() # used by 'bundle()' revset expression
+        self.bundlerevs = set()  # used by 'bundle()' revset expression
         for rev2 in self.revlog2:
             rev = self.revlog2.index[rev2]
             # rev numbers - in revlog2, very different from self.rev
             _start, _csize, rsize, base, linkrev, p1rev, p2rev, node = rev
             flags = _start & 0xFFFF
 
-            if linkmapper is None: # link is to same revlog
-                assert linkrev == rev2 # we never link back
+            if linkmapper is None:  # link is to same revlog
+                assert linkrev == rev2  # we never link back
                 link = n
-            else: # rev must be mapped from repo2 cl to unified cl by linkmapper
+            else:  # rev must be mapped from repo2 cl to unified cl by linkmapper
                 link = linkmapper(linkrev)
 
-            if linkmapper is not None: # link is to same revlog
+            if linkmapper is not None:  # link is to same revlog
                 base = linkmapper(base)
 
             if node in self.nodemap:
@@ -70,8 +71,16 @@
 
             # TODO: it's probably wrong to set compressed length to None, but
             # I have no idea if csize is valid in the base revlog context.
-            e = (flags, None, rsize, base,
-                 link, self.rev(p1node), self.rev(p2node), node)
+            e = (
+                flags,
+                None,
+                rsize,
+                base,
+                link,
+                self.rev(p1node),
+                self.rev(p2node),
+                node,
+            )
             self.index.append(e)
             self.nodemap[node] = n
             self.bundlerevs.add(n)
@@ -87,7 +96,8 @@
         if rev1 > self.repotiprev and rev2 > self.repotiprev:
             return self.revlog2.revdiff(
                 self.revlog2.rev(self.node(rev1)),
-                self.revlog2.rev(self.node(rev2)))
+                self.revlog2.rev(self.node(rev2)),
+            )
         elif rev1 <= self.repotiprev and rev2 <= self.repotiprev:
             return super(unionrevlog, self).revdiff(rev1, rev2)
 
@@ -111,35 +121,50 @@
 
     def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
         raise NotImplementedError
-    def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None,
-                 maybemissingparents=False):
+
+    def addgroup(
+        self,
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        maybemissingparents=False,
+    ):
         raise NotImplementedError
+
     def strip(self, minlink, transaction):
         raise NotImplementedError
+
     def checksize(self):
         raise NotImplementedError
 
+
 class unionchangelog(unionrevlog, changelog.changelog):
     def __init__(self, opener, opener2):
         changelog.changelog.__init__(self, opener)
         linkmapper = None
         changelog2 = changelog.changelog(opener2)
-        unionrevlog.__init__(self, opener, self.indexfile, changelog2,
-                             linkmapper)
+        unionrevlog.__init__(
+            self, opener, self.indexfile, changelog2, linkmapper
+        )
+
 
 class unionmanifest(unionrevlog, manifest.manifestrevlog):
     def __init__(self, opener, opener2, linkmapper):
         manifest.manifestrevlog.__init__(self, opener)
         manifest2 = manifest.manifestrevlog(opener2)
-        unionrevlog.__init__(self, opener, self.indexfile, manifest2,
-                             linkmapper)
+        unionrevlog.__init__(
+            self, opener, self.indexfile, manifest2, linkmapper
+        )
+
 
 class unionfilelog(filelog.filelog):
     def __init__(self, opener, path, opener2, linkmapper, repo):
         filelog.filelog.__init__(self, opener, path)
         filelog2 = filelog.filelog(opener2, path)
-        self._revlog = unionrevlog(opener, self.indexfile,
-                                   filelog2._revlog, linkmapper)
+        self._revlog = unionrevlog(
+            opener, self.indexfile, filelog2._revlog, linkmapper
+        )
         self._repo = repo
         self.repotiprev = self._revlog.repotiprev
         self.revlog2 = self._revlog.revlog2
@@ -151,16 +176,19 @@
         node = self.node(rev)
         return self.revlog2.iscensored(self.revlog2.rev(node))
 
+
 class unionpeer(localrepo.localpeer):
     def canpush(self):
         return False
 
+
 class unionrepository(object):
     """Represents the union of data in 2 repositories.
 
     Instances are not usable if constructed directly. Use ``instance()``
     or ``makeunionrepository()`` to create a usable instance.
     """
+
     def __init__(self, repo2, url):
         self.repo2 = repo2
         self._url = url
@@ -173,10 +201,12 @@
 
     @localrepo.unfilteredpropertycache
     def manifestlog(self):
-        rootstore = unionmanifest(self.svfs, self.repo2.svfs,
-                                  self.unfiltered()._clrev)
-        return manifest.manifestlog(self.svfs, self, rootstore,
-                                    self.narrowmatch())
+        rootstore = unionmanifest(
+            self.svfs, self.repo2.svfs, self.unfiltered()._clrev
+        )
+        return manifest.manifestlog(
+            self.svfs, self, rootstore, self.narrowmatch()
+        )
 
     def _clrev(self, rev2):
         """map from repo2 changelog rev to temporary rev in self.changelog"""
@@ -187,8 +217,9 @@
         return self._url
 
     def file(self, f):
-        return unionfilelog(self.svfs, f, self.repo2.svfs,
-                            self.unfiltered()._clrev, self)
+        return unionfilelog(
+            self.svfs, f, self.repo2.svfs, self.unfiltered()._clrev, self
+        )
 
     def close(self):
         self.repo2.close()
@@ -200,7 +231,8 @@
         return unionpeer(self)
 
     def getcwd(self):
-        return encoding.getcwd() # always outside the repo
+        return encoding.getcwd()  # always outside the repo
+
 
 def instance(ui, path, create, intents=None, createopts=None):
     if create:
@@ -220,7 +252,7 @@
         else:
             cwd = pathutil.normasprefix(cwd)
             if parentpath.startswith(cwd):
-                parentpath = parentpath[len(cwd):]
+                parentpath = parentpath[len(cwd) :]
     if path.startswith('union:'):
         s = path.split(":", 1)[1].split("+", 1)
         if len(s) == 1:
@@ -232,13 +264,16 @@
 
     return makeunionrepository(ui, repopath, repopath2)
 
+
 def makeunionrepository(ui, repopath1, repopath2):
     """Make a union repository object from 2 local repo paths."""
     repo1 = localrepo.instance(ui, repopath1, create=False)
     repo2 = localrepo.instance(ui, repopath2, create=False)
 
-    url = 'union:%s+%s' % (util.expandpath(repopath1),
-                           util.expandpath(repopath2))
+    url = 'union:%s+%s' % (
+        util.expandpath(repopath1),
+        util.expandpath(repopath2),
+    )
 
     class derivedunionrepository(unionrepository, repo1.__class__):
         pass
--- a/mercurial/upgrade.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/upgrade.py	Sun Oct 06 09:45:02 2019 -0400
@@ -24,9 +24,7 @@
     vfs as vfsmod,
 )
 
-from .utils import (
-    compression,
-)
+from .utils import compression
 
 # list of requirements that request a clone of all revlog if added/removed
 RECLONES_REQUIREMENTS = {
@@ -34,6 +32,7 @@
     localrepo.SPARSEREVLOG_REQUIREMENT,
 }
 
+
 def requiredsourcerequirements(repo):
     """Obtain requirements required to be present to upgrade a repo.
 
@@ -47,6 +46,7 @@
         'store',
     }
 
+
 def blocksourcerequirements(repo):
     """Obtain requirements that will prevent an upgrade from occurring.
 
@@ -64,6 +64,7 @@
         'shared',
     }
 
+
 def supportremovedrequirements(repo):
     """Obtain requirements that can be removed during an upgrade.
 
@@ -82,6 +83,7 @@
                 supported.add(b'revlog-compression-zstd')
     return supported
 
+
 def supporteddestrequirements(repo):
     """Obtain requirements that upgrade supports in the destination.
 
@@ -106,6 +108,7 @@
                 supported.add(b'revlog-compression-zstd')
     return supported
 
+
 def allowednewrequirements(repo):
     """Obtain requirements that can be added to a repository during upgrade.
 
@@ -130,12 +133,15 @@
                 supported.add(b'revlog-compression-zstd')
     return supported
 
+
 def preservedrequirements(repo):
     return set()
 
+
 deficiency = 'deficiency'
 optimisation = 'optimization'
 
+
 class improvement(object):
     """Represents an improvement that can be made as part of an upgrade.
 
@@ -160,6 +166,7 @@
        Message intended for humans explaining what an upgrade addressing this
        issue will do. Should be worded in the future tense.
     """
+
     def __init__(self, name, type, description, upgrademessage):
         self.name = name
         self.type = type
@@ -178,14 +185,18 @@
     def __hash__(self):
         return hash(self.name)
 
+
 allformatvariant = []
 
+
 def registerformatvariant(cls):
     allformatvariant.append(cls)
     return cls
 
+
 class formatvariant(improvement):
     """an improvement subclass dedicated to repository format"""
+
     type = deficiency
     ### The following attributes should be defined for each class:
 
@@ -218,6 +229,7 @@
         """current value of the variant in the configuration"""
         raise NotImplementedError()
 
+
 class requirementformatvariant(formatvariant):
     """formatvariant based on a 'requirement' name.
 
@@ -231,7 +243,8 @@
     @staticmethod
     def _newreporequirements(ui):
         return localrepo.newreporequirements(
-            ui, localrepo.defaultcreateopts(ui))
+            ui, localrepo.defaultcreateopts(ui)
+        )
 
     @classmethod
     def fromrepo(cls, repo):
@@ -243,6 +256,7 @@
         assert cls._requirement is not None
         return cls._requirement in cls._newreporequirements(repo.ui)
 
+
 @registerformatvariant
 class fncache(requirementformatvariant):
     name = 'fncache'
@@ -251,12 +265,17 @@
 
     default = True
 
-    description = _('long and reserved filenames may not work correctly; '
-                    'repository performance is sub-optimal')
+    description = _(
+        'long and reserved filenames may not work correctly; '
+        'repository performance is sub-optimal'
+    )
 
-    upgrademessage = _('repository will be more resilient to storing '
-                       'certain paths and performance of certain '
-                       'operations should be improved')
+    upgrademessage = _(
+        'repository will be more resilient to storing '
+        'certain paths and performance of certain '
+        'operations should be improved'
+    )
+
 
 @registerformatvariant
 class dotencode(requirementformatvariant):
@@ -266,11 +285,16 @@
 
     default = True
 
-    description = _('storage of filenames beginning with a period or '
-                    'space may not work correctly')
+    description = _(
+        'storage of filenames beginning with a period or '
+        'space may not work correctly'
+    )
 
-    upgrademessage = _('repository will be better able to store files '
-                       'beginning with a space or period')
+    upgrademessage = _(
+        'repository will be better able to store files '
+        'beginning with a space or period'
+    )
+
 
 @registerformatvariant
 class generaldelta(requirementformatvariant):
@@ -280,19 +304,24 @@
 
     default = True
 
-    description = _('deltas within internal storage are unable to '
-                    'choose optimal revisions; repository is larger and '
-                    'slower than it could be; interaction with other '
-                    'repositories may require extra network and CPU '
-                    'resources, making "hg push" and "hg pull" slower')
+    description = _(
+        'deltas within internal storage are unable to '
+        'choose optimal revisions; repository is larger and '
+        'slower than it could be; interaction with other '
+        'repositories may require extra network and CPU '
+        'resources, making "hg push" and "hg pull" slower'
+    )
 
-    upgrademessage = _('repository storage will be able to create '
-                       'optimal deltas; new repository data will be '
-                       'smaller and read times should decrease; '
-                       'interacting with other repositories using this '
-                       'storage model should require less network and '
-                       'CPU resources, making "hg push" and "hg pull" '
-                       'faster')
+    upgrademessage = _(
+        'repository storage will be able to create '
+        'optimal deltas; new repository data will be '
+        'smaller and read times should decrease; '
+        'interacting with other repositories using this '
+        'storage model should require less network and '
+        'CPU resources, making "hg push" and "hg pull" '
+        'faster'
+    )
+
 
 @registerformatvariant
 class sparserevlog(requirementformatvariant):
@@ -302,19 +331,23 @@
 
     default = True
 
-    description = _('in order to limit disk reading and memory usage on older '
-                    'version, the span of a delta chain from its root to its '
-                    'end is limited, whatever the relevant data in this span. '
-                    'This can severly limit Mercurial ability to build good '
-                    'chain of delta resulting is much more storage space being '
-                    'taken and limit reusability of on disk delta during '
-                    'exchange.'
-                   )
+    description = _(
+        'in order to limit disk reading and memory usage on older '
+        'version, the span of a delta chain from its root to its '
+        'end is limited, whatever the relevant data in this span. '
+        'This can severly limit Mercurial ability to build good '
+        'chain of delta resulting is much more storage space being '
+        'taken and limit reusability of on disk delta during '
+        'exchange.'
+    )
 
-    upgrademessage = _('Revlog supports delta chain with more unused data '
-                       'between payload. These gaps will be skipped at read '
-                       'time. This allows for better delta chains, making a '
-                       'better compression and faster exchange with server.')
+    upgrademessage = _(
+        'Revlog supports delta chain with more unused data '
+        'between payload. These gaps will be skipped at read '
+        'time. This allows for better delta chains, making a '
+        'better compression and faster exchange with server.'
+    )
+
 
 @registerformatvariant
 class sidedata(requirementformatvariant):
@@ -324,25 +357,32 @@
 
     default = False
 
-    description = _('Allows storage of extra data alongside a revision, '
-                    'unlocking various caching options.')
+    description = _(
+        'Allows storage of extra data alongside a revision, '
+        'unlocking various caching options.'
+    )
 
     upgrademessage = _('Allows storage of extra data alongside a revision.')
 
+
 @registerformatvariant
 class removecldeltachain(formatvariant):
     name = 'plain-cl-delta'
 
     default = True
 
-    description = _('changelog storage is using deltas instead of '
-                    'raw entries; changelog reading and any '
-                    'operation relying on changelog data are slower '
-                    'than they could be')
+    description = _(
+        'changelog storage is using deltas instead of '
+        'raw entries; changelog reading and any '
+        'operation relying on changelog data are slower '
+        'than they could be'
+    )
 
-    upgrademessage = _('changelog storage will be reformated to '
-                       'store raw entries; changelog reading will be '
-                       'faster; changelog size may be reduced')
+    upgrademessage = _(
+        'changelog storage will be reformated to '
+        'store raw entries; changelog reading will be '
+        'faster; changelog size may be reduced'
+    )
 
     @staticmethod
     def fromrepo(repo):
@@ -356,16 +396,20 @@
     def fromconfig(repo):
         return True
 
+
 @registerformatvariant
 class compressionengine(formatvariant):
     name = 'compression'
     default = 'zlib'
 
-    description = _('Compresion algorithm used to compress data. '
-                    'Some engine are faster than other')
+    description = _(
+        'Compresion algorithm used to compress data. '
+        'Some engine are faster than other'
+    )
 
-    upgrademessage = _('revlog content will be recompressed with the new '
-                       'algorithm.')
+    upgrademessage = _(
+        'revlog content will be recompressed with the new ' 'algorithm.'
+    )
 
     @classmethod
     def fromrepo(cls, repo):
@@ -384,6 +428,7 @@
     def fromconfig(cls, repo):
         return repo.ui.config('format', 'revlog-compression')
 
+
 @registerformatvariant
 class compressionlevel(formatvariant):
     name = 'compression-level'
@@ -417,6 +462,7 @@
             return 'default'
         return bytes(level)
 
+
 def finddeficiencies(repo):
     """returns a list of deficiencies that the repo suffer from"""
     deficiencies = []
@@ -431,6 +477,7 @@
 
     return deficiencies
 
+
 # search without '-' to support older form on newer client.
 #
 # We don't enforce backward compatibility for debug command so this
@@ -444,68 +491,98 @@
     'redeltafulladd': 're-delta-fulladd',
 }
 
+
 def findoptimizations(repo):
     """Determine optimisation that could be used during upgrade"""
     # These are unconditionally added. There is logic later that figures out
     # which ones to apply.
     optimizations = []
 
-    optimizations.append(improvement(
-        name='re-delta-parent',
-        type=optimisation,
-        description=_('deltas within internal storage will be recalculated to '
-                      'choose an optimal base revision where this was not '
-                      'already done; the size of the repository may shrink and '
-                      'various operations may become faster; the first time '
-                      'this optimization is performed could slow down upgrade '
-                      'execution considerably; subsequent invocations should '
-                      'not run noticeably slower'),
-        upgrademessage=_('deltas within internal storage will choose a new '
-                         'base revision if needed')))
+    optimizations.append(
+        improvement(
+            name='re-delta-parent',
+            type=optimisation,
+            description=_(
+                'deltas within internal storage will be recalculated to '
+                'choose an optimal base revision where this was not '
+                'already done; the size of the repository may shrink and '
+                'various operations may become faster; the first time '
+                'this optimization is performed could slow down upgrade '
+                'execution considerably; subsequent invocations should '
+                'not run noticeably slower'
+            ),
+            upgrademessage=_(
+                'deltas within internal storage will choose a new '
+                'base revision if needed'
+            ),
+        )
+    )
 
-    optimizations.append(improvement(
-        name='re-delta-multibase',
-        type=optimisation,
-        description=_('deltas within internal storage will be recalculated '
-                      'against multiple base revision and the smallest '
-                      'difference will be used; the size of the repository may '
-                      'shrink significantly when there are many merges; this '
-                      'optimization will slow down execution in proportion to '
-                      'the number of merges in the repository and the amount '
-                      'of files in the repository; this slow down should not '
-                      'be significant unless there are tens of thousands of '
-                      'files and thousands of merges'),
-        upgrademessage=_('deltas within internal storage will choose an '
-                         'optimal delta by computing deltas against multiple '
-                         'parents; may slow down execution time '
-                         'significantly')))
+    optimizations.append(
+        improvement(
+            name='re-delta-multibase',
+            type=optimisation,
+            description=_(
+                'deltas within internal storage will be recalculated '
+                'against multiple base revision and the smallest '
+                'difference will be used; the size of the repository may '
+                'shrink significantly when there are many merges; this '
+                'optimization will slow down execution in proportion to '
+                'the number of merges in the repository and the amount '
+                'of files in the repository; this slow down should not '
+                'be significant unless there are tens of thousands of '
+                'files and thousands of merges'
+            ),
+            upgrademessage=_(
+                'deltas within internal storage will choose an '
+                'optimal delta by computing deltas against multiple '
+                'parents; may slow down execution time '
+                'significantly'
+            ),
+        )
+    )
 
-    optimizations.append(improvement(
-        name='re-delta-all',
-        type=optimisation,
-        description=_('deltas within internal storage will always be '
-                      'recalculated without reusing prior deltas; this will '
-                      'likely make execution run several times slower; this '
-                      'optimization is typically not needed'),
-        upgrademessage=_('deltas within internal storage will be fully '
-                         'recomputed; this will likely drastically slow down '
-                         'execution time')))
+    optimizations.append(
+        improvement(
+            name='re-delta-all',
+            type=optimisation,
+            description=_(
+                'deltas within internal storage will always be '
+                'recalculated without reusing prior deltas; this will '
+                'likely make execution run several times slower; this '
+                'optimization is typically not needed'
+            ),
+            upgrademessage=_(
+                'deltas within internal storage will be fully '
+                'recomputed; this will likely drastically slow down '
+                'execution time'
+            ),
+        )
+    )
 
-    optimizations.append(improvement(
-        name='re-delta-fulladd',
-        type=optimisation,
-        description=_('every revision will be re-added as if it was new '
-                      'content. It will go through the full storage '
-                      'mechanism giving extensions a chance to process it '
-                      '(eg. lfs). This is similar to "re-delta-all" but even '
-                      'slower since more logic is involved.'),
-        upgrademessage=_('each revision will be added as new content to the '
-                         'internal storage; this will likely drastically slow '
-                         'down execution time, but some extensions might need '
-                         'it')))
+    optimizations.append(
+        improvement(
+            name='re-delta-fulladd',
+            type=optimisation,
+            description=_(
+                'every revision will be re-added as if it was new '
+                'content. It will go through the full storage '
+                'mechanism giving extensions a chance to process it '
+                '(eg. lfs). This is similar to "re-delta-all" but even '
+                'slower since more logic is involved.'
+            ),
+            upgrademessage=_(
+                'each revision will be added as new content to the '
+                'internal storage; this will likely drastically slow '
+                'down execution time, but some extensions might need '
+                'it'
+            ),
+        )
+    )
 
     return optimizations
 
+
 def determineactions(repo, deficiencies, sourcereqs, destreqs):
     """Determine upgrade actions that will be performed.
 
@@ -538,6 +615,7 @@
 
     return newactions
 
+
 def _revlogfrompath(repo, path):
     """Obtain a revlog from a repo path.
 
@@ -546,12 +624,13 @@
     if path == '00changelog.i':
         return changelog.changelog(repo.svfs)
     elif path.endswith('00manifest.i'):
-        mandir = path[:-len('00manifest.i')]
+        mandir = path[: -len('00manifest.i')]
         return manifest.manifestrevlog(repo.svfs, tree=mandir)
     else:
-        #reverse of "/".join(("data", path + ".i"))
+        # reverse of "/".join(("data", path + ".i"))
         return filelog.filelog(repo.svfs, path[5:-2])
 
+
 def _copyrevlog(tr, destrepo, oldrl, unencodedname):
     """copy all relevant files for `oldrl` into `destrepo` store
 
@@ -571,26 +650,30 @@
     newdata = newvfs.join(newrl.datafile)
 
     with newvfs(newrl.indexfile, 'w'):
-        pass # create all the directories
+        pass  # create all the directories
 
     util.copyfile(oldindex, newindex)
     copydata = oldrl.opener.exists(oldrl.datafile)
     if copydata:
         util.copyfile(olddata, newdata)
 
-    if not (unencodedname.endswith('00changelog.i')
-            or unencodedname.endswith('00manifest.i')):
+    if not (
+        unencodedname.endswith('00changelog.i')
+        or unencodedname.endswith('00manifest.i')
+    ):
         destrepo.svfs.fncache.add(unencodedname)
         if copydata:
             destrepo.svfs.fncache.add(unencodedname[:-2] + '.d')
 
+
 UPGRADE_CHANGELOG = object()
 UPGRADE_MANIFEST = object()
 UPGRADE_FILELOG = object()
 
-UPGRADE_ALL_REVLOGS = frozenset([UPGRADE_CHANGELOG,
-                                 UPGRADE_MANIFEST,
-                                 UPGRADE_FILELOG])
+UPGRADE_ALL_REVLOGS = frozenset(
+    [UPGRADE_CHANGELOG, UPGRADE_MANIFEST, UPGRADE_FILELOG]
+)
+
 
 def matchrevlog(revlogfilter, entry):
     """check is a revlog is selected for cloning
@@ -602,8 +685,16 @@
         return UPGRADE_MANIFEST in revlogfilter
     return UPGRADE_FILELOG in revlogfilter
 
-def _clonerevlogs(ui, srcrepo, dstrepo, tr, deltareuse, forcedeltabothparents,
-                  revlogs=UPGRADE_ALL_REVLOGS):
+
+def _clonerevlogs(
+    ui,
+    srcrepo,
+    dstrepo,
+    tr,
+    deltareuse,
+    forcedeltabothparents,
+    revlogs=UPGRADE_ALL_REVLOGS,
+):
     """Copy revlogs between 2 repos."""
     revcount = 0
     srcsize = 0
@@ -634,8 +725,12 @@
 
         rl = _revlogfrompath(srcrepo, unencoded)
 
-        info = rl.storageinfo(exclusivefiles=True, revisionscount=True,
-                              trackedsize=True, storedsize=True)
+        info = rl.storageinfo(
+            exclusivefiles=True,
+            revisionscount=True,
+            trackedsize=True,
+            storedsize=True,
+        )
 
         revcount += info['revisionscount'] or 0
         datasize = info['storedsize'] or 0
@@ -665,14 +760,21 @@
     if not revcount:
         return
 
-    ui.write(_('migrating %d total revisions (%d in filelogs, %d in manifests, '
-               '%d in changelog)\n') %
-             (revcount, frevcount, mrevcount, crevcount))
-    ui.write(_('migrating %s in store; %s tracked data\n') % (
-             (util.bytecount(srcsize), util.bytecount(srcrawsize))))
+    ui.write(
+        _(
+            'migrating %d total revisions (%d in filelogs, %d in manifests, '
+            '%d in changelog)\n'
+        )
+        % (revcount, frevcount, mrevcount, crevcount)
+    )
+    ui.write(
+        _('migrating %s in store; %s tracked data\n')
+        % ((util.bytecount(srcsize), util.bytecount(srcrawsize)))
+    )
 
     # Used to keep track of progress.
     progress = None
+
     def oncopiedrevision(rl, rev, node):
         progress.increment()
 
@@ -686,49 +788,88 @@
         oldrl = _revlogfrompath(srcrepo, unencoded)
 
         if isinstance(oldrl, changelog.changelog) and 'c' not in seen:
-            ui.write(_('finished migrating %d manifest revisions across %d '
-                       'manifests; change in size: %s\n') %
-                     (mrevcount, mcount, util.bytecount(mdstsize - msrcsize)))
+            ui.write(
+                _(
+                    'finished migrating %d manifest revisions across %d '
+                    'manifests; change in size: %s\n'
+                )
+                % (mrevcount, mcount, util.bytecount(mdstsize - msrcsize))
+            )
 
-            ui.write(_('migrating changelog containing %d revisions '
-                       '(%s in store; %s tracked data)\n') %
-                     (crevcount, util.bytecount(csrcsize),
-                      util.bytecount(crawsize)))
+            ui.write(
+                _(
+                    'migrating changelog containing %d revisions '
+                    '(%s in store; %s tracked data)\n'
+                )
+                % (
+                    crevcount,
+                    util.bytecount(csrcsize),
+                    util.bytecount(crawsize),
+                )
+            )
             seen.add('c')
-            progress = srcrepo.ui.makeprogress(_('changelog revisions'),
-                                               total=crevcount)
+            progress = srcrepo.ui.makeprogress(
+                _('changelog revisions'), total=crevcount
+            )
         elif isinstance(oldrl, manifest.manifestrevlog) and 'm' not in seen:
-            ui.write(_('finished migrating %d filelog revisions across %d '
-                       'filelogs; change in size: %s\n') %
-                     (frevcount, fcount, util.bytecount(fdstsize - fsrcsize)))
+            ui.write(
+                _(
+                    'finished migrating %d filelog revisions across %d '
+                    'filelogs; change in size: %s\n'
+                )
+                % (frevcount, fcount, util.bytecount(fdstsize - fsrcsize))
+            )
 
-            ui.write(_('migrating %d manifests containing %d revisions '
-                       '(%s in store; %s tracked data)\n') %
-                     (mcount, mrevcount, util.bytecount(msrcsize),
-                      util.bytecount(mrawsize)))
+            ui.write(
+                _(
+                    'migrating %d manifests containing %d revisions '
+                    '(%s in store; %s tracked data)\n'
+                )
+                % (
+                    mcount,
+                    mrevcount,
+                    util.bytecount(msrcsize),
+                    util.bytecount(mrawsize),
+                )
+            )
             seen.add('m')
             if progress:
                 progress.complete()
-            progress = srcrepo.ui.makeprogress(_('manifest revisions'),
-                                               total=mrevcount)
+            progress = srcrepo.ui.makeprogress(
+                _('manifest revisions'), total=mrevcount
+            )
         elif 'f' not in seen:
-            ui.write(_('migrating %d filelogs containing %d revisions '
-                       '(%s in store; %s tracked data)\n') %
-                     (fcount, frevcount, util.bytecount(fsrcsize),
-                      util.bytecount(frawsize)))
+            ui.write(
+                _(
+                    'migrating %d filelogs containing %d revisions '
+                    '(%s in store; %s tracked data)\n'
+                )
+                % (
+                    fcount,
+                    frevcount,
+                    util.bytecount(fsrcsize),
+                    util.bytecount(frawsize),
+                )
+            )
             seen.add('f')
             if progress:
                 progress.complete()
-            progress = srcrepo.ui.makeprogress(_('file revisions'),
-                                               total=frevcount)
+            progress = srcrepo.ui.makeprogress(
+                _('file revisions'), total=frevcount
+            )
 
         if matchrevlog(revlogs, unencoded):
-            ui.note(_('cloning %d revisions from %s\n')
-                    % (len(oldrl), unencoded))
+            ui.note(
+                _('cloning %d revisions from %s\n') % (len(oldrl), unencoded)
+            )
             newrl = _revlogfrompath(dstrepo, unencoded)
-            oldrl.clone(tr, newrl, addrevisioncb=oncopiedrevision,
-                        deltareuse=deltareuse,
-                        forcedeltabothparents=forcedeltabothparents)
+            oldrl.clone(
+                tr,
+                newrl,
+                addrevisioncb=oncopiedrevision,
+                deltareuse=deltareuse,
+                forcedeltabothparents=forcedeltabothparents,
+            )
         else:
             msg = _('blindly copying %s containing %i revisions\n')
             ui.note(msg % (unencoded, len(oldrl)))
@@ -750,11 +891,19 @@
 
     progress.complete()
 
-    ui.write(_('finished migrating %d changelog revisions; change in size: '
-               '%s\n') % (crevcount, util.bytecount(cdstsize - csrcsize)))
+    ui.write(
+        _('finished migrating %d changelog revisions; change in size: ' '%s\n')
+        % (crevcount, util.bytecount(cdstsize - csrcsize))
+    )
 
-    ui.write(_('finished migrating %d total revisions; total change in store '
-               'size: %s\n') % (revcount, util.bytecount(dstsize - srcsize)))
+    ui.write(
+        _(
+            'finished migrating %d total revisions; total change in store '
+            'size: %s\n'
+        )
+        % (revcount, util.bytecount(dstsize - srcsize))
+    )
+
 
 def _filterstorefile(srcrepo, dstrepo, requirements, path, mode, st):
     """Determine whether to copy a store file during upgrade.
@@ -787,6 +936,7 @@
 
     return True
 
+
 def _finishdatamigration(ui, srcrepo, dstrepo, requirements):
     """Hook point for extensions to perform additional actions during upgrade.
 
@@ -794,8 +944,10 @@
     before the new store is swapped into the original location.
     """
 
-def _upgraderepo(ui, srcrepo, dstrepo, requirements, actions,
-                 revlogs=UPGRADE_ALL_REVLOGS):
+
+def _upgraderepo(
+    ui, srcrepo, dstrepo, requirements, actions, revlogs=UPGRADE_ALL_REVLOGS
+):
     """Do the low-level work of upgrading a repository.
 
     The upgrade is effectively performed as a copy between a source
@@ -808,8 +960,12 @@
     assert srcrepo.currentwlock()
     assert dstrepo.currentwlock()
 
-    ui.write(_('(it is safe to interrupt this process any time before '
-               'data migration completes)\n'))
+    ui.write(
+        _(
+            '(it is safe to interrupt this process any time before '
+            'data migration completes)\n'
+        )
+    )
 
     if 're-delta-all' in actions:
         deltareuse = revlog.revlog.DELTAREUSENEVER
@@ -823,14 +979,20 @@
         deltareuse = revlog.revlog.DELTAREUSEALWAYS
 
     with dstrepo.transaction('upgrade') as tr:
-        _clonerevlogs(ui, srcrepo, dstrepo, tr, deltareuse,
-                      're-delta-multibase' in actions, revlogs=revlogs)
+        _clonerevlogs(
+            ui,
+            srcrepo,
+            dstrepo,
+            tr,
+            deltareuse,
+            're-delta-multibase' in actions,
+            revlogs=revlogs,
+        )
 
     # Now copy other files in the store directory.
     # The sorted() makes execution deterministic.
     for p, kind, st in sorted(srcrepo.store.vfs.readdir('', stat=True)):
-        if not _filterstorefile(srcrepo, dstrepo, requirements,
-                                       p, kind, st):
+        if not _filterstorefile(srcrepo, dstrepo, requirements, p, kind, st):
             continue
 
         srcrepo.ui.write(_('copying %s\n') % p)
@@ -852,14 +1014,18 @@
     # as a mechanism to lock out new clients during the data swap. This is
     # better than allowing a client to continue while the repository is in
     # an inconsistent state.
-    ui.write(_('marking source repository as being upgraded; clients will be '
-               'unable to read from repository\n'))
-    scmutil.writerequires(srcrepo.vfs,
-                          srcrepo.requirements | {'upgradeinprogress'})
+    ui.write(
+        _(
+            'marking source repository as being upgraded; clients will be '
+            'unable to read from repository\n'
+        )
+    )
+    scmutil.writerequires(
+        srcrepo.vfs, srcrepo.requirements | {'upgradeinprogress'}
+    )
 
     ui.write(_('starting in-place swap of repository data\n'))
-    ui.write(_('replaced files will be backed up at %s\n') %
-             backuppath)
+    ui.write(_('replaced files will be backed up at %s\n') % backuppath)
 
     # Now swap in the new store directory. Doing it as a rename should make
     # the operation nearly instantaneous and atomic (at least in well-behaved
@@ -869,13 +1035,22 @@
     util.rename(srcrepo.spath, backupvfs.join('store'))
     util.rename(dstrepo.spath, srcrepo.spath)
     elapsed = util.timer() - tstart
-    ui.write(_('store replacement complete; repository was inconsistent for '
-               '%0.1fs\n') % elapsed)
+    ui.write(
+        _(
+            'store replacement complete; repository was inconsistent for '
+            '%0.1fs\n'
+        )
+        % elapsed
+    )
 
     # We first write the requirements file. Any new requirements will lock
     # out legacy clients.
-    ui.write(_('finalizing requirements file and making repository readable '
-               'again\n'))
+    ui.write(
+        _(
+            'finalizing requirements file and making repository readable '
+            'again\n'
+        )
+    )
     scmutil.writerequires(srcrepo.vfs, requirements)
 
     # The lock file from the old store won't be removed because nothing has a
@@ -886,8 +1061,16 @@
 
     return backuppath
 
-def upgraderepo(ui, repo, run=False, optimize=None, backup=True,
-                manifest=None, changelog=None):
+
+def upgraderepo(
+    ui,
+    repo,
+    run=False,
+    optimize=None,
+    backup=True,
+    manifest=None,
+    changelog=None,
+):
     """Upgrade a repository in place."""
     if optimize is None:
         optimize = []
@@ -918,39 +1101,56 @@
     # Ensure the repository can be upgraded.
     missingreqs = requiredsourcerequirements(repo) - repo.requirements
     if missingreqs:
-        raise error.Abort(_('cannot upgrade repository; requirement '
-                            'missing: %s') % _(', ').join(sorted(missingreqs)))
+        raise error.Abort(
+            _('cannot upgrade repository; requirement ' 'missing: %s')
+            % _(', ').join(sorted(missingreqs))
+        )
 
     blockedreqs = blocksourcerequirements(repo) & repo.requirements
     if blockedreqs:
-        raise error.Abort(_('cannot upgrade repository; unsupported source '
-                            'requirement: %s') %
-                          _(', ').join(sorted(blockedreqs)))
+        raise error.Abort(
+            _(
+                'cannot upgrade repository; unsupported source '
+                'requirement: %s'
+            )
+            % _(', ').join(sorted(blockedreqs))
+        )
 
     # FUTURE there is potentially a need to control the wanted requirements via
     # command arguments or via an extension hook point.
     newreqs = localrepo.newreporequirements(
-        repo.ui, localrepo.defaultcreateopts(repo.ui))
+        repo.ui, localrepo.defaultcreateopts(repo.ui)
+    )
     newreqs.update(preservedrequirements(repo))
 
-    noremovereqs = (repo.requirements - newreqs -
-                   supportremovedrequirements(repo))
+    noremovereqs = (
+        repo.requirements - newreqs - supportremovedrequirements(repo)
+    )
     if noremovereqs:
-        raise error.Abort(_('cannot upgrade repository; requirement would be '
-                            'removed: %s') % _(', ').join(sorted(noremovereqs)))
+        raise error.Abort(
+            _('cannot upgrade repository; requirement would be ' 'removed: %s')
+            % _(', ').join(sorted(noremovereqs))
+        )
 
-    noaddreqs = (newreqs - repo.requirements -
-                 allowednewrequirements(repo))
+    noaddreqs = newreqs - repo.requirements - allowednewrequirements(repo)
     if noaddreqs:
-        raise error.Abort(_('cannot upgrade repository; do not support adding '
-                            'requirement: %s') %
-                          _(', ').join(sorted(noaddreqs)))
+        raise error.Abort(
+            _(
+                'cannot upgrade repository; do not support adding '
+                'requirement: %s'
+            )
+            % _(', ').join(sorted(noaddreqs))
+        )
 
     unsupportedreqs = newreqs - supporteddestrequirements(repo)
     if unsupportedreqs:
-        raise error.Abort(_('cannot upgrade repository; do not support '
-                            'destination requirement: %s') %
-                          _(', ').join(sorted(unsupportedreqs)))
+        raise error.Abort(
+            _(
+                'cannot upgrade repository; do not support '
+                'destination requirement: %s'
+            )
+            % _(', ').join(sorted(unsupportedreqs))
+        )
 
     # Find and validate all improvements that can be made.
     alloptimizations = findoptimizations(repo)
@@ -962,17 +1162,21 @@
             optimizations.append(o)
             optimize.discard(o.name)
 
-    if optimize: # anything left is unknown
-        raise error.Abort(_('unknown optimization action requested: %s') %
-                          ', '.join(sorted(optimize)),
-                          hint=_('run without arguments to see valid '
-                                 'optimizations'))
+    if optimize:  # anything left is unknown
+        raise error.Abort(
+            _('unknown optimization action requested: %s')
+            % ', '.join(sorted(optimize)),
+            hint=_('run without arguments to see valid ' 'optimizations'),
+        )
 
     deficiencies = finddeficiencies(repo)
     actions = determineactions(repo, deficiencies, repo.requirements, newreqs)
-    actions.extend(o for o in sorted(optimizations)
-                   # determineactions could have added optimisation
-                   if o not in actions)
+    actions.extend(
+        o
+        for o in sorted(optimizations)
+        # determineactions could have added optimisation
+        if o not in actions
+    )
 
     removedreqs = repo.requirements - newreqs
     addedreqs = newreqs - repo.requirements
@@ -980,23 +1184,31 @@
     if revlogs != UPGRADE_ALL_REVLOGS:
         incompatible = RECLONES_REQUIREMENTS & (removedreqs | addedreqs)
         if incompatible:
-            msg = _('ignoring revlogs selection flags, format requirements '
-                    'change: %s\n')
+            msg = _(
+                'ignoring revlogs selection flags, format requirements '
+                'change: %s\n'
+            )
             ui.warn(msg % ', '.join(sorted(incompatible)))
             revlogs = UPGRADE_ALL_REVLOGS
 
     def printrequirements():
         ui.write(_('requirements\n'))
-        ui.write(_('   preserved: %s\n') %
-                 _(', ').join(sorted(newreqs & repo.requirements)))
+        ui.write(
+            _('   preserved: %s\n')
+            % _(', ').join(sorted(newreqs & repo.requirements))
+        )
 
         if repo.requirements - newreqs:
-            ui.write(_('   removed: %s\n') %
-                     _(', ').join(sorted(repo.requirements - newreqs)))
+            ui.write(
+                _('   removed: %s\n')
+                % _(', ').join(sorted(repo.requirements - newreqs))
+            )
 
         if newreqs - repo.requirements:
-            ui.write(_('   added: %s\n') %
-                     _(', ').join(sorted(newreqs - repo.requirements)))
+            ui.write(
+                _('   added: %s\n')
+                % _(', ').join(sorted(newreqs - repo.requirements))
+            )
 
         ui.write('\n')
 
@@ -1017,24 +1229,37 @@
         if fromconfig or onlydefault:
 
             if fromconfig:
-                ui.write(_('repository lacks features recommended by '
-                           'current config options:\n\n'))
+                ui.write(
+                    _(
+                        'repository lacks features recommended by '
+                        'current config options:\n\n'
+                    )
+                )
                 for i in fromconfig:
                     ui.write('%s\n   %s\n\n' % (i.name, i.description))
 
             if onlydefault:
-                ui.write(_('repository lacks features used by the default '
-                           'config options:\n\n'))
+                ui.write(
+                    _(
+                        'repository lacks features used by the default '
+                        'config options:\n\n'
+                    )
+                )
                 for i in onlydefault:
                     ui.write('%s\n   %s\n\n' % (i.name, i.description))
 
             ui.write('\n')
         else:
-            ui.write(_('(no feature deficiencies found in existing '
-                       'repository)\n'))
+            ui.write(
+                _('(no feature deficiencies found in existing ' 'repository)\n')
+            )
 
-        ui.write(_('performing an upgrade with "--run" will make the following '
-                   'changes:\n\n'))
+        ui.write(
+            _(
+                'performing an upgrade with "--run" will make the following '
+                'changes:\n\n'
+            )
+        )
 
         printrequirements()
         printupgradeactions()
@@ -1042,8 +1267,12 @@
         unusedoptimize = [i for i in alloptimizations if i not in actions]
 
         if unusedoptimize:
-            ui.write(_('additional optimizations are available by specifying '
-                     '"--optimize <name>":\n\n'))
+            ui.write(
+                _(
+                    'additional optimizations are available by specifying '
+                    '"--optimize <name>":\n\n'
+                )
+            )
             for i in unusedoptimize:
                 ui.write(_('%s\n   %s\n\n') % (i.name, i.description))
         return
@@ -1066,16 +1295,22 @@
         tmppath = pycompat.mkdtemp(prefix='upgrade.', dir=repo.path)
         backuppath = None
         try:
-            ui.write(_('creating temporary repository to stage migrated '
-                       'data: %s\n') % tmppath)
+            ui.write(
+                _(
+                    'creating temporary repository to stage migrated '
+                    'data: %s\n'
+                )
+                % tmppath
+            )
 
             # clone ui without using ui.copy because repo.ui is protected
             repoui = repo.ui.__class__(repo.ui)
             dstrepo = hg.repository(repoui, path=tmppath, create=True)
 
             with dstrepo.wlock(), dstrepo.lock():
-                backuppath = _upgraderepo(ui, repo, dstrepo, newreqs,
-                                          upgradeactions, revlogs=revlogs)
+                backuppath = _upgraderepo(
+                    ui, repo, dstrepo, newreqs, upgradeactions, revlogs=revlogs
+                )
             if not (backup or backuppath is None):
                 ui.write(_('removing old repository content%s\n') % backuppath)
                 repo.vfs.rmtree(backuppath, forcibly=True)
@@ -1086,8 +1321,13 @@
             repo.vfs.rmtree(tmppath, forcibly=True)
 
             if backuppath:
-                ui.warn(_('copy of old repository backed up at %s\n') %
-                        backuppath)
-                ui.warn(_('the old repository will not be deleted; remove '
-                          'it to free up disk space once the upgraded '
-                          'repository is verified\n'))
+                ui.warn(
+                    _('copy of old repository backed up at %s\n') % backuppath
+                )
+                ui.warn(
+                    _(
+                        'the old repository will not be deleted; remove '
+                        'it to free up disk space once the upgraded '
+                        'repository is verified\n'
+                    )
+                )
--- a/mercurial/url.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/url.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,15 +25,14 @@
     urllibcompat,
     util,
 )
-from .utils import (
-    stringutil,
-)
+from .utils import stringutil
 
 httplib = util.httplib
 stringio = util.stringio
 urlerr = util.urlerr
 urlreq = util.urlreq
 
+
 def escape(s, quote=None):
     '''Replace special characters "&", "<" and ">" to HTML-safe sequences.
     If the optional flag quote is true, the quotation mark character (")
@@ -49,6 +48,7 @@
         s = s.replace(b'"', b"&quot;")
     return s
 
+
 class passwordmgr(object):
     def __init__(self, ui, passwddb):
         self.ui = ui
@@ -77,11 +77,15 @@
             u = util.url(pycompat.bytesurl(authuri))
             u.query = None
             if not self.ui.interactive():
-                raise error.Abort(_('http authorization required for %s') %
-                                  util.hidepassword(bytes(u)))
+                raise error.Abort(
+                    _('http authorization required for %s')
+                    % util.hidepassword(bytes(u))
+                )
 
-            self.ui.write(_("http authorization required for %s\n") %
-                          util.hidepassword(bytes(u)))
+            self.ui.write(
+                _("http authorization required for %s\n")
+                % util.hidepassword(bytes(u))
+            )
             self.ui.write(_("realm: %s\n") % pycompat.bytesurl(realm))
             if user:
                 self.ui.write(_("user: %s\n") % user)
@@ -102,16 +106,19 @@
     def find_stored_password(self, authuri):
         return self.passwddb.find_user_password(None, authuri)
 
+
 class proxyhandler(urlreq.proxyhandler):
     def __init__(self, ui):
-        proxyurl = (ui.config("http_proxy", "host") or
-                        encoding.environ.get('http_proxy'))
+        proxyurl = ui.config("http_proxy", "host") or encoding.environ.get(
+            'http_proxy'
+        )
         # XXX proxyauthinfo = None
 
         if proxyurl:
             # proxy can be proper url or host[:port]
-            if not (proxyurl.startswith('http:') or
-                    proxyurl.startswith('https:')):
+            if not (
+                proxyurl.startswith('http:') or proxyurl.startswith('https:')
+            ):
                 proxyurl = 'http://' + proxyurl + '/'
             proxy = util.url(proxyurl)
             if not proxy.user:
@@ -120,11 +127,16 @@
 
             # see if we should use a proxy for this url
             no_list = ["localhost", "127.0.0.1"]
-            no_list.extend([p.lower() for
-                            p in ui.configlist("http_proxy", "no")])
-            no_list.extend([p.strip().lower() for
-                            p in encoding.environ.get("no_proxy", '').split(',')
-                            if p.strip()])
+            no_list.extend(
+                [p.lower() for p in ui.configlist("http_proxy", "no")]
+            )
+            no_list.extend(
+                [
+                    p.strip().lower()
+                    for p in encoding.environ.get("no_proxy", '').split(',')
+                    if p.strip()
+                ]
+            )
             # "http_proxy.always" config is for running tests on localhost
             if ui.configbool("http_proxy", "always"):
                 self.no_list = []
@@ -154,6 +166,7 @@
 
         return urlreq.proxyhandler.proxy_open(self, req, proxy, type_)
 
+
 def _gen_sendfile(orgsend):
     def _sendfile(self, data):
         # send a file
@@ -164,10 +177,13 @@
                 orgsend(self, chunk)
         else:
             orgsend(self, data)
+
     return _sendfile
 
+
 has_https = util.safehasattr(urlreq, 'httpshandler')
 
+
 class httpconnection(keepalive.HTTPConnection):
     # must be able to send big bundle as stream.
     send = _gen_sendfile(keepalive.HTTPConnection.send)
@@ -181,6 +197,7 @@
             return proxyres
         return keepalive.HTTPConnection.getresponse(self)
 
+
 # Large parts of this function have their origin from before Python 2.6
 # and could potentially be removed.
 def _generic_start_transaction(handler, h, req):
@@ -193,9 +210,9 @@
         tunnel_host = urllibcompat.getselector(req)
         new_tunnel = False
 
-    if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy
+    if new_tunnel or tunnel_host == urllibcompat.getfullurl(req):  # has proxy
         u = util.url(pycompat.bytesurl(tunnel_host))
-        if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS
+        if new_tunnel or u.scheme == 'https':  # only use CONNECT for HTTPS
             h.realhostport = ':'.join([u.host, (u.port or '443')])
             h.headers = req.headers.copy()
             h.headers.update(handler.parent.addheaders)
@@ -204,10 +221,15 @@
     h.realhostport = None
     h.headers = None
 
+
 def _generic_proxytunnel(self):
     proxyheaders = dict(
-            [(x, self.headers[x]) for x in self.headers
-             if x.lower().startswith(r'proxy-')])
+        [
+            (x, self.headers[x])
+            for x in self.headers
+            if x.lower().startswith(r'proxy-')
+        ]
+    )
     self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport)
     for header in proxyheaders.iteritems():
         self.send('%s: %s\r\n' % header)
@@ -221,9 +243,7 @@
     if not pycompat.ispy3:
         kwargs['strict'] = self.strict
 
-    res = self.response_class(self.sock,
-                              method=self._method,
-                              **kwargs)
+    res = self.response_class(self.sock, method=self._method, **kwargs)
 
     while True:
         version, status, reason = res._read_status()
@@ -285,23 +305,25 @@
         res.length = None
 
     # does the body have a fixed length? (of zero)
-    if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or
-        100 <= status < 200 or # 1xx codes
-        res._method == 'HEAD'):
+    if (
+        status == httplib.NO_CONTENT
+        or status == httplib.NOT_MODIFIED
+        or 100 <= status < 200
+        or res._method == 'HEAD'  # 1xx codes
+    ):
         res.length = 0
 
     # if the connection remains open, and we aren't using chunked, and
     # a content-length was not provided, then assume that the connection
     # WILL close.
-    if (not res.will_close and
-       not res.chunked and
-       res.length is None):
+    if not res.will_close and not res.chunked and res.length is None:
         res.will_close = 1
 
     self.proxyres = res
 
     return False
 
+
 class httphandler(keepalive.HTTPHandler):
     def http_open(self, req):
         return self.do_open(httpconnection, req)
@@ -310,6 +332,7 @@
         _generic_start_transaction(self, h, req)
         return keepalive.HTTPHandler._start_transaction(self, h, req)
 
+
 class logginghttpconnection(keepalive.HTTPConnection):
     def __init__(self, createconn, *args, **kwargs):
         keepalive.HTTPConnection.__init__(self, *args, **kwargs)
@@ -319,14 +342,16 @@
         # copied from 2.7.14, since old implementations directly call
         # socket.create_connection()
         def connect(self):
-            self.sock = self._create_connection((self.host, self.port),
-                                                self.timeout,
-                                                self.source_address)
+            self.sock = self._create_connection(
+                (self.host, self.port), self.timeout, self.source_address
+            )
             if self._tunnel_host:
                 self._tunnel()
 
+
 class logginghttphandler(httphandler):
     """HTTP handler that logs socket I/O."""
+
     def __init__(self, logfh, name, observeropts, timeout=None):
         super(logginghttphandler, self).__init__(timeout=timeout)
 
@@ -343,12 +368,15 @@
     def _makeconnection(self, *args, **kwargs):
         def createconnection(*args, **kwargs):
             sock = socket.create_connection(*args, **kwargs)
-            return util.makeloggingsocket(self._logfh, sock, self._logname,
-                                          **self._observeropts)
+            return util.makeloggingsocket(
+                self._logfh, sock, self._logname, **self._observeropts
+            )
 
         return logginghttpconnection(createconnection, *args, **kwargs)
 
+
 if has_https:
+
     class httpsconnection(keepalive.HTTPConnection):
         response_class = keepalive.HTTPResponse
         default_port = httplib.HTTPS_PORT
@@ -356,8 +384,15 @@
         send = _gen_sendfile(keepalive.safesend)
         getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection)
 
-        def __init__(self, host, port=None, key_file=None, cert_file=None,
-                     *args, **kwargs):
+        def __init__(
+            self,
+            host,
+            port=None,
+            key_file=None,
+            cert_file=None,
+            *args,
+            **kwargs
+        ):
             keepalive.HTTPConnection.__init__(self, host, port, *args, **kwargs)
             self.key_file = key_file
             self.cert_file = cert_file
@@ -366,12 +401,16 @@
             self.sock = socket.create_connection((self.host, self.port))
 
             host = self.host
-            if self.realhostport: # use CONNECT proxy
+            if self.realhostport:  # use CONNECT proxy
                 _generic_proxytunnel(self)
                 host = self.realhostport.rsplit(':', 1)[0]
             self.sock = sslutil.wrapsocket(
-                self.sock, self.key_file, self.cert_file, ui=self.ui,
-                serverhostname=host)
+                self.sock,
+                self.key_file,
+                self.cert_file,
+                ui=self.ui,
+                serverhostname=host,
+            )
             sslutil.validatesocket(self.sock)
 
     class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler):
@@ -379,8 +418,7 @@
             keepalive.KeepAliveHandler.__init__(self, timeout=timeout)
             urlreq.httpshandler.__init__(self)
             self.ui = ui
-            self.pwmgr = passwordmgr(self.ui,
-                                     self.ui.httppasswordmgrdb)
+            self.pwmgr = passwordmgr(self.ui, self.ui.httppasswordmgrdb)
 
         def _start_transaction(self, h, req):
             _generic_start_transaction(self, h, req)
@@ -404,9 +442,9 @@
             keyfile = None
             certfile = None
 
-            if len(args) >= 1: # key_file
+            if len(args) >= 1:  # key_file
                 keyfile = args[0]
-            if len(args) >= 2: # cert_file
+            if len(args) >= 2:  # cert_file
                 certfile = args[1]
             args = args[2:]
 
@@ -416,11 +454,13 @@
                 keyfile = self.auth['key']
                 certfile = self.auth['cert']
 
-            conn = httpsconnection(host, port, keyfile, certfile, *args,
-                                   **kwargs)
+            conn = httpsconnection(
+                host, port, keyfile, certfile, *args, **kwargs
+            )
             conn.ui = self.ui
             return conn
 
+
 class httpdigestauthhandler(urlreq.httpdigestauthhandler):
     def __init__(self, *args, **kwargs):
         urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs)
@@ -438,7 +478,9 @@
             self.retried_req = req
             self.retried = 0
         return urlreq.httpdigestauthhandler.http_error_auth_reqed(
-                    self, auth_header, host, req, headers)
+            self, auth_header, host, req, headers
+        )
+
 
 class httpbasicauthhandler(urlreq.httpbasicauthhandler):
     def __init__(self, *args, **kwargs):
@@ -470,11 +512,13 @@
             self.retried_req = req
             self.retried = 0
         return urlreq.httpbasicauthhandler.http_error_auth_reqed(
-                        self, auth_header, host, req, headers)
+            self, auth_header, host, req, headers
+        )
 
     def retry_http_basic_auth(self, host, req, realm):
         user, pw = self.passwd.find_user_password(
-            realm, urllibcompat.getfullurl(req))
+            realm, urllibcompat.getfullurl(req)
+        )
         if pw is not None:
             raw = "%s:%s" % (pycompat.bytesurl(user), pycompat.bytesurl(pw))
             auth = r'Basic %s' % pycompat.strurl(base64.b64encode(raw).strip())
@@ -486,6 +530,7 @@
         else:
             return None
 
+
 class cookiehandler(urlreq.basehandler):
     def __init__(self, ui):
         self.cookiejar = None
@@ -497,12 +542,18 @@
         cookiefile = util.expandpath(cookiefile)
         try:
             cookiejar = util.cookielib.MozillaCookieJar(
-                pycompat.fsdecode(cookiefile))
+                pycompat.fsdecode(cookiefile)
+            )
             cookiejar.load()
             self.cookiejar = cookiejar
         except util.cookielib.LoadError as e:
-            ui.warn(_('(error loading cookie file %s: %s; continuing without '
-                      'cookies)\n') % (cookiefile, stringutil.forcebytestr(e)))
+            ui.warn(
+                _(
+                    '(error loading cookie file %s: %s; continuing without '
+                    'cookies)\n'
+                )
+                % (cookiefile, stringutil.forcebytestr(e))
+            )
 
     def http_request(self, request):
         if self.cookiejar:
@@ -516,10 +567,19 @@
 
         return request
 
+
 handlerfuncs = []
 
-def opener(ui, authinfo=None, useragent=None, loggingfh=None,
-           loggingname=b's', loggingopts=None, sendaccept=True):
+
+def opener(
+    ui,
+    authinfo=None,
+    useragent=None,
+    loggingfh=None,
+    loggingname=b's',
+    loggingopts=None,
+    sendaccept=True,
+):
     '''
     construct an opener suitable for urllib2
     authinfo will be added to the password manager
@@ -539,8 +599,11 @@
     handlers = []
 
     if loggingfh:
-        handlers.append(logginghttphandler(loggingfh, loggingname,
-                                           loggingopts or {}, timeout=timeout))
+        handlers.append(
+            logginghttphandler(
+                loggingfh, loggingname, loggingopts or {}, timeout=timeout
+            )
+        )
         # We don't yet support HTTPS when logging I/O. If we attempt to open
         # an HTTPS URL, we'll likely fail due to unknown protocol.
 
@@ -557,11 +620,14 @@
         saveduser, savedpass = passmgr.find_stored_password(uris[0])
         if user != saveduser or passwd:
             passmgr.add_password(realm, uris, user, passwd)
-        ui.debug('http auth: user %s, password %s\n' %
-                 (user, passwd and '*' * len(passwd) or 'not set'))
+        ui.debug(
+            'http auth: user %s, password %s\n'
+            % (user, passwd and '*' * len(passwd) or 'not set')
+        )
 
-    handlers.extend((httpbasicauthhandler(passmgr),
-                     httpdigestauthhandler(passmgr)))
+    handlers.extend(
+        (httpbasicauthhandler(passmgr), httpdigestauthhandler(passmgr))
+    )
     handlers.extend([h(ui, passmgr) for h in handlerfuncs])
     handlers.append(cookiehandler(ui))
     opener = urlreq.buildopener(*handlers)
@@ -601,6 +667,7 @@
 
     return opener
 
+
 def open(ui, url_, data=None, sendaccept=True):
     u = util.url(url_)
     if u.scheme:
@@ -610,9 +677,10 @@
         path = util.normpath(os.path.abspath(url_))
         url_ = 'file://' + pycompat.bytesurl(urlreq.pathname2url(path))
         authinfo = None
-    return opener(ui, authinfo,
-                  sendaccept=sendaccept).open(pycompat.strurl(url_),
-                                              data)
+    return opener(ui, authinfo, sendaccept=sendaccept).open(
+        pycompat.strurl(url_), data
+    )
+
 
 def wrapresponse(resp):
     """Wrap a response object with common error handlers.
@@ -631,21 +699,29 @@
                 if e.expected:
                     got = len(e.partial)
                     total = e.expected + got
-                    msg = _('HTTP request error (incomplete response; '
-                            'expected %d bytes got %d)') % (total, got)
+                    msg = _(
+                        'HTTP request error (incomplete response; '
+                        'expected %d bytes got %d)'
+                    ) % (total, got)
                 else:
                     msg = _('HTTP request error (incomplete response)')
 
                 raise error.PeerTransportError(
                     msg,
-                    hint=_('this may be an intermittent network failure; '
-                           'if the error persists, consider contacting the '
-                           'network or server operator'))
+                    hint=_(
+                        'this may be an intermittent network failure; '
+                        'if the error persists, consider contacting the '
+                        'network or server operator'
+                    ),
+                )
             except httplib.HTTPException as e:
                 raise error.PeerTransportError(
                     _('HTTP request error (%s)') % e,
-                    hint=_('this may be an intermittent network failure; '
-                           'if the error persists, consider contacting the '
-                           'network or server operator'))
+                    hint=_(
+                        'this may be an intermittent network failure; '
+                        'if the error persists, consider contacting the '
+                        'network or server operator'
+                    ),
+                )
 
     resp.__class__ = readerproxy
--- a/mercurial/urllibcompat.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/urllibcompat.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,6 +10,7 @@
 
 _sysstr = pycompat.sysstr
 
+
 class _pycompatstub(object):
     def __init__(self):
         self._aliases = {}
@@ -18,8 +19,8 @@
         """Add items that will be populated at the first access"""
         items = map(_sysstr, items)
         self._aliases.update(
-            (item.replace(r'_', r'').lower(), (origin, item))
-            for item in items)
+            (item.replace(r'_', r'').lower(), (origin, item)) for item in items
+        )
 
     def _registeralias(self, origin, attr, name):
         """Alias ``origin``.``attr`` as ``name``"""
@@ -33,60 +34,69 @@
         self.__dict__[name] = obj = getattr(origin, item)
         return obj
 
+
 httpserver = _pycompatstub()
 urlreq = _pycompatstub()
 urlerr = _pycompatstub()
 
 if pycompat.ispy3:
     import urllib.parse
-    urlreq._registeraliases(urllib.parse, (
-        "splitattr",
-        "splitpasswd",
-        "splitport",
-        "splituser",
-        "urlparse",
-        "urlunparse",
-    ))
+
+    urlreq._registeraliases(
+        urllib.parse,
+        (
+            "splitattr",
+            "splitpasswd",
+            "splitport",
+            "splituser",
+            "urlparse",
+            "urlunparse",
+        ),
+    )
     urlreq._registeralias(urllib.parse, "parse_qs", "parseqs")
     urlreq._registeralias(urllib.parse, "parse_qsl", "parseqsl")
     urlreq._registeralias(urllib.parse, "unquote_to_bytes", "unquote")
     import urllib.request
-    urlreq._registeraliases(urllib.request, (
-        "AbstractHTTPHandler",
-        "BaseHandler",
-        "build_opener",
-        "FileHandler",
-        "FTPHandler",
-        "ftpwrapper",
-        "HTTPHandler",
-        "HTTPSHandler",
-        "install_opener",
-        "pathname2url",
-        "HTTPBasicAuthHandler",
-        "HTTPDigestAuthHandler",
-        "HTTPPasswordMgrWithDefaultRealm",
-        "ProxyHandler",
-        "Request",
-        "url2pathname",
-        "urlopen",
-    ))
+
+    urlreq._registeraliases(
+        urllib.request,
+        (
+            "AbstractHTTPHandler",
+            "BaseHandler",
+            "build_opener",
+            "FileHandler",
+            "FTPHandler",
+            "ftpwrapper",
+            "HTTPHandler",
+            "HTTPSHandler",
+            "install_opener",
+            "pathname2url",
+            "HTTPBasicAuthHandler",
+            "HTTPDigestAuthHandler",
+            "HTTPPasswordMgrWithDefaultRealm",
+            "ProxyHandler",
+            "Request",
+            "url2pathname",
+            "urlopen",
+        ),
+    )
     import urllib.response
-    urlreq._registeraliases(urllib.response, (
-        "addclosehook",
-        "addinfourl",
-    ))
+
+    urlreq._registeraliases(urllib.response, ("addclosehook", "addinfourl",))
     import urllib.error
-    urlerr._registeraliases(urllib.error, (
-        "HTTPError",
-        "URLError",
-    ))
+
+    urlerr._registeraliases(urllib.error, ("HTTPError", "URLError",))
     import http.server
-    httpserver._registeraliases(http.server, (
-        "HTTPServer",
-        "BaseHTTPRequestHandler",
-        "SimpleHTTPRequestHandler",
-        "CGIHTTPRequestHandler",
-    ))
+
+    httpserver._registeraliases(
+        http.server,
+        (
+            "HTTPServer",
+            "BaseHTTPRequestHandler",
+            "SimpleHTTPRequestHandler",
+            "CGIHTTPRequestHandler",
+        ),
+    )
 
     # urllib.parse.quote() accepts both str and bytes, decodes bytes
     # (if necessary), and returns str. This is wonky. We provide a custom
@@ -102,8 +112,8 @@
     # urllib.parse.urlencode() returns str. We use this function to make
     # sure we return bytes.
     def urlencode(query, doseq=False):
-            s = urllib.parse.urlencode(query, doseq=doseq)
-            return s.encode('ascii')
+        s = urllib.parse.urlencode(query, doseq=doseq)
+        return s.encode('ascii')
 
     urlreq.quote = quote
     urlreq.urlencode = urlencode
@@ -122,6 +132,8 @@
 
     def hasdata(req):
         return req.data is not None
+
+
 else:
     import BaseHTTPServer
     import CGIHTTPServer
@@ -129,56 +141,52 @@
     import urllib2
     import urllib
     import urlparse
-    urlreq._registeraliases(urllib, (
-        "addclosehook",
-        "addinfourl",
-        "ftpwrapper",
-        "pathname2url",
-        "quote",
-        "splitattr",
-        "splitpasswd",
-        "splitport",
-        "splituser",
-        "unquote",
-        "url2pathname",
-        "urlencode",
-    ))
-    urlreq._registeraliases(urllib2, (
-        "AbstractHTTPHandler",
-        "BaseHandler",
-        "build_opener",
-        "FileHandler",
-        "FTPHandler",
-        "HTTPBasicAuthHandler",
-        "HTTPDigestAuthHandler",
-        "HTTPHandler",
-        "HTTPPasswordMgrWithDefaultRealm",
-        "HTTPSHandler",
-        "install_opener",
-        "ProxyHandler",
-        "Request",
-        "urlopen",
-    ))
-    urlreq._registeraliases(urlparse, (
-        "urlparse",
-        "urlunparse",
-    ))
+
+    urlreq._registeraliases(
+        urllib,
+        (
+            "addclosehook",
+            "addinfourl",
+            "ftpwrapper",
+            "pathname2url",
+            "quote",
+            "splitattr",
+            "splitpasswd",
+            "splitport",
+            "splituser",
+            "unquote",
+            "url2pathname",
+            "urlencode",
+        ),
+    )
+    urlreq._registeraliases(
+        urllib2,
+        (
+            "AbstractHTTPHandler",
+            "BaseHandler",
+            "build_opener",
+            "FileHandler",
+            "FTPHandler",
+            "HTTPBasicAuthHandler",
+            "HTTPDigestAuthHandler",
+            "HTTPHandler",
+            "HTTPPasswordMgrWithDefaultRealm",
+            "HTTPSHandler",
+            "install_opener",
+            "ProxyHandler",
+            "Request",
+            "urlopen",
+        ),
+    )
+    urlreq._registeraliases(urlparse, ("urlparse", "urlunparse",))
     urlreq._registeralias(urlparse, "parse_qs", "parseqs")
     urlreq._registeralias(urlparse, "parse_qsl", "parseqsl")
-    urlerr._registeraliases(urllib2, (
-        "HTTPError",
-        "URLError",
-    ))
-    httpserver._registeraliases(BaseHTTPServer, (
-        "HTTPServer",
-        "BaseHTTPRequestHandler",
-    ))
-    httpserver._registeraliases(SimpleHTTPServer, (
-        "SimpleHTTPRequestHandler",
-    ))
-    httpserver._registeraliases(CGIHTTPServer, (
-        "CGIHTTPRequestHandler",
-    ))
+    urlerr._registeraliases(urllib2, ("HTTPError", "URLError",))
+    httpserver._registeraliases(
+        BaseHTTPServer, ("HTTPServer", "BaseHTTPRequestHandler",)
+    )
+    httpserver._registeraliases(SimpleHTTPServer, ("SimpleHTTPRequestHandler",))
+    httpserver._registeraliases(CGIHTTPServer, ("CGIHTTPRequestHandler",))
 
     def gethost(req):
         return req.get_host()
--- a/mercurial/util.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/util.py	Sun Oct 06 09:45:02 2019 -0400
@@ -34,9 +34,7 @@
 import traceback
 import warnings
 
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 from hgdemandimport import tracing
 from . import (
     encoding,
@@ -142,12 +140,14 @@
 
 _notset = object()
 
+
 def bitsfrom(container):
     bits = 0
     for bit in container:
         bits |= bit
     return bits
 
+
 # python 2.6 still have deprecation warning enabled by default. We do not want
 # to display anything to standard user so detect if we are running test and
 # only use python deprecation warning in this case.
@@ -164,13 +164,20 @@
     warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
 if _dowarn and pycompat.ispy3:
     # silence warning emitted by passing user string to re.sub()
-    warnings.filterwarnings(r'ignore', r'bad escape', DeprecationWarning,
-                            r'mercurial')
-    warnings.filterwarnings(r'ignore', r'invalid escape sequence',
-                            DeprecationWarning, r'mercurial')
+    warnings.filterwarnings(
+        r'ignore', r'bad escape', DeprecationWarning, r'mercurial'
+    )
+    warnings.filterwarnings(
+        r'ignore', r'invalid escape sequence', DeprecationWarning, r'mercurial'
+    )
     # TODO: reinvent imp.is_frozen()
-    warnings.filterwarnings(r'ignore', r'the imp module is deprecated',
-                            DeprecationWarning, r'mercurial')
+    warnings.filterwarnings(
+        r'ignore',
+        r'the imp module is deprecated',
+        DeprecationWarning,
+        r'mercurial',
+    )
+
 
 def nouideprecwarn(msg, version, stacklevel=1):
     """Issue an python native deprecation warning
@@ -178,10 +185,13 @@
     This is a noop outside of tests, use 'ui.deprecwarn' when possible.
     """
     if _dowarn:
-        msg += ("\n(compatibility will be dropped after Mercurial-%s,"
-                " update your code.)") % version
+        msg += (
+            "\n(compatibility will be dropped after Mercurial-%s,"
+            " update your code.)"
+        ) % version
         warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
 
+
 DIGESTS = {
     'md5': hashlib.md5,
     'sha1': hashlib.sha1,
@@ -193,6 +203,7 @@
 for k in DIGESTS_BY_STRENGTH:
     assert k in DIGESTS
 
+
 class digester(object):
     """helper to compute digests.
 
@@ -240,6 +251,7 @@
                 return k
         return None
 
+
 class digestchecker(object):
     """file handle wrapper that additionally checks content against a given
     size and digests.
@@ -264,24 +276,32 @@
 
     def validate(self):
         if self._size != self._got:
-            raise error.Abort(_('size mismatch: expected %d, got %d') %
-                              (self._size, self._got))
+            raise error.Abort(
+                _('size mismatch: expected %d, got %d')
+                % (self._size, self._got)
+            )
         for k, v in self._digests.items():
             if v != self._digester[k]:
                 # i18n: first parameter is a digest name
-                raise error.Abort(_('%s mismatch: expected %s, got %s') %
-                                  (k, v, self._digester[k]))
+                raise error.Abort(
+                    _('%s mismatch: expected %s, got %s')
+                    % (k, v, self._digester[k])
+                )
+
 
 try:
     buffer = buffer
 except NameError:
+
     def buffer(sliceable, offset=0, length=None):
         if length is not None:
-            return memoryview(sliceable)[offset:offset + length]
+            return memoryview(sliceable)[offset : offset + length]
         return memoryview(sliceable)[offset:]
 
+
 _chunksize = 4096
 
+
 class bufferedinputpipe(object):
     """a manually buffered input pipe
 
@@ -296,6 +316,7 @@
     This class lives in the 'util' module because it makes use of the 'os'
     module from the python stdlib.
     """
+
     def __new__(cls, fh):
         # If we receive a fileobjectproxy, we need to use a variation of this
         # class that notifies observers about activity.
@@ -352,7 +373,7 @@
             if self._buffer:
                 lfi = self._buffer[-1].find('\n')
         size = lfi + 1
-        if lfi < 0: # end of file
+        if lfi < 0:  # end of file
             size = self._lenbuf
         elif len(self._buffer) > 1:
             # we need to take previous chunks into account
@@ -370,7 +391,7 @@
             buf = ''.join(self._buffer)
 
         data = buf[:size]
-        buf = buf[len(data):]
+        buf = buf[len(data) :]
         if buf:
             self._buffer = [buf]
             self._lenbuf = len(buf)
@@ -390,6 +411,7 @@
 
         return data
 
+
 def mmapread(fp):
     try:
         fd = getattr(fp, 'fileno', lambda: fp)()
@@ -401,12 +423,14 @@
             return ''
         raise
 
+
 class fileobjectproxy(object):
     """A proxy around file objects that tells a watcher when events occur.
 
     This type is intended to only be used for testing purposes. Think hard
     before using it in important code.
     """
+
     __slots__ = (
         r'_orig',
         r'_observer',
@@ -419,7 +443,6 @@
     def __getattribute__(self, name):
         ours = {
             r'_observer',
-
             # IOBase
             r'close',
             # closed if a property
@@ -485,79 +508,99 @@
 
     def close(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'close', *args, **kwargs)
+            r'close', *args, **kwargs
+        )
 
     def fileno(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'fileno', *args, **kwargs)
+            r'fileno', *args, **kwargs
+        )
 
     def flush(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'flush', *args, **kwargs)
+            r'flush', *args, **kwargs
+        )
 
     def isatty(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'isatty', *args, **kwargs)
+            r'isatty', *args, **kwargs
+        )
 
     def readable(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'readable', *args, **kwargs)
+            r'readable', *args, **kwargs
+        )
 
     def readline(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'readline', *args, **kwargs)
+            r'readline', *args, **kwargs
+        )
 
     def readlines(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'readlines', *args, **kwargs)
+            r'readlines', *args, **kwargs
+        )
 
     def seek(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'seek', *args, **kwargs)
+            r'seek', *args, **kwargs
+        )
 
     def seekable(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'seekable', *args, **kwargs)
+            r'seekable', *args, **kwargs
+        )
 
     def tell(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'tell', *args, **kwargs)
+            r'tell', *args, **kwargs
+        )
 
     def truncate(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'truncate', *args, **kwargs)
+            r'truncate', *args, **kwargs
+        )
 
     def writable(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'writable', *args, **kwargs)
+            r'writable', *args, **kwargs
+        )
 
     def writelines(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'writelines', *args, **kwargs)
+            r'writelines', *args, **kwargs
+        )
 
     def read(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'read', *args, **kwargs)
+            r'read', *args, **kwargs
+        )
 
     def readall(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'readall', *args, **kwargs)
+            r'readall', *args, **kwargs
+        )
 
     def readinto(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'readinto', *args, **kwargs)
+            r'readinto', *args, **kwargs
+        )
 
     def write(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'write', *args, **kwargs)
+            r'write', *args, **kwargs
+        )
 
     def detach(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'detach', *args, **kwargs)
+            r'detach', *args, **kwargs
+        )
 
     def read1(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'read1', *args, **kwargs)
+            r'read1', *args, **kwargs
+        )
+
 
 class observedbufferedinputpipe(bufferedinputpipe):
     """A variation of bufferedinputpipe that is aware of fileobjectproxy.
@@ -570,6 +613,7 @@
     ``os.read()`` events. It also re-publishes other events, such as
     ``read()`` and ``readline()``.
     """
+
     def _fillbuffer(self):
         res = super(observedbufferedinputpipe, self)._fillbuffer()
 
@@ -599,6 +643,7 @@
 
         return res
 
+
 PROXIED_SOCKET_METHODS = {
     r'makefile',
     r'recv',
@@ -614,6 +659,7 @@
     r'setsockopt',
 }
 
+
 class socketproxy(object):
     """A proxy around a socket that tells a watcher when events occur.
 
@@ -622,6 +668,7 @@
     This type is intended to only be used for testing purposes. Think hard
     before using it in important code.
     """
+
     __slots__ = (
         r'_orig',
         r'_observer',
@@ -664,60 +711,77 @@
 
     def makefile(self, *args, **kwargs):
         res = object.__getattribute__(self, r'_observedcall')(
-            r'makefile', *args, **kwargs)
+            r'makefile', *args, **kwargs
+        )
 
         # The file object may be used for I/O. So we turn it into a
         # proxy using our observer.
         observer = object.__getattribute__(self, r'_observer')
-        return makeloggingfileobject(observer.fh, res, observer.name,
-                                     reads=observer.reads,
-                                     writes=observer.writes,
-                                     logdata=observer.logdata,
-                                     logdataapis=observer.logdataapis)
+        return makeloggingfileobject(
+            observer.fh,
+            res,
+            observer.name,
+            reads=observer.reads,
+            writes=observer.writes,
+            logdata=observer.logdata,
+            logdataapis=observer.logdataapis,
+        )
 
     def recv(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'recv', *args, **kwargs)
+            r'recv', *args, **kwargs
+        )
 
     def recvfrom(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'recvfrom', *args, **kwargs)
+            r'recvfrom', *args, **kwargs
+        )
 
     def recvfrom_into(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'recvfrom_into', *args, **kwargs)
+            r'recvfrom_into', *args, **kwargs
+        )
 
     def recv_into(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'recv_info', *args, **kwargs)
+            r'recv_info', *args, **kwargs
+        )
 
     def send(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'send', *args, **kwargs)
+            r'send', *args, **kwargs
+        )
 
     def sendall(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'sendall', *args, **kwargs)
+            r'sendall', *args, **kwargs
+        )
 
     def sendto(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'sendto', *args, **kwargs)
+            r'sendto', *args, **kwargs
+        )
 
     def setblocking(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'setblocking', *args, **kwargs)
+            r'setblocking', *args, **kwargs
+        )
 
     def settimeout(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'settimeout', *args, **kwargs)
+            r'settimeout', *args, **kwargs
+        )
 
     def gettimeout(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'gettimeout', *args, **kwargs)
+            r'gettimeout', *args, **kwargs
+        )
 
     def setsockopt(self, *args, **kwargs):
         return object.__getattribute__(self, r'_observedcall')(
-            r'setsockopt', *args, **kwargs)
+            r'setsockopt', *args, **kwargs
+        )
+
 
 class baseproxyobserver(object):
     def _writedata(self, data):
@@ -732,8 +796,9 @@
             if self.logdataapis:
                 self.fh.write(': %s\n' % stringutil.escapestr(data))
             else:
-                self.fh.write('%s>     %s\n'
-                              % (self.name, stringutil.escapestr(data)))
+                self.fh.write(
+                    '%s>     %s\n' % (self.name, stringutil.escapestr(data))
+                )
             self.fh.flush()
             return
 
@@ -743,14 +808,18 @@
 
         lines = data.splitlines(True)
         for line in lines:
-            self.fh.write('%s>     %s\n'
-                          % (self.name, stringutil.escapestr(line)))
+            self.fh.write(
+                '%s>     %s\n' % (self.name, stringutil.escapestr(line))
+            )
         self.fh.flush()
 
+
 class fileobjectobserver(baseproxyobserver):
     """Logs file object activity."""
-    def __init__(self, fh, name, reads=True, writes=True, logdata=False,
-                 logdataapis=True):
+
+    def __init__(
+        self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
+    ):
         self.fh = fh
         self.name = name
         self.logdata = logdata
@@ -791,8 +860,9 @@
             return
 
         if self.logdataapis:
-            self.fh.write('%s> readinto(%d) -> %r' % (self.name, len(dest),
-                                                      res))
+            self.fh.write(
+                '%s> readinto(%d) -> %r' % (self.name, len(dest), res)
+            )
 
         data = dest[0:res] if res is not None else b''
 
@@ -829,8 +899,9 @@
             return
 
         if self.logdataapis:
-            self.fh.write('%s> bufferedread(%d) -> %d' % (
-                self.name, size, len(res)))
+            self.fh.write(
+                '%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
+            )
 
         self._writedata(res)
 
@@ -839,23 +910,42 @@
             return
 
         if self.logdataapis:
-            self.fh.write('%s> bufferedreadline() -> %d' % (
-                self.name, len(res)))
+            self.fh.write(
+                '%s> bufferedreadline() -> %d' % (self.name, len(res))
+            )
 
         self._writedata(res)
 
-def makeloggingfileobject(logh, fh, name, reads=True, writes=True,
-                          logdata=False, logdataapis=True):
+
+def makeloggingfileobject(
+    logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
+):
     """Turn a file object into a logging file object."""
 
-    observer = fileobjectobserver(logh, name, reads=reads, writes=writes,
-                                  logdata=logdata, logdataapis=logdataapis)
+    observer = fileobjectobserver(
+        logh,
+        name,
+        reads=reads,
+        writes=writes,
+        logdata=logdata,
+        logdataapis=logdataapis,
+    )
     return fileobjectproxy(fh, observer)
 
+
 class socketobserver(baseproxyobserver):
     """Logs socket activity."""
-    def __init__(self, fh, name, reads=True, writes=True, states=True,
-                 logdata=False, logdataapis=True):
+
+    def __init__(
+        self,
+        fh,
+        name,
+        reads=True,
+        writes=True,
+        states=True,
+        logdata=False,
+        logdataapis=True,
+    ):
         self.fh = fh
         self.name = name
         self.reads = reads
@@ -868,16 +958,16 @@
         if not self.states:
             return
 
-        self.fh.write('%s> makefile(%r, %r)\n' % (
-            self.name, mode, bufsize))
+        self.fh.write('%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
 
     def recv(self, res, size, flags=0):
         if not self.reads:
             return
 
         if self.logdataapis:
-            self.fh.write('%s> recv(%d, %d) -> %d' % (
-                self.name, size, flags, len(res)))
+            self.fh.write(
+                '%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
+            )
         self._writedata(res)
 
     def recvfrom(self, res, size, flags=0):
@@ -885,8 +975,10 @@
             return
 
         if self.logdataapis:
-            self.fh.write('%s> recvfrom(%d, %d) -> %d' % (
-                self.name, size, flags, len(res[0])))
+            self.fh.write(
+                '%s> recvfrom(%d, %d) -> %d'
+                % (self.name, size, flags, len(res[0]))
+            )
 
         self._writedata(res[0])
 
@@ -895,18 +987,21 @@
             return
 
         if self.logdataapis:
-            self.fh.write('%s> recvfrom_into(%d, %d) -> %d' % (
-                self.name, size, flags, res[0]))
-
-        self._writedata(buf[0:res[0]])
+            self.fh.write(
+                '%s> recvfrom_into(%d, %d) -> %d'
+                % (self.name, size, flags, res[0])
+            )
+
+        self._writedata(buf[0 : res[0]])
 
     def recv_into(self, res, buf, size=0, flags=0):
         if not self.reads:
             return
 
         if self.logdataapis:
-            self.fh.write('%s> recv_into(%d, %d) -> %d' % (
-                self.name, size, flags, res))
+            self.fh.write(
+                '%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
+            )
 
         self._writedata(buf[0:res])
 
@@ -914,8 +1009,9 @@
         if not self.writes:
             return
 
-        self.fh.write('%s> send(%d, %d) -> %d' % (
-            self.name, len(data), flags, len(res)))
+        self.fh.write(
+            '%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
+        )
         self._writedata(data)
 
     def sendall(self, res, data, flags=0):
@@ -924,8 +1020,7 @@
 
         if self.logdataapis:
             # Returns None on success. So don't bother reporting return value.
-            self.fh.write('%s> sendall(%d, %d)' % (
-                self.name, len(data), flags))
+            self.fh.write('%s> sendall(%d, %d)' % (self.name, len(data), flags))
 
         self._writedata(data)
 
@@ -939,8 +1034,10 @@
             flags = 0
 
         if self.logdataapis:
-            self.fh.write('%s> sendto(%d, %d, %r) -> %d' % (
-                self.name, len(data), flags, address, res))
+            self.fh.write(
+                '%s> sendto(%d, %d, %r) -> %d'
+                % (self.name, len(data), flags, address, res)
+            )
 
         self._writedata(data)
 
@@ -966,26 +1063,46 @@
         if not self.states:
             return
 
-        self.fh.write('%s> setsockopt(%r, %r, %r) -> %r\n' % (
-            self.name, level, optname, value, res))
-
-def makeloggingsocket(logh, fh, name, reads=True, writes=True, states=True,
-                      logdata=False, logdataapis=True):
+        self.fh.write(
+            '%s> setsockopt(%r, %r, %r) -> %r\n'
+            % (self.name, level, optname, value, res)
+        )
+
+
+def makeloggingsocket(
+    logh,
+    fh,
+    name,
+    reads=True,
+    writes=True,
+    states=True,
+    logdata=False,
+    logdataapis=True,
+):
     """Turn a socket into a logging socket."""
 
-    observer = socketobserver(logh, name, reads=reads, writes=writes,
-                              states=states, logdata=logdata,
-                              logdataapis=logdataapis)
+    observer = socketobserver(
+        logh,
+        name,
+        reads=reads,
+        writes=writes,
+        states=states,
+        logdata=logdata,
+        logdataapis=logdataapis,
+    )
     return socketproxy(fh, observer)
 
+
 def version():
     """Return version information if available."""
     try:
         from . import __version__
+
         return __version__.version
     except ImportError:
         return 'unknown'
 
+
 def versiontuple(v=None, n=4):
     """Parses a Mercurial version string into an N-tuple.
 
@@ -1068,15 +1185,18 @@
     if n == 4:
         return (vints[0], vints[1], vints[2], extra)
 
+
 def cachefunc(func):
     '''cache the result of function calls'''
     # XXX doesn't handle keywords args
     if func.__code__.co_argcount == 0:
         cache = []
+
         def f():
             if len(cache) == 0:
                 cache.append(func())
             return cache[0]
+
         return f
     cache = {}
     if func.__code__.co_argcount == 1:
@@ -1086,7 +1206,9 @@
             if arg not in cache:
                 cache[arg] = func(arg)
             return cache[arg]
+
     else:
+
         def f(*args):
             if args not in cache:
                 cache[args] = func(*args)
@@ -1094,6 +1216,7 @@
 
     return f
 
+
 class cow(object):
     """helper class to make copy-on-write easier
 
@@ -1112,6 +1235,7 @@
         self._copied = getattr(self, '_copied', 0) + 1
         return self
 
+
 class sortdict(collections.OrderedDict):
     '''a simple sorted dictionary
 
@@ -1137,6 +1261,7 @@
             for k, v in src:
                 self[k] = v
 
+
 class cowdict(cow, dict):
     """copy-on-write dict
 
@@ -1163,14 +1288,17 @@
     True
     """
 
+
 class cowsortdict(cow, sortdict):
     """copy-on-write sortdict
 
     Be sure to call d = d.preparewrite() before writing to d.
     """
 
+
 class transactional(object):
     """Base class for making a transactional type into a context manager."""
+
     __metaclass__ = abc.ABCMeta
 
     @abc.abstractmethod
@@ -1194,6 +1322,7 @@
         finally:
             self.release()
 
+
 @contextlib.contextmanager
 def acceptintervention(tr=None):
     """A context manager that closes the transaction on InterventionRequired
@@ -1212,16 +1341,19 @@
     finally:
         tr.release()
 
+
 @contextlib.contextmanager
 def nullcontextmanager():
     yield
 
+
 class _lrucachenode(object):
     """A node in a doubly linked list.
 
     Holds a reference to nodes on either side as well as a key-value
     pair for the dictionary entry.
     """
+
     __slots__ = (r'next', r'prev', r'key', r'value', r'cost')
 
     def __init__(self):
@@ -1238,6 +1370,7 @@
         self.value = None
         self.cost = 0
 
+
 class lrucachedict(object):
     """Dict that caches most recent accesses and sets.
 
@@ -1260,6 +1393,7 @@
     to e.g. set a max memory limit and associate an estimated bytes size
     cost to each item in the cache. By default, no maximum cost is enforced.
     """
+
     def __init__(self, max, maxcost=0):
         self._cache = {}
 
@@ -1530,11 +1664,13 @@
             n.markempty()
             n = n.prev
 
+
 def lrucachefunc(func):
     '''cache most recent results of function calls'''
     cache = {}
     order = collections.deque()
     if func.__code__.co_argcount == 1:
+
         def f(arg):
             if arg not in cache:
                 if len(cache) > 20:
@@ -1544,7 +1680,9 @@
                 order.remove(arg)
             order.append(arg)
             return cache[arg]
+
     else:
+
         def f(*args):
             if args not in cache:
                 if len(cache) > 20:
@@ -1557,10 +1695,12 @@
 
     return f
 
+
 class propertycache(object):
     def __init__(self, func):
         self.func = func
         self.name = func.__name__
+
     def __get__(self, obj, type=None):
         result = self.func(obj)
         self.cachevalue(obj, result)
@@ -1570,15 +1710,18 @@
         # __dict__ assignment required to bypass __setattr__ (eg: repoview)
         obj.__dict__[self.name] = value
 
+
 def clearcachedproperty(obj, prop):
     '''clear a cached property value, if one has been set'''
     prop = pycompat.sysstr(prop)
     if prop in obj.__dict__:
         del obj.__dict__[prop]
 
+
 def increasingchunks(source, min=1024, max=65536):
     '''return no less than min bytes per chunk while data remains,
     doubling min after each chunk until it reaches max'''
+
     def log2(x):
         if not x:
             return 0
@@ -1607,12 +1750,15 @@
     if buf:
         yield ''.join(buf)
 
+
 def always(fn):
     return True
 
+
 def never(fn):
     return False
 
+
 def nogc(func):
     """disable garbage collector
 
@@ -1626,6 +1772,7 @@
     This garbage collector issue have been fixed in 2.7. But it still affect
     CPython's performance.
     """
+
     def wrapper(*args, **kwargs):
         gcenabled = gc.isenabled()
         gc.disable()
@@ -1634,12 +1781,15 @@
         finally:
             if gcenabled:
                 gc.enable()
+
     return wrapper
 
+
 if pycompat.ispypy:
     # PyPy runs slower with gc disabled
     nogc = lambda x: x
 
+
 def pathto(root, n1, n2):
     '''return the relative path from one place to another.
     root should use os.sep to separate directories
@@ -1666,6 +1816,7 @@
     b.reverse()
     return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
 
+
 # the location of data files matching the source code
 if procutil.mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
     # executable version (py2exe) doesn't support __file__
@@ -1675,8 +1826,10 @@
 
 i18n.setdatapath(datapath)
 
+
 def checksignature(func):
     '''wrap a function with code to check for calling errors'''
+
     def check(*args, **kwargs):
         try:
             return func(*args, **kwargs)
@@ -1687,6 +1840,7 @@
 
     return check
 
+
 # a whilelist of known filesystems where hardlink works reliably
 _hardlinkfswhitelist = {
     'apfs',
@@ -1704,6 +1858,7 @@
     'zfs',
 }
 
+
 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
     '''copy a file, preserving mode and optionally other stat info like
     atime/mtime
@@ -1734,7 +1889,7 @@
             oslink(src, dest)
             return
         except (IOError, OSError):
-            pass # fall back to normal copy
+            pass  # fall back to normal copy
     if os.path.islink(src):
         os.symlink(os.readlink(src), dest)
         # copytime is ignored for symlinks, but in general copytime isn't needed
@@ -1752,11 +1907,13 @@
                     if newstat.isambig(oldstat):
                         # stat of copied file is ambiguous to original one
                         advanced = (
-                            oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
+                            oldstat.stat[stat.ST_MTIME] + 1
+                        ) & 0x7FFFFFFF
                         os.utime(dest, (advanced, advanced))
         except shutil.Error as inst:
             raise error.Abort(str(inst))
 
+
 def copyfiles(src, dst, hardlink=None, progress=None):
     """Copy a directory tree using hardlinks if possible."""
     num = 0
@@ -1767,8 +1924,9 @@
 
     if os.path.isdir(src):
         if hardlink is None:
-            hardlink = (os.stat(src).st_dev ==
-                        os.stat(os.path.dirname(dst)).st_dev)
+            hardlink = (
+                os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
+            )
         settopic()
         os.mkdir(dst)
         for name, kind in listdir(src):
@@ -1778,8 +1936,10 @@
             num += n
     else:
         if hardlink is None:
-            hardlink = (os.stat(os.path.dirname(src)).st_dev ==
-                        os.stat(os.path.dirname(dst)).st_dev)
+            hardlink = (
+                os.stat(os.path.dirname(src)).st_dev
+                == os.stat(os.path.dirname(dst)).st_dev
+            )
         settopic()
 
         if hardlink:
@@ -1796,12 +1956,34 @@
 
     return hardlink, num
 
+
 _winreservednames = {
-    'con', 'prn', 'aux', 'nul',
-    'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
-    'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
+    'con',
+    'prn',
+    'aux',
+    'nul',
+    'com1',
+    'com2',
+    'com3',
+    'com4',
+    'com5',
+    'com6',
+    'com7',
+    'com8',
+    'com9',
+    'lpt1',
+    'lpt2',
+    'lpt3',
+    'lpt4',
+    'lpt5',
+    'lpt6',
+    'lpt7',
+    'lpt8',
+    'lpt9',
 }
 _winreservedchars = ':*?"<>|'
+
+
 def checkwinfilename(path):
     r'''Check that the base-relative path is a valid filename on Windows.
     Returns None if the path is ok, or a UI string describing the problem.
@@ -1835,19 +2017,27 @@
             continue
         for c in _filenamebytestr(n):
             if c in _winreservedchars:
-                return _("filename contains '%s', which is reserved "
-                         "on Windows") % c
+                return (
+                    _("filename contains '%s', which is reserved " "on Windows")
+                    % c
+                )
             if ord(c) <= 31:
-                return _("filename contains '%s', which is invalid "
-                         "on Windows") % stringutil.escapestr(c)
+                return _(
+                    "filename contains '%s', which is invalid " "on Windows"
+                ) % stringutil.escapestr(c)
         base = n.split('.')[0]
         if base and base.lower() in _winreservednames:
-            return _("filename contains '%s', which is reserved "
-                     "on Windows") % base
+            return (
+                _("filename contains '%s', which is reserved " "on Windows")
+                % base
+            )
         t = n[-1:]
         if t in '. ' and n not in '..':
-            return _("filename ends with '%s', which is not allowed "
-                     "on Windows") % t
+            return (
+                _("filename ends with '%s', which is not allowed " "on Windows")
+                % t
+            )
+
 
 if pycompat.iswindows:
     checkosfilename = checkwinfilename
@@ -1859,6 +2049,7 @@
 if safehasattr(time, "perf_counter"):
     timer = time.perf_counter
 
+
 def makelock(info, pathname):
     """Create a lock file atomically if possible
 
@@ -1870,7 +2061,7 @@
     except OSError as why:
         if why.errno == errno.EEXIST:
             raise
-    except AttributeError: # no symlink in os
+    except AttributeError:  # no symlink in os
         pass
 
     flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
@@ -1878,17 +2069,19 @@
     os.write(ld, info)
     os.close(ld)
 
+
 def readlock(pathname):
     try:
         return readlink(pathname)
     except OSError as why:
         if why.errno not in (errno.EINVAL, errno.ENOSYS):
             raise
-    except AttributeError: # no symlink in os
+    except AttributeError:  # no symlink in os
         pass
     with posixfile(pathname, 'rb') as fp:
         return fp.read()
 
+
 def fstat(fp):
     '''stat file object that may not have fileno method.'''
     try:
@@ -1896,8 +2089,10 @@
     except AttributeError:
         return os.stat(fp.name)
 
+
 # File system features
 
+
 def fscasesensitive(path):
     """
     Return true if the given path is on a case-sensitive filesystem
@@ -1911,7 +2106,7 @@
     if b == b2:
         b2 = b.lower()
         if b == b2:
-            return True # no evidence against case sensitivity
+            return True  # no evidence against case sensitivity
     p2 = os.path.join(d, b2)
     try:
         s2 = os.lstat(p2)
@@ -1921,12 +2116,15 @@
     except OSError:
         return True
 
+
 try:
     import re2
+
     _re2 = None
 except ImportError:
     _re2 = False
 
+
 class _re(object):
     def _checkre2(self):
         global _re2
@@ -1970,9 +2168,12 @@
         else:
             return remod.escape
 
+
 re = _re()
 
 _fspathcache = {}
+
+
 def fspath(name, root):
     '''Get name in the case stored in the filesystem
 
@@ -1983,6 +2184,7 @@
 
     The root should be normcase-ed, too.
     '''
+
     def _makefspathcacheentry(dir):
         return dict((normcase(n), n) for n in os.listdir(dir))
 
@@ -1990,7 +2192,7 @@
     if pycompat.osaltsep:
         seps = seps + pycompat.osaltsep
     # Protect backslashes. This gets silly very quickly.
-    seps.replace('\\','\\\\')
+    seps.replace('\\', '\\\\')
     pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
     dir = os.path.normpath(root)
     result = []
@@ -2015,6 +2217,7 @@
 
     return ''.join(result)
 
+
 def checknlink(testfile):
     '''check whether hardlink count reporting works properly'''
 
@@ -2022,8 +2225,11 @@
     # work around issue2543 (or testfile may get lost on Samba shares)
     f1, f2, fp = None, None, None
     try:
-        fd, f1 = pycompat.mkstemp(prefix='.%s-' % os.path.basename(testfile),
-                                  suffix='1~', dir=os.path.dirname(testfile))
+        fd, f1 = pycompat.mkstemp(
+            prefix='.%s-' % os.path.basename(testfile),
+            suffix='1~',
+            dir=os.path.dirname(testfile),
+        )
         os.close(fd)
         f2 = '%s2~' % f1[:-2]
 
@@ -2044,10 +2250,15 @@
             except OSError:
                 pass
 
+
 def endswithsep(path):
     '''Check path ends with os.sep or os.altsep.'''
-    return (path.endswith(pycompat.ossep)
-            or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
+    return (
+        path.endswith(pycompat.ossep)
+        or pycompat.osaltsep
+        and path.endswith(pycompat.osaltsep)
+    )
+
 
 def splitpath(path):
     '''Split path by os.sep.
@@ -2057,6 +2268,7 @@
     function if need.'''
     return path.split(pycompat.ossep)
 
+
 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
     """Create a temporary file with the same contents from name
 
@@ -2091,7 +2303,7 @@
             ofp.write(chunk)
         ifp.close()
         ofp.close()
-    except: # re-raises
+    except:  # re-raises
         try:
             os.unlink(temp)
         except OSError:
@@ -2099,6 +2311,7 @@
         raise
     return temp
 
+
 class filestat(object):
     """help to exactly detect change of a file
 
@@ -2106,6 +2319,7 @@
     exists. Otherwise, it is None. This can avoid preparative
     'exists()' examination on client side of this class.
     """
+
     def __init__(self, stat):
         self.stat = stat
 
@@ -2131,9 +2345,11 @@
             # if ambiguity between stat of new and old file is
             # avoided, comparison of size, ctime and mtime is enough
             # to exactly detect change of a file regardless of platform
-            return (self.stat.st_size == old.stat.st_size and
-                    self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME] and
-                    self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME])
+            return (
+                self.stat.st_size == old.stat.st_size
+                and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
+                and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
+            )
         except AttributeError:
             pass
         try:
@@ -2172,7 +2388,7 @@
         S[n].mtime", even if size of a file isn't changed.
         """
         try:
-            return (self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME])
+            return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
         except AttributeError:
             return False
 
@@ -2187,7 +2403,7 @@
 
         Otherwise, this returns True, as "ambiguity is avoided".
         """
-        advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7fffffff
+        advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
         try:
             os.utime(path, (advanced, advanced))
         except OSError as inst:
@@ -2201,6 +2417,7 @@
     def __ne__(self, other):
         return not self == other
 
+
 class atomictempfile(object):
     '''writable file object that atomically updates a file
 
@@ -2214,11 +2431,15 @@
     useful only if target file is guarded by any lock (e.g. repo.lock
     or repo.wlock).
     '''
+
     def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
-        self.__name = name      # permanent name
-        self._tempname = mktempcopy(name, emptyok=('w' in mode),
-                                    createmode=createmode,
-                                    enforcewritable=('w' in mode))
+        self.__name = name  # permanent name
+        self._tempname = mktempcopy(
+            name,
+            emptyok=('w' in mode),
+            createmode=createmode,
+            enforcewritable=('w' in mode),
+        )
 
         self._fp = posixfile(self._tempname, mode)
         self._checkambig = checkambig
@@ -2240,7 +2461,7 @@
                 newstat = filestat.frompath(filename)
                 if newstat.isambig(oldstat):
                     # stat of changed file is ambiguous to original one
-                    advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7fffffff
+                    advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
                     os.utime(filename, (advanced, advanced))
             else:
                 rename(self._tempname, filename)
@@ -2254,7 +2475,7 @@
             self._fp.close()
 
     def __del__(self):
-        if safehasattr(self, '_fp'): # constructor actually did something
+        if safehasattr(self, '_fp'):  # constructor actually did something
             self.discard()
 
     def __enter__(self):
@@ -2266,6 +2487,7 @@
         else:
             self.close()
 
+
 def unlinkpath(f, ignoremissing=False, rmdir=True):
     """unlink and remove the directory if it is empty"""
     if ignoremissing:
@@ -2279,6 +2501,7 @@
         except OSError:
             pass
 
+
 def tryunlink(f):
     """Attempt to remove a file, ignoring ENOENT errors."""
     try:
@@ -2287,6 +2510,7 @@
         if e.errno != errno.ENOENT:
             raise
 
+
 def makedirs(name, mode=None, notindexed=False):
     """recursive directory creation with parent mode inheritance
 
@@ -2315,27 +2539,32 @@
     if mode is not None:
         os.chmod(name, mode)
 
+
 def readfile(path):
     with open(path, 'rb') as fp:
         return fp.read()
 
+
 def writefile(path, text):
     with open(path, 'wb') as fp:
         fp.write(text)
 
+
 def appendfile(path, text):
     with open(path, 'ab') as fp:
         fp.write(text)
 
+
 class chunkbuffer(object):
     """Allow arbitrary sized chunks of data to be efficiently read from an
     iterator over chunks of arbitrary size."""
 
     def __init__(self, in_iter):
         """in_iter is the iterator that's iterating over the input chunks."""
+
         def splitbig(chunks):
             for chunk in chunks:
-                if len(chunk) > 2**20:
+                if len(chunk) > 2 ** 20:
                     pos = 0
                     while pos < len(chunk):
                         end = pos + 2 ** 18
@@ -2343,6 +2572,7 @@
                         pos = end
                 else:
                     yield chunk
+
         self.iter = splitbig(in_iter)
         self._queue = collections.deque()
         self._chunkoffset = 0
@@ -2361,7 +2591,7 @@
         while left > 0:
             # refill the queue
             if not queue:
-                target = 2**18
+                target = 2 ** 18
                 for chunk in self.iter:
                     queue.append(chunk)
                     target -= len(chunk)
@@ -2401,12 +2631,13 @@
 
             # Partial chunk needed.
             else:
-                buf.append(chunk[offset:offset + left])
+                buf.append(chunk[offset : offset + left])
                 self._chunkoffset += left
                 left -= chunkremaining
 
         return ''.join(buf)
 
+
 def filechunkiter(f, size=131072, limit=None):
     """Create a generator that produces the data in the file size
     (default 131072) bytes at a time, up to optional limit (default is
@@ -2428,6 +2659,7 @@
             limit -= len(s)
         yield s
 
+
 class cappedreader(object):
     """A file object proxy that allows reading up to N bytes.
 
@@ -2439,6 +2671,7 @@
     in addition to I/O that is performed by this instance. If there is,
     state tracking will get out of sync and unexpected results will ensue.
     """
+
     def __init__(self, fh, limit):
         """Allow reading up to <limit> bytes from <fh>."""
         self._fh = fh
@@ -2462,9 +2695,10 @@
         if res is None:
             return None
 
-        b[0:len(res)] = res
+        b[0 : len(res)] = res
         return len(res)
 
+
 def unitcountfn(*unittable):
     '''return a function that renders a readable count of some quantity'''
 
@@ -2476,6 +2710,7 @@
 
     return go
 
+
 def processlinerange(fromline, toline):
     """Check that linerange <fromline>:<toline> makes sense and return a
     0-based range.
@@ -2497,6 +2732,7 @@
         raise error.ParseError(_("fromline must be strictly positive"))
     return fromline - 1, toline
 
+
 bytecount = unitcountfn(
     (100, 1 << 30, _('%.0f GB')),
     (10, 1 << 30, _('%.1f GB')),
@@ -2508,7 +2744,8 @@
     (10, 1 << 10, _('%.1f KB')),
     (1, 1 << 10, _('%.2f KB')),
     (1, 1, _('%.0f bytes')),
-    )
+)
+
 
 class transformingwriter(object):
     """Writable file wrapper to transform data by function"""
@@ -2526,20 +2763,25 @@
     def write(self, data):
         return self._fp.write(self._encode(data))
 
+
 # Matches a single EOL which can either be a CRLF where repeated CR
 # are removed or a LF. We do not care about old Macintosh files, so a
 # stray CR is an error.
 _eolre = remod.compile(br'\r*\n')
 
+
 def tolf(s):
     return _eolre.sub('\n', s)
 
+
 def tocrlf(s):
     return _eolre.sub('\r\n', s)
 
+
 def _crlfwriter(fp):
     return transformingwriter(fp, tocrlf)
 
+
 if pycompat.oslinesep == '\r\n':
     tonativeeol = tocrlf
     fromnativeeol = tolf
@@ -2549,8 +2791,10 @@
     fromnativeeol = pycompat.identity
     nativeeolwriter = pycompat.identity
 
-if (pyplatform.python_implementation() == 'CPython' and
-    sys.version_info < (3, 0)):
+if pyplatform.python_implementation() == 'CPython' and sys.version_info < (
+    3,
+    0,
+):
     # There is an issue in CPython that some IO methods do not handle EINTR
     # correctly. The following table shows what CPython version (and functions)
     # are affected (buggy: has the EINTR bug, okay: otherwise):
@@ -2579,6 +2823,7 @@
         # fp.readline deals with EINTR correctly, use it as a workaround.
         def _safeiterfile(fp):
             return iter(fp.readline, '')
+
     else:
         # fp.read* are broken too, manually deal with EINTR in a stupid way.
         # note: this may block longer than necessary because of bufsize.
@@ -2616,19 +2861,24 @@
             return fp
         else:
             return _safeiterfile(fp)
+
+
 else:
     # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
     def iterfile(fp):
         return fp
 
+
 def iterlines(iterator):
     for chunk in iterator:
         for line in chunk.splitlines():
             yield line
 
+
 def expandpath(path):
     return os.path.expanduser(os.path.expandvars(path))
 
+
 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
     """Return the result of interpolating items in the mapping into string s.
 
@@ -2654,6 +2904,7 @@
     r = remod.compile(br'%s(%s)' % (prefix, patterns))
     return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
 
+
 def getport(port):
     """Return the port for a given network service.
 
@@ -2669,8 +2920,10 @@
     try:
         return socket.getservbyname(pycompat.sysstr(port))
     except socket.error:
-        raise error.Abort(_("no port number associated with service '%s'")
-                          % port)
+        raise error.Abort(
+            _("no port number associated with service '%s'") % port
+        )
+
 
 class url(object):
     r"""Reliable URL parser.
@@ -2822,22 +3075,27 @@
                     self.host = None
 
             # Don't split on colons in IPv6 addresses without ports
-            if (self.host and ':' in self.host and
-                not (self.host.startswith('[') and self.host.endswith(']'))):
+            if (
+                self.host
+                and ':' in self.host
+                and not (self.host.startswith('[') and self.host.endswith(']'))
+            ):
                 self._hostport = self.host
                 self.host, self.port = self.host.rsplit(':', 1)
                 if not self.host:
                     self.host = None
 
-            if (self.host and self.scheme == 'file' and
-                self.host not in ('localhost', '127.0.0.1', '[::1]')):
+            if (
+                self.host
+                and self.scheme == 'file'
+                and self.host not in ('localhost', '127.0.0.1', '[::1]')
+            ):
                 raise error.Abort(_('file:// URLs can only refer to localhost'))
 
         self.path = path
 
         # leave the query string escaped
-        for a in ('user', 'passwd', 'host', 'port',
-                  'path', 'fragment'):
+        for a in ('user', 'passwd', 'host', 'port', 'path', 'fragment'):
             v = getattr(self, a)
             if v is not None:
                 setattr(self, a, urlreq.unquote(v))
@@ -2845,8 +3103,16 @@
     @encoding.strmethod
     def __repr__(self):
         attrs = []
-        for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
-                  'query', 'fragment'):
+        for a in (
+            'scheme',
+            'user',
+            'passwd',
+            'host',
+            'port',
+            'path',
+            'query',
+            'fragment',
+        ):
             v = getattr(self, a)
             if v is not None:
                 attrs.append('%s: %r' % (a, pycompat.bytestr(v)))
@@ -2897,8 +3163,11 @@
         s = self.scheme + ':'
         if self.user or self.passwd or self.host:
             s += '//'
-        elif self.scheme and (not self.path or self.path.startswith('/')
-                              or hasdriveletter(self.path)):
+        elif self.scheme and (
+            not self.path
+            or self.path.startswith('/')
+            or hasdriveletter(self.path)
+        ):
             s += '//'
             if hasdriveletter(self.path):
                 s += '/'
@@ -2944,18 +3213,17 @@
         # URIs must not contain credentials. The host is passed in the
         # URIs list because Python < 2.4.3 uses only that to search for
         # a password.
-        return (s, (None, (s, self.host),
-                    self.user, self.passwd or ''))
+        return (s, (None, (s, self.host), self.user, self.passwd or ''))
 
     def isabs(self):
         if self.scheme and self.scheme != 'file':
-            return True # remote URL
+            return True  # remote URL
         if hasdriveletter(self.path):
-            return True # absolute for our purposes - can't be joined()
+            return True  # absolute for our purposes - can't be joined()
         if self.path.startswith(br'\\'):
-            return True # Windows UNC path
+            return True  # Windows UNC path
         if self.path.startswith('/'):
-            return True # POSIX-style
+            return True  # POSIX-style
         return False
 
     def localpath(self):
@@ -2965,26 +3233,32 @@
             # letters to paths with drive letters.
             if hasdriveletter(self._hostport):
                 path = self._hostport + '/' + self.path
-            elif (self.host is not None and self.path
-                  and not hasdriveletter(path)):
+            elif (
+                self.host is not None and self.path and not hasdriveletter(path)
+            ):
                 path = '/' + path
             return path
         return self._origpath
 
     def islocal(self):
         '''whether localpath will return something that posixfile can open'''
-        return (not self.scheme or self.scheme == 'file'
-                or self.scheme == 'bundle')
+        return (
+            not self.scheme or self.scheme == 'file' or self.scheme == 'bundle'
+        )
+
 
 def hasscheme(path):
     return bool(url(path).scheme)
 
+
 def hasdriveletter(path):
     return path and path[1:2] == ':' and path[0:1].isalpha()
 
+
 def urllocalpath(path):
     return url(path, parsequery=False, parsefragment=False).localpath()
 
+
 def checksafessh(path):
     """check if a path / url is a potentially unsafe ssh exploit (SEC)
 
@@ -2997,8 +3271,10 @@
     """
     path = urlreq.unquote(path)
     if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
-        raise error.Abort(_('potentially unsafe url: %r') %
-                          (pycompat.bytestr(path),))
+        raise error.Abort(
+            _('potentially unsafe url: %r') % (pycompat.bytestr(path),)
+        )
+
 
 def hidepassword(u):
     '''hide user credential in a url string'''
@@ -3007,12 +3283,14 @@
         u.passwd = '***'
     return bytes(u)
 
+
 def removeauth(u):
     '''remove all authentication information from a url string'''
     u = url(u)
     u.user = u.passwd = None
     return bytes(u)
 
+
 timecount = unitcountfn(
     (1, 1e3, _('%.0f s')),
     (100, 1, _('%.1f s')),
@@ -3027,7 +3305,8 @@
     (100, 0.000000001, _('%.1f ns')),
     (10, 0.000000001, _('%.2f ns')),
     (1, 0.000000001, _('%.3f ns')),
-    )
+)
+
 
 @attr.s
 class timedcmstats(object):
@@ -3047,6 +3326,7 @@
 
     __str__ = encoding.strmethod(__bytes__)
 
+
 @contextlib.contextmanager
 def timedcm(whencefmt, *whenceargs):
     """A context manager that produces timing information for a given context.
@@ -3066,8 +3346,10 @@
         timing_stats.elapsed = timer() - timing_stats.start
         timedcm._nested -= 1
 
+
 timedcm._nested = 0
 
+
 def timed(func):
     '''Report the execution time of a function call to stderr.
 
@@ -3083,14 +3365,29 @@
         with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
             result = func(*args, **kwargs)
         stderr = procutil.stderr
-        stderr.write('%s%s: %s\n' % (
-            ' ' * time_stats.level * 2, pycompat.bytestr(func.__name__),
-            time_stats))
+        stderr.write(
+            '%s%s: %s\n'
+            % (
+                ' ' * time_stats.level * 2,
+                pycompat.bytestr(func.__name__),
+                time_stats,
+            )
+        )
         return result
+
     return wrapper
 
-_sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
-              ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
+
+_sizeunits = (
+    ('m', 2 ** 20),
+    ('k', 2 ** 10),
+    ('g', 2 ** 30),
+    ('kb', 2 ** 10),
+    ('mb', 2 ** 20),
+    ('gb', 2 ** 30),
+    ('b', 1),
+)
+
 
 def sizetoint(s):
     '''Convert a space specifier to a byte count.
@@ -3106,11 +3403,12 @@
     try:
         for k, u in _sizeunits:
             if t.endswith(k):
-                return int(float(t[:-len(k)]) * u)
+                return int(float(t[: -len(k)]) * u)
         return int(t)
     except ValueError:
         raise error.ParseError(_("couldn't parse size: %s") % s)
 
+
 class hooks(object):
     '''A collection of hook functions that can be used to extend a
     function's behavior. Hooks are called in lexicographic order,
@@ -3129,6 +3427,7 @@
             results.append(hook(*args))
         return results
 
+
 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
     '''Yields lines for a nicely formatted stacktrace.
     Skips the 'skip' last entries, then return the last 'depth' entries.
@@ -3141,9 +3440,10 @@
 
     Not be used in production code but very convenient while developing.
     '''
-    entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
-        for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
-        ][-depth:]
+    entries = [
+        (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
+        for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
+    ][-depth:]
     if entries:
         fnmax = max(len(entry[0]) for entry in entries)
         for fnln, func in entries:
@@ -3152,8 +3452,10 @@
             else:
                 yield line % (fnmax, fnln, func)
 
-def debugstacktrace(msg='stacktrace', skip=0,
-                    f=procutil.stderr, otherf=procutil.stdout, depth=0):
+
+def debugstacktrace(
+    msg='stacktrace', skip=0, f=procutil.stderr, otherf=procutil.stdout, depth=0
+):
     '''Writes a message to f (stderr) with a nicely formatted stacktrace.
     Skips the 'skip' entries closest to the call, then show 'depth' entries.
     By default it will flush stdout first.
@@ -3167,6 +3469,7 @@
         f.write(line)
     f.flush()
 
+
 class dirs(object):
     '''a multiset of directory names from a dirstate or manifest'''
 
@@ -3178,8 +3481,9 @@
                 if s[0] != skip:
                     addpath(f)
         elif skip is not None:
-            raise error.ProgrammingError("skip character is only supported "
-                                         "with a dict source")
+            raise error.ProgrammingError(
+                "skip character is only supported " "with a dict source"
+            )
         else:
             for f in map:
                 addpath(f)
@@ -3206,12 +3510,14 @@
     def __contains__(self, d):
         return d in self._dirs
 
+
 if safehasattr(parsers, 'dirs'):
     dirs = parsers.dirs
 
 if rustdirs is not None:
     dirs = rustdirs
 
+
 def finddirs(path):
     pos = path.rfind('/')
     while pos != -1:
@@ -3223,6 +3529,7 @@
 # convenient shortcut
 dst = debugstacktrace
 
+
 def safename(f, tag, ctx, others=None):
     """
     Generate a name that it is safe to rename f to in the given context.
@@ -3246,15 +3553,18 @@
         if fn not in ctx and fn not in others:
             return fn
 
+
 def readexactly(stream, n):
     '''read n bytes from stream.read and abort if less was available'''
     s = stream.read(n)
     if len(s) < n:
-        raise error.Abort(_("stream ended unexpectedly"
-                           " (got %d bytes, expected %d)")
-                          % (len(s), n))
+        raise error.Abort(
+            _("stream ended unexpectedly" " (got %d bytes, expected %d)")
+            % (len(s), n)
+        )
     return s
 
+
 def uvarintencode(value):
     """Encode an unsigned integer value to a varint.
 
@@ -3279,19 +3589,19 @@
     ProgrammingError: negative value for uvarint: -1
     """
     if value < 0:
-        raise error.ProgrammingError('negative value for uvarint: %d'
-                                     % value)
-    bits = value & 0x7f
+        raise error.ProgrammingError('negative value for uvarint: %d' % value)
+    bits = value & 0x7F
     value >>= 7
     bytes = []
     while value:
         bytes.append(pycompat.bytechr(0x80 | bits))
-        bits = value & 0x7f
+        bits = value & 0x7F
         value >>= 7
     bytes.append(pycompat.bytechr(bits))
 
     return ''.join(bytes)
 
+
 def uvarintdecodestream(fh):
     """Decode an unsigned variable length integer from a stream.
 
@@ -3320,7 +3630,7 @@
     shift = 0
     while True:
         byte = ord(readexactly(fh, 1))
-        result |= ((byte & 0x7f) << shift)
+        result |= (byte & 0x7F) << shift
         if not (byte & 0x80):
             return result
         shift += 7
--- a/mercurial/utils/cborutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/utils/cborutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -46,11 +46,14 @@
 
 # Indefinite types begin with their major type ORd with information value 31.
 BEGIN_INDEFINITE_BYTESTRING = struct.pack(
-    r'>B', MAJOR_TYPE_BYTESTRING << 5 | SUBTYPE_INDEFINITE)
+    r'>B', MAJOR_TYPE_BYTESTRING << 5 | SUBTYPE_INDEFINITE
+)
 BEGIN_INDEFINITE_ARRAY = struct.pack(
-    r'>B', MAJOR_TYPE_ARRAY << 5 | SUBTYPE_INDEFINITE)
+    r'>B', MAJOR_TYPE_ARRAY << 5 | SUBTYPE_INDEFINITE
+)
 BEGIN_INDEFINITE_MAP = struct.pack(
-    r'>B', MAJOR_TYPE_MAP << 5 | SUBTYPE_INDEFINITE)
+    r'>B', MAJOR_TYPE_MAP << 5 | SUBTYPE_INDEFINITE
+)
 
 ENCODED_LENGTH_1 = struct.Struct(r'>B')
 ENCODED_LENGTH_2 = struct.Struct(r'>BB')
@@ -62,6 +65,7 @@
 BREAK = b'\xff'
 BREAK_INT = 255
 
+
 def encodelength(majortype, length):
     """Obtain a value encoding the major type and its length."""
     if length < 24:
@@ -75,10 +79,12 @@
     else:
         return ENCODED_LENGTH_5.pack(majortype << 5 | 27, length)
 
+
 def streamencodebytestring(v):
     yield encodelength(MAJOR_TYPE_BYTESTRING, len(v))
     yield v
 
+
 def streamencodebytestringfromiter(it):
     """Convert an iterator of chunks to an indefinite bytestring.
 
@@ -93,6 +99,7 @@
 
     yield BREAK
 
+
 def streamencodeindefinitebytestring(source, chunksize=65536):
     """Given a large source buffer, emit as an indefinite length bytestring.
 
@@ -104,7 +111,7 @@
     l = len(source)
 
     while True:
-        chunk = source[i:i + chunksize]
+        chunk = source[i : i + chunksize]
         i += len(chunk)
 
         yield encodelength(MAJOR_TYPE_BYTESTRING, len(chunk))
@@ -115,6 +122,7 @@
 
     yield BREAK
 
+
 def streamencodeint(v):
     if v >= 18446744073709551616 or v < -18446744073709551616:
         raise ValueError('big integers not supported')
@@ -124,6 +132,7 @@
     else:
         yield encodelength(MAJOR_TYPE_NEGINT, abs(v) - 1)
 
+
 def streamencodearray(l):
     """Encode a known size iterable to an array."""
 
@@ -133,6 +142,7 @@
         for chunk in streamencode(i):
             yield chunk
 
+
 def streamencodearrayfromiter(it):
     """Encode an iterator of items to an indefinite length array."""
 
@@ -144,9 +154,11 @@
 
     yield BREAK
 
+
 def _mixedtypesortkey(v):
     return type(v).__name__, v
 
+
 def streamencodeset(s):
     # https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml defines
     # semantic tag 258 for finite sets.
@@ -155,6 +167,7 @@
     for chunk in streamencodearray(sorted(s, key=_mixedtypesortkey)):
         yield chunk
 
+
 def streamencodemap(d):
     """Encode dictionary to a generator.
 
@@ -162,13 +175,15 @@
     """
     yield encodelength(MAJOR_TYPE_MAP, len(d))
 
-    for key, value in sorted(d.iteritems(),
-                             key=lambda x: _mixedtypesortkey(x[0])):
+    for key, value in sorted(
+        d.iteritems(), key=lambda x: _mixedtypesortkey(x[0])
+    ):
         for chunk in streamencode(key):
             yield chunk
         for chunk in streamencode(value):
             yield chunk
 
+
 def streamencodemapfromiter(it):
     """Given an iterable of (key, value), encode to an indefinite length map."""
     yield BEGIN_INDEFINITE_MAP
@@ -181,14 +196,17 @@
 
     yield BREAK
 
+
 def streamencodebool(b):
     # major type 7, simple value 20 and 21.
     yield b'\xf5' if b else b'\xf4'
 
+
 def streamencodenone(v):
     # major type 7, simple value 22.
     yield b'\xf6'
 
+
 STREAM_ENCODERS = {
     bytes: streamencodebytestring,
     int: streamencodeint,
@@ -201,6 +219,7 @@
     type(None): streamencodenone,
 }
 
+
 def streamencode(v):
     """Encode a value in a streaming manner.
 
@@ -226,16 +245,23 @@
 
     return fn(v)
 
+
 class CBORDecodeError(Exception):
     """Represents an error decoding CBOR."""
 
+
 if sys.version_info.major >= 3:
+
     def _elementtointeger(b, i):
         return b[i]
+
+
 else:
+
     def _elementtointeger(b, i):
         return ord(b[i])
 
+
 STRUCT_BIG_UBYTE = struct.Struct(r'>B')
 STRUCT_BIG_USHORT = struct.Struct('>H')
 STRUCT_BIG_ULONG = struct.Struct('>L')
@@ -248,6 +274,7 @@
 SPECIAL_START_SET = 4
 SPECIAL_INDEFINITE_BREAK = 5
 
+
 def decodeitem(b, offset=0):
     """Decode a new CBOR value from a buffer at offset.
 
@@ -301,8 +328,9 @@
     elif majortype == MAJOR_TYPE_BYTESTRING:
         # Beginning of bytestrings are treated as uints in order to
         # decode their length, which may be indefinite.
-        complete, size, readcount = decodeuint(subtype, b, offset,
-                                               allowindefinite=True)
+        complete, size, readcount = decodeuint(
+            subtype, b, offset, allowindefinite=True
+        )
 
         # We don't know the size of the bytestring. It must be a definitive
         # length since the indefinite subtype would be encoded in the initial
@@ -314,7 +342,7 @@
         if size is not None:
             # And the data is available in the buffer.
             if offset + readcount + size <= len(b):
-                value = b[offset + readcount:offset + readcount + size]
+                value = b[offset + readcount : offset + readcount + size]
                 return True, value, readcount + size + 1, SPECIAL_NONE
 
             # And we need more data in order to return the bytestring.
@@ -367,15 +395,17 @@
             if offset + readcount >= len(b):
                 return False, None, -1, SPECIAL_NONE
 
-            complete, size, readcount2, special = decodeitem(b,
-                                                             offset + readcount)
+            complete, size, readcount2, special = decodeitem(
+                b, offset + readcount
+            )
 
             if not complete:
                 return False, None, readcount2, SPECIAL_NONE
 
             if special != SPECIAL_START_ARRAY:
-                raise CBORDecodeError('expected array after finite set '
-                                      'semantic tag')
+                raise CBORDecodeError(
+                    'expected array after finite set ' 'semantic tag'
+                )
 
             return True, size, readcount + readcount2 + 1, SPECIAL_START_SET
 
@@ -398,6 +428,7 @@
     else:
         assert False
 
+
 def decodeuint(subtype, b, offset=0, allowindefinite=False):
     """Decode an unsigned integer.
 
@@ -428,8 +459,9 @@
         else:
             raise CBORDecodeError('indefinite length uint not allowed here')
     elif subtype >= 28:
-        raise CBORDecodeError('unsupported subtype on integer type: %d' %
-                              subtype)
+        raise CBORDecodeError(
+            'unsupported subtype on integer type: %d' % subtype
+        )
 
     if subtype == 24:
         s = STRUCT_BIG_UBYTE
@@ -447,6 +479,7 @@
     else:
         return False, None, len(b) - offset - s.size
 
+
 class bytestringchunk(bytes):
     """Represents a chunk/segment in an indefinite length bytestring.
 
@@ -462,6 +495,7 @@
 
         return self
 
+
 class sansiodecoder(object):
     """A CBOR decoder that doesn't perform its own I/O.
 
@@ -606,32 +640,30 @@
                     self._decodedvalues.append(value)
 
                 elif special == SPECIAL_START_ARRAY:
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': [],
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': [],}
+                    )
                     self._state = self._STATE_WANT_ARRAY_VALUE
 
                 elif special == SPECIAL_START_MAP:
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': {},
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': {},}
+                    )
                     self._state = self._STATE_WANT_MAP_KEY
 
                 elif special == SPECIAL_START_SET:
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': set(),
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': set(),}
+                    )
                     self._state = self._STATE_WANT_SET_VALUE
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
                     self._state = self._STATE_WANT_BYTESTRING_CHUNK_FIRST
 
                 else:
-                    raise CBORDecodeError('unhandled special state: %d' %
-                                          special)
+                    raise CBORDecodeError(
+                        'unhandled special state: %d' % special
+                    )
 
             # This value becomes an element of the current array.
             elif self._state == self._STATE_WANT_ARRAY_VALUE:
@@ -651,10 +683,9 @@
                     lastc['v'].append(newvalue)
                     lastc['remaining'] -= 1
 
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': newvalue,
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': newvalue,}
+                    )
 
                     # self._state doesn't need changed.
 
@@ -666,10 +697,9 @@
                     lastc['v'].append(newvalue)
                     lastc['remaining'] -= 1
 
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': newvalue
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': newvalue}
+                    )
 
                     self._state = self._STATE_WANT_MAP_KEY
 
@@ -680,20 +710,23 @@
                     lastc['v'].append(newvalue)
                     lastc['remaining'] -= 1
 
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': newvalue,
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': newvalue,}
+                    )
 
                     self._state = self._STATE_WANT_SET_VALUE
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
-                    raise CBORDecodeError('indefinite length bytestrings '
-                                          'not allowed as array values')
+                    raise CBORDecodeError(
+                        'indefinite length bytestrings '
+                        'not allowed as array values'
+                    )
 
                 else:
-                    raise CBORDecodeError('unhandled special item when '
-                                          'expecting array value: %d' % special)
+                    raise CBORDecodeError(
+                        'unhandled special item when '
+                        'expecting array value: %d' % special
+                    )
 
             # This value becomes the key of the current map instance.
             elif self._state == self._STATE_WANT_MAP_KEY:
@@ -702,18 +735,26 @@
                     self._state = self._STATE_WANT_MAP_VALUE
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
-                    raise CBORDecodeError('indefinite length bytestrings '
-                                          'not allowed as map keys')
+                    raise CBORDecodeError(
+                        'indefinite length bytestrings '
+                        'not allowed as map keys'
+                    )
 
-                elif special in (SPECIAL_START_ARRAY, SPECIAL_START_MAP,
-                                 SPECIAL_START_SET):
-                    raise CBORDecodeError('collections not supported as map '
-                                          'keys')
+                elif special in (
+                    SPECIAL_START_ARRAY,
+                    SPECIAL_START_MAP,
+                    SPECIAL_START_SET,
+                ):
+                    raise CBORDecodeError(
+                        'collections not supported as map ' 'keys'
+                    )
 
                 # We do not allow special values to be used as map keys.
                 else:
-                    raise CBORDecodeError('unhandled special item when '
-                                          'expecting map key: %d' % special)
+                    raise CBORDecodeError(
+                        'unhandled special item when '
+                        'expecting map key: %d' % special
+                    )
 
             # This value becomes the value of the current map key.
             elif self._state == self._STATE_WANT_MAP_VALUE:
@@ -733,10 +774,9 @@
                     lastc['v'][self._currentmapkey] = newvalue
                     lastc['remaining'] -= 1
 
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': newvalue,
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': newvalue,}
+                    )
 
                     self._state = self._STATE_WANT_ARRAY_VALUE
 
@@ -748,10 +788,9 @@
                     lastc['v'][self._currentmapkey] = newvalue
                     lastc['remaining'] -= 1
 
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': newvalue,
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': newvalue,}
+                    )
 
                     self._state = self._STATE_WANT_MAP_KEY
 
@@ -763,20 +802,23 @@
                     lastc['v'][self._currentmapkey] = newvalue
                     lastc['remaining'] -= 1
 
-                    self._collectionstack.append({
-                        'remaining': value,
-                        'v': newvalue,
-                    })
+                    self._collectionstack.append(
+                        {'remaining': value, 'v': newvalue,}
+                    )
 
                     self._state = self._STATE_WANT_SET_VALUE
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
-                    raise CBORDecodeError('indefinite length bytestrings not '
-                                          'allowed as map values')
+                    raise CBORDecodeError(
+                        'indefinite length bytestrings not '
+                        'allowed as map values'
+                    )
 
                 else:
-                    raise CBORDecodeError('unhandled special item when '
-                                          'expecting map value: %d' % special)
+                    raise CBORDecodeError(
+                        'unhandled special item when '
+                        'expecting map value: %d' % special
+                    )
 
                 self._currentmapkey = None
 
@@ -788,27 +830,35 @@
                     lastc['remaining'] -= 1
 
                 elif special == SPECIAL_START_INDEFINITE_BYTESTRING:
-                    raise CBORDecodeError('indefinite length bytestrings not '
-                                          'allowed as set values')
+                    raise CBORDecodeError(
+                        'indefinite length bytestrings not '
+                        'allowed as set values'
+                    )
 
-                elif special in (SPECIAL_START_ARRAY,
-                                 SPECIAL_START_MAP,
-                                 SPECIAL_START_SET):
-                    raise CBORDecodeError('collections not allowed as set '
-                                          'values')
+                elif special in (
+                    SPECIAL_START_ARRAY,
+                    SPECIAL_START_MAP,
+                    SPECIAL_START_SET,
+                ):
+                    raise CBORDecodeError(
+                        'collections not allowed as set ' 'values'
+                    )
 
                 # We don't allow non-trivial types to exist as set values.
                 else:
-                    raise CBORDecodeError('unhandled special item when '
-                                          'expecting set value: %d' % special)
+                    raise CBORDecodeError(
+                        'unhandled special item when '
+                        'expecting set value: %d' % special
+                    )
 
             # This value represents the first chunk in an indefinite length
             # bytestring.
             elif self._state == self._STATE_WANT_BYTESTRING_CHUNK_FIRST:
                 # We received a full chunk.
                 if special == SPECIAL_NONE:
-                    self._decodedvalues.append(bytestringchunk(value,
-                                                               first=True))
+                    self._decodedvalues.append(
+                        bytestringchunk(value, first=True)
+                    )
 
                     self._state = self._STATE_WANT_BYTESTRING_CHUNK_SUBSEQUENT
 
@@ -818,9 +868,9 @@
                     # We /could/ convert this to a b''. But we want to preserve
                     # the nature of the underlying data so consumers expecting
                     # an indefinite length bytestring get one.
-                    self._decodedvalues.append(bytestringchunk(b'',
-                                                               first=True,
-                                                               last=True))
+                    self._decodedvalues.append(
+                        bytestringchunk(b'', first=True, last=True)
+                    )
 
                     # Since indefinite length bytestrings can't be used in
                     # collections, we must be at the root level.
@@ -828,9 +878,10 @@
                     self._state = self._STATE_NONE
 
                 else:
-                    raise CBORDecodeError('unexpected special value when '
-                                          'expecting bytestring chunk: %d' %
-                                          special)
+                    raise CBORDecodeError(
+                        'unexpected special value when '
+                        'expecting bytestring chunk: %d' % special
+                    )
 
             # This value represents the non-initial chunk in an indefinite
             # length bytestring.
@@ -849,21 +900,25 @@
                     self._state = self._STATE_NONE
 
                 else:
-                    raise CBORDecodeError('unexpected special value when '
-                                          'expecting bytestring chunk: %d' %
-                                          special)
+                    raise CBORDecodeError(
+                        'unexpected special value when '
+                        'expecting bytestring chunk: %d' % special
+                    )
 
             else:
-                raise CBORDecodeError('unhandled decoder state: %d' %
-                                      self._state)
+                raise CBORDecodeError(
+                    'unhandled decoder state: %d' % self._state
+                )
 
             # We could have just added the final value in a collection. End
             # all complete collections at the top of the stack.
             while True:
                 # Bail if we're not waiting on a new collection item.
-                if self._state not in (self._STATE_WANT_ARRAY_VALUE,
-                                       self._STATE_WANT_MAP_KEY,
-                                       self._STATE_WANT_SET_VALUE):
+                if self._state not in (
+                    self._STATE_WANT_ARRAY_VALUE,
+                    self._STATE_WANT_MAP_KEY,
+                    self._STATE_WANT_SET_VALUE,
+                ):
                     break
 
                 # Or we are expecting more items for this collection.
@@ -909,6 +964,7 @@
         self._decodedvalues = []
         return l
 
+
 class bufferingdecoder(object):
     """A CBOR decoder that buffers undecoded input.
 
@@ -919,6 +975,7 @@
     TODO consider adding limits as to the maximum amount of data that can
     be buffered.
     """
+
     def __init__(self):
         self._decoder = sansiodecoder()
         self._chunks = []
@@ -978,6 +1035,7 @@
     def getavailable(self):
         return self._decoder.getavailable()
 
+
 def decodeall(b):
     """Decode all CBOR items present in an iterable of bytes.
 
--- a/mercurial/utils/compression.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/utils/compression.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,9 +15,7 @@
     i18n,
     pycompat,
 )
-from . import (
-    stringutil,
-)
+from . import stringutil
 
 safehasattr = pycompat.safehasattr
 
@@ -29,14 +27,17 @@
 SERVERROLE = 'server'
 CLIENTROLE = 'client'
 
-compewireprotosupport = collections.namedtuple(r'compenginewireprotosupport',
-                                               (r'name', r'serverpriority',
-                                                r'clientpriority'))
+compewireprotosupport = collections.namedtuple(
+    r'compenginewireprotosupport',
+    (r'name', r'serverpriority', r'clientpriority'),
+)
+
 
 class propertycache(object):
     def __init__(self, func):
         self.func = func
         self.name = func.__name__
+
     def __get__(self, obj, type=None):
         result = self.func(obj)
         self.cachevalue(obj, result)
@@ -46,6 +47,7 @@
         # __dict__ assignment required to bypass __setattr__ (eg: repoview)
         obj.__dict__[self.name] = value
 
+
 class compressormanager(object):
     """Holds registrations of various compression engines.
 
@@ -56,6 +58,7 @@
     Compressors are registered against the global instance by calling its
     ``register()`` method.
     """
+
     def __init__(self):
         self._engines = {}
         # Bundle spec human name to engine name.
@@ -87,19 +90,23 @@
         name = engine.name()
 
         if name in self._engines:
-            raise error.Abort(_('compression engine %s already registered') %
-                              name)
+            raise error.Abort(
+                _('compression engine %s already registered') % name
+            )
 
         bundleinfo = engine.bundletype()
         if bundleinfo:
             bundlename, bundletype = bundleinfo
 
             if bundlename in self._bundlenames:
-                raise error.Abort(_('bundle name %s already registered') %
-                                  bundlename)
+                raise error.Abort(
+                    _('bundle name %s already registered') % bundlename
+                )
             if bundletype in self._bundletypes:
-                raise error.Abort(_('bundle type %s already registered by %s') %
-                                  (bundletype, self._bundletypes[bundletype]))
+                raise error.Abort(
+                    _('bundle type %s already registered by %s')
+                    % (bundletype, self._bundletypes[bundletype])
+                )
 
             # No external facing name declared.
             if bundlename:
@@ -111,16 +118,22 @@
         if wiresupport:
             wiretype = wiresupport.name
             if wiretype in self._wiretypes:
-                raise error.Abort(_('wire protocol compression %s already '
-                                    'registered by %s') %
-                                  (wiretype, self._wiretypes[wiretype]))
+                raise error.Abort(
+                    _(
+                        'wire protocol compression %s already '
+                        'registered by %s'
+                    )
+                    % (wiretype, self._wiretypes[wiretype])
+                )
 
             self._wiretypes[wiretype] = name
 
         revlogheader = engine.revlogheader()
         if revlogheader and revlogheader in self._revlogheaders:
-            raise error.Abort(_('revlog header %s already registered by %s') %
-                              (revlogheader, self._revlogheaders[revlogheader]))
+            raise error.Abort(
+                _('revlog header %s already registered by %s')
+                % (revlogheader, self._revlogheaders[revlogheader])
+            )
 
         if revlogheader:
             self._revlogheaders[revlogheader] = name
@@ -144,8 +157,9 @@
         """
         engine = self._engines[self._bundlenames[bundlename]]
         if not engine.available():
-            raise error.Abort(_('compression engine %s could not be loaded') %
-                              engine.name())
+            raise error.Abort(
+                _('compression engine %s could not be loaded') % engine.name()
+            )
         return engine
 
     def forbundletype(self, bundletype):
@@ -157,8 +171,9 @@
         """
         engine = self._engines[self._bundletypes[bundletype]]
         if not engine.available():
-            raise error.Abort(_('compression engine %s could not be loaded') %
-                              engine.name())
+            raise error.Abort(
+                _('compression engine %s could not be loaded') % engine.name()
+            )
         return engine
 
     def supportedwireengines(self, role, onlyavailable=True):
@@ -189,8 +204,9 @@
     def forwiretype(self, wiretype):
         engine = self._engines[self._wiretypes[wiretype]]
         if not engine.available():
-            raise error.Abort(_('compression engine %s could not be loaded') %
-                              engine.name())
+            raise error.Abort(
+                _('compression engine %s could not be loaded') % engine.name()
+            )
         return engine
 
     def forrevlogheader(self, header):
@@ -200,13 +216,16 @@
         """
         return self._engines[self._revlogheaders[header]]
 
+
 compengines = compressormanager()
 
+
 class compressionengine(object):
     """Base class for compression engines.
 
     Compression engines must implement the interface defined by this class.
     """
+
     def name(self):
         """Returns the name of the compression engine.
 
@@ -319,6 +338,7 @@
         """
         raise NotImplementedError()
 
+
 class _CompressedStreamReader(object):
     def __init__(self, fh):
         if safehasattr(fh, 'unbufferedread'):
@@ -338,13 +358,13 @@
             while self._pending:
                 if len(self._pending[0]) > l + self._pos:
                     newbuf = self._pending[0]
-                    buf.append(newbuf[self._pos:self._pos + l])
+                    buf.append(newbuf[self._pos : self._pos + l])
                     self._pos += l
                     return ''.join(buf)
 
                 newbuf = self._pending.pop(0)
                 if self._pos:
-                    buf.append(newbuf[self._pos:])
+                    buf.append(newbuf[self._pos :])
                     l -= len(newbuf) - self._pos
                 else:
                     buf.append(newbuf)
@@ -359,10 +379,12 @@
                 # No progress and no new data, bail out
                 return ''.join(buf)
 
+
 class _GzipCompressedStreamReader(_CompressedStreamReader):
     def __init__(self, fh):
         super(_GzipCompressedStreamReader, self).__init__(fh)
         self._decompobj = zlib.decompressobj()
+
     def _decompress(self, chunk):
         newbuf = self._decompobj.decompress(chunk)
         if newbuf:
@@ -376,10 +398,12 @@
         except zlib.error:
             pass
 
+
 class _BZ2CompressedStreamReader(_CompressedStreamReader):
     def __init__(self, fh):
         super(_BZ2CompressedStreamReader, self).__init__(fh)
         self._decompobj = bz2.BZ2Decompressor()
+
     def _decompress(self, chunk):
         newbuf = self._decompobj.decompress(chunk)
         if newbuf:
@@ -394,6 +418,7 @@
         except EOFError:
             self._eof = True
 
+
 class _TruncatedBZ2CompressedStreamReader(_BZ2CompressedStreamReader):
     def __init__(self, fh):
         super(_TruncatedBZ2CompressedStreamReader, self).__init__(fh)
@@ -401,11 +426,13 @@
         if newbuf:
             self._pending.append(newbuf)
 
+
 class _ZstdCompressedStreamReader(_CompressedStreamReader):
     def __init__(self, fh, zstd):
         super(_ZstdCompressedStreamReader, self).__init__(fh)
         self._zstd = zstd
         self._decompobj = zstd.ZstdDecompressor().decompressobj()
+
     def _decompress(self, chunk):
         newbuf = self._decompobj.decompress(chunk)
         if newbuf:
@@ -420,6 +447,7 @@
         except self._zstd.ZstdError:
             self._eof = True
 
+
 class _zlibengine(compressionengine):
     def name(self):
         return 'zlib'
@@ -456,7 +484,6 @@
         return _GzipCompressedStreamReader(fh)
 
     class zlibrevlogcompressor(object):
-
         def __init__(self, level=None):
             self._level = level
 
@@ -488,7 +515,7 @@
                 parts = []
                 pos = 0
                 while pos < insize:
-                    pos2 = pos + 2**20
+                    pos2 = pos + 2 ** 20
                     parts.append(z.compress(data[pos:pos2]))
                     pos = pos2
                 parts.append(z.flush())
@@ -501,8 +528,10 @@
             try:
                 return zlib.decompress(data)
             except zlib.error as e:
-                raise error.StorageError(_('revlog decompress error: %s') %
-                                         stringutil.forcebytestr(e))
+                raise error.StorageError(
+                    _('revlog decompress error: %s')
+                    % stringutil.forcebytestr(e)
+                )
 
     def revlogcompressor(self, opts=None):
         level = None
@@ -510,8 +539,10 @@
             level = opts.get('zlib.level')
         return self.zlibrevlogcompressor(level)
 
+
 compengines.register(_zlibengine())
 
+
 class _bz2engine(compressionengine):
     def name(self):
         return 'bz2'
@@ -548,8 +579,10 @@
     def decompressorreader(self, fh):
         return _BZ2CompressedStreamReader(fh)
 
+
 compengines.register(_bz2engine())
 
+
 class _truncatedbz2engine(compressionengine):
     def name(self):
         return 'bz2truncated'
@@ -562,8 +595,10 @@
     def decompressorreader(self, fh):
         return _TruncatedBZ2CompressedStreamReader(fh)
 
+
 compengines.register(_truncatedbz2engine())
 
+
 class _noopengine(compressionengine):
     def name(self):
         return 'none'
@@ -597,8 +632,10 @@
     def revlogcompressor(self, opts=None):
         return self.nooprevlogcompressor()
 
+
 compengines.register(_noopengine())
 
+
 class _zstdengine(compressionengine):
     def name(self):
         return 'zstd'
@@ -609,6 +646,7 @@
         # until first access.
         try:
             from .. import zstd
+
             # Force delayed import.
             zstd.__version__
             return zstd
@@ -716,8 +754,10 @@
 
                 return ''.join(chunks)
             except Exception as e:
-                raise error.StorageError(_('revlog decompress error: %s') %
-                                         stringutil.forcebytestr(e))
+                raise error.StorageError(
+                    _('revlog decompress error: %s')
+                    % stringutil.forcebytestr(e)
+                )
 
     def revlogcompressor(self, opts=None):
         opts = opts or {}
@@ -728,8 +768,10 @@
             level = 3
         return self.zstdrevlogcompressor(self._module, level=level)
 
+
 compengines.register(_zstdengine())
 
+
 def bundlecompressiontopics():
     """Obtains a list of available bundle compressions for use in help."""
     # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
@@ -761,4 +803,5 @@
 
     return items
 
+
 i18nfunctions = bundlecompressiontopics().values()
--- a/mercurial/utils/dateutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/utils/dateutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -20,14 +20,14 @@
 
 # used by parsedate
 defaultdateformats = (
-    '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
-    '%Y-%m-%dT%H:%M',    #   without seconds
-    '%Y-%m-%dT%H%M%S',   # another awful but legal variant without :
-    '%Y-%m-%dT%H%M',     #   without seconds
-    '%Y-%m-%d %H:%M:%S', # our common legal variant
-    '%Y-%m-%d %H:%M',    #   without seconds
-    '%Y-%m-%d %H%M%S',   # without :
-    '%Y-%m-%d %H%M',     #   without seconds
+    '%Y-%m-%dT%H:%M:%S',  # the 'real' ISO8601
+    '%Y-%m-%dT%H:%M',  #   without seconds
+    '%Y-%m-%dT%H%M%S',  # another awful but legal variant without :
+    '%Y-%m-%dT%H%M',  #   without seconds
+    '%Y-%m-%d %H:%M:%S',  # our common legal variant
+    '%Y-%m-%d %H:%M',  #   without seconds
+    '%Y-%m-%d %H%M%S',  # without :
+    '%Y-%m-%d %H%M',  #   without seconds
     '%Y-%m-%d %I:%M:%S%p',
     '%Y-%m-%d %H:%M',
     '%Y-%m-%d %I:%M%p',
@@ -38,7 +38,7 @@
     '%m/%d/%Y',
     '%a %b %d %H:%M:%S %Y',
     '%a %b %d %I:%M:%S%p %Y',
-    '%a, %d %b %Y %H:%M:%S',        #  GNU coreutils "/bin/date --rfc-2822"
+    '%a, %d %b %Y %H:%M:%S',  #  GNU coreutils "/bin/date --rfc-2822"
     '%b %d %H:%M:%S %Y',
     '%b %d %I:%M:%S%p %Y',
     '%b %d %H:%M:%S',
@@ -53,12 +53,8 @@
     '%I:%M%p',
 )
 
-extendeddateformats = defaultdateformats + (
-    "%Y",
-    "%Y-%m",
-    "%b",
-    "%b %Y",
-)
+extendeddateformats = defaultdateformats + ("%Y", "%Y-%m", "%b", "%b %Y",)
+
 
 def makedate(timestamp=None):
     '''Return a unix timestamp (or the current time) as a (unixtime,
@@ -68,11 +64,13 @@
     if timestamp < 0:
         hint = _("check your clock")
         raise error.Abort(_("negative timestamp: %d") % timestamp, hint=hint)
-    delta = (datetime.datetime.utcfromtimestamp(timestamp) -
-             datetime.datetime.fromtimestamp(timestamp))
+    delta = datetime.datetime.utcfromtimestamp(
+        timestamp
+    ) - datetime.datetime.fromtimestamp(timestamp)
     tz = delta.days * 86400 + delta.seconds
     return timestamp, tz
 
+
 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
     """represent a (unixtime, offset) tuple as a localized time.
     unixtime is seconds since the epoch, and offset is the time zone's
@@ -98,8 +96,8 @@
         format = format.replace("%1", "%c%02d" % (sign, q))
         format = format.replace("%2", "%02d" % r)
     d = t - tz
-    if d > 0x7fffffff:
-        d = 0x7fffffff
+    if d > 0x7FFFFFFF:
+        d = 0x7FFFFFFF
     elif d < -0x80000000:
         d = -0x80000000
     # Never use time.gmtime() and datetime.datetime.fromtimestamp()
@@ -109,10 +107,12 @@
     s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
     return s
 
+
 def shortdate(date=None):
     """turn (timestamp, tzoff) tuple into iso 8631 date."""
     return datestr(date, format='%Y-%m-%d')
 
+
 def parsetimezone(s):
     """find a trailing timezone, if any, in string, and return a
        (offset, remainder) pair"""
@@ -133,8 +133,13 @@
         return 0, s[:-1]
 
     # ISO8601-style [+-]hh:mm
-    if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
-        s[-5:-3].isdigit() and s[-2:].isdigit()):
+    if (
+        len(s) >= 6
+        and s[-6] in "+-"
+        and s[-3] == ":"
+        and s[-5:-3].isdigit()
+        and s[-2:].isdigit()
+    ):
         sign = (s[-6] == "+") and 1 or -1
         hours = int(s[-5:-3])
         minutes = int(s[-2:])
@@ -142,6 +147,7 @@
 
     return None, s
 
+
 def strdate(string, format, defaults=None):
     """parse a localized time string and return a (unixtime, offset) tuple.
     if the string cannot be parsed, ValueError is raised."""
@@ -152,10 +158,10 @@
     offset, date = parsetimezone(string)
 
     # add missing elements from defaults
-    usenow = False # default to using biased defaults
-    for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
+    usenow = False  # default to using biased defaults
+    for part in ("S", "M", "HI", "d", "mb", "yY"):  # decreasing specificity
         part = pycompat.bytestr(part)
-        found = [True for p in part if ("%"+p) in format]
+        found = [True for p in part if ("%" + p) in format]
         if not found:
             date += "@" + defaults[part][usenow]
             format += "@%" + part[0]
@@ -164,8 +170,9 @@
             # elements are relative to today
             usenow = True
 
-    timetuple = time.strptime(encoding.strfromlocal(date),
-                              encoding.strfromlocal(format))
+    timetuple = time.strptime(
+        encoding.strfromlocal(date), encoding.strfromlocal(format)
+    )
     localunixtime = int(calendar.timegm(timetuple))
     if offset is None:
         # local timezone
@@ -175,6 +182,7 @@
         unixtime = localunixtime + offset
     return unixtime, offset
 
+
 def parsedate(date, formats=None, bias=None):
     """parse a localized date/time and return a (unixtime, offset) tuple.
 
@@ -211,8 +219,9 @@
         date = datetime.date.today().strftime(r'%b %d')
         date = encoding.strtolocal(date)
     elif date == 'yesterday' or date == _('yesterday'):
-        date = (datetime.date.today() -
-                datetime.timedelta(days=1)).strftime(r'%b %d')
+        date = (datetime.date.today() - datetime.timedelta(days=1)).strftime(
+            r'%b %d'
+        )
         date = encoding.strtolocal(date)
 
     try:
@@ -244,17 +253,19 @@
                 break
         else:
             raise error.ParseError(
-                _('invalid date: %r') % pycompat.bytestr(date))
+                _('invalid date: %r') % pycompat.bytestr(date)
+            )
     # validate explicit (probably user-specified) date and
     # time zone offset. values must fit in signed 32 bits for
     # current 32-bit linux runtimes. timezones go from UTC-12
     # to UTC+14
-    if when < -0x80000000 or when > 0x7fffffff:
+    if when < -0x80000000 or when > 0x7FFFFFFF:
         raise error.ParseError(_('date exceeds 32 bits: %d') % when)
     if offset < -50400 or offset > 43200:
         raise error.ParseError(_('impossible time zone offset: %d') % offset)
     return when, offset
 
+
 def matchdate(date):
     """Return a function that matches a given date match specifier
 
@@ -319,8 +330,9 @@
         except ValueError:
             raise error.Abort(_("invalid day spec: %s") % date[1:])
         if days < 0:
-            raise error.Abort(_("%s must be nonnegative (see 'hg help dates')")
-                % date[1:])
+            raise error.Abort(
+                _("%s must be nonnegative (see 'hg help dates')") % date[1:]
+            )
         when = makedate()[0] - days * 3600 * 24
         return lambda x: x >= when
     elif b" to " in date:
--- a/mercurial/utils/procutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/utils/procutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -34,12 +34,14 @@
 stdin = pycompat.stdin
 stdout = pycompat.stdout
 
+
 def isatty(fp):
     try:
         return fp.isatty()
     except AttributeError:
         return False
 
+
 # glibc determines buffering on first write to stdout - if we replace a TTY
 # destined stdout with a pipe destined stdout (e.g. pager), we want line
 # buffering (or unbuffered, on Windows)
@@ -52,6 +54,7 @@
 
 if pycompat.iswindows:
     from .. import windows as platform
+
     stdout = platform.winstdout(stdout)
 else:
     from .. import posix as platform
@@ -83,6 +86,7 @@
 
 closefds = pycompat.isposix
 
+
 def explainexit(code):
     """return a message describing a subprocess status
     (codes from kill are negative - not os.system/wait encoding)"""
@@ -90,6 +94,7 @@
         return _("exited with status %d") % code
     return _("killed by signal %d") % -code
 
+
 class _pfile(object):
     """File-like wrapper for a stream opened by subprocess.Popen()"""
 
@@ -114,6 +119,7 @@
     def __exit__(self, exc_type, exc_value, exc_tb):
         self.close()
 
+
 def popen(cmd, mode='rb', bufsize=-1):
     if mode == 'rb':
         return _popenreader(cmd, bufsize)
@@ -121,52 +127,77 @@
         return _popenwriter(cmd, bufsize)
     raise error.ProgrammingError('unsupported mode: %r' % mode)
 
+
 def _popenreader(cmd, bufsize):
-    p = subprocess.Popen(tonativestr(quotecommand(cmd)),
-                         shell=True, bufsize=bufsize,
-                         close_fds=closefds,
-                         stdout=subprocess.PIPE)
+    p = subprocess.Popen(
+        tonativestr(quotecommand(cmd)),
+        shell=True,
+        bufsize=bufsize,
+        close_fds=closefds,
+        stdout=subprocess.PIPE,
+    )
     return _pfile(p, p.stdout)
 
+
 def _popenwriter(cmd, bufsize):
-    p = subprocess.Popen(tonativestr(quotecommand(cmd)),
-                         shell=True, bufsize=bufsize,
-                         close_fds=closefds,
-                         stdin=subprocess.PIPE)
+    p = subprocess.Popen(
+        tonativestr(quotecommand(cmd)),
+        shell=True,
+        bufsize=bufsize,
+        close_fds=closefds,
+        stdin=subprocess.PIPE,
+    )
     return _pfile(p, p.stdin)
 
+
 def popen2(cmd, env=None):
     # Setting bufsize to -1 lets the system decide the buffer size.
     # The default for bufsize is 0, meaning unbuffered. This leads to
     # poor performance on Mac OS X: http://bugs.python.org/issue4194
-    p = subprocess.Popen(tonativestr(cmd),
-                         shell=True, bufsize=-1,
-                         close_fds=closefds,
-                         stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                         env=tonativeenv(env))
+    p = subprocess.Popen(
+        tonativestr(cmd),
+        shell=True,
+        bufsize=-1,
+        close_fds=closefds,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        env=tonativeenv(env),
+    )
     return p.stdin, p.stdout
 
+
 def popen3(cmd, env=None):
     stdin, stdout, stderr, p = popen4(cmd, env)
     return stdin, stdout, stderr
 
+
 def popen4(cmd, env=None, bufsize=-1):
-    p = subprocess.Popen(tonativestr(cmd),
-                         shell=True, bufsize=bufsize,
-                         close_fds=closefds,
-                         stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                         stderr=subprocess.PIPE,
-                         env=tonativeenv(env))
+    p = subprocess.Popen(
+        tonativestr(cmd),
+        shell=True,
+        bufsize=bufsize,
+        close_fds=closefds,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        env=tonativeenv(env),
+    )
     return p.stdin, p.stdout, p.stderr, p
 
+
 def pipefilter(s, cmd):
     '''filter string S through command CMD, returning its output'''
-    p = subprocess.Popen(tonativestr(cmd),
-                         shell=True, close_fds=closefds,
-                         stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+    p = subprocess.Popen(
+        tonativestr(cmd),
+        shell=True,
+        close_fds=closefds,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+    )
     pout, perr = p.communicate(s)
     return pout
 
+
 def tempfilter(s, cmd):
     '''filter string S through a pair of temporary files with CMD.
     CMD is used as a template to create the real command to be run,
@@ -186,8 +217,9 @@
         if pycompat.sysplatform == 'OpenVMS' and code & 1:
             code = 0
         if code:
-            raise error.Abort(_("command '%s' failed: %s") %
-                              (cmd, explainexit(code)))
+            raise error.Abort(
+                _("command '%s' failed: %s") % (cmd, explainexit(code))
+            )
         with open(outname, 'rb') as fp:
             return fp.read()
     finally:
@@ -202,30 +234,37 @@
         except OSError:
             pass
 
+
 _filtertable = {
     'tempfile:': tempfilter,
     'pipe:': pipefilter,
 }
 
+
 def filter(s, cmd):
     "filter a string through a command that transforms its input to its output"
     for name, fn in _filtertable.iteritems():
         if cmd.startswith(name):
-            return fn(s, cmd[len(name):].lstrip())
+            return fn(s, cmd[len(name) :].lstrip())
     return pipefilter(s, cmd)
 
+
 def mainfrozen():
     """return True if we are a frozen executable.
 
     The code supports py2exe (most common, Windows only) and tools/freeze
     (portable, not much used).
     """
-    return (pycompat.safehasattr(sys, "frozen") or # new py2exe
-            pycompat.safehasattr(sys, "importers") or # old py2exe
-            imp.is_frozen(r"__main__")) # tools/freeze
+    return (
+        pycompat.safehasattr(sys, "frozen")
+        or pycompat.safehasattr(sys, "importers")  # new py2exe
+        or imp.is_frozen(r"__main__")  # old py2exe
+    )  # tools/freeze
+
 
 _hgexecutable = None
 
+
 def hgexecutable():
     """return location of the 'hg' executable.
 
@@ -242,32 +281,43 @@
                 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
             else:
                 _sethgexecutable(pycompat.sysexecutable)
-        elif (not pycompat.iswindows and os.path.basename(
-            pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
+        elif (
+            not pycompat.iswindows
+            and os.path.basename(
+                pycompat.fsencode(getattr(mainmod, '__file__', ''))
+            )
+            == 'hg'
+        ):
             _sethgexecutable(pycompat.fsencode(mainmod.__file__))
         else:
-            _sethgexecutable(findexe('hg') or
-                             os.path.basename(pycompat.sysargv[0]))
+            _sethgexecutable(
+                findexe('hg') or os.path.basename(pycompat.sysargv[0])
+            )
     return _hgexecutable
 
+
 def _sethgexecutable(path):
     """set location of the 'hg' executable"""
     global _hgexecutable
     _hgexecutable = path
 
+
 def _testfileno(f, stdf):
     fileno = getattr(f, 'fileno', None)
     try:
         return fileno and fileno() == stdf.fileno()
     except io.UnsupportedOperation:
-        return False # fileno() raised UnsupportedOperation
+        return False  # fileno() raised UnsupportedOperation
+
 
 def isstdin(f):
     return _testfileno(f, sys.__stdin__)
 
+
 def isstdout(f):
     return _testfileno(f, sys.__stdout__)
 
+
 def protectstdio(uin, uout):
     """Duplicate streams and redirect original if (uin, uout) are stdio
 
@@ -292,6 +342,7 @@
         fout = os.fdopen(newfd, r'wb')
     return fin, fout
 
+
 def restorestdio(uin, uout, fin, fout):
     """Restore (uin, uout) streams from possibly duplicated (fin, fout)"""
     uout.flush()
@@ -300,8 +351,10 @@
             os.dup2(f.fileno(), uif.fileno())
             f.close()
 
+
 def shellenviron(environ=None):
     """return environ with optional override, useful for shelling out"""
+
     def py2shell(val):
         'convert python object into string that is useful to shell'
         if val is None or val is False:
@@ -309,28 +362,34 @@
         if val is True:
             return '1'
         return pycompat.bytestr(val)
+
     env = dict(encoding.environ)
     if environ:
         env.update((k, py2shell(v)) for k, v in environ.iteritems())
     env['HG'] = hgexecutable()
     return env
 
+
 if pycompat.iswindows:
+
     def shelltonative(cmd, env):
         return platform.shelltocmdexe(cmd, shellenviron(env))
 
     tonativestr = encoding.strfromlocal
 else:
+
     def shelltonative(cmd, env):
         return cmd
 
     tonativestr = pycompat.identity
 
+
 def tonativeenv(env):
     '''convert the environment from bytes to strings suitable for Popen(), etc.
     '''
     return pycompat.rapply(tonativestr, env)
 
+
 def system(cmd, environ=None, cwd=None, out=None):
     '''enhanced shell command execution.
     run with environment maybe modified, maybe in different dir.
@@ -344,17 +403,23 @@
     cmd = quotecommand(cmd)
     env = shellenviron(environ)
     if out is None or isstdout(out):
-        rc = subprocess.call(tonativestr(cmd),
-                             shell=True, close_fds=closefds,
-                             env=tonativeenv(env),
-                             cwd=pycompat.rapply(tonativestr, cwd))
+        rc = subprocess.call(
+            tonativestr(cmd),
+            shell=True,
+            close_fds=closefds,
+            env=tonativeenv(env),
+            cwd=pycompat.rapply(tonativestr, cwd),
+        )
     else:
-        proc = subprocess.Popen(tonativestr(cmd),
-                                shell=True, close_fds=closefds,
-                                env=tonativeenv(env),
-                                cwd=pycompat.rapply(tonativestr, cwd),
-                                stdout=subprocess.PIPE,
-                                stderr=subprocess.STDOUT)
+        proc = subprocess.Popen(
+            tonativestr(cmd),
+            shell=True,
+            close_fds=closefds,
+            env=tonativeenv(env),
+            cwd=pycompat.rapply(tonativestr, cwd),
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+        )
         for line in iter(proc.stdout.readline, ''):
             out.write(line)
         proc.wait()
@@ -363,6 +428,7 @@
         rc = 0
     return rc
 
+
 def gui():
     '''Are we running in a GUI?'''
     if pycompat.isdarwin:
@@ -378,6 +444,7 @@
     else:
         return pycompat.iswindows or encoding.environ.get("DISPLAY")
 
+
 def hgcmd():
     """Return the command used to execute current hg
 
@@ -393,6 +460,7 @@
             return [pycompat.sysexecutable]
     return _gethgcmd()
 
+
 def rundetached(args, condfn):
     """Execute the argument list in a detached process.
 
@@ -410,8 +478,10 @@
     # running process on success. Instead we listen for SIGCHLD telling
     # us our child process terminated.
     terminated = set()
+
     def handler(signum, frame):
         terminated.add(os.wait())
+
     prevhandler = None
     SIGCHLD = getattr(signal, 'SIGCHLD', None)
     if SIGCHLD is not None:
@@ -419,8 +489,7 @@
     try:
         pid = spawndetached(args)
         while not condfn():
-            if ((pid in terminated or not testpid(pid))
-                and not condfn()):
+            if (pid in terminated or not testpid(pid)) and not condfn():
                 return -1
             time.sleep(0.1)
         return pid
@@ -428,6 +497,7 @@
         if prevhandler is not None:
             signal.signal(signal.SIGCHLD, prevhandler)
 
+
 @contextlib.contextmanager
 def uninterruptible(warn):
     """Inhibit SIGINT handling on a region of code.
@@ -461,6 +531,7 @@
         if shouldbail:
             raise KeyboardInterrupt
 
+
 if pycompat.iswindows:
     # no fork on Windows, but we can create a detached process
     # https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863.aspx
@@ -472,18 +543,27 @@
     _creationflags = DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP
 
     def runbgcommand(
-      script, env, shell=False, stdout=None, stderr=None, ensurestart=True):
+        script, env, shell=False, stdout=None, stderr=None, ensurestart=True
+    ):
         '''Spawn a command without waiting for it to finish.'''
         # we can't use close_fds *and* redirect stdin. I'm not sure that we
         # need to because the detached process has no console connection.
         subprocess.Popen(
             tonativestr(script),
-            shell=shell, env=tonativeenv(env), close_fds=True,
-            creationflags=_creationflags, stdout=stdout,
-            stderr=stderr)
+            shell=shell,
+            env=tonativeenv(env),
+            close_fds=True,
+            creationflags=_creationflags,
+            stdout=stdout,
+            stderr=stderr,
+        )
+
+
 else:
+
     def runbgcommand(
-      cmd, env, shell=False, stdout=None, stderr=None, ensurestart=True):
+        cmd, env, shell=False, stdout=None, stderr=None, ensurestart=True
+    ):
         '''Spawn a command without waiting for it to finish.'''
         # double-fork to completely detach from the parent process
         # based on http://code.activestate.com/recipes/278731
@@ -496,7 +576,7 @@
             if os.WIFEXITED(status):
                 returncode = os.WEXITSTATUS(status)
             else:
-                returncode = -os.WTERMSIG(status)
+                returncode = -(os.WTERMSIG(status))
             if returncode != 0:
                 # The child process's return code is 0 on success, an errno
                 # value on failure, or 255 if we don't have a valid errno
@@ -507,8 +587,10 @@
                 # doesn't seem worth adding that complexity here, though.)
                 if returncode == 255:
                     returncode = errno.EINVAL
-                raise OSError(returncode, 'error running %r: %s' %
-                              (cmd, os.strerror(returncode)))
+                raise OSError(
+                    returncode,
+                    'error running %r: %s' % (cmd, os.strerror(returncode)),
+                )
             return
 
         returncode = 255
@@ -525,11 +607,17 @@
             # connect stdin to devnull to make sure the subprocess can't
             # muck up that stream for mercurial.
             subprocess.Popen(
-                cmd, shell=shell, env=env, close_fds=True,
-                stdin=stdin, stdout=stdout, stderr=stderr)
+                cmd,
+                shell=shell,
+                env=env,
+                close_fds=True,
+                stdin=stdin,
+                stdout=stdout,
+                stderr=stderr,
+            )
             returncode = 0
         except EnvironmentError as ex:
-            returncode = (ex.errno & 0xff)
+            returncode = ex.errno & 0xFF
             if returncode == 0:
                 # This shouldn't happen, but just in case make sure the
                 # return code is never 0 here.
--- a/mercurial/utils/repoviewutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/utils/repoviewutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,9 +14,11 @@
 # * X - Y is as small as possible.
 # This create and ordering used for branchmap purpose.
 # the ordering may be partial
-subsettable = {None: 'visible',
-               'visible-hidden': 'visible',
-               'visible': 'served',
-               'served.hidden': 'served',
-               'served': 'immutable',
-               'immutable': 'base'}
+subsettable = {
+    None: 'visible',
+    'visible-hidden': 'visible',
+    'visible': 'served',
+    'served.hidden': 'served',
+    'served': 'immutable',
+    'immutable': 'base',
+}
--- a/mercurial/utils/storageutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/utils/storageutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -27,6 +27,7 @@
 
 _nullhash = hashlib.sha1(nullid)
 
+
 def hashrevisionsha1(text, p1, p2):
     """Compute the SHA-1 for revision data and its parents.
 
@@ -52,8 +53,10 @@
     s.update(text)
     return s.digest()
 
+
 METADATA_RE = re.compile(b'\x01\n')
 
+
 def parsemeta(text):
     """Parse metadata header from revision data.
 
@@ -71,16 +74,19 @@
         meta[k] = v
     return meta, s + 2
 
+
 def packmeta(meta, text):
     """Add metadata to fulltext to produce revision text."""
     keys = sorted(meta)
     metatext = b''.join(b'%s: %s\n' % (k, meta[k]) for k in keys)
     return b'\x01\n%s\x01\n%s' % (metatext, text)
 
+
 def iscensoredtext(text):
     meta = parsemeta(text)[0]
     return meta and b'censored' in meta
 
+
 def filtermetadata(text):
     """Extract just the revision data from source text.
 
@@ -91,7 +97,8 @@
         return text
 
     offset = text.index(b'\x01\n', 2)
-    return text[offset + 2:]
+    return text[offset + 2 :]
+
 
 def filerevisioncopied(store, node):
     """Resolve file revision copy metadata.
@@ -112,6 +119,7 @@
 
     return False
 
+
 def filedataequivalent(store, node, filedata):
     """Determines whether file data is equivalent to a stored node.
 
@@ -148,6 +156,7 @@
 
     return False
 
+
 def iterrevs(storelen, start=0, stop=None):
     """Iterate over revision numbers in a store."""
     step = 1
@@ -163,6 +172,7 @@
 
     return pycompat.xrange(start, stop, step)
 
+
 def fileidlookup(store, fileid, identifier):
     """Resolve the file node for a value.
 
@@ -184,8 +194,9 @@
         try:
             return store.node(fileid)
         except IndexError:
-            raise error.LookupError('%d' % fileid, identifier,
-                                    _('no match found'))
+            raise error.LookupError(
+                '%d' % fileid, identifier, _('no match found')
+            )
 
     if len(fileid) == 20:
         try:
@@ -217,6 +228,7 @@
 
     raise error.LookupError(fileid, identifier, _('no match found'))
 
+
 def resolvestripinfo(minlinkrev, tiprev, headrevs, linkrevfn, parentrevsfn):
     """Resolve information needed to strip revisions.
 
@@ -268,10 +280,21 @@
 
     return strippoint, brokenrevs
 
-def emitrevisions(store, nodes, nodesorder, resultcls, deltaparentfn=None,
-                  candeltafn=None, rawsizefn=None, revdifffn=None, flagsfn=None,
-                  deltamode=repository.CG_DELTAMODE_STD,
-                  revisiondata=False, assumehaveparentrevisions=False):
+
+def emitrevisions(
+    store,
+    nodes,
+    nodesorder,
+    resultcls,
+    deltaparentfn=None,
+    candeltafn=None,
+    rawsizefn=None,
+    revdifffn=None,
+    flagsfn=None,
+    deltamode=repository.CG_DELTAMODE_STD,
+    revisiondata=False,
+    assumehaveparentrevisions=False,
+):
     """Generic implementation of ifiledata.emitrevisions().
 
     Emitting revision data is subtly complex. This function attempts to
@@ -343,7 +366,7 @@
     elif nodesorder == 'linear':
         revs = set(frev(n) for n in nodes)
         revs = dagop.linearize(revs, store.parentrevs)
-    else: # storage and default
+    else:  # storage and default
         revs = sorted(frev(n) for n in nodes)
 
     prevrev = None
@@ -388,8 +411,7 @@
 
             # Base revision is a parent that hasn't been emitted already.
             # Use it if we can assume the receiver has the parent revision.
-            elif (assumehaveparentrevisions
-                  and deltaparentrev in (p1rev, p2rev)):
+            elif assumehaveparentrevisions and deltaparentrev in (p1rev, p2rev):
                 baserev = deltaparentrev
 
             # No guarantee the receiver has the delta parent. Send delta
@@ -432,16 +454,18 @@
                     else:
                         baserevisionsize = len(store.rawdata(baserev))
 
-            elif (baserev == nullrev
-                    and deltamode != repository.CG_DELTAMODE_PREV):
+            elif (
+                baserev == nullrev and deltamode != repository.CG_DELTAMODE_PREV
+            ):
                 revision = store.rawdata(node)
                 available.add(rev)
             else:
                 if revdifffn:
                     delta = revdifffn(baserev, rev)
                 else:
-                    delta = mdiff.textdiff(store.rawdata(baserev),
-                                           store.rawdata(rev))
+                    delta = mdiff.textdiff(
+                        store.rawdata(baserev), store.rawdata(rev)
+                    )
 
                 available.add(rev)
 
@@ -453,10 +477,12 @@
             flags=flagsfn(rev) if flagsfn else 0,
             baserevisionsize=baserevisionsize,
             revision=revision,
-            delta=delta)
+            delta=delta,
+        )
 
         prevrev = rev
 
+
 def deltaiscensored(delta, baserev, baselenfn):
     """Determine if a delta represents censored revision data.
 
@@ -483,4 +509,4 @@
 
     add = "\1\ncensored:"
     addlen = len(add)
-    return newlen >= addlen and delta[hlen:hlen + addlen] == add
+    return newlen >= addlen and delta[hlen : hlen + addlen] == add
--- a/mercurial/utils/stringutil.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/utils/stringutil.py	Sun Oct 06 09:45:02 2019 -0400
@@ -30,6 +30,7 @@
 _regexescapemap = {ord(i): (b'\\' + i).decode('latin1') for i in _respecial}
 regexbytesescapemap = {i: (b'\\' + i) for i in _respecial}
 
+
 def reescape(pat):
     """Drop-in replacement for re.escape."""
     # NOTE: it is intentional that this works on unicodes and not
@@ -44,10 +45,12 @@
         return pat
     return pat.encode('latin1')
 
+
 def pprint(o, bprefix=False, indent=0, level=0):
     """Pretty print an object."""
     return b''.join(pprintgen(o, bprefix=bprefix, indent=indent, level=level))
 
+
 def pprintgen(o, bprefix=False, indent=0, level=0):
     """Pretty print an object to a generator of atoms.
 
@@ -83,8 +86,9 @@
             yield ' ' * (level * indent)
 
         for i, a in enumerate(o):
-            for chunk in pprintgen(a, bprefix=bprefix, indent=indent,
-                                   level=level):
+            for chunk in pprintgen(
+                a, bprefix=bprefix, indent=indent, level=level
+            ):
                 yield chunk
 
             if i + 1 < len(o):
@@ -113,14 +117,16 @@
             yield ' ' * (level * indent)
 
         for i, (k, v) in enumerate(sorted(o.items())):
-            for chunk in pprintgen(k, bprefix=bprefix, indent=indent,
-                                   level=level):
+            for chunk in pprintgen(
+                k, bprefix=bprefix, indent=indent, level=level
+            ):
                 yield chunk
 
             yield ': '
 
-            for chunk in pprintgen(v, bprefix=bprefix, indent=indent,
-                                   level=level):
+            for chunk in pprintgen(
+                v, bprefix=bprefix, indent=indent, level=level
+            ):
                 yield chunk
 
             if i + 1 < len(o):
@@ -149,8 +155,9 @@
             yield ' ' * (level * indent)
 
         for i, k in enumerate(sorted(o)):
-            for chunk in pprintgen(k, bprefix=bprefix, indent=indent,
-                                   level=level):
+            for chunk in pprintgen(
+                k, bprefix=bprefix, indent=indent, level=level
+            ):
                 yield chunk
 
             if i + 1 < len(o):
@@ -179,8 +186,9 @@
             yield ' ' * (level * indent)
 
         for i, a in enumerate(o):
-            for chunk in pprintgen(a, bprefix=bprefix, indent=indent,
-                                   level=level):
+            for chunk in pprintgen(
+                a, bprefix=bprefix, indent=indent, level=level
+            ):
                 yield chunk
 
             if i + 1 < len(o):
@@ -221,8 +229,9 @@
             except StopIteration:
                 last = True
 
-            for chunk in pprintgen(current, bprefix=bprefix, indent=indent,
-                                   level=level):
+            for chunk in pprintgen(
+                current, bprefix=bprefix, indent=indent, level=level
+            ):
                 yield chunk
 
             if not last:
@@ -241,6 +250,7 @@
     else:
         yield pycompat.byterepr(o)
 
+
 def prettyrepr(o):
     """Pretty print a representation of a possibly-nested object"""
     lines = []
@@ -267,6 +277,7 @@
         p0, p1 = q0, q1
     return '\n'.join('  ' * l + s for l, s in lines)
 
+
 def buildrepr(r):
     """Format an optional printable representation from unexpanded bits
 
@@ -290,10 +301,12 @@
     else:
         return pprint(r)
 
+
 def binary(s):
     """return true if a string is binary data"""
     return bool(s and '\0' in s)
 
+
 def stringmatcher(pattern, casesensitive=True):
     """
     accepts a string, possibly starting with 're:' or 'literal:' prefix.
@@ -340,8 +353,7 @@
                 flags = remod.I
             regex = remod.compile(pattern, flags)
         except remod.error as e:
-            raise error.ParseError(_('invalid regular expression: %s')
-                                   % e)
+            raise error.ParseError(_('invalid regular expression: %s') % e)
         return 're', pattern, regex.search
     elif pattern.startswith('literal:'):
         pattern = pattern[8:]
@@ -353,6 +365,7 @@
         match = lambda s: ipat == encoding.lower(s)
     return 'literal', pattern, match
 
+
 def shortuser(user):
     """Return a short representation of a user name or email address."""
     f = user.find('@')
@@ -360,7 +373,7 @@
         user = user[:f]
     f = user.find('<')
     if f >= 0:
-        user = user[f + 1:]
+        user = user[f + 1 :]
     f = user.find(' ')
     if f >= 0:
         user = user[:f]
@@ -369,6 +382,7 @@
         user = user[:f]
     return user
 
+
 def emailuser(user):
     """Return the user portion of an email address."""
     f = user.find('@')
@@ -376,15 +390,17 @@
         user = user[:f]
     f = user.find('<')
     if f >= 0:
-        user = user[f + 1:]
+        user = user[f + 1 :]
     return user
 
+
 def email(author):
     '''get email of author.'''
     r = author.find('>')
     if r == -1:
         r = None
-    return author[author.find('<') + 1:r]
+    return author[author.find('<') + 1 : r]
+
 
 def person(author):
     """Returns the name before an email address,
@@ -413,13 +429,16 @@
     f = author.find('@')
     return author[:f].replace('.', ' ')
 
+
 @attr.s(hash=True)
 class mailmapping(object):
     '''Represents a username/email key or value in
     a mailmap file'''
+
     email = attr.ib()
     name = attr.ib(default=None)
 
+
 def _ismailmaplineinvalid(names, emails):
     '''Returns True if the parsed names and emails
     in a mailmap entry are invalid.
@@ -444,6 +463,7 @@
     '''
     return not emails or not names and len(emails) < 2
 
+
 def parsemailmap(mailmapcontent):
     """Parses data in the .mailmap format
 
@@ -515,17 +535,16 @@
             continue
 
         mailmapkey = mailmapping(
-            email=emails[-1],
-            name=names[-1] if len(names) == 2 else None,
+            email=emails[-1], name=names[-1] if len(names) == 2 else None,
         )
 
         mailmap[mailmapkey] = mailmapping(
-            email=emails[0],
-            name=names[0] if names else None,
+            email=emails[0], name=names[0] if names else None,
         )
 
     return mailmap
 
+
 def mapname(mailmap, author):
     """Returns the author field according to the mailmap cache, or
     the original author field.
@@ -573,8 +592,10 @@
         proper.email if proper.email else commit.email,
     )
 
+
 _correctauthorformat = remod.compile(br'^[^<]+\s\<[^<>]+@[^<>]+\>$')
 
+
 def isauthorwellformed(author):
     '''Return True if the author field is well formed
     (ie "Contributor Name <contrib@email.dom>")
@@ -596,10 +617,12 @@
     '''
     return _correctauthorformat.match(author) is not None
 
+
 def ellipsis(text, maxlength=400):
     """Trim string to at most maxlength (default: 400) columns in display."""
     return encoding.trim(text, maxlength, ellipsis='...')
 
+
 def escapestr(s):
     if isinstance(s, memoryview):
         s = bytes(s)
@@ -607,9 +630,11 @@
     # Python 3 compatibility
     return codecs.escape_encode(s)[0]
 
+
 def unescapestr(s):
     return codecs.escape_decode(s)[0]
 
+
 def forcebytestr(obj):
     """Portably format an arbitrary object (e.g. exception) into a byte
     string."""
@@ -619,10 +644,12 @@
         # non-ascii string, may be lossy
         return pycompat.bytestr(encoding.strtolocal(str(obj)))
 
+
 def uirepr(s):
     # Avoid double backslash in Windows path repr()
     return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
 
+
 # delay import of textwrap
 def _MBTextWrapper(**kwargs):
     class tw(textwrap.TextWrapper):
@@ -640,6 +667,7 @@
 
         This requires use decision to determine width of such characters.
         """
+
         def _cutdown(self, ucstr, space_left):
             l = 0
             colwidth = encoding.ucolwidth
@@ -712,8 +740,11 @@
                     self._handle_long_word(chunks, cur_line, cur_len, width)
 
                 # If the last chunk on this line is all whitespace, drop it.
-                if (self.drop_whitespace and
-                    cur_line and cur_line[-1].strip() == r''):
+                if (
+                    self.drop_whitespace
+                    and cur_line
+                    and cur_line[-1].strip() == r''
+                ):
                     del cur_line[-1]
 
                 # Convert current line back to a string and store it in list
@@ -727,25 +758,43 @@
     _MBTextWrapper = tw
     return tw(**kwargs)
 
+
 def wrap(line, width, initindent='', hangindent=''):
     maxindent = max(len(hangindent), len(initindent))
     if width <= maxindent:
         # adjust for weird terminal size
         width = max(78, maxindent + 1)
-    line = line.decode(pycompat.sysstr(encoding.encoding),
-                       pycompat.sysstr(encoding.encodingmode))
-    initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
-                                   pycompat.sysstr(encoding.encodingmode))
-    hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
-                                   pycompat.sysstr(encoding.encodingmode))
-    wrapper = _MBTextWrapper(width=width,
-                             initial_indent=initindent,
-                             subsequent_indent=hangindent)
+    line = line.decode(
+        pycompat.sysstr(encoding.encoding),
+        pycompat.sysstr(encoding.encodingmode),
+    )
+    initindent = initindent.decode(
+        pycompat.sysstr(encoding.encoding),
+        pycompat.sysstr(encoding.encodingmode),
+    )
+    hangindent = hangindent.decode(
+        pycompat.sysstr(encoding.encoding),
+        pycompat.sysstr(encoding.encodingmode),
+    )
+    wrapper = _MBTextWrapper(
+        width=width, initial_indent=initindent, subsequent_indent=hangindent
+    )
     return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
 
-_booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
-             '0': False, 'no': False, 'false': False, 'off': False,
-             'never': False}
+
+_booleans = {
+    '1': True,
+    'yes': True,
+    'true': True,
+    'on': True,
+    'always': True,
+    '0': False,
+    'no': False,
+    'false': False,
+    'off': False,
+    'never': False,
+}
+
 
 def parsebool(s):
     """Parse s into a boolean.
@@ -754,6 +803,7 @@
     """
     return _booleans.get(s.lower(), None)
 
+
 def evalpythonliteral(s):
     """Evaluate a string containing a Python literal expression"""
     # We could backport our tokenizer hack to rewrite '' to u'' if we want
--- a/mercurial/verify.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/verify.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,11 +25,13 @@
 VERIFY_DEFAULT = 0
 VERIFY_FULL = 1
 
+
 def verify(repo, level=None):
     with repo.lock():
         v = verifier(repo, level)
         return v.verify()
 
+
 def _normpath(f):
     # under hg < 2.4, convert didn't sanitize paths properly, so a
     # converted repo may contain repeated slashes
@@ -37,6 +39,7 @@
         f = f.replace('//', '/')
     return f
 
+
 class verifier(object):
     def __init__(self, repo, level=None):
         self.repo = repo.unfiltered()
@@ -138,22 +141,33 @@
                 if f and len(linkrevs) > 1:
                     try:
                         # attempt to filter down to real linkrevs
-                        linkrevs = [l for l in linkrevs
-                                    if self.lrugetctx(l)[f].filenode() == node]
+                        linkrevs = [
+                            l
+                            for l in linkrevs
+                            if self.lrugetctx(l)[f].filenode() == node
+                        ]
                     except Exception:
                         pass
-                self._warn(_(" (expected %s)") % " ".join
-                           (map(pycompat.bytestr, linkrevs)))
-            lr = None # can't be trusted
+                self._warn(
+                    _(" (expected %s)")
+                    % " ".join(map(pycompat.bytestr, linkrevs))
+                )
+            lr = None  # can't be trusted
 
         try:
             p1, p2 = obj.parents(node)
             if p1 not in seen and p1 != nullid:
-                self._err(lr, _("unknown parent 1 %s of %s") %
-                    (short(p1), short(node)), f)
+                self._err(
+                    lr,
+                    _("unknown parent 1 %s of %s") % (short(p1), short(node)),
+                    f,
+                )
             if p2 not in seen and p2 != nullid:
-                self._err(lr, _("unknown parent 2 %s of %s") %
-                    (short(p2), short(node)), f)
+                self._err(
+                    lr,
+                    _("unknown parent 2 %s of %s") % (short(p2), short(node)),
+                    f,
+                )
         except Exception as inst:
             self._exc(lr, _("checking parents of %s") % short(node), inst, f)
 
@@ -178,8 +192,10 @@
             ui.warn(_("abandoned transaction found - run hg recover\n"))
 
         if ui.verbose or not self.revlogv1:
-            ui.status(_("repository uses revlog format %d\n") %
-                           (self.revlogv1 and 1 or 0))
+            ui.status(
+                _("repository uses revlog format %d\n")
+                % (self.revlogv1 and 1 or 0)
+            )
 
         # data verification
         mflinkrevs, filelinkrevs = self._verifychangelog()
@@ -189,18 +205,26 @@
         totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs)
 
         # final report
-        ui.status(_("checked %d changesets with %d changes to %d files\n") %
-                       (len(repo.changelog), filerevisions, totalfiles))
+        ui.status(
+            _("checked %d changesets with %d changes to %d files\n")
+            % (len(repo.changelog), filerevisions, totalfiles)
+        )
         if self.warnings:
             ui.warn(_("%d warnings encountered!\n") % self.warnings)
         if self.fncachewarned:
-            ui.warn(_('hint: run "hg debugrebuildfncache" to recover from '
-                      'corrupt fncache\n'))
+            ui.warn(
+                _(
+                    'hint: run "hg debugrebuildfncache" to recover from '
+                    'corrupt fncache\n'
+                )
+            )
         if self.errors:
             ui.warn(_("%d integrity errors encountered!\n") % self.errors)
             if self.badrevs:
-                ui.warn(_("(first damaged changeset appears to be %d)\n")
-                        % min(self.badrevs))
+                ui.warn(
+                    _("(first damaged changeset appears to be %d)\n")
+                    % min(self.badrevs)
+                )
             return 1
         return 0
 
@@ -230,8 +254,9 @@
         filelinkrevs = {}
         seen = {}
         self._checkrevlog(cl, "changelog", 0)
-        progress = ui.makeprogress(_('checking'), unit=_('changesets'),
-                                   total=len(repo))
+        progress = ui.makeprogress(
+            _('checking'), unit=_('changesets'), total=len(repo)
+        )
         for i in repo:
             progress.update(i)
             n = cl.node(i)
@@ -251,8 +276,9 @@
         progress.complete()
         return mflinkrevs, filelinkrevs
 
-    def _verifymanifest(self, mflinkrevs, dir="", storefiles=None,
-                        subdirprogress=None):
+    def _verifymanifest(
+        self, mflinkrevs, dir="", storefiles=None, subdirprogress=None
+    ):
         """verify the manifestlog content
 
         Inputs:
@@ -297,14 +323,15 @@
             label = dir
             revlogfiles = mf.files()
             storefiles.difference_update(revlogfiles)
-            if subdirprogress: # should be true since we're in a subdirectory
+            if subdirprogress:  # should be true since we're in a subdirectory
                 subdirprogress.increment()
         if self.refersmf:
             # Do not check manifest if there are only changelog entries with
             # null manifests.
             self._checkrevlog(mf, label, 0)
-        progress = ui.makeprogress(_('checking'), unit=_('manifests'),
-                                   total=len(mf))
+        progress = ui.makeprogress(
+            _('checking'), unit=_('manifests'), total=len(mf)
+        )
         for i in mf:
             if not dir:
                 progress.update(i)
@@ -313,8 +340,11 @@
             if n in mflinkrevs:
                 del mflinkrevs[n]
             elif dir:
-                self._err(lr, _("%s not in parent-directory manifest") %
-                         short(n), label)
+                self._err(
+                    lr,
+                    _("%s not in parent-directory manifest") % short(n),
+                    label,
+                )
             else:
                 self._err(lr, _("%s not in changesets") % short(n), label)
 
@@ -330,7 +360,8 @@
                         if not match.visitdir(fullpath):
                             continue
                         subdirnodes.setdefault(fullpath + '/', {}).setdefault(
-                            fn, []).append(lr)
+                            fn, []
+                        ).append(lr)
                     else:
                         if not match(fullpath):
                             continue
@@ -344,8 +375,12 @@
                     # code (eg: hash verification, filename are ordered, etc.)
                     mfdelta = mfl.get(dir, n).read()
                 except Exception as inst:
-                    self._exc(lr, _("reading full manifest %s") % short(n),
-                              inst, label)
+                    self._exc(
+                        lr,
+                        _("reading full manifest %s") % short(n),
+                        inst,
+                        label,
+                    )
 
         if not dir:
             progress.complete()
@@ -356,11 +391,21 @@
             changesetpairs = [(c, m) for m in mflinkrevs for c in mflinkrevs[m]]
             for c, m in sorted(changesetpairs):
                 if dir:
-                    self._err(c, _("parent-directory manifest refers to unknown"
-                                   " revision %s") % short(m), label)
+                    self._err(
+                        c,
+                        _(
+                            "parent-directory manifest refers to unknown"
+                            " revision %s"
+                        )
+                        % short(m),
+                        label,
+                    )
                 else:
-                    self._err(c, _("changeset refers to unknown revision %s") %
-                              short(m), label)
+                    self._err(
+                        c,
+                        _("changeset refers to unknown revision %s") % short(m),
+                        label,
+                    )
 
         if not dir and subdirnodes:
             self.ui.status(_("checking directory manifests\n"))
@@ -373,12 +418,14 @@
                 elif (size > 0 or not revlogv1) and f.startswith('meta/'):
                     storefiles.add(_normpath(f))
                     subdirs.add(os.path.dirname(f))
-            subdirprogress = ui.makeprogress(_('checking'), unit=_('manifests'),
-                                             total=len(subdirs))
+            subdirprogress = ui.makeprogress(
+                _('checking'), unit=_('manifests'), total=len(subdirs)
+            )
 
         for subdir, linkrevs in subdirnodes.iteritems():
-            subdirfilenodes = self._verifymanifest(linkrevs, subdir, storefiles,
-                                                   subdirprogress)
+            subdirfilenodes = self._verifymanifest(
+                linkrevs, subdir, storefiles, subdirprogress
+            )
             for f, onefilenodes in subdirfilenodes.iteritems():
                 filenodes.setdefault(f, {}).update(onefilenodes)
 
@@ -396,8 +443,9 @@
         ui.status(_("crosschecking files in changesets and manifests\n"))
 
         total = len(filelinkrevs) + len(filenodes)
-        progress = ui.makeprogress(_('crosschecking'), unit=_('files'),
-                                   total=total)
+        progress = ui.makeprogress(
+            _('crosschecking'), unit=_('files'), total=total
+        )
         if self.havemf:
             for f in sorted(filelinkrevs):
                 progress.increment()
@@ -443,8 +491,9 @@
 
         files = sorted(set(filenodes) | set(filelinkrevs))
         revisions = 0
-        progress = ui.makeprogress(_('checking'), unit=_('files'),
-                                   total=len(files))
+        progress = ui.makeprogress(
+            _('checking'), unit=_('files'), total=len(files)
+        )
         for i, f in enumerate(files):
             progress.update(i, item=f)
             try:
@@ -469,8 +518,9 @@
                     storefiles.remove(ff)
                 except KeyError:
                     if self.warnorphanstorefiles:
-                        self._warn(_(" warning: revlog '%s' not in fncache!") %
-                                  ff)
+                        self._warn(
+                            _(" warning: revlog '%s' not in fncache!") % ff
+                        )
                         self.fncachewarned = True
 
             if not len(fl) and (self.havecl or self.havemf):
@@ -487,12 +537,16 @@
                     if problem.warning:
                         self._warn(problem.warning)
                     elif problem.error:
-                        self._err(linkrev if linkrev is not None else lr,
-                                  problem.error, f)
+                        self._err(
+                            linkrev if linkrev is not None else lr,
+                            problem.error,
+                            f,
+                        )
                     else:
                         raise error.ProgrammingError(
                             'problem instance does not set warning or error '
-                            'attribute: %s' % problem.msg)
+                            'attribute: %s' % problem.msg
+                        )
 
             seen = {}
             for i in fl:
@@ -518,31 +572,49 @@
                         if lr is not None and ui.verbose:
                             ctx = lrugetctx(lr)
                             if not any(rp[0] in pctx for pctx in ctx.parents()):
-                                self._warn(_("warning: copy source of '%s' not"
-                                            " in parents of %s") % (f, ctx))
+                                self._warn(
+                                    _(
+                                        "warning: copy source of '%s' not"
+                                        " in parents of %s"
+                                    )
+                                    % (f, ctx)
+                                )
                         fl2 = repo.file(rp[0])
                         if not len(fl2):
-                            self._err(lr,
-                                      _("empty or missing copy source revlog "
-                                        "%s:%s") % (rp[0],
-                                      short(rp[1])),
-                                      f)
+                            self._err(
+                                lr,
+                                _(
+                                    "empty or missing copy source revlog "
+                                    "%s:%s"
+                                )
+                                % (rp[0], short(rp[1])),
+                                f,
+                            )
                         elif rp[1] == nullid:
-                            ui.note(_("warning: %s@%s: copy source"
-                                      " revision is nullid %s:%s\n")
-                                % (f, lr, rp[0], short(rp[1])))
+                            ui.note(
+                                _(
+                                    "warning: %s@%s: copy source"
+                                    " revision is nullid %s:%s\n"
+                                )
+                                % (f, lr, rp[0], short(rp[1]))
+                            )
                         else:
                             fl2.rev(rp[1])
                 except Exception as inst:
-                    self._exc(lr, _("checking rename of %s") % short(n),
-                              inst, f)
+                    self._exc(
+                        lr, _("checking rename of %s") % short(n), inst, f
+                    )
 
             # cross-check
             if f in filenodes:
                 fns = [(v, k) for k, v in filenodes[f].iteritems()]
                 for lr, node in sorted(fns):
-                    self._err(lr, _("manifest refers to unknown revision %s") %
-                              short(node), f)
+                    self._err(
+                        lr,
+                        _("manifest refers to unknown revision %s")
+                        % short(node),
+                        f,
+                    )
         progress.complete()
 
         if self.warnorphanstorefiles:
--- a/mercurial/vfs.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/vfs.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,23 +22,26 @@
     util,
 )
 
+
 def _avoidambig(path, oldstat):
     """Avoid file stat ambiguity forcibly
 
     This function causes copying ``path`` file, if it is owned by
     another (see issue5418 and issue5584 for detail).
     """
+
     def checkandavoid():
         newstat = util.filestat.frompath(path)
         # return whether file stat ambiguity is (already) avoided
-        return (not newstat.isambig(oldstat) or
-                newstat.avoidambig(path, oldstat))
+        return not newstat.isambig(oldstat) or newstat.avoidambig(path, oldstat)
+
     if not checkandavoid():
         # simply copy to change owner of path to get privilege to
         # advance mtime (see issue5418)
         util.rename(util.mktempcopy(path), path)
         checkandavoid()
 
+
 class abstractvfs(object):
     """Abstract base class; cannot be instantiated"""
 
@@ -173,8 +176,9 @@
         return os.mkdir(self.join(path))
 
     def mkstemp(self, suffix='', prefix='tmp', dir=None):
-        fd, name = pycompat.mkstemp(suffix=suffix, prefix=prefix,
-                                    dir=self.join(dir))
+        fd, name = pycompat.mkstemp(
+            suffix=suffix, prefix=prefix, dir=self.join(dir)
+        )
         dname, fname = util.split(name)
         if dir:
             return fd, os.path.join(dir, fname)
@@ -227,6 +231,7 @@
         If ``forcibly``, this tries to remove READ-ONLY files, too.
         """
         if forcibly:
+
             def onerror(function, path, excinfo):
                 if function is not os.remove:
                     raise
@@ -236,10 +241,12 @@
                     raise
                 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
                 os.remove(path)
+
         else:
             onerror = None
-        return shutil.rmtree(self.join(path),
-                             ignore_errors=ignore_errors, onerror=onerror)
+        return shutil.rmtree(
+            self.join(path), ignore_errors=ignore_errors, onerror=onerror
+        )
 
     def setflags(self, path, l, x):
         return util.setflags(self.join(path), l, x)
@@ -255,8 +262,9 @@
         util.tryunlink(self.join(path))
 
     def unlinkpath(self, path=None, ignoremissing=False, rmdir=True):
-        return util.unlinkpath(self.join(path), ignoremissing=ignoremissing,
-                               rmdir=rmdir)
+        return util.unlinkpath(
+            self.join(path), ignoremissing=ignoremissing, rmdir=rmdir
+        )
 
     def utime(self, path=None, t=None):
         return os.utime(self.join(path), t)
@@ -294,7 +302,8 @@
         vfs = getattr(self, 'vfs', self)
         if getattr(vfs, '_backgroundfilecloser', None):
             raise error.Abort(
-                _('can only have 1 active background file closer'))
+                _('can only have 1 active background file closer')
+            )
 
         with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc:
             try:
@@ -303,6 +312,7 @@
             finally:
                 vfs._backgroundfilecloser = None
 
+
 class vfs(abstractvfs):
     '''Operate files relative to a base directory
 
@@ -313,8 +323,15 @@
     (b) the base directory is managed by hg and considered sort-of append-only.
     See pathutil.pathauditor() for details.
     '''
-    def __init__(self, base, audit=True, cacheaudited=False, expandpath=False,
-                 realpath=False):
+
+    def __init__(
+        self,
+        base,
+        audit=True,
+        cacheaudited=False,
+        expandpath=False,
+        realpath=False,
+    ):
         if expandpath:
             base = util.expandpath(base)
         if realpath:
@@ -324,7 +341,7 @@
         if audit:
             self.audit = pathutil.pathauditor(self.base, cached=cacheaudited)
         else:
-            self.audit = (lambda path, mode=None: True)
+            self.audit = lambda path, mode=None: True
         self.createmode = None
         self._trustnlink = None
         self.options = {}
@@ -351,9 +368,17 @@
                 raise error.Abort("%s: %r" % (r, path))
             self.audit(path, mode=mode)
 
-    def __call__(self, path, mode="r", atomictemp=False, notindexed=False,
-                 backgroundclose=False, checkambig=False, auditpath=True,
-                 makeparentdirs=True):
+    def __call__(
+        self,
+        path,
+        mode="r",
+        atomictemp=False,
+        notindexed=False,
+        backgroundclose=False,
+        checkambig=False,
+        auditpath=True,
+        makeparentdirs=True,
+    ):
         '''Open ``path`` file, which is relative to vfs root.
 
         By default, parent directories are created as needed. Newly created
@@ -389,7 +414,7 @@
         f = self.join(path)
 
         if "b" not in mode:
-            mode += "b" # for that other OS
+            mode += "b"  # for that other OS
 
         nlink = -1
         if mode not in ('r', 'rb'):
@@ -400,8 +425,9 @@
                 if atomictemp:
                     if makeparentdirs:
                         util.makedirs(dirname, self.createmode, notindexed)
-                    return util.atomictempfile(f, mode, self.createmode,
-                                               checkambig=checkambig)
+                    return util.atomictempfile(
+                        f, mode, self.createmode, checkambig=checkambig
+                    )
                 try:
                     if 'w' in mode:
                         util.unlink(f)
@@ -412,7 +438,7 @@
                         with util.posixfile(f):
                             nlink = util.nlinks(f)
                             if nlink < 1:
-                                nlink = 2 # force mktempcopy (issue1922)
+                                nlink = 2  # force mktempcopy (issue1922)
                 except (OSError, IOError) as e:
                     if e.errno != errno.ENOENT:
                         raise
@@ -430,16 +456,25 @@
 
         if checkambig:
             if mode in ('r', 'rb'):
-                raise error.Abort(_('implementation error: mode %s is not'
-                                    ' valid for checkambig=True') % mode)
+                raise error.Abort(
+                    _(
+                        'implementation error: mode %s is not'
+                        ' valid for checkambig=True'
+                    )
+                    % mode
+                )
             fp = checkambigatclosing(fp)
 
-        if (backgroundclose and
-                isinstance(threading.currentThread(), threading._MainThread)):
+        if backgroundclose and isinstance(
+            threading.currentThread(), threading._MainThread
+        ):
             if not self._backgroundfilecloser:
-                raise error.Abort(_('backgroundclose can only be used when a '
-                                  'backgroundclosing context manager is active')
-                                  )
+                raise error.Abort(
+                    _(
+                        'backgroundclose can only be used when a '
+                        'backgroundclosing context manager is active'
+                    )
+                )
 
             fp = delayclosedfile(fp, self._backgroundfilecloser)
 
@@ -456,9 +491,12 @@
             try:
                 os.symlink(src, linkname)
             except OSError as err:
-                raise OSError(err.errno, _('could not symlink to %r: %s') %
-                              (src, encoding.strtolocal(err.strerror)),
-                              linkname)
+                raise OSError(
+                    err.errno,
+                    _('could not symlink to %r: %s')
+                    % (src, encoding.strtolocal(err.strerror)),
+                    linkname,
+                )
         else:
             self.write(dst, src)
 
@@ -468,8 +506,10 @@
         else:
             return self.base
 
+
 opener = vfs
 
+
 class proxyvfs(abstractvfs):
     def __init__(self, vfs):
         self.vfs = vfs
@@ -485,6 +525,7 @@
     def options(self, value):
         self.vfs.options = value
 
+
 class filtervfs(proxyvfs, abstractvfs):
     '''Wrapper vfs for filtering filenames with a function.'''
 
@@ -501,8 +542,10 @@
         else:
             return self.vfs.join(path)
 
+
 filteropener = filtervfs
 
+
 class readonlyvfs(proxyvfs):
     '''Wrapper vfs preventing any writing.'''
 
@@ -517,11 +560,13 @@
     def join(self, path, *insidef):
         return self.vfs.join(path, *insidef)
 
+
 class closewrapbase(object):
     """Base class of wrapper, which hooks closing
 
     Do not instantiate outside of the vfs layer.
     """
+
     def __init__(self, fh):
         object.__setattr__(self, r'_origfh', fh)
 
@@ -544,11 +589,13 @@
     def close(self):
         raise NotImplementedError('attempted instantiating ' + str(type(self)))
 
+
 class delayclosedfile(closewrapbase):
     """Proxy for a file object whose close is delayed.
 
     Do not instantiate outside of the vfs layer.
     """
+
     def __init__(self, fh, closer):
         super(delayclosedfile, self).__init__(fh)
         object.__setattr__(self, r'_closer', closer)
@@ -559,8 +606,10 @@
     def close(self):
         self._closer.close(self._origfh)
 
+
 class backgroundfilecloser(object):
     """Coordinates background closing of file handles on multiple threads."""
+
     def __init__(self, ui, expectedcount=-1):
         self._running = False
         self._entered = False
@@ -587,8 +636,9 @@
         maxqueue = ui.configint('worker', 'backgroundclosemaxqueue')
         threadcount = ui.configint('worker', 'backgroundclosethreadcount')
 
-        ui.debug('starting %d threads for background file closing\n' %
-                 threadcount)
+        ui.debug(
+            'starting %d threads for background file closing\n' % threadcount
+        )
 
         self._queue = pycompat.queue.Queue(maxsize=maxqueue)
         self._running = True
@@ -629,8 +679,9 @@
     def close(self, fh):
         """Schedule a file for closing."""
         if not self._entered:
-            raise error.Abort(_('can only call close() when context manager '
-                              'active'))
+            raise error.Abort(
+                _('can only call close() when context manager ' 'active')
+            )
 
         # If a background thread encountered an exception, raise now so we fail
         # fast. Otherwise we may potentially go on for minutes until the error
@@ -647,6 +698,7 @@
 
         self._queue.put(fh, block=True, timeout=None)
 
+
 class checkambigatclosing(closewrapbase):
     """Proxy for a file object, to avoid ambiguity of file stat
 
@@ -657,6 +709,7 @@
 
     Do not instantiate outside of the vfs layer.
     """
+
     def __init__(self, fh):
         super(checkambigatclosing, self).__init__(fh)
         object.__setattr__(self, r'_oldstat', util.filestat.frompath(fh.name))
--- a/mercurial/win32.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/win32.py	Sun Oct 06 09:45:02 2019 -0400
@@ -55,21 +55,25 @@
     _WPARAM = ctypes.c_ulonglong
     _LPARAM = ctypes.c_longlong
 
+
 class _FILETIME(ctypes.Structure):
-    _fields_ = [(r'dwLowDateTime', _DWORD),
-                (r'dwHighDateTime', _DWORD)]
+    _fields_ = [(r'dwLowDateTime', _DWORD), (r'dwHighDateTime', _DWORD)]
+
 
 class _BY_HANDLE_FILE_INFORMATION(ctypes.Structure):
-    _fields_ = [(r'dwFileAttributes', _DWORD),
-                (r'ftCreationTime', _FILETIME),
-                (r'ftLastAccessTime', _FILETIME),
-                (r'ftLastWriteTime', _FILETIME),
-                (r'dwVolumeSerialNumber', _DWORD),
-                (r'nFileSizeHigh', _DWORD),
-                (r'nFileSizeLow', _DWORD),
-                (r'nNumberOfLinks', _DWORD),
-                (r'nFileIndexHigh', _DWORD),
-                (r'nFileIndexLow', _DWORD)]
+    _fields_ = [
+        (r'dwFileAttributes', _DWORD),
+        (r'ftCreationTime', _FILETIME),
+        (r'ftLastAccessTime', _FILETIME),
+        (r'ftLastWriteTime', _FILETIME),
+        (r'dwVolumeSerialNumber', _DWORD),
+        (r'nFileSizeHigh', _DWORD),
+        (r'nFileSizeLow', _DWORD),
+        (r'nNumberOfLinks', _DWORD),
+        (r'nFileIndexHigh', _DWORD),
+        (r'nFileIndexLow', _DWORD),
+    ]
+
 
 # CreateFile
 _FILE_SHARE_READ = 0x00000001
@@ -90,51 +94,65 @@
 # GetExitCodeProcess
 _STILL_ACTIVE = 259
 
+
 class _STARTUPINFO(ctypes.Structure):
-    _fields_ = [(r'cb', _DWORD),
-                (r'lpReserved', _LPSTR),
-                (r'lpDesktop', _LPSTR),
-                (r'lpTitle', _LPSTR),
-                (r'dwX', _DWORD),
-                (r'dwY', _DWORD),
-                (r'dwXSize', _DWORD),
-                (r'dwYSize', _DWORD),
-                (r'dwXCountChars', _DWORD),
-                (r'dwYCountChars', _DWORD),
-                (r'dwFillAttribute', _DWORD),
-                (r'dwFlags', _DWORD),
-                (r'wShowWindow', _WORD),
-                (r'cbReserved2', _WORD),
-                (r'lpReserved2', ctypes.c_char_p),
-                (r'hStdInput', _HANDLE),
-                (r'hStdOutput', _HANDLE),
-                (r'hStdError', _HANDLE)]
+    _fields_ = [
+        (r'cb', _DWORD),
+        (r'lpReserved', _LPSTR),
+        (r'lpDesktop', _LPSTR),
+        (r'lpTitle', _LPSTR),
+        (r'dwX', _DWORD),
+        (r'dwY', _DWORD),
+        (r'dwXSize', _DWORD),
+        (r'dwYSize', _DWORD),
+        (r'dwXCountChars', _DWORD),
+        (r'dwYCountChars', _DWORD),
+        (r'dwFillAttribute', _DWORD),
+        (r'dwFlags', _DWORD),
+        (r'wShowWindow', _WORD),
+        (r'cbReserved2', _WORD),
+        (r'lpReserved2', ctypes.c_char_p),
+        (r'hStdInput', _HANDLE),
+        (r'hStdOutput', _HANDLE),
+        (r'hStdError', _HANDLE),
+    ]
+
 
 class _PROCESS_INFORMATION(ctypes.Structure):
-    _fields_ = [(r'hProcess', _HANDLE),
-                (r'hThread', _HANDLE),
-                (r'dwProcessId', _DWORD),
-                (r'dwThreadId', _DWORD)]
+    _fields_ = [
+        (r'hProcess', _HANDLE),
+        (r'hThread', _HANDLE),
+        (r'dwProcessId', _DWORD),
+        (r'dwThreadId', _DWORD),
+    ]
+
 
 _CREATE_NO_WINDOW = 0x08000000
 _SW_HIDE = 0
 
+
 class _COORD(ctypes.Structure):
-    _fields_ = [(r'X', ctypes.c_short),
-                (r'Y', ctypes.c_short)]
+    _fields_ = [(r'X', ctypes.c_short), (r'Y', ctypes.c_short)]
+
 
 class _SMALL_RECT(ctypes.Structure):
-    _fields_ = [(r'Left', ctypes.c_short),
-                (r'Top', ctypes.c_short),
-                (r'Right', ctypes.c_short),
-                (r'Bottom', ctypes.c_short)]
+    _fields_ = [
+        (r'Left', ctypes.c_short),
+        (r'Top', ctypes.c_short),
+        (r'Right', ctypes.c_short),
+        (r'Bottom', ctypes.c_short),
+    ]
+
 
 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
-    _fields_ = [(r'dwSize', _COORD),
-                (r'dwCursorPosition', _COORD),
-                (r'wAttributes', _WORD),
-                (r'srWindow', _SMALL_RECT),
-                (r'dwMaximumWindowSize', _COORD)]
+    _fields_ = [
+        (r'dwSize', _COORD),
+        (r'dwCursorPosition', _COORD),
+        (r'wAttributes', _WORD),
+        (r'srWindow', _SMALL_RECT),
+        (r'dwMaximumWindowSize', _COORD),
+    ]
+
 
 _STD_OUTPUT_HANDLE = _DWORD(-11).value
 _STD_ERROR_HANDLE = _DWORD(-12).value
@@ -150,11 +168,9 @@
 class CERT_CHAIN_CONTEXT(ctypes.Structure):
     _fields_ = (
         (r"cbSize", _DWORD),
-
         # CERT_TRUST_STATUS struct
         (r"dwErrorStatus", _DWORD),
         (r"dwInfoStatus", _DWORD),
-
         (r"cChain", _DWORD),
         (r"rgpChain", ctypes.c_void_p),
         (r"cLowerQualityChainContext", _DWORD),
@@ -163,15 +179,16 @@
         (r"dwRevocationFreshnessTime", _DWORD),
     )
 
+
 class CERT_USAGE_MATCH(ctypes.Structure):
     _fields_ = (
         (r"dwType", _DWORD),
-
-         # CERT_ENHKEY_USAGE struct
+        # CERT_ENHKEY_USAGE struct
         (r"cUsageIdentifier", _DWORD),
-        (r"rgpszUsageIdentifier", ctypes.c_void_p), # LPSTR *
+        (r"rgpszUsageIdentifier", ctypes.c_void_p),  # LPSTR *
     )
 
+
 class CERT_CHAIN_PARA(ctypes.Structure):
     _fields_ = (
         (r"cbSize", _DWORD),
@@ -180,35 +197,45 @@
         (r"dwUrlRetrievalTimeout", _DWORD),
         (r"fCheckRevocationFreshnessTime", _BOOL),
         (r"dwRevocationFreshnessTime", _DWORD),
-        (r"pftCacheResync", ctypes.c_void_p), # LPFILETIME
-        (r"pStrongSignPara", ctypes.c_void_p), # PCCERT_STRONG_SIGN_PARA
+        (r"pftCacheResync", ctypes.c_void_p),  # LPFILETIME
+        (r"pStrongSignPara", ctypes.c_void_p),  # PCCERT_STRONG_SIGN_PARA
         (r"dwStrongSignFlags", _DWORD),
     )
 
+
 # types of parameters of C functions used (required by pypy)
 
-_crypt32.CertCreateCertificateContext.argtypes = [_DWORD, # cert encoding
-                                                  ctypes.c_char_p, # cert
-                                                  _DWORD] # cert size
+_crypt32.CertCreateCertificateContext.argtypes = [
+    _DWORD,  # cert encoding
+    ctypes.c_char_p,  # cert
+    _DWORD,
+]  # cert size
 _crypt32.CertCreateCertificateContext.restype = _PCCERT_CONTEXT
 
 _crypt32.CertGetCertificateChain.argtypes = [
-        ctypes.c_void_p, # HCERTCHAINENGINE
-        _PCCERT_CONTEXT,
-        ctypes.c_void_p, # LPFILETIME
-        ctypes.c_void_p, # HCERTSTORE
-        ctypes.c_void_p, # PCERT_CHAIN_PARA
-        _DWORD,
-        ctypes.c_void_p, # LPVOID
-        ctypes.c_void_p  # PCCERT_CHAIN_CONTEXT *
-    ]
+    ctypes.c_void_p,  # HCERTCHAINENGINE
+    _PCCERT_CONTEXT,
+    ctypes.c_void_p,  # LPFILETIME
+    ctypes.c_void_p,  # HCERTSTORE
+    ctypes.c_void_p,  # PCERT_CHAIN_PARA
+    _DWORD,
+    ctypes.c_void_p,  # LPVOID
+    ctypes.c_void_p,  # PCCERT_CHAIN_CONTEXT *
+]
 _crypt32.CertGetCertificateChain.restype = _BOOL
 
 _crypt32.CertFreeCertificateContext.argtypes = [_PCCERT_CONTEXT]
 _crypt32.CertFreeCertificateContext.restype = _BOOL
 
-_kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p,
-    _DWORD, _DWORD, _HANDLE]
+_kernel32.CreateFileA.argtypes = [
+    _LPCSTR,
+    _DWORD,
+    _DWORD,
+    ctypes.c_void_p,
+    _DWORD,
+    _DWORD,
+    _HANDLE,
+]
 _kernel32.CreateFileA.restype = _HANDLE
 
 _kernel32.GetFileInformationByHandle.argtypes = [_HANDLE, ctypes.c_void_p]
@@ -237,8 +264,16 @@
 _kernel32.GetDriveTypeA.argtypes = [_LPCSTR]
 _kernel32.GetDriveTypeA.restype = _UINT
 
-_kernel32.GetVolumeInformationA.argtypes = [_LPCSTR, ctypes.c_void_p, _DWORD,
-    ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, _DWORD]
+_kernel32.GetVolumeInformationA.argtypes = [
+    _LPCSTR,
+    ctypes.c_void_p,
+    _DWORD,
+    ctypes.c_void_p,
+    ctypes.c_void_p,
+    ctypes.c_void_p,
+    ctypes.c_void_p,
+    _DWORD,
+]
 _kernel32.GetVolumeInformationA.restype = _BOOL
 
 _kernel32.GetVolumePathNameA.argtypes = [_LPCSTR, ctypes.c_void_p, _DWORD]
@@ -256,9 +291,18 @@
 _kernel32.GetModuleFileNameA.argtypes = [_HANDLE, ctypes.c_void_p, _DWORD]
 _kernel32.GetModuleFileNameA.restype = _DWORD
 
-_kernel32.CreateProcessA.argtypes = [_LPCSTR, _LPCSTR, ctypes.c_void_p,
-    ctypes.c_void_p, _BOOL, _DWORD, ctypes.c_void_p, _LPCSTR, ctypes.c_void_p,
-    ctypes.c_void_p]
+_kernel32.CreateProcessA.argtypes = [
+    _LPCSTR,
+    _LPCSTR,
+    ctypes.c_void_p,
+    ctypes.c_void_p,
+    _BOOL,
+    _DWORD,
+    ctypes.c_void_p,
+    _LPCSTR,
+    ctypes.c_void_p,
+    ctypes.c_void_p,
+]
 _kernel32.CreateProcessA.restype = _BOOL
 
 _kernel32.ExitProcess.argtypes = [_UINT]
@@ -296,24 +340,39 @@
 _user32.EnumWindows.argtypes = [_WNDENUMPROC, _LPARAM]
 _user32.EnumWindows.restype = _BOOL
 
-_kernel32.PeekNamedPipe.argtypes = [_HANDLE, ctypes.c_void_p, _DWORD,
-    ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
+_kernel32.PeekNamedPipe.argtypes = [
+    _HANDLE,
+    ctypes.c_void_p,
+    _DWORD,
+    ctypes.c_void_p,
+    ctypes.c_void_p,
+    ctypes.c_void_p,
+]
 _kernel32.PeekNamedPipe.restype = _BOOL
 
+
 def _raiseoserror(name):
     # Force the code to a signed int to avoid an 'int too large' error.
     # See https://bugs.python.org/issue28474
     code = _kernel32.GetLastError()
-    if code > 0x7fffffff:
-        code -= 2**32
+    if code > 0x7FFFFFFF:
+        code -= 2 ** 32
     err = ctypes.WinError(code=code)
-    raise OSError(err.errno, r'%s: %s' % (encoding.strfromlocal(name),
-                                          err.strerror))
+    raise OSError(
+        err.errno, r'%s: %s' % (encoding.strfromlocal(name), err.strerror)
+    )
+
 
 def _getfileinfo(name):
-    fh = _kernel32.CreateFileA(name, 0,
-            _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
-            None, _OPEN_EXISTING, _FILE_FLAG_BACKUP_SEMANTICS, None)
+    fh = _kernel32.CreateFileA(
+        name,
+        0,
+        _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
+        None,
+        _OPEN_EXISTING,
+        _FILE_FLAG_BACKUP_SEMANTICS,
+        None,
+    )
     if fh == _INVALID_HANDLE_VALUE:
         _raiseoserror(name)
     try:
@@ -324,6 +383,7 @@
     finally:
         _kernel32.CloseHandle(fh)
 
+
 def checkcertificatechain(cert, build=True):
     '''Tests the given certificate to see if there is a complete chain to a
        trusted root certificate.  As a side effect, missing certificates are
@@ -336,11 +396,13 @@
     chainctxptr = ctypes.POINTER(CERT_CHAIN_CONTEXT)
 
     pchainctx = chainctxptr()
-    chainpara = CERT_CHAIN_PARA(cbSize=ctypes.sizeof(CERT_CHAIN_PARA),
-                                RequestedUsage=CERT_USAGE_MATCH())
+    chainpara = CERT_CHAIN_PARA(
+        cbSize=ctypes.sizeof(CERT_CHAIN_PARA), RequestedUsage=CERT_USAGE_MATCH()
+    )
 
-    certctx = _crypt32.CertCreateCertificateContext(X509_ASN_ENCODING, cert,
-                                                    len(cert))
+    certctx = _crypt32.CertCreateCertificateContext(
+        X509_ASN_ENCODING, cert, len(cert)
+    )
     if certctx is None:
         _raiseoserror('CertCreateCertificateContext')
 
@@ -351,14 +413,16 @@
 
     try:
         # Building the certificate chain will update root certs as necessary.
-        if not _crypt32.CertGetCertificateChain(None,      # hChainEngine
-                                                certctx,   # pCertContext
-                                                None,      # pTime
-                                                None,      # hAdditionalStore
-                                                ctypes.byref(chainpara),
-                                                flags,
-                                                None,      # pvReserved
-                                                ctypes.byref(pchainctx)):
+        if not _crypt32.CertGetCertificateChain(
+            None,  # hChainEngine
+            certctx,  # pCertContext
+            None,  # pTime
+            None,  # hAdditionalStore
+            ctypes.byref(chainpara),
+            flags,
+            None,  # pvReserved
+            ctypes.byref(pchainctx),
+        ):
             _raiseoserror('CertGetCertificateChain')
 
         chainctx = pchainctx.contents
@@ -369,24 +433,30 @@
             _crypt32.CertFreeCertificateChain(pchainctx)
         _crypt32.CertFreeCertificateContext(certctx)
 
+
 def oslink(src, dst):
     try:
         if not _kernel32.CreateHardLinkA(dst, src, None):
             _raiseoserror(src)
-    except AttributeError: # Wine doesn't support this function
+    except AttributeError:  # Wine doesn't support this function
         _raiseoserror(src)
 
+
 def nlinks(name):
     '''return number of hardlinks for the given file'''
     return _getfileinfo(name).nNumberOfLinks
 
+
 def samefile(path1, path2):
     '''Returns whether path1 and path2 refer to the same file or directory.'''
     res1 = _getfileinfo(path1)
     res2 = _getfileinfo(path2)
-    return (res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber
+    return (
+        res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber
         and res1.nFileIndexHigh == res2.nFileIndexHigh
-        and res1.nFileIndexLow == res2.nFileIndexLow)
+        and res1.nFileIndexLow == res2.nFileIndexLow
+    )
+
 
 def samedevice(path1, path2):
     '''Returns whether path1 and path2 are on the same device.'''
@@ -394,12 +464,14 @@
     res2 = _getfileinfo(path2)
     return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber
 
+
 def peekpipe(pipe):
     handle = msvcrt.get_osfhandle(pipe.fileno())
     avail = _DWORD()
 
-    if not _kernel32.PeekNamedPipe(handle, None, 0, None, ctypes.byref(avail),
-                                   None):
+    if not _kernel32.PeekNamedPipe(
+        handle, None, 0, None, ctypes.byref(avail), None
+    ):
         err = _kernel32.GetLastError()
         if err == _ERROR_BROKEN_PIPE:
             return 0
@@ -407,12 +479,14 @@
 
     return avail.value
 
+
 def lasterrorwaspipeerror(err):
     if err.errno != errno.EINVAL:
         return False
     err = _kernel32.GetLastError()
     return err == _ERROR_BROKEN_PIPE or err == _ERROR_NO_DATA
 
+
 def testpid(pid):
     '''return True if pid is still running or unable to
     determine, False otherwise'''
@@ -426,17 +500,19 @@
             _kernel32.CloseHandle(h)
     return _kernel32.GetLastError() != _ERROR_INVALID_PARAMETER
 
+
 def executablepath():
     '''return full path of hg.exe'''
     size = 600
     buf = ctypes.create_string_buffer(size + 1)
     len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size)
     if len == 0:
-        raise ctypes.WinError() # Note: WinError is a function
+        raise ctypes.WinError()  # Note: WinError is a function
     elif len == size:
         raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER)
     return buf.value
 
+
 def getvolumename(path):
     """Get the mount point of the filesystem from a directory or file
     (best-effort)
@@ -452,10 +528,11 @@
     buf = ctypes.create_string_buffer(size)
 
     if not _kernel32.GetVolumePathNameA(realpath, ctypes.byref(buf), size):
-        raise ctypes.WinError() # Note: WinError is a function
+        raise ctypes.WinError()  # Note: WinError is a function
 
     return buf.value
 
+
 def getfstype(path):
     """Get the filesystem type name from a directory or file (best-effort)
 
@@ -467,19 +544,25 @@
 
     if t == _DRIVE_REMOTE:
         return 'cifs'
-    elif t not in (_DRIVE_REMOVABLE, _DRIVE_FIXED, _DRIVE_CDROM,
-                   _DRIVE_RAMDISK):
+    elif t not in (
+        _DRIVE_REMOVABLE,
+        _DRIVE_FIXED,
+        _DRIVE_CDROM,
+        _DRIVE_RAMDISK,
+    ):
         return None
 
     size = _MAX_PATH + 1
     name = ctypes.create_string_buffer(size)
 
-    if not _kernel32.GetVolumeInformationA(volume, None, 0, None, None, None,
-                                           ctypes.byref(name), size):
-        raise ctypes.WinError() # Note: WinError is a function
+    if not _kernel32.GetVolumeInformationA(
+        volume, None, 0, None, None, None, ctypes.byref(name), size
+    ):
+        raise ctypes.WinError()  # Note: WinError is a function
 
     return name.value
 
+
 def getuser():
     '''return name of current user'''
     size = _DWORD(300)
@@ -488,36 +571,40 @@
         raise ctypes.WinError()
     return buf.value
 
+
 _signalhandler = []
 
+
 def setsignalhandler():
     '''Register a termination handler for console events including
     CTRL+C. python signal handlers do not work well with socket
     operations.
     '''
+
     def handler(event):
         _kernel32.ExitProcess(1)
 
     if _signalhandler:
-        return # already registered
+        return  # already registered
     h = _SIGNAL_HANDLER(handler)
-    _signalhandler.append(h) # needed to prevent garbage collection
+    _signalhandler.append(h)  # needed to prevent garbage collection
     if not _kernel32.SetConsoleCtrlHandler(h, True):
         raise ctypes.WinError()
 
+
 def hidewindow():
-
     def callback(hwnd, pid):
         wpid = _DWORD()
         _user32.GetWindowThreadProcessId(hwnd, ctypes.byref(wpid))
         if pid == wpid.value:
             _user32.ShowWindow(hwnd, _SW_HIDE)
-            return False # stop enumerating windows
+            return False  # stop enumerating windows
         return True
 
     pid = _kernel32.GetCurrentProcessId()
     _user32.EnumWindows(_WNDENUMPROC(callback), pid)
 
+
 def termsize():
     # cmd.exe does not handle CR like a unix console, the CR is
     # counted in the line length. On 80 columns consoles, if 80
@@ -527,24 +614,27 @@
     height = 25
     # Query stderr to avoid problems with redirections
     screenbuf = _kernel32.GetStdHandle(
-                  _STD_ERROR_HANDLE) # don't close the handle returned
+        _STD_ERROR_HANDLE
+    )  # don't close the handle returned
     if screenbuf is None or screenbuf == _INVALID_HANDLE_VALUE:
         return width, height
     csbi = _CONSOLE_SCREEN_BUFFER_INFO()
-    if not _kernel32.GetConsoleScreenBufferInfo(
-                        screenbuf, ctypes.byref(csbi)):
+    if not _kernel32.GetConsoleScreenBufferInfo(screenbuf, ctypes.byref(csbi)):
         return width, height
     width = csbi.srWindow.Right - csbi.srWindow.Left  # don't '+ 1'
     height = csbi.srWindow.Bottom - csbi.srWindow.Top + 1
     return width, height
 
+
 def enablevtmode():
     '''Enable virtual terminal mode for the associated console.  Return True if
     enabled, else False.'''
 
     ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
 
-    handle = _kernel32.GetStdHandle(_STD_OUTPUT_HANDLE) # don't close the handle
+    handle = _kernel32.GetStdHandle(
+        _STD_OUTPUT_HANDLE
+    )  # don't close the handle
     if handle == _INVALID_HANDLE_VALUE:
         return False
 
@@ -561,6 +651,7 @@
 
     return True
 
+
 def spawndetached(args):
     # No standard library function really spawns a fully detached
     # process under win32 because they allocate pipes or other objects
@@ -583,8 +674,17 @@
 
     # TODO: CreateProcessW on py3?
     res = _kernel32.CreateProcessA(
-        None, encoding.strtolocal(args), None, None, False, _CREATE_NO_WINDOW,
-        env, encoding.getcwd(), ctypes.byref(si), ctypes.byref(pi))
+        None,
+        encoding.strtolocal(args),
+        None,
+        None,
+        False,
+        _CREATE_NO_WINDOW,
+        env,
+        encoding.getcwd(),
+        ctypes.byref(si),
+        ctypes.byref(pi),
+    )
     if not res:
         raise ctypes.WinError()
 
@@ -593,15 +693,18 @@
 
     return pi.dwProcessId
 
+
 def unlink(f):
     '''try to implement POSIX' unlink semantics on Windows'''
 
     if os.path.isdir(f):
         # use EPERM because it is POSIX prescribed value, even though
         # unlink(2) on directories returns EISDIR on Linux
-        raise IOError(errno.EPERM,
-                      r"Unlinking directory not permitted: '%s'"
-                      % encoding.strfromlocal(f))
+        raise IOError(
+            errno.EPERM,
+            r"Unlinking directory not permitted: '%s'"
+            % encoding.strfromlocal(f),
+        )
 
     # POSIX allows to unlink and rename open files. Windows has serious
     # problems with doing that:
@@ -621,7 +724,7 @@
     # implicit zombie filename blocking on a temporary name.
 
     for tries in pycompat.xrange(10):
-        temp = '%s-%08x' % (f, random.randint(0, 0xffffffff))
+        temp = '%s-%08x' % (f, random.randint(0, 0xFFFFFFFF))
         try:
             os.rename(f, temp)  # raises OSError EEXIST if temp exists
             break
@@ -646,6 +749,7 @@
             # leaving some potentially serious inconsistencies.
             pass
 
+
 def makedir(path, notindexed):
     os.mkdir(path)
     if notindexed:
--- a/mercurial/windows.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/windows.py	Sun Oct 06 09:45:02 2019 -0400
@@ -26,6 +26,7 @@
 
 try:
     import _winreg as winreg
+
     winreg.CloseKey
 except ImportError:
     import winreg
@@ -49,6 +50,7 @@
 
 umask = 0o022
 
+
 class mixedfilemodewrapper(object):
     """Wraps a file handle when it is opened in read/write mode.
 
@@ -61,6 +63,7 @@
     mode and automatically adds checks or inserts appropriate file positioning
     calls when necessary.
     """
+
     OPNONE = 0
     OPREAD = 1
     OPWRITE = 2
@@ -124,10 +127,12 @@
         object.__setattr__(self, r'_lastop', self.OPREAD)
         return self._fp.readlines(*args, **kwargs)
 
+
 class fdproxy(object):
     """Wraps osutil.posixfile() to override the name attribute to reflect the
     underlying file name.
     """
+
     def __init__(self, name, fp):
         self.name = name
         self._fp = fp
@@ -147,10 +152,11 @@
     def __getattr__(self, name):
         return getattr(self._fp, name)
 
+
 def posixfile(name, mode='r', buffering=-1):
     '''Open a file with even more POSIX-like semantics'''
     try:
-        fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError
+        fp = osutil.posixfile(name, mode, buffering)  # may raise WindowsError
 
         # PyFile_FromFd() ignores the name, and seems to report fp.name as the
         # underlying file descriptor.
@@ -168,12 +174,15 @@
         return fp
     except WindowsError as err:
         # convert to a friendlier exception
-        raise IOError(err.errno, r'%s: %s' % (
-            encoding.strfromlocal(name), err.strerror))
+        raise IOError(
+            err.errno, r'%s: %s' % (encoding.strfromlocal(name), err.strerror)
+        )
+
 
 # may be wrapped by win32mbcs extension
 listdir = osutil.listdir
 
+
 class winstdout(object):
     '''stdout on windows misbehaves if sent through a pipe'''
 
@@ -215,6 +224,7 @@
                 raise
             raise IOError(errno.EPIPE, r'Broken pipe')
 
+
 def _is_win_9x():
     '''return true if run on windows 95, 98 or me.'''
     try:
@@ -222,41 +232,50 @@
     except AttributeError:
         return 'command' in encoding.environ.get('comspec', '')
 
+
 def openhardlinks():
     return not _is_win_9x()
 
+
 def parsepatchoutput(output_line):
     """parses the output produced by patch and returns the filename"""
     pf = output_line[14:]
     if pf[0] == '`':
-        pf = pf[1:-1] # Remove the quotes
+        pf = pf[1:-1]  # Remove the quotes
     return pf
 
+
 def sshargs(sshcmd, host, user, port):
     '''Build argument list for ssh or Plink'''
     pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
     args = user and ("%s@%s" % (user, host)) or host
     if args.startswith('-') or args.startswith('/'):
         raise error.Abort(
-            _('illegal ssh hostname or username starting with - or /: %s') %
-            args)
+            _('illegal ssh hostname or username starting with - or /: %s')
+            % args
+        )
     args = shellquote(args)
     if port:
         args = '%s %s %s' % (pflag, shellquote(port), args)
     return args
 
+
 def setflags(f, l, x):
     pass
 
+
 def copymode(src, dst, mode=None, enforcewritable=False):
     pass
 
+
 def checkexec(path):
     return False
 
+
 def checklink(path):
     return False
 
+
 def setbinary(fd):
     # When run without console, pipes may expose invalid
     # fileno(), usually set to -1.
@@ -264,25 +283,32 @@
     if fno is not None and fno() >= 0:
         msvcrt.setmode(fno(), os.O_BINARY)
 
+
 def pconvert(path):
     return path.replace(pycompat.ossep, '/')
 
+
 def localpath(path):
     return path.replace('/', '\\')
 
+
 def normpath(path):
     return pconvert(os.path.normpath(path))
 
+
 def normcase(path):
-    return encoding.upper(path) # NTFS compares via upper()
+    return encoding.upper(path)  # NTFS compares via upper()
+
 
 # see posix.py for definitions
 normcasespec = encoding.normcasespecs.upper
 normcasefallback = encoding.upperfallback
 
+
 def samestat(s1, s2):
     return False
 
+
 def shelltocmdexe(path, env):
     r"""Convert shell variables in the form $var and ${var} inside ``path``
     to %var% form.  Existing Windows style variables are left unchanged.
@@ -318,9 +344,9 @@
     index = 0
     pathlen = len(path)
     while index < pathlen:
-        c = path[index:index + 1]
-        if c == b'\'':   # no expansion within single quotes
-            path = path[index + 1:]
+        c = path[index : index + 1]
+        if c == b'\'':  # no expansion within single quotes
+            path = path[index + 1 :]
             pathlen = len(path)
             try:
                 index = path.index(b'\'')
@@ -329,7 +355,7 @@
                 res += c + path
                 index = pathlen - 1
         elif c == b'%':  # variable
-            path = path[index + 1:]
+            path = path[index + 1 :]
             pathlen = len(path)
             try:
                 index = path.index(b'%')
@@ -340,8 +366,8 @@
                 var = path[:index]
                 res += b'%' + var + b'%'
         elif c == b'$':  # variable
-            if path[index + 1:index + 2] == b'{':
-                path = path[index + 2:]
+            if path[index + 1 : index + 2] == b'{':
+                path = path[index + 2 :]
                 pathlen = len(path)
                 try:
                     index = path.index(b'}')
@@ -358,11 +384,11 @@
             else:
                 var = b''
                 index += 1
-                c = path[index:index + 1]
+                c = path[index : index + 1]
                 while c != b'' and c in varchars:
                     var += c
                     index += 1
-                    c = path[index:index + 1]
+                    c = path[index : index + 1]
                 # Some variables (like HG_OLDNODE) may be defined, but have an
                 # empty value.  Those need to be skipped because when spawning
                 # cmd.exe to run the hook, it doesn't replace %VAR% for an empty
@@ -376,13 +402,19 @@
 
                 if c != b'':
                     index -= 1
-        elif (c == b'~' and index + 1 < pathlen
-              and path[index + 1:index + 2] in (b'\\', b'/')):
+        elif (
+            c == b'~'
+            and index + 1 < pathlen
+            and path[index + 1 : index + 2] in (b'\\', b'/')
+        ):
             res += "%USERPROFILE%"
-        elif (c == b'\\' and index + 1 < pathlen
-              and path[index + 1:index + 2] in (b'$', b'~')):
+        elif (
+            c == b'\\'
+            and index + 1 < pathlen
+            and path[index + 1 : index + 2] in (b'$', b'~')
+        ):
             # Skip '\', but only if it is escaping $ or ~
-            res += path[index + 1:index + 2]
+            res += path[index + 1 : index + 2]
             index += 1
         else:
             res += c
@@ -390,6 +422,7 @@
         index += 1
     return res
 
+
 # A sequence of backslashes is special iff it precedes a double quote:
 # - if there's an even number of backslashes, the double quote is not
 #   quoted (i.e. it ends the quoted region)
@@ -403,6 +436,8 @@
 # quote we've appended to the end)
 _quotere = None
 _needsshellquote = None
+
+
 def shellquote(s):
     r"""
     >>> shellquote(br'C:\Users\xyz')
@@ -432,15 +467,18 @@
         return s
     return b'"%s"' % _quotere.sub(br'\1\1\\\2', s)
 
+
 def _unquote(s):
     if s.startswith(b'"') and s.endswith(b'"'):
         return s[1:-1]
     return s
 
+
 def shellsplit(s):
     """Parse a command string in cmd.exe way (best-effort)"""
     return pycompat.maplist(_unquote, pycompat.shlexsplit(s, posix=False))
 
+
 def quotecommand(cmd):
     """Build a command string suitable for os.popen* calls."""
     if sys.version_info < (2, 7, 1):
@@ -448,11 +486,13 @@
         return '"' + cmd + '"'
     return cmd
 
+
 # if you change this stub into a real check, please try to implement the
 # username and groupname functions above, too.
 def isowner(st):
     return True
 
+
 def findexe(command):
     '''Find executable for command searching like cmd.exe does.
     If command is a basename then PATH is searched for command.
@@ -481,51 +521,60 @@
             return executable
     return findexisting(os.path.expanduser(os.path.expandvars(command)))
 
+
 _wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
 
+
 def statfiles(files):
     '''Stat each file in files. Yield each stat, or None if a file
     does not exist or has a type we don't care about.
 
     Cluster and cache stat per directory to minimize number of OS stat calls.'''
-    dircache = {} # dirname -> filename -> status | None if file does not exist
+    dircache = {}  # dirname -> filename -> status | None if file does not exist
     getkind = stat.S_IFMT
     for nf in files:
-        nf  = normcase(nf)
+        nf = normcase(nf)
         dir, base = os.path.split(nf)
         if not dir:
             dir = '.'
         cache = dircache.get(dir, None)
         if cache is None:
             try:
-                dmap = dict([(normcase(n), s)
-                             for n, k, s in listdir(dir, True)
-                             if getkind(s.st_mode) in _wantedkinds])
+                dmap = dict(
+                    [
+                        (normcase(n), s)
+                        for n, k, s in listdir(dir, True)
+                        if getkind(s.st_mode) in _wantedkinds
+                    ]
+                )
             except OSError as err:
                 # Python >= 2.5 returns ENOENT and adds winerror field
                 # EINVAL is raised if dir is not a directory.
-                if err.errno not in (errno.ENOENT, errno.EINVAL,
-                                     errno.ENOTDIR):
+                if err.errno not in (errno.ENOENT, errno.EINVAL, errno.ENOTDIR):
                     raise
                 dmap = {}
             cache = dircache.setdefault(dir, dmap)
         yield cache.get(base, None)
 
+
 def username(uid=None):
     """Return the name of the user with the given uid.
 
     If uid is None, return the name of the current user."""
     return None
 
+
 def groupname(gid=None):
     """Return the name of the group with the given gid.
 
     If gid is None, return the name of the current group."""
     return None
 
+
 def readlink(pathname):
     return pycompat.fsencode(os.readlink(pycompat.fsdecode(pathname)))
 
+
 def removedirs(name):
     """special version of os.removedirs that does not remove symlinked
     directories or junction points if they actually contain files"""
@@ -544,6 +593,7 @@
             break
         head, tail = os.path.split(head)
 
+
 def rename(src, dst):
     '''atomically rename file src to dst, replacing dst if it exists'''
     try:
@@ -554,16 +604,20 @@
         unlink(dst)
         os.rename(src, dst)
 
+
 def gethgcmd():
     return [encoding.strtolocal(arg) for arg in [sys.executable] + sys.argv[:1]]
 
+
 def groupmembers(name):
     # Don't support groups on Windows for now
     raise KeyError
 
+
 def isexec(f):
     return False
 
+
 class cachestat(object):
     def __init__(self, path):
         pass
@@ -571,6 +625,7 @@
     def cacheable(self):
         return False
 
+
 def lookupreg(key, valname=None, scope=None):
     ''' Look up a key/value name in the Windows registry.
 
@@ -594,20 +649,25 @@
         except EnvironmentError:
             pass
 
+
 expandglobs = True
 
+
 def statislink(st):
     '''check whether a stat result is a symlink'''
     return False
 
+
 def statisexec(st):
     '''check whether a stat result is an executable file'''
     return False
 
+
 def poll(fds):
     # see posix.py for description
     raise NotImplementedError()
 
+
 def readpipe(pipe):
     """Read all available data from a pipe."""
     chunks = []
@@ -623,5 +683,6 @@
 
     return ''.join(chunks)
 
+
 def bindunixsocket(sock, path):
     raise NotImplementedError(r'unsupported platform')
--- a/mercurial/wireprotoframing.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/wireprotoframing.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,9 +15,7 @@
 import struct
 
 from .i18n import _
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 from . import (
     encoding,
     error,
@@ -121,6 +119,7 @@
 
 ARGUMENT_RECORD_HEADER = struct.Struct(r'<HH')
 
+
 def humanflags(mapping, value):
     """Convert a numeric flags value to a human value, using a mapping table."""
     namemap = {v: k for k, v in mapping.iteritems()}
@@ -133,6 +132,7 @@
 
     return b'|'.join(flags)
 
+
 @attr.s(slots=True)
 class frameheader(object):
     """Represents the data in a frame header."""
@@ -144,6 +144,7 @@
     typeid = attr.ib()
     flags = attr.ib()
 
+
 @attr.s(slots=True, repr=False)
 class frame(object):
     """Represents a parsed frame."""
@@ -163,11 +164,19 @@
                 typename = name
                 break
 
-        return ('frame(size=%d; request=%d; stream=%d; streamflags=%s; '
-                'type=%s; flags=%s)' % (
-            len(self.payload), self.requestid, self.streamid,
-            humanflags(STREAM_FLAGS, self.streamflags), typename,
-            humanflags(FRAME_TYPE_FLAGS.get(self.typeid, {}), self.flags)))
+        return (
+            'frame(size=%d; request=%d; stream=%d; streamflags=%s; '
+            'type=%s; flags=%s)'
+            % (
+                len(self.payload),
+                self.requestid,
+                self.streamid,
+                humanflags(STREAM_FLAGS, self.streamflags),
+                typename,
+                humanflags(FRAME_TYPE_FLAGS.get(self.typeid, {}), self.flags),
+            )
+        )
+
 
 def makeframe(requestid, streamid, streamflags, typeid, flags, payload):
     """Assemble a frame into a byte array."""
@@ -189,6 +198,7 @@
 
     return frame
 
+
 def makeframefromhumanstring(s):
     """Create a frame from a human readable string
 
@@ -238,15 +248,22 @@
             finalflags |= int(flag)
 
     if payload.startswith(b'cbor:'):
-        payload = b''.join(cborutil.streamencode(
-            stringutil.evalpythonliteral(payload[5:])))
+        payload = b''.join(
+            cborutil.streamencode(stringutil.evalpythonliteral(payload[5:]))
+        )
 
     else:
         payload = stringutil.unescapestr(payload)
 
-    return makeframe(requestid=requestid, streamid=streamid,
-                     streamflags=finalstreamflags, typeid=frametype,
-                     flags=finalflags, payload=payload)
+    return makeframe(
+        requestid=requestid,
+        streamid=streamid,
+        streamflags=finalstreamflags,
+        typeid=frametype,
+        flags=finalflags,
+        payload=payload,
+    )
+
 
 def parseheader(data):
     """Parse a unified framing protocol frame header from a buffer.
@@ -265,11 +282,13 @@
     requestid, streamid, streamflags = struct.unpack_from(r'<HBB', data, 3)
     typeflags = data[7]
 
-    frametype = (typeflags & 0xf0) >> 4
-    frameflags = typeflags & 0x0f
+    frametype = (typeflags & 0xF0) >> 4
+    frameflags = typeflags & 0x0F
 
-    return frameheader(framelength, requestid, streamid, streamflags,
-                       frametype, frameflags)
+    return frameheader(
+        framelength, requestid, streamid, streamflags, frametype, frameflags
+    )
+
 
 def readframe(fh):
     """Read a unified framing protocol frame from a file object.
@@ -286,22 +305,34 @@
         return None
 
     if readcount != FRAME_HEADER_SIZE:
-        raise error.Abort(_('received incomplete frame: got %d bytes: %s') %
-                          (readcount, header))
+        raise error.Abort(
+            _('received incomplete frame: got %d bytes: %s')
+            % (readcount, header)
+        )
 
     h = parseheader(header)
 
     payload = fh.read(h.length)
     if len(payload) != h.length:
-        raise error.Abort(_('frame length error: expected %d; got %d') %
-                          (h.length, len(payload)))
+        raise error.Abort(
+            _('frame length error: expected %d; got %d')
+            % (h.length, len(payload))
+        )
+
+    return frame(
+        h.requestid, h.streamid, h.streamflags, h.typeid, h.flags, payload
+    )
+
 
-    return frame(h.requestid, h.streamid, h.streamflags, h.typeid, h.flags,
-                 payload)
-
-def createcommandframes(stream, requestid, cmd, args, datafh=None,
-                        maxframesize=DEFAULT_MAX_FRAME_SIZE,
-                        redirect=None):
+def createcommandframes(
+    stream,
+    requestid,
+    cmd,
+    args,
+    datafh=None,
+    maxframesize=DEFAULT_MAX_FRAME_SIZE,
+    redirect=None,
+):
     """Create frames necessary to transmit a request to run a command.
 
     This is a generator of bytearrays. Each item represents a frame
@@ -331,16 +362,18 @@
         if datafh:
             flags |= FLAG_COMMAND_REQUEST_EXPECT_DATA
 
-        payload = data[offset:offset + maxframesize]
+        payload = data[offset : offset + maxframesize]
         offset += len(payload)
 
         if len(payload) == maxframesize and offset < len(data):
             flags |= FLAG_COMMAND_REQUEST_MORE_FRAMES
 
-        yield stream.makeframe(requestid=requestid,
-                               typeid=FRAME_TYPE_COMMAND_REQUEST,
-                               flags=flags,
-                               payload=payload)
+        yield stream.makeframe(
+            requestid=requestid,
+            typeid=FRAME_TYPE_COMMAND_REQUEST,
+            flags=flags,
+            payload=payload,
+        )
 
         if not (flags & FLAG_COMMAND_REQUEST_MORE_FRAMES):
             break
@@ -357,14 +390,17 @@
                 assert datafh.read(1) == b''
                 done = True
 
-            yield stream.makeframe(requestid=requestid,
-                                   typeid=FRAME_TYPE_COMMAND_DATA,
-                                   flags=flags,
-                                   payload=data)
+            yield stream.makeframe(
+                requestid=requestid,
+                typeid=FRAME_TYPE_COMMAND_DATA,
+                flags=flags,
+                payload=data,
+            )
 
             if done:
                 break
 
+
 def createcommandresponseokframe(stream, requestid):
     overall = b''.join(cborutil.streamencode({b'status': b'ok'}))
 
@@ -377,20 +413,24 @@
     else:
         encoded = False
 
-    return stream.makeframe(requestid=requestid,
-                            typeid=FRAME_TYPE_COMMAND_RESPONSE,
-                            flags=FLAG_COMMAND_RESPONSE_CONTINUATION,
-                            payload=overall,
-                            encoded=encoded)
+    return stream.makeframe(
+        requestid=requestid,
+        typeid=FRAME_TYPE_COMMAND_RESPONSE,
+        flags=FLAG_COMMAND_RESPONSE_CONTINUATION,
+        payload=overall,
+        encoded=encoded,
+    )
 
-def createcommandresponseeosframes(stream, requestid,
-                                   maxframesize=DEFAULT_MAX_FRAME_SIZE):
+
+def createcommandresponseeosframes(
+    stream, requestid, maxframesize=DEFAULT_MAX_FRAME_SIZE
+):
     """Create an empty payload frame representing command end-of-stream."""
     payload = stream.flush()
 
     offset = 0
     while True:
-        chunk = payload[offset:offset + maxframesize]
+        chunk = payload[offset : offset + maxframesize]
         offset += len(chunk)
 
         done = offset == len(payload)
@@ -400,26 +440,31 @@
         else:
             flags = FLAG_COMMAND_RESPONSE_CONTINUATION
 
-        yield stream.makeframe(requestid=requestid,
-                               typeid=FRAME_TYPE_COMMAND_RESPONSE,
-                               flags=flags,
-                               payload=chunk,
-                               encoded=payload != b'')
+        yield stream.makeframe(
+            requestid=requestid,
+            typeid=FRAME_TYPE_COMMAND_RESPONSE,
+            flags=flags,
+            payload=chunk,
+            encoded=payload != b'',
+        )
 
         if done:
             break
 
+
 def createalternatelocationresponseframe(stream, requestid, location):
     data = {
         b'status': b'redirect',
-        b'location': {
-            b'url': location.url,
-            b'mediatype': location.mediatype,
-        }
+        b'location': {b'url': location.url, b'mediatype': location.mediatype,},
     }
 
-    for a in (r'size', r'fullhashes', r'fullhashseed', r'serverdercerts',
-              r'servercadercerts'):
+    for a in (
+        r'size',
+        r'fullhashes',
+        r'fullhashseed',
+        r'serverdercerts',
+        r'servercadercerts',
+    ):
         value = getattr(location, a)
         if value is not None:
             data[b'location'][pycompat.bytestr(a)] = value
@@ -432,48 +477,52 @@
     else:
         encoded = False
 
-    return stream.makeframe(requestid=requestid,
-                            typeid=FRAME_TYPE_COMMAND_RESPONSE,
-                            flags=FLAG_COMMAND_RESPONSE_CONTINUATION,
-                            payload=payload,
-                            encoded=encoded)
+    return stream.makeframe(
+        requestid=requestid,
+        typeid=FRAME_TYPE_COMMAND_RESPONSE,
+        flags=FLAG_COMMAND_RESPONSE_CONTINUATION,
+        payload=payload,
+        encoded=encoded,
+    )
+
 
 def createcommanderrorresponse(stream, requestid, message, args=None):
     # TODO should this be using a list of {'msg': ..., 'args': {}} so atom
     # formatting works consistently?
-    m = {
-        b'status': b'error',
-        b'error': {
-            b'message': message,
-        }
-    }
+    m = {b'status': b'error', b'error': {b'message': message,}}
 
     if args:
         m[b'error'][b'args'] = args
 
     overall = b''.join(cborutil.streamencode(m))
 
-    yield stream.makeframe(requestid=requestid,
-                           typeid=FRAME_TYPE_COMMAND_RESPONSE,
-                           flags=FLAG_COMMAND_RESPONSE_EOS,
-                           payload=overall)
+    yield stream.makeframe(
+        requestid=requestid,
+        typeid=FRAME_TYPE_COMMAND_RESPONSE,
+        flags=FLAG_COMMAND_RESPONSE_EOS,
+        payload=overall,
+    )
+
 
 def createerrorframe(stream, requestid, msg, errtype):
     # TODO properly handle frame size limits.
     assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
 
-    payload = b''.join(cborutil.streamencode({
-        b'type': errtype,
-        b'message': [{b'msg': msg}],
-    }))
+    payload = b''.join(
+        cborutil.streamencode({b'type': errtype, b'message': [{b'msg': msg}],})
+    )
 
-    yield stream.makeframe(requestid=requestid,
-                           typeid=FRAME_TYPE_ERROR_RESPONSE,
-                           flags=0,
-                           payload=payload)
+    yield stream.makeframe(
+        requestid=requestid,
+        typeid=FRAME_TYPE_ERROR_RESPONSE,
+        flags=0,
+        payload=payload,
+    )
 
-def createtextoutputframe(stream, requestid, atoms,
-                          maxframesize=DEFAULT_MAX_FRAME_SIZE):
+
+def createtextoutputframe(
+    stream, requestid, atoms, maxframesize=DEFAULT_MAX_FRAME_SIZE
+):
     """Create a text output frame to render text to people.
 
     ``atoms`` is a 3-tuple of (formatting string, args, labels).
@@ -504,8 +553,9 @@
         args = [a.decode(r'utf-8', r'replace').encode(r'utf-8') for a in args]
 
         # Labels must be ASCII.
-        labels = [l.decode(r'ascii', r'strict').encode(r'ascii')
-                  for l in labels]
+        labels = [
+            l.decode(r'ascii', r'strict').encode(r'ascii') for l in labels
+        ]
 
         atom = {b'msg': formatting}
         if args:
@@ -520,10 +570,13 @@
     if len(payload) > maxframesize:
         raise ValueError('cannot encode data in a single frame')
 
-    yield stream.makeframe(requestid=requestid,
-                           typeid=FRAME_TYPE_TEXT_OUTPUT,
-                           flags=0,
-                           payload=payload)
+    yield stream.makeframe(
+        requestid=requestid,
+        typeid=FRAME_TYPE_TEXT_OUTPUT,
+        flags=0,
+        payload=payload,
+    )
+
 
 class bufferingcommandresponseemitter(object):
     """Helper object to emit command response frames intelligently.
@@ -536,6 +589,7 @@
     So it might make sense to implement this functionality at the stream
     level.
     """
+
     def __init__(self, stream, requestid, maxframesize=DEFAULT_MAX_FRAME_SIZE):
         self._stream = stream
         self._requestid = requestid
@@ -581,7 +635,7 @@
             # Now emit frames for the big chunk.
             offset = 0
             while True:
-                chunk = data[offset:offset + self._maxsize]
+                chunk = data[offset : offset + self._maxsize]
                 offset += len(chunk)
 
                 yield self._stream.makeframe(
@@ -589,7 +643,8 @@
                     typeid=FRAME_TYPE_COMMAND_RESPONSE,
                     flags=FLAG_COMMAND_RESPONSE_CONTINUATION,
                     payload=chunk,
-                    encoded=True)
+                    encoded=True,
+                )
 
                 if offset == len(data):
                     return
@@ -625,13 +680,17 @@
             typeid=FRAME_TYPE_COMMAND_RESPONSE,
             flags=FLAG_COMMAND_RESPONSE_CONTINUATION,
             payload=payload,
-            encoded=True)
+            encoded=True,
+        )
+
 
 # TODO consider defining encoders/decoders using the util.compressionengine
 # mechanism.
 
+
 class identityencoder(object):
     """Encoder for the "identity" stream encoding profile."""
+
     def __init__(self, ui):
         pass
 
@@ -644,20 +703,24 @@
     def finish(self):
         return b''
 
+
 class identitydecoder(object):
     """Decoder for the "identity" stream encoding profile."""
 
     def __init__(self, ui, extraobjs):
         if extraobjs:
-            raise error.Abort(_('identity decoder received unexpected '
-                                'additional values'))
+            raise error.Abort(
+                _('identity decoder received unexpected ' 'additional values')
+            )
 
     def decode(self, data):
         return data
 
+
 class zlibencoder(object):
     def __init__(self, ui):
         import zlib
+
         self._zlib = zlib
         self._compressor = zlib.compressobj()
 
@@ -674,13 +737,15 @@
         self._compressor = None
         return res
 
+
 class zlibdecoder(object):
     def __init__(self, ui, extraobjs):
         import zlib
 
         if extraobjs:
-            raise error.Abort(_('zlib decoder received unexpected '
-                                'additional values'))
+            raise error.Abort(
+                _('zlib decoder received unexpected ' 'additional values')
+            )
 
         self._decompressor = zlib.decompressobj()
 
@@ -692,6 +757,7 @@
 
         return self._decompressor.decompress(data)
 
+
 class zstdbaseencoder(object):
     def __init__(self, level):
         from . import zstd
@@ -714,38 +780,46 @@
         self._compressor = None
         return res
 
+
 class zstd8mbencoder(zstdbaseencoder):
     def __init__(self, ui):
         super(zstd8mbencoder, self).__init__(3)
 
+
 class zstdbasedecoder(object):
     def __init__(self, maxwindowsize):
         from . import zstd
+
         dctx = zstd.ZstdDecompressor(max_window_size=maxwindowsize)
         self._decompressor = dctx.decompressobj()
 
     def decode(self, data):
         return self._decompressor.decompress(data)
 
+
 class zstd8mbdecoder(zstdbasedecoder):
     def __init__(self, ui, extraobjs):
         if extraobjs:
-            raise error.Abort(_('zstd8mb decoder received unexpected '
-                                'additional values'))
+            raise error.Abort(
+                _('zstd8mb decoder received unexpected ' 'additional values')
+            )
 
         super(zstd8mbdecoder, self).__init__(maxwindowsize=8 * 1048576)
 
+
 # We lazily populate this to avoid excessive module imports when importing
 # this module.
 STREAM_ENCODERS = {}
 STREAM_ENCODERS_ORDER = []
 
+
 def populatestreamencoders():
     if STREAM_ENCODERS:
         return
 
     try:
         from . import zstd
+
         zstd.__version__
     except ImportError:
         zstd = None
@@ -761,6 +835,7 @@
     STREAM_ENCODERS[b'identity'] = (identityencoder, identitydecoder)
     STREAM_ENCODERS_ORDER.append(b'identity')
 
+
 class stream(object):
     """Represents a logical unidirectional series of frames."""
 
@@ -778,8 +853,10 @@
             streamflags |= STREAM_FLAG_BEGIN_STREAM
             self._active = True
 
-        return makeframe(requestid, self.streamid, streamflags, typeid, flags,
-                         payload)
+        return makeframe(
+            requestid, self.streamid, streamflags, typeid, flags, payload
+        )
+
 
 class inputstream(stream):
     """Represents a stream used for receiving data."""
@@ -813,6 +890,7 @@
 
         return self._decoder.flush()
 
+
 class outputstream(stream):
     """Represents a stream used for sending data."""
 
@@ -851,8 +929,7 @@
 
         self._encoder.finish()
 
-    def makeframe(self, requestid, typeid, flags, payload,
-                  encoded=False):
+    def makeframe(self, requestid, typeid, flags, payload, encoded=False):
         """Create a frame to be sent out over this stream.
 
         Only returns the frame instance. Does not actually send it.
@@ -866,16 +943,20 @@
             if not self.streamsettingssent:
                 raise error.ProgrammingError(
                     b'attempting to send encoded frame without sending stream '
-                    b'settings')
+                    b'settings'
+                )
 
             streamflags |= STREAM_FLAG_ENCODING_APPLIED
 
-        if (typeid == FRAME_TYPE_STREAM_SETTINGS
-            and flags & FLAG_STREAM_ENCODING_SETTINGS_EOS):
+        if (
+            typeid == FRAME_TYPE_STREAM_SETTINGS
+            and flags & FLAG_STREAM_ENCODING_SETTINGS_EOS
+        ):
             self.streamsettingssent = True
 
-        return makeframe(requestid, self.streamid, streamflags, typeid, flags,
-                         payload)
+        return makeframe(
+            requestid, self.streamid, streamflags, typeid, flags, payload
+        )
 
     def makestreamsettingsframe(self, requestid):
         """Create a stream settings frame for this stream.
@@ -887,19 +968,27 @@
             return None
 
         payload = b''.join(cborutil.streamencode(self._encodername))
-        return self.makeframe(requestid, FRAME_TYPE_STREAM_SETTINGS,
-                              FLAG_STREAM_ENCODING_SETTINGS_EOS, payload)
+        return self.makeframe(
+            requestid,
+            FRAME_TYPE_STREAM_SETTINGS,
+            FLAG_STREAM_ENCODING_SETTINGS_EOS,
+            payload,
+        )
+
 
 def ensureserverstream(stream):
     if stream.streamid % 2:
-        raise error.ProgrammingError('server should only write to even '
-                                     'numbered streams; %d is not even' %
-                                     stream.streamid)
+        raise error.ProgrammingError(
+            'server should only write to even '
+            'numbered streams; %d is not even' % stream.streamid
+        )
+
 
 DEFAULT_PROTOCOL_SETTINGS = {
     'contentencodings': [b'identity'],
 }
 
+
 class serverreactor(object):
     """Holds state of a server handling frame-based protocol requests.
 
@@ -1006,23 +1095,28 @@
         if not frame.streamid % 2:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('received frame with even numbered stream ID: %d') %
-                  frame.streamid)
+                _('received frame with even numbered stream ID: %d')
+                % frame.streamid
+            )
 
         if frame.streamid not in self._incomingstreams:
             if not frame.streamflags & STREAM_FLAG_BEGIN_STREAM:
                 self._state = 'errored'
                 return self._makeerrorresult(
-                    _('received frame on unknown inactive stream without '
-                      'beginning of stream flag set'))
+                    _(
+                        'received frame on unknown inactive stream without '
+                        'beginning of stream flag set'
+                    )
+                )
 
             self._incomingstreams[frame.streamid] = inputstream(frame.streamid)
 
         if frame.streamflags & STREAM_FLAG_ENCODING_APPLIED:
             # TODO handle decoding frames
             self._state = 'errored'
-            raise error.ProgrammingError('support for decoding stream payloads '
-                                         'not yet implemented')
+            raise error.ProgrammingError(
+                'support for decoding stream payloads ' 'not yet implemented'
+            )
 
         if frame.streamflags & STREAM_FLAG_END_STREAM:
             del self._incomingstreams[frame.streamid]
@@ -1080,20 +1174,25 @@
 
                     if emitted:
                         for frame in createcommandresponseeosframes(
-                            stream, requestid):
+                            stream, requestid
+                        ):
                             yield frame
                     break
 
                 except error.WireprotoCommandError as e:
                     for frame in createcommanderrorresponse(
-                        stream, requestid, e.message, e.messageargs):
+                        stream, requestid, e.message, e.messageargs
+                    ):
                         yield frame
                     break
 
                 except Exception as e:
                     for frame in createerrorframe(
-                        stream, requestid, '%s' % stringutil.forcebytestr(e),
-                        errtype='server'):
+                        stream,
+                        requestid,
+                        '%s' % stringutil.forcebytestr(e),
+                        errtype='server',
+                    ):
 
                         yield frame
 
@@ -1106,14 +1205,16 @@
                         if emitted:
                             raise error.ProgrammingError(
                                 'alternatelocationresponse seen after initial '
-                                'output object')
+                                'output object'
+                            )
 
                         frame = stream.makestreamsettingsframe(requestid)
                         if frame:
                             yield frame
 
                         yield createalternatelocationresponseframe(
-                            stream, requestid, o)
+                            stream, requestid, o
+                        )
 
                         alternatelocationsent = True
                         emitted = True
@@ -1121,7 +1222,8 @@
 
                     if alternatelocationsent:
                         raise error.ProgrammingError(
-                            'object follows alternatelocationresponse')
+                            'object follows alternatelocationresponse'
+                        )
 
                     if not emitted:
                         # Frame is optional.
@@ -1147,9 +1249,11 @@
                             yield frame
 
                     elif isinstance(
-                        o, wireprototypes.indefinitebytestringresponse):
+                        o, wireprototypes.indefinitebytestringresponse
+                    ):
                         for chunk in cborutil.streamencodebytestringfromiter(
-                            o.chunks):
+                            o.chunks
+                        ):
 
                             for frame in emitter.send(chunk):
                                 yield frame
@@ -1161,9 +1265,9 @@
                                 yield frame
 
                 except Exception as e:
-                    for frame in createerrorframe(stream, requestid,
-                                                  '%s' % e,
-                                                  errtype='server'):
+                    for frame in createerrorframe(
+                        stream, requestid, '%s' % e, errtype='server'
+                    ):
                         yield frame
 
                     break
@@ -1189,25 +1293,22 @@
                 for frame in gen:
                     yield frame
 
-        return 'sendframes', {
-            'framegen': makegen(),
-        }
+        return 'sendframes', {'framegen': makegen(),}
 
     def _handlesendframes(self, framegen):
         if self._deferoutput:
             self._bufferedframegens.append(framegen)
             return 'noop', {}
         else:
-            return 'sendframes', {
-                'framegen': framegen,
-            }
+            return 'sendframes', {'framegen': framegen,}
 
     def onservererror(self, stream, requestid, msg):
         ensureserverstream(stream)
 
         def sendframes():
-            for frame in createerrorframe(stream, requestid, msg,
-                                          errtype='server'):
+            for frame in createerrorframe(
+                stream, requestid, msg, errtype='server'
+            ):
                 yield frame
 
             self._activecommands.remove(requestid)
@@ -1219,8 +1320,9 @@
         ensureserverstream(stream)
 
         def sendframes():
-            for frame in createcommanderrorresponse(stream, requestid, message,
-                                                    args):
+            for frame in createcommanderrorresponse(
+                stream, requestid, message, args
+            ):
                 yield frame
 
             self._activecommands.remove(requestid)
@@ -1250,17 +1352,16 @@
         return s
 
     def _makeerrorresult(self, msg):
-        return 'error', {
-            'message': msg,
-        }
+        return 'error', {'message': msg,}
 
     def _makeruncommandresult(self, requestid):
         entry = self._receivingcommands[requestid]
 
         if not entry['requestdone']:
             self._state = 'errored'
-            raise error.ProgrammingError('should not be called without '
-                                         'requestdone set')
+            raise error.ProgrammingError(
+                'should not be called without ' 'requestdone set'
+            )
 
         del self._receivingcommands[requestid]
 
@@ -1276,7 +1377,8 @@
         if b'name' not in request:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('command request missing "name" field'))
+                _('command request missing "name" field')
+            )
 
         if b'args' not in request:
             request[b'args'] = {}
@@ -1284,18 +1386,19 @@
         assert requestid not in self._activecommands
         self._activecommands.add(requestid)
 
-        return 'runcommand', {
-            'requestid': requestid,
-            'command': request[b'name'],
-            'args': request[b'args'],
-            'redirect': request.get(b'redirect'),
-            'data': entry['data'].getvalue() if entry['data'] else None,
-        }
+        return (
+            'runcommand',
+            {
+                'requestid': requestid,
+                'command': request[b'name'],
+                'args': request[b'args'],
+                'redirect': request.get(b'redirect'),
+                'data': entry['data'].getvalue() if entry['data'] else None,
+            },
+        )
 
     def _makewantframeresult(self):
-        return 'wantframe', {
-            'state': self._state,
-        }
+        return 'wantframe', {'state': self._state,}
 
     def _validatecommandrequestframe(self, frame):
         new = frame.flags & FLAG_COMMAND_REQUEST_NEW
@@ -1304,14 +1407,20 @@
         if new and continuation:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('received command request frame with both new and '
-                  'continuation flags set'))
+                _(
+                    'received command request frame with both new and '
+                    'continuation flags set'
+                )
+            )
 
         if not new and not continuation:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('received command request frame with neither new nor '
-                  'continuation flags set'))
+                _(
+                    'received command request frame with neither new nor '
+                    'continuation flags set'
+                )
+            )
 
     def _onframeinitial(self, frame):
         # Called when we receive a frame when in the "initial" state.
@@ -1327,8 +1436,12 @@
         else:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('expected sender protocol settings or command request '
-                  'frame; got %d') % frame.typeid)
+                _(
+                    'expected sender protocol settings or command request '
+                    'frame; got %d'
+                )
+                % frame.typeid
+            )
 
     def _onframeprotocolsettings(self, frame):
         assert self._state == 'protocol-settings-receiving'
@@ -1337,8 +1450,9 @@
         if frame.typeid != FRAME_TYPE_SENDER_PROTOCOL_SETTINGS:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('expected sender protocol settings frame; got %d') %
-                frame.typeid)
+                _('expected sender protocol settings frame; got %d')
+                % frame.typeid
+            )
 
         more = frame.flags & FLAG_SENDER_PROTOCOL_SETTINGS_CONTINUATION
         eos = frame.flags & FLAG_SENDER_PROTOCOL_SETTINGS_EOS
@@ -1346,14 +1460,20 @@
         if more and eos:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('sender protocol settings frame cannot have both '
-                  'continuation and end of stream flags set'))
+                _(
+                    'sender protocol settings frame cannot have both '
+                    'continuation and end of stream flags set'
+                )
+            )
 
         if not more and not eos:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('sender protocol settings frame must have continuation or '
-                  'end of stream flag set'))
+                _(
+                    'sender protocol settings frame must have continuation or '
+                    'end of stream flag set'
+                )
+            )
 
         # TODO establish limits for maximum amount of data that can be
         # buffered.
@@ -1363,7 +1483,8 @@
             self._state = 'errored'
             return self._makeerrorresult(
                 _('error decoding CBOR from sender protocol settings frame: %s')
-                % stringutil.forcebytestr(e))
+                % stringutil.forcebytestr(e)
+            )
 
         if more:
             return self._makewantframeresult()
@@ -1376,12 +1497,16 @@
         if not decoded:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('sender protocol settings frame did not contain CBOR data'))
+                _('sender protocol settings frame did not contain CBOR data')
+            )
         elif len(decoded) > 1:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('sender protocol settings frame contained multiple CBOR '
-                  'values'))
+                _(
+                    'sender protocol settings frame contained multiple CBOR '
+                    'values'
+                )
+            )
 
         d = decoded[0]
 
@@ -1398,7 +1523,8 @@
         if frame.typeid != FRAME_TYPE_COMMAND_REQUEST:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('expected command request frame; got %d') % frame.typeid)
+                _('expected command request frame; got %d') % frame.typeid
+            )
 
         res = self._validatecommandrequestframe(frame)
         if res:
@@ -1407,12 +1533,14 @@
         if frame.requestid in self._receivingcommands:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('request with ID %d already received') % frame.requestid)
+                _('request with ID %d already received') % frame.requestid
+            )
 
         if frame.requestid in self._activecommands:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('request with ID %d is already active') % frame.requestid)
+                _('request with ID %d is already active') % frame.requestid
+            )
 
         new = frame.flags & FLAG_COMMAND_REQUEST_NEW
         moreframes = frame.flags & FLAG_COMMAND_REQUEST_MORE_FRAMES
@@ -1421,7 +1549,8 @@
         if not new:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('received command request frame without new flag set'))
+                _('received command request frame without new flag set')
+            )
 
         payload = util.bytesio()
         payload.write(frame.payload)
@@ -1456,14 +1585,16 @@
         if frame.requestid in self._activecommands:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('received frame for request that is still active: %d') %
-                frame.requestid)
+                _('received frame for request that is still active: %d')
+                % frame.requestid
+            )
 
         if frame.requestid not in self._receivingcommands:
             self._state = 'errored'
             return self._makeerrorresult(
-                _('received frame for request that is not receiving: %d') %
-                  frame.requestid)
+                _('received frame for request that is not receiving: %d')
+                % frame.requestid
+            )
 
         entry = self._receivingcommands[frame.requestid]
 
@@ -1474,13 +1605,17 @@
             if entry['requestdone']:
                 self._state = 'errored'
                 return self._makeerrorresult(
-                    _('received command request frame when request frames '
-                      'were supposedly done'))
+                    _(
+                        'received command request frame when request frames '
+                        'were supposedly done'
+                    )
+                )
 
             if expectingdata != entry['expectingdata']:
                 self._state = 'errored'
                 return self._makeerrorresult(
-                    _('mismatch between expect data flag and previous frame'))
+                    _('mismatch between expect data flag and previous frame')
+                )
 
             entry['payload'].write(frame.payload)
 
@@ -1495,9 +1630,13 @@
         elif frame.typeid == FRAME_TYPE_COMMAND_DATA:
             if not entry['expectingdata']:
                 self._state = 'errored'
-                return self._makeerrorresult(_(
-                    'received command data frame for request that is not '
-                    'expecting data: %d') % frame.requestid)
+                return self._makeerrorresult(
+                    _(
+                        'received command data frame for request that is not '
+                        'expecting data: %d'
+                    )
+                    % frame.requestid
+                )
 
             if entry['data'] is None:
                 entry['data'] = util.bytesio()
@@ -1505,8 +1644,9 @@
             return self._handlecommanddataframe(frame, entry)
         else:
             self._state = 'errored'
-            return self._makeerrorresult(_(
-                'received unexpected frame type: %d') % frame.typeid)
+            return self._makeerrorresult(
+                _('received unexpected frame type: %d') % frame.typeid
+            )
 
     def _handlecommanddataframe(self, frame, entry):
         assert frame.typeid == FRAME_TYPE_COMMAND_DATA
@@ -1521,12 +1661,14 @@
             return self._makeruncommandresult(frame.requestid)
         else:
             self._state = 'errored'
-            return self._makeerrorresult(_('command data frame without '
-                                           'flags'))
+            return self._makeerrorresult(
+                _('command data frame without ' 'flags')
+            )
 
     def _onframeerrored(self, frame):
         return self._makeerrorresult(_('server already errored'))
 
+
 class commandrequest(object):
     """Represents a request to run a command."""
 
@@ -1538,6 +1680,7 @@
         self.redirect = redirect
         self.state = 'pending'
 
+
 class clientreactor(object):
     """Holds state of a client issuing frame-based protocol requests.
 
@@ -1584,8 +1727,14 @@
        is expected to follow or we're at the end of the response stream,
        respectively.
     """
-    def __init__(self, ui, hasmultiplesend=False, buffersends=True,
-                 clientcontentencoders=None):
+
+    def __init__(
+        self,
+        ui,
+        hasmultiplesend=False,
+        buffersends=True,
+        clientcontentencoders=None,
+    ):
         """Create a new instance.
 
         ``hasmultiplesend`` indicates whether multiple sends are supported
@@ -1634,24 +1783,28 @@
         requestid = self._nextrequestid
         self._nextrequestid += 2
 
-        request = commandrequest(requestid, name, args, datafh=datafh,
-                                 redirect=redirect)
+        request = commandrequest(
+            requestid, name, args, datafh=datafh, redirect=redirect
+        )
 
         if self._buffersends:
             self._pendingrequests.append(request)
             return request, 'noop', {}
         else:
             if not self._cansend:
-                raise error.ProgrammingError('sends cannot be performed on '
-                                             'this instance')
+                raise error.ProgrammingError(
+                    'sends cannot be performed on ' 'this instance'
+                )
 
             if not self._hasmultiplesend:
                 self._cansend = False
                 self._canissuecommands = False
 
-            return request, 'sendframes', {
-                'framegen': self._makecommandframes(request),
-            }
+            return (
+                request,
+                'sendframes',
+                {'framegen': self._makecommandframes(request),},
+            )
 
     def flushcommands(self):
         """Request that all queued commands be sent.
@@ -1667,8 +1820,9 @@
             return 'noop', {}
 
         if not self._cansend:
-            raise error.ProgrammingError('sends cannot be performed on this '
-                                         'instance')
+            raise error.ProgrammingError(
+                'sends cannot be performed on this ' 'instance'
+            )
 
         # If the instance only allows sending once, mark that we have fired
         # our one shot.
@@ -1682,9 +1836,7 @@
                 for frame in self._makecommandframes(request):
                     yield frame
 
-        return 'sendframes', {
-            'framegen': makeframes(),
-        }
+        return 'sendframes', {'framegen': makeframes(),}
 
     def _makecommandframes(self, request):
         """Emit frames to issue a command request.
@@ -1698,22 +1850,27 @@
         if not self._protocolsettingssent and self._clientcontentencoders:
             self._protocolsettingssent = True
 
-            payload = b''.join(cborutil.streamencode({
-                b'contentencodings': self._clientcontentencoders,
-            }))
+            payload = b''.join(
+                cborutil.streamencode(
+                    {b'contentencodings': self._clientcontentencoders,}
+                )
+            )
 
             yield self._outgoingstream.makeframe(
                 requestid=request.requestid,
                 typeid=FRAME_TYPE_SENDER_PROTOCOL_SETTINGS,
                 flags=FLAG_SENDER_PROTOCOL_SETTINGS_EOS,
-                payload=payload)
+                payload=payload,
+            )
 
-        res = createcommandframes(self._outgoingstream,
-                                  request.requestid,
-                                  request.name,
-                                  request.args,
-                                  datafh=request.datafh,
-                                  redirect=request.redirect)
+        res = createcommandframes(
+            self._outgoingstream,
+            request.requestid,
+            request.name,
+            request.args,
+            datafh=request.datafh,
+            redirect=request.redirect,
+        )
 
         for frame in res:
             yield frame
@@ -1727,21 +1884,29 @@
         caller needs to take as a result of receiving this frame.
         """
         if frame.streamid % 2:
-            return 'error', {
-                'message': (
-                    _('received frame with odd numbered stream ID: %d') %
-                    frame.streamid),
-            }
+            return (
+                'error',
+                {
+                    'message': (
+                        _('received frame with odd numbered stream ID: %d')
+                        % frame.streamid
+                    ),
+                },
+            )
 
         if frame.streamid not in self._incomingstreams:
             if not frame.streamflags & STREAM_FLAG_BEGIN_STREAM:
-                return 'error', {
-                    'message': _('received frame on unknown stream '
-                                 'without beginning of stream flag set'),
-                }
+                return (
+                    'error',
+                    {
+                        'message': _(
+                            'received frame on unknown stream '
+                            'without beginning of stream flag set'
+                        ),
+                    },
+                )
 
-            self._incomingstreams[frame.streamid] = inputstream(
-                frame.streamid)
+            self._incomingstreams[frame.streamid] = inputstream(frame.streamid)
 
         stream = self._incomingstreams[frame.streamid]
 
@@ -1758,10 +1923,15 @@
             return self._onstreamsettingsframe(frame)
 
         if frame.requestid not in self._activerequests:
-            return 'error', {
-                'message': (_('received frame for inactive request ID: %d') %
-                            frame.requestid),
-            }
+            return (
+                'error',
+                {
+                    'message': (
+                        _('received frame for inactive request ID: %d')
+                        % frame.requestid
+                    ),
+                },
+            )
 
         request = self._activerequests[frame.requestid]
         request.state = 'receiving'
@@ -1773,8 +1943,9 @@
 
         meth = handlers.get(frame.typeid)
         if not meth:
-            raise error.ProgrammingError('unhandled frame type: %d' %
-                                         frame.typeid)
+            raise error.ProgrammingError(
+                'unhandled frame type: %d' % frame.typeid
+            )
 
         return meth(request, frame)
 
@@ -1785,16 +1956,28 @@
         eos = frame.flags & FLAG_STREAM_ENCODING_SETTINGS_EOS
 
         if more and eos:
-            return 'error', {
-                'message': (_('stream encoding settings frame cannot have both '
-                              'continuation and end of stream flags set')),
-            }
+            return (
+                'error',
+                {
+                    'message': (
+                        _(
+                            'stream encoding settings frame cannot have both '
+                            'continuation and end of stream flags set'
+                        )
+                    ),
+                },
+            )
 
         if not more and not eos:
-            return 'error', {
-                'message': _('stream encoding settings frame must have '
-                             'continuation or end of stream flag set'),
-            }
+            return (
+                'error',
+                {
+                    'message': _(
+                        'stream encoding settings frame must have '
+                        'continuation or end of stream flag set'
+                    ),
+                },
+            )
 
         if frame.streamid not in self._streamsettingsdecoders:
             decoder = cborutil.bufferingdecoder()
@@ -1805,11 +1988,18 @@
         try:
             decoder.decode(frame.payload)
         except Exception as e:
-            return 'error', {
-                'message': (_('error decoding CBOR from stream encoding '
-                             'settings frame: %s') %
-                           stringutil.forcebytestr(e)),
-            }
+            return (
+                'error',
+                {
+                    'message': (
+                        _(
+                            'error decoding CBOR from stream encoding '
+                            'settings frame: %s'
+                        )
+                        % stringutil.forcebytestr(e)
+                    ),
+                },
+            )
 
         if more:
             return 'noop', {}
@@ -1820,20 +2010,30 @@
         del self._streamsettingsdecoders[frame.streamid]
 
         if not decoded:
-            return 'error', {
-                'message': _('stream encoding settings frame did not contain '
-                             'CBOR data'),
-            }
+            return (
+                'error',
+                {
+                    'message': _(
+                        'stream encoding settings frame did not contain '
+                        'CBOR data'
+                    ),
+                },
+            )
 
         try:
-            self._incomingstreams[frame.streamid].setdecoder(self._ui,
-                                                             decoded[0],
-                                                             decoded[1:])
+            self._incomingstreams[frame.streamid].setdecoder(
+                self._ui, decoded[0], decoded[1:]
+            )
         except Exception as e:
-            return 'error', {
-                'message': (_('error setting stream decoder: %s') %
-                            stringutil.forcebytestr(e)),
-            }
+            return (
+                'error',
+                {
+                    'message': (
+                        _('error setting stream decoder: %s')
+                        % stringutil.forcebytestr(e)
+                    ),
+                },
+            )
 
         return 'noop', {}
 
@@ -1842,12 +2042,15 @@
             request.state = 'received'
             del self._activerequests[request.requestid]
 
-        return 'responsedata', {
-            'request': request,
-            'expectmore': frame.flags & FLAG_COMMAND_RESPONSE_CONTINUATION,
-            'eos': frame.flags & FLAG_COMMAND_RESPONSE_EOS,
-            'data': frame.payload,
-        }
+        return (
+            'responsedata',
+            {
+                'request': request,
+                'expectmore': frame.flags & FLAG_COMMAND_RESPONSE_CONTINUATION,
+                'eos': frame.flags & FLAG_COMMAND_RESPONSE_EOS,
+                'data': frame.payload,
+            },
+        )
 
     def _onerrorresponseframe(self, request, frame):
         request.state = 'errored'
@@ -1856,8 +2059,7 @@
         # The payload should be a CBOR map.
         m = cborutil.decodeall(frame.payload)[0]
 
-        return 'error', {
-            'request': request,
-            'type': m['type'],
-            'message': m['message'],
-        }
+        return (
+            'error',
+            {'request': request, 'type': m['type'], 'message': m['message'],},
+        )
--- a/mercurial/wireprotoserver.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/wireprotoserver.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,9 +21,7 @@
     wireprotov1server,
     wireprotov2server,
 )
-from .interfaces import (
-    util as interfaceutil,
-)
+from .interfaces import util as interfaceutil
 from .utils import (
     cborutil,
     compression,
@@ -43,6 +41,7 @@
 SSHV1 = wireprototypes.SSHV1
 SSHV2 = wireprototypes.SSHV2
 
+
 def decodevaluefromheaders(req, headerprefix):
     """Decode a long value from multiple HTTP request headers.
 
@@ -59,6 +58,7 @@
 
     return ''.join(chunks)
 
+
 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
 class httpv1protocolhandler(object):
     def __init__(self, req, ui, checkperm):
@@ -90,8 +90,11 @@
         args = self._req.qsparams.asdictoflists()
         postlen = int(self._req.headers.get(b'X-HgArgs-Post', 0))
         if postlen:
-            args.update(urlreq.parseqs(
-                self._req.bodyfh.read(postlen), keep_blank_values=True))
+            args.update(
+                urlreq.parseqs(
+                    self._req.bodyfh.read(postlen), keep_blank_values=True
+                )
+            )
             return args
 
         argvalue = decodevaluefromheaders(self._req, b'X-HgArg')
@@ -132,13 +135,15 @@
         return 'remote:%s:%s:%s' % (
             self._req.urlscheme,
             urlreq.quote(self._req.remotehost or ''),
-            urlreq.quote(self._req.remoteuser or ''))
+            urlreq.quote(self._req.remoteuser or ''),
+        )
 
     def addcapabilities(self, repo, caps):
         caps.append(b'batch')
 
-        caps.append('httpheader=%d' %
-                    repo.ui.configint('server', 'maxhttpheaderlen'))
+        caps.append(
+            'httpheader=%d' % repo.ui.configint('server', 'maxhttpheaderlen')
+        )
         if repo.ui.configbool('experimental', 'httppostargs'):
             caps.append('httppostargs')
 
@@ -146,11 +151,13 @@
         # FUTURE advertise minrx and mintx after consulting config option
         caps.append('httpmediatype=0.1rx,0.1tx,0.2tx')
 
-        compengines = wireprototypes.supportedcompengines(repo.ui,
-            compression.SERVERROLE)
+        compengines = wireprototypes.supportedcompengines(
+            repo.ui, compression.SERVERROLE
+        )
         if compengines:
-            comptypes = ','.join(urlreq.quote(e.wireprotosupport().name)
-                                 for e in compengines)
+            comptypes = ','.join(
+                urlreq.quote(e.wireprotosupport().name) for e in compengines
+            )
             caps.append('compression=%s' % comptypes)
 
         return caps
@@ -158,6 +165,7 @@
     def checkperm(self, perm):
         return self._checkperm(perm)
 
+
 # This method exists mostly so that extensions like remotefilelog can
 # disable a kludgey legacy method only over http. As of early 2018,
 # there are no other known users, so with any luck we can discard this
@@ -165,6 +173,7 @@
 def iscmd(cmd):
     return cmd in wireprotov1server.commands
 
+
 def handlewsgirequest(rctx, req, res, checkperm):
     """Possibly process a wire protocol request.
 
@@ -212,8 +221,9 @@
         res.setbodybytes('0\n%s\n' % b'Not Found')
         return True
 
-    proto = httpv1protocolhandler(req, repo.ui,
-                                  lambda perm: checkperm(rctx, req, perm))
+    proto = httpv1protocolhandler(
+        req, repo.ui, lambda perm: checkperm(rctx, req, perm)
+    )
 
     # The permissions checker should be the only thing that can raise an
     # ErrorResponse. It is kind of a layer violation to catch an hgweb
@@ -231,6 +241,7 @@
 
     return True
 
+
 def _availableapis(repo):
     apis = set()
 
@@ -243,6 +254,7 @@
 
     return apis
 
+
 def handlewsgiapirequest(rctx, req, res, checkperm):
     """Handle requests to /api/*."""
     assert req.dispatchparts[0] == b'api'
@@ -266,8 +278,12 @@
     if req.dispatchparts == [b'api']:
         res.status = b'200 OK'
         res.headers[b'Content-Type'] = b'text/plain'
-        lines = [_('APIs can be accessed at /api/<name>, where <name> can be '
-                   'one of the following:\n')]
+        lines = [
+            _(
+                'APIs can be accessed at /api/<name>, where <name> can be '
+                'one of the following:\n'
+            )
+        ]
         if availableapis:
             lines.extend(sorted(availableapis))
         else:
@@ -280,8 +296,10 @@
     if proto not in API_HANDLERS:
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('Unknown API: %s\nKnown APIs: %s') % (
-            proto, b', '.join(sorted(availableapis))))
+        res.setbodybytes(
+            _('Unknown API: %s\nKnown APIs: %s')
+            % (proto, b', '.join(sorted(availableapis)))
+        )
         return
 
     if proto not in availableapis:
@@ -290,8 +308,10 @@
         res.setbodybytes(_('API %s not enabled\n') % proto)
         return
 
-    API_HANDLERS[proto]['handler'](rctx, req, res, checkperm,
-                                   req.dispatchparts[2:])
+    API_HANDLERS[proto]['handler'](
+        rctx, req, res, checkperm, req.dispatchparts[2:]
+    )
+
 
 # Maps API name to metadata so custom API can be registered.
 # Keys are:
@@ -312,6 +332,7 @@
     },
 }
 
+
 def _httpresponsetype(ui, proto, prefer_uncompressed):
     """Determine the appropriate response type and compression settings.
 
@@ -327,8 +348,9 @@
 
         # Now find an agreed upon compression format.
         compformats = wireprotov1server.clientcompressionsupport(proto)
-        for engine in wireprototypes.supportedcompengines(ui,
-                compression.SERVERROLE):
+        for engine in wireprototypes.supportedcompengines(
+            ui, compression.SERVERROLE
+        ):
             if engine.wireprotosupport().name in compformats:
                 opts = {}
                 level = ui.configint('server', '%slevel' % engine.name())
@@ -346,6 +368,7 @@
     opts = {'level': ui.configint('server', 'zliblevel')}
     return HGTYPE, util.compengines['zlib'], opts
 
+
 def processcapabilitieshandshake(repo, req, res, proto):
     """Called during a ?cmd=capabilities request.
 
@@ -394,6 +417,7 @@
 
     return True
 
+
 def _callhttp(repo, req, res, proto, cmd):
     # Avoid cycle involving hg module.
     from .hgweb import common as hgwebcommon
@@ -423,16 +447,19 @@
             res.setbodygen(bodygen)
 
     if not wireprotov1server.commands.commandavailable(cmd, proto):
-        setresponse(HTTP_OK, HGERRTYPE,
-                    _('requested wire protocol command is not available over '
-                      'HTTP'))
+        setresponse(
+            HTTP_OK,
+            HGERRTYPE,
+            _('requested wire protocol command is not available over ' 'HTTP'),
+        )
         return
 
     proto.checkperm(wireprotov1server.commands[cmd].permission)
 
     # Possibly handle a modern client wanting to switch protocols.
-    if (cmd == 'capabilities' and
-        processcapabilitieshandshake(repo, req, res, proto)):
+    if cmd == 'capabilities' and processcapabilitieshandshake(
+        repo, req, res, proto
+    ):
 
         return
 
@@ -450,7 +477,8 @@
         # This code for compression should not be streamres specific. It
         # is here because we only compress streamres at the moment.
         mediatype, engine, engineopts = _httpresponsetype(
-            repo.ui, proto, rsp.prefer_uncompressed)
+            repo.ui, proto, rsp.prefer_uncompressed
+        )
         gen = engine.compressstream(gen, engineopts)
 
         if mediatype == HGTYPE2:
@@ -469,27 +497,32 @@
     else:
         raise error.ProgrammingError('hgweb.protocol internal failure', rsp)
 
+
 def _sshv1respondbytes(fout, value):
     """Send a bytes response for protocol version 1."""
     fout.write('%d\n' % len(value))
     fout.write(value)
     fout.flush()
 
+
 def _sshv1respondstream(fout, source):
     write = fout.write
     for chunk in source.gen:
         write(chunk)
     fout.flush()
 
+
 def _sshv1respondooberror(fout, ferr, rsp):
     ferr.write(b'%s\n-\n' % rsp)
     ferr.flush()
     fout.write(b'\n')
     fout.flush()
 
+
 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
 class sshv1protocolhandler(object):
     """Handler for requests services via version 1 of SSH protocol."""
+
     def __init__(self, ui, fin, fout):
         self._ui = ui
         self._fin = fin
@@ -557,6 +590,7 @@
     def checkperm(self, perm):
         pass
 
+
 class sshv2protocolhandler(sshv1protocolhandler):
     """Protocol handler for version 2 of the SSH protocol."""
 
@@ -567,6 +601,7 @@
     def addcapabilities(self, repo, caps):
         return caps
 
+
 def _runsshserver(ui, repo, fin, fout, ev):
     # This function operates like a state machine of sorts. The following
     # states are defined:
@@ -638,9 +673,11 @@
             # handle it.
             if request.startswith(b'upgrade '):
                 if protoswitched:
-                    _sshv1respondooberror(fout, ui.ferr,
-                                          b'cannot upgrade protocols multiple '
-                                          b'times')
+                    _sshv1respondooberror(
+                        fout,
+                        ui.ferr,
+                        b'cannot upgrade protocols multiple ' b'times',
+                    )
                     state = 'shutdown'
                     continue
 
@@ -648,7 +685,8 @@
                 continue
 
             available = wireprotov1server.commands.commandavailable(
-                request, proto)
+                request, proto
+            )
 
             # This command isn't available. Send an empty response and go
             # back to waiting for a new command.
@@ -676,8 +714,10 @@
             elif isinstance(rsp, wireprototypes.ooberror):
                 _sshv1respondooberror(fout, ui.ferr, rsp.message)
             else:
-                raise error.ProgrammingError('unhandled response type from '
-                                             'wire protocol command: %s' % rsp)
+                raise error.ProgrammingError(
+                    'unhandled response type from '
+                    'wire protocol command: %s' % rsp
+                )
 
         # For now, protocol version 2 serving just goes back to version 1.
         elif state == 'protov2-serving':
@@ -741,9 +781,11 @@
                 request = fin.readline()[:-1]
 
                 if request != line:
-                    _sshv1respondooberror(fout, ui.ferr,
-                                          b'malformed handshake protocol: '
-                                          b'missing %s' % line)
+                    _sshv1respondooberror(
+                        fout,
+                        ui.ferr,
+                        b'malformed handshake protocol: ' b'missing %s' % line,
+                    )
                     ok = False
                     state = 'shutdown'
                     break
@@ -753,9 +795,12 @@
 
             request = fin.read(81)
             if request != b'%s-%s' % (b'0' * 40, b'0' * 40):
-                _sshv1respondooberror(fout, ui.ferr,
-                                      b'malformed handshake protocol: '
-                                      b'missing between argument value')
+                _sshv1respondooberror(
+                    fout,
+                    ui.ferr,
+                    b'malformed handshake protocol: '
+                    b'missing between argument value',
+                )
                 state = 'shutdown'
                 continue
 
@@ -780,8 +825,10 @@
             break
 
         else:
-            raise error.ProgrammingError('unhandled ssh server state: %s' %
-                                         state)
+            raise error.ProgrammingError(
+                'unhandled ssh server state: %s' % state
+            )
+
 
 class sshserver(object):
     def __init__(self, ui, repo, logfh=None):
@@ -792,9 +839,11 @@
         # Log write I/O to stdout and stderr if configured.
         if logfh:
             self._fout = util.makeloggingfileobject(
-                logfh, self._fout, 'o', logdata=True)
+                logfh, self._fout, 'o', logdata=True
+            )
             ui.ferr = util.makeloggingfileobject(
-                logfh, ui.ferr, 'e', logdata=True)
+                logfh, ui.ferr, 'e', logdata=True
+            )
 
     def serve_forever(self):
         self.serveuntil(threading.Event())
--- a/mercurial/wireprototypes.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/wireprototypes.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,19 +10,13 @@
     hex,
 )
 from .i18n import _
-from .thirdparty import (
-    attr,
-)
+from .thirdparty import attr
 from . import (
     error,
     util,
 )
-from .interfaces import (
-    util as interfaceutil,
-)
-from .utils import (
-    compression,
-)
+from .interfaces import util as interfaceutil
+from .utils import compression
 
 # Names of the SSH protocol implementations.
 SSHV1 = 'ssh-v1'
@@ -38,57 +32,57 @@
 
 # All available wire protocol transports.
 TRANSPORTS = {
-    SSHV1: {
-        'transport': 'ssh',
-        'version': 1,
-    },
+    SSHV1: {'transport': 'ssh', 'version': 1,},
     SSHV2: {
         'transport': 'ssh',
         # TODO mark as version 2 once all commands are implemented.
         'version': 1,
     },
-    'http-v1': {
-        'transport': 'http',
-        'version': 1,
-    },
-    HTTP_WIREPROTO_V2: {
-        'transport': 'http',
-        'version': 2,
-    }
+    'http-v1': {'transport': 'http', 'version': 1,},
+    HTTP_WIREPROTO_V2: {'transport': 'http', 'version': 2,},
 }
 
+
 class bytesresponse(object):
     """A wire protocol response consisting of raw bytes."""
+
     def __init__(self, data):
         self.data = data
 
+
 class ooberror(object):
     """wireproto reply: failure of a batch of operation
 
     Something failed during a batch call. The error message is stored in
     `self.message`.
     """
+
     def __init__(self, message):
         self.message = message
 
+
 class pushres(object):
     """wireproto reply: success with simple integer return
 
     The call was successful and returned an integer contained in `self.res`.
     """
+
     def __init__(self, res, output):
         self.res = res
         self.output = output
 
+
 class pusherr(object):
     """wireproto reply: failure
 
     The call failed. The `self.res` attribute contains the error message.
     """
+
     def __init__(self, res, output):
         self.res = res
         self.output = output
 
+
 class streamres(object):
     """wireproto reply: binary stream
 
@@ -100,10 +94,12 @@
     uncompressable and that the stream should therefore use the ``none``
     engine.
     """
+
     def __init__(self, gen=None, prefer_uncompressed=False):
         self.gen = gen
         self.prefer_uncompressed = prefer_uncompressed
 
+
 class streamreslegacy(object):
     """wireproto reply: uncompressed binary stream
 
@@ -114,36 +110,45 @@
     Like ``streamres``, but sends an uncompressed data for "version 1" clients
     using the application/mercurial-0.1 media type.
     """
+
     def __init__(self, gen=None):
         self.gen = gen
 
+
 # list of nodes encoding / decoding
 def decodelist(l, sep=' '):
     if l:
-        return [bin(v) for v in  l.split(sep)]
+        return [bin(v) for v in l.split(sep)]
     return []
 
+
 def encodelist(l, sep=' '):
     try:
         return sep.join(map(hex, l))
     except TypeError:
         raise
 
+
 # batched call argument encoding
 
+
 def escapebatcharg(plain):
-    return (plain
-            .replace(':', ':c')
-            .replace(',', ':o')
-            .replace(';', ':s')
-            .replace('=', ':e'))
+    return (
+        plain.replace(':', ':c')
+        .replace(',', ':o')
+        .replace(';', ':s')
+        .replace('=', ':e')
+    )
+
 
 def unescapebatcharg(escaped):
-    return (escaped
-            .replace(':e', '=')
-            .replace(':s', ';')
-            .replace(':o', ',')
-            .replace(':c', ':'))
+    return (
+        escaped.replace(':e', '=')
+        .replace(':s', ';')
+        .replace(':o', ',')
+        .replace(':c', ':')
+    )
+
 
 # mapping of options accepted by getbundle and their types
 #
@@ -157,7 +162,7 @@
 # :scsv:  set of values, transmitted as comma-separated values
 # :plain: string with no transformation needed.
 GETBUNDLE_ARGUMENTS = {
-    'heads':  'nodes',
+    'heads': 'nodes',
     'bookmarks': 'boolean',
     'common': 'nodes',
     'obsmarkers': 'boolean',
@@ -171,6 +176,7 @@
     'excludepats': 'csv',
 }
 
+
 class baseprotocolhandler(interfaceutil.Interface):
     """Abstract base class for wire protocol handlers.
 
@@ -184,7 +190,8 @@
         """The name of the protocol implementation.
 
         Used for uniquely identifying the transport type.
-        """)
+        """
+    )
 
     def getargs(args):
         """return the value for arguments in <args>
@@ -239,10 +246,19 @@
         in a protocol specific manner.
         """
 
+
 class commandentry(object):
     """Represents a declared wire protocol command."""
-    def __init__(self, func, args='', transports=None,
-                 permission='push', cachekeyfn=None, extracapabilitiesfn=None):
+
+    def __init__(
+        self,
+        func,
+        args='',
+        transports=None,
+        permission='push',
+        cachekeyfn=None,
+        extracapabilitiesfn=None,
+    ):
         self.func = func
         self.args = args
         self.transports = transports or set()
@@ -258,8 +274,12 @@
         data not captured by the 2-tuple and a new instance containing
         the union of the two objects is returned.
         """
-        return commandentry(func, args=args, transports=set(self.transports),
-                            permission=self.permission)
+        return commandentry(
+            func,
+            args=args,
+            transports=set(self.transports),
+            permission=self.permission,
+        )
 
     # Old code treats instances as 2-tuples. So expose that interface.
     def __iter__(self):
@@ -274,12 +294,14 @@
         else:
             raise IndexError('can only access elements 0 and 1')
 
+
 class commanddict(dict):
     """Container for registered wire protocol commands.
 
     It behaves like a dict. But __setitem__ is overwritten to allow silent
     coercion of values from 2-tuples for API compatibility.
     """
+
     def __setitem__(self, k, v):
         if isinstance(v, commandentry):
             pass
@@ -296,12 +318,16 @@
                 v = self[k]._merge(v[0], v[1])
             else:
                 # Use default values from @wireprotocommand.
-                v = commandentry(v[0], args=v[1],
-                                 transports=set(TRANSPORTS),
-                                 permission='push')
+                v = commandentry(
+                    v[0],
+                    args=v[1],
+                    transports=set(TRANSPORTS),
+                    permission='push',
+                )
         else:
-            raise ValueError('command entries must be commandentry instances '
-                             'or 2-tuples')
+            raise ValueError(
+                'command entries must be commandentry instances ' 'or 2-tuples'
+            )
 
         return super(commanddict, self).__setitem__(k, v)
 
@@ -319,6 +345,7 @@
 
         return True
 
+
 def supportedcompengines(ui, role):
     """Obtain the list of supported compression engines for a request."""
     assert role in (compression.CLIENTROLE, compression.SERVERROLE)
@@ -335,16 +362,18 @@
         # because a server has the most to lose from a sub-optimal choice. (e.g.
         # CPU DoS due to an expensive engine or a network DoS due to poor
         # compression ratio).
-        configengines = ui.configlist('experimental',
-                                      'clientcompressionengines')
+        configengines = ui.configlist(
+            'experimental', 'clientcompressionengines'
+        )
         config = 'experimental.clientcompressionengines'
 
     # No explicit config. Filter out the ones that aren't supposed to be
     # advertised and return default ordering.
     if not configengines:
         attr = 'serverpriority' if role == util.SERVERROLE else 'clientpriority'
-        return [e for e in compengines
-                if getattr(e.wireprotosupport(), attr) > 0]
+        return [
+            e for e in compengines if getattr(e.wireprotosupport(), attr) > 0
+        ]
 
     # If compression engines are listed in the config, assume there is a good
     # reason for it (like server operators wanting to achieve specific
@@ -353,21 +382,29 @@
     validnames = set(e.name() for e in compengines)
     invalidnames = set(e for e in configengines if e not in validnames)
     if invalidnames:
-        raise error.Abort(_('invalid compression engine defined in %s: %s') %
-                          (config, ', '.join(sorted(invalidnames))))
+        raise error.Abort(
+            _('invalid compression engine defined in %s: %s')
+            % (config, ', '.join(sorted(invalidnames)))
+        )
 
     compengines = [e for e in compengines if e.name() in configengines]
-    compengines = sorted(compengines,
-                         key=lambda e: configengines.index(e.name()))
+    compengines = sorted(
+        compengines, key=lambda e: configengines.index(e.name())
+    )
 
     if not compengines:
-        raise error.Abort(_('%s config option does not specify any known '
-                            'compression engines') % config,
-                          hint=_('usable compression engines: %s') %
-                          ', '.sorted(validnames))
+        raise error.Abort(
+            _(
+                '%s config option does not specify any known '
+                'compression engines'
+            )
+            % config,
+            hint=_('usable compression engines: %s') % ', '.sorted(validnames),
+        )
 
     return compengines
 
+
 @attr.s
 class encodedresponse(object):
     """Represents response data that is already content encoded.
@@ -378,8 +415,10 @@
     wire. If commands emit an object of this type, the encoding step is bypassed
     and the content from this object is used instead.
     """
+
     data = attr.ib()
 
+
 @attr.s
 class alternatelocationresponse(object):
     """Represents a response available at an alternate location.
@@ -389,6 +428,7 @@
 
     Only compatible with wire protocol version 2.
     """
+
     url = attr.ib()
     mediatype = attr.ib()
     size = attr.ib(default=None)
@@ -397,6 +437,7 @@
     serverdercerts = attr.ib(default=None)
     servercadercerts = attr.ib(default=None)
 
+
 @attr.s
 class indefinitebytestringresponse(object):
     """Represents an object to be encoded to an indefinite length bytestring.
@@ -404,4 +445,5 @@
     Instances are initialized from an iterable of chunks, with each chunk being
     a bytes instance.
     """
+
     chunks = attr.ib()
--- a/mercurial/wireprotov1peer.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/wireprotov1peer.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,9 +12,7 @@
 import weakref
 
 from .i18n import _
-from .node import (
-    bin,
-)
+from .node import bin
 from . import (
     bundle2,
     changegroup as changegroupmod,
@@ -32,6 +30,7 @@
 
 urlreq = util.urlreq
 
+
 def batchable(f):
     '''annotation for batchable methods
 
@@ -54,26 +53,31 @@
     which is used by remotebatch to split the call into separate encoding and
     decoding phases.
     '''
+
     def plain(*args, **opts):
         batchable = f(*args, **opts)
         encargsorres, encresref = next(batchable)
         if not encresref:
-            return encargsorres # a local result in this case
+            return encargsorres  # a local result in this case
         self = args[0]
         cmd = pycompat.bytesurl(f.__name__)  # ensure cmd is ascii bytestr
         encresref.set(self._submitone(cmd, encargsorres))
         return next(batchable)
+
     setattr(plain, 'batchable', f)
     setattr(plain, '__name__', f.__name__)
     return plain
 
+
 class future(object):
     '''placeholder for a value to be set later'''
+
     def set(self, value):
         if util.safehasattr(self, 'value'):
             raise error.RepoError("future is already set")
         self.value = value
 
+
 def encodebatchcmds(req):
     """Return a ``cmds`` argument value for the ``batch`` command."""
     escapearg = wireprototypes.escapebatcharg
@@ -85,12 +89,15 @@
         # servers.
         assert all(escapearg(k) == k for k in argsdict)
 
-        args = ','.join('%s=%s' % (escapearg(k), escapearg(v))
-                        for k, v in argsdict.iteritems())
+        args = ','.join(
+            '%s=%s' % (escapearg(k), escapearg(v))
+            for k, v in argsdict.iteritems()
+        )
         cmds.append('%s %s' % (op, args))
 
     return ';'.join(cmds)
 
+
 class unsentfuture(pycompat.futures.Future):
     """A Future variation to represent an unsent command.
 
@@ -111,6 +118,7 @@
         # on that.
         return self.result(timeout)
 
+
 @interfaceutil.implementer(repository.ipeercommandexecutor)
 class peerexecutor(object):
     def __init__(self, peer):
@@ -130,12 +138,14 @@
 
     def callcommand(self, command, args):
         if self._sent:
-            raise error.ProgrammingError('callcommand() cannot be used '
-                                         'after commands are sent')
+            raise error.ProgrammingError(
+                'callcommand() cannot be used ' 'after commands are sent'
+            )
 
         if self._closed:
-            raise error.ProgrammingError('callcommand() cannot be used '
-                                         'after close()')
+            raise error.ProgrammingError(
+                'callcommand() cannot be used ' 'after close()'
+            )
 
         # Commands are dispatched through methods on the peer.
         fn = getattr(self._peer, pycompat.sysstr(command), None)
@@ -143,7 +153,8 @@
         if not fn:
             raise error.ProgrammingError(
                 'cannot call command %s: method of same name not available '
-                'on peer' % command)
+                'on peer' % command
+            )
 
         # Commands are either batchable or they aren't. If a command
         # isn't batchable, we send it immediately because the executor
@@ -169,7 +180,8 @@
             if self._calls:
                 raise error.ProgrammingError(
                     '%s is not batchable and cannot be called on a command '
-                    'executor along with other commands' % command)
+                    'executor along with other commands' % command
+                )
 
             f = addcall()
 
@@ -232,8 +244,9 @@
                 continue
 
             try:
-                batchable = fn.batchable(fn.__self__,
-                                         **pycompat.strkwargs(args))
+                batchable = fn.batchable(
+                    fn.__self__, **pycompat.strkwargs(args)
+                )
             except Exception:
                 pycompat.future_set_exception_info(f, sys.exc_info()[1:])
                 return
@@ -263,8 +276,9 @@
         # concurrent.futures already solves these problems and its thread pool
         # executor has minimal overhead. So we use it.
         self._responseexecutor = pycompat.futures.ThreadPoolExecutor(1)
-        self._responsef = self._responseexecutor.submit(self._readbatchresponse,
-                                                        states, wireresults)
+        self._responsef = self._responseexecutor.submit(
+            self._readbatchresponse, states, wireresults
+        )
 
     def close(self):
         self.sendcommands()
@@ -290,8 +304,11 @@
             # errored. Otherwise a result() could wait indefinitely.
             for f in self._futures:
                 if not f.done():
-                    f.set_exception(error.ResponseError(
-                        _('unfulfilled batch command response')))
+                    f.set_exception(
+                        error.ResponseError(
+                            _('unfulfilled batch command response')
+                        )
+                    )
 
             self._futures = None
 
@@ -312,8 +329,10 @@
             else:
                 f.set_result(result)
 
-@interfaceutil.implementer(repository.ipeercommands,
-                           repository.ipeerlegacycommands)
+
+@interfaceutil.implementer(
+    repository.ipeercommands, repository.ipeerlegacycommands
+)
 class wirepeer(repository.peer):
     """Client-side interface for communicating with a peer repository.
 
@@ -322,6 +341,7 @@
     See also httppeer.py and sshpeer.py for protocol-specific
     implementations of this interface.
     """
+
     def commandexecutor(self):
         return peerexecutor(self)
 
@@ -387,8 +407,9 @@
         self.ui.debug('preparing listkeys for "%s"\n' % namespace)
         yield {'namespace': encoding.fromlocal(namespace)}, f
         d = f.value
-        self.ui.debug('received listkey for "%s": %i bytes\n'
-                      % (namespace, len(d)))
+        self.ui.debug(
+            'received listkey for "%s": %i bytes\n' % (namespace, len(d))
+        )
         yield pushkeymod.decodekeys(d)
 
     @batchable
@@ -397,17 +418,20 @@
             yield False, None
         f = future()
         self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
-        yield {'namespace': encoding.fromlocal(namespace),
-               'key': encoding.fromlocal(key),
-               'old': encoding.fromlocal(old),
-               'new': encoding.fromlocal(new)}, f
+        yield {
+            'namespace': encoding.fromlocal(namespace),
+            'key': encoding.fromlocal(key),
+            'old': encoding.fromlocal(old),
+            'new': encoding.fromlocal(new),
+        }, f
         d = f.value
         d, output = d.split('\n', 1)
         try:
             d = bool(int(d))
         except ValueError:
             raise error.ResponseError(
-                _('push failed (unexpected response):'), d)
+                _('push failed (unexpected response):'), d
+            )
         for l in output.splitlines(True):
             self.ui.status(_('remote: '), l)
         yield d
@@ -426,7 +450,8 @@
             keytype = wireprototypes.GETBUNDLE_ARGUMENTS.get(key)
             if keytype is None:
                 raise error.ProgrammingError(
-                    'Unexpectedly None keytype for key %s' % key)
+                    'Unexpectedly None keytype for key %s' % key
+                )
             elif keytype == 'nodes':
                 value = wireprototypes.encodelist(value)
             elif keytype == 'csv':
@@ -436,8 +461,7 @@
             elif keytype == 'boolean':
                 value = '%i' % bool(value)
             elif keytype != 'plain':
-                raise KeyError('unknown getbundle option type %s'
-                               % keytype)
+                raise KeyError('unknown getbundle option type %s' % keytype)
             opts[key] = value
         f = self._callcompressable("getbundle", **pycompat.strkwargs(opts))
         if any((cap.startswith('HG2') for cap in bundlecaps)):
@@ -461,7 +485,8 @@
 
         if heads != ['force'] and self.capable('unbundlehash'):
             heads = wireprototypes.encodelist(
-                ['hashed', hashlib.sha1(''.join(sorted(heads))).digest()])
+                ['hashed', hashlib.sha1(''.join(sorted(heads))).digest()]
+            )
         else:
             heads = wireprototypes.encodelist(heads)
 
@@ -469,13 +494,13 @@
             # this a bundle10, do the old style call sequence
             ret, output = self._callpush("unbundle", bundle, heads=heads)
             if ret == "":
-                raise error.ResponseError(
-                    _('push failed:'), output)
+                raise error.ResponseError(_('push failed:'), output)
             try:
                 ret = int(ret)
             except ValueError:
                 raise error.ResponseError(
-                    _('push failed (unexpected response):'), ret)
+                    _('push failed (unexpected response):'), ret
+                )
 
             for l in output.splitlines(True):
                 self.ui.status(_('remote: '), l)
@@ -499,15 +524,21 @@
             self._abort(error.ResponseError(_("unexpected response:"), d))
 
     def between(self, pairs):
-        batch = 8 # avoid giant requests
+        batch = 8  # avoid giant requests
         r = []
         for i in pycompat.xrange(0, len(pairs), batch):
-            n = " ".join([wireprototypes.encodelist(p, '-')
-                          for p in pairs[i:i + batch]])
+            n = " ".join(
+                [
+                    wireprototypes.encodelist(p, '-')
+                    for p in pairs[i : i + batch]
+                ]
+            )
             d = self._call("between", pairs=n)
             try:
-                r.extend(l and wireprototypes.decodelist(l) or []
-                         for l in d.splitlines())
+                r.extend(
+                    l and wireprototypes.decodelist(l) or []
+                    for l in d.splitlines()
+                )
             except ValueError:
                 self._abort(error.ResponseError(_("unexpected response:"), d))
         return r
@@ -521,8 +552,9 @@
         self.requirecap('changegroupsubset', _('look up remote changes'))
         bases = wireprototypes.encodelist(bases)
         heads = wireprototypes.encodelist(heads)
-        f = self._callcompressable("changegroupsubset",
-                                   bases=bases, heads=heads)
+        f = self._callcompressable(
+            "changegroupsubset", bases=bases, heads=heads
+        )
         return changegroupmod.cg1unpacker(f, 'UN')
 
     # End of ipeerlegacycommands interface.
--- a/mercurial/wireprotov1server.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/wireprotov1server.py	Sun Oct 06 09:45:02 2019 -0400
@@ -39,10 +39,12 @@
 urlreq = util.urlreq
 
 bundle2requiredmain = _('incompatible Mercurial client; bundle2 required')
-bundle2requiredhint = _('see https://www.mercurial-scm.org/wiki/'
-                        'IncompatibleClient')
+bundle2requiredhint = _(
+    'see https://www.mercurial-scm.org/wiki/' 'IncompatibleClient'
+)
 bundle2required = '%s\n(%s)\n' % (bundle2requiredmain, bundle2requiredhint)
 
+
 def clientcompressionsupport(proto):
     """Returns a list of compression methods supported by the client.
 
@@ -55,8 +57,10 @@
             return cap[5:].split(',')
     return ['zlib', 'none']
 
+
 # wire protocol command can either return a string or one of these classes.
 
+
 def getdispatchrepo(repo, proto, command):
     """Obtain the repo used for processing wire protocol commands.
 
@@ -67,6 +71,7 @@
     viewconfig = repo.ui.config('server', 'view')
     return repo.filtered(viewconfig)
 
+
 def dispatch(repo, proto, command):
     repo = getdispatchrepo(repo, proto, command)
 
@@ -75,6 +80,7 @@
 
     return func(repo, proto, *args)
 
+
 def options(cmd, keys, others):
     opts = {}
     for k in keys:
@@ -82,10 +88,13 @@
             opts[k] = others[k]
             del others[k]
     if others:
-        procutil.stderr.write("warning: %s ignored unexpected arguments %s\n"
-                              % (cmd, ",".join(others)))
+        procutil.stderr.write(
+            "warning: %s ignored unexpected arguments %s\n"
+            % (cmd, ",".join(others))
+        )
     return opts
 
+
 def bundle1allowed(repo, action):
     """Whether a bundle1 operation is allowed from the server.
 
@@ -115,8 +124,10 @@
 
     return ui.configbool('server', 'bundle1')
 
+
 commands = wireprototypes.commanddict()
 
+
 def wireprotocommand(name, args=None, permission='push'):
     """Decorator to declare a wire protocol command.
 
@@ -132,8 +143,9 @@
     because otherwise commands not declaring their permissions could modify
     a repository that is supposed to be read-only.
     """
-    transports = {k for k, v in wireprototypes.TRANSPORTS.items()
-                  if v['version'] == 1}
+    transports = {
+        k for k, v in wireprototypes.TRANSPORTS.items() if v['version'] == 1
+    }
 
     # Because SSHv2 is a mirror of SSHv1, we allow "batch" commands through to
     # SSHv2.
@@ -142,27 +154,33 @@
         transports.add(wireprototypes.SSHV2)
 
     if permission not in ('push', 'pull'):
-        raise error.ProgrammingError('invalid wire protocol permission; '
-                                     'got %s; expected "push" or "pull"' %
-                                     permission)
+        raise error.ProgrammingError(
+            'invalid wire protocol permission; '
+            'got %s; expected "push" or "pull"' % permission
+        )
 
     if args is None:
         args = ''
 
     if not isinstance(args, bytes):
-        raise error.ProgrammingError('arguments for version 1 commands '
-                                     'must be declared as bytes')
+        raise error.ProgrammingError(
+            'arguments for version 1 commands ' 'must be declared as bytes'
+        )
 
     def register(func):
         if name in commands:
-            raise error.ProgrammingError('%s command already registered '
-                                         'for version 1' % name)
+            raise error.ProgrammingError(
+                '%s command already registered ' 'for version 1' % name
+            )
         commands[name] = wireprototypes.commandentry(
-            func, args=args, transports=transports, permission=permission)
+            func, args=args, transports=transports, permission=permission
+        )
 
         return func
+
     return register
 
+
 # TODO define a more appropriate permissions type to use for this.
 @wireprotocommand('batch', 'cmds *', permission='pull')
 def batch(repo, proto, cmds, others):
@@ -209,6 +227,7 @@
 
     return wireprototypes.bytesresponse(';'.join(res))
 
+
 @wireprotocommand('between', 'pairs', permission='pull')
 def between(repo, proto, pairs):
     pairs = [wireprototypes.decodelist(p, '-') for p in pairs.split(" ")]
@@ -218,6 +237,7 @@
 
     return wireprototypes.bytesresponse(''.join(r))
 
+
 @wireprotocommand('branchmap', permission='pull')
 def branchmap(repo, proto):
     branchmap = repo.branchmap()
@@ -229,6 +249,7 @@
 
     return wireprototypes.bytesresponse('\n'.join(heads))
 
+
 @wireprotocommand('branches', 'nodes', permission='pull')
 def branches(repo, proto, nodes):
     nodes = wireprototypes.decodelist(nodes)
@@ -238,6 +259,7 @@
 
     return wireprototypes.bytesresponse(''.join(r))
 
+
 @wireprotocommand('clonebundles', '', permission='pull')
 def clonebundles(repo, proto):
     """Server command for returning info for available bundles to seed clones.
@@ -249,10 +271,19 @@
     data center given the client's IP address.
     """
     return wireprototypes.bytesresponse(
-        repo.vfs.tryread('clonebundles.manifest'))
+        repo.vfs.tryread('clonebundles.manifest')
+    )
+
 
-wireprotocaps = ['lookup', 'branchmap', 'pushkey',
-                 'known', 'getbundle', 'unbundlehash']
+wireprotocaps = [
+    'lookup',
+    'branchmap',
+    'pushkey',
+    'known',
+    'getbundle',
+    'unbundlehash',
+]
+
 
 def _capabilities(repo, proto):
     """return a list of capabilities for a repo
@@ -294,6 +325,7 @@
 
     return proto.addcapabilities(repo, caps)
 
+
 # If you are writing an extension and consider wrapping this function. Wrap
 # `_capabilities` instead.
 @wireprotocommand('capabilities', permission='pull')
@@ -301,33 +333,36 @@
     caps = _capabilities(repo, proto)
     return wireprototypes.bytesresponse(' '.join(sorted(caps)))
 
+
 @wireprotocommand('changegroup', 'roots', permission='pull')
 def changegroup(repo, proto, roots):
     nodes = wireprototypes.decodelist(roots)
-    outgoing = discovery.outgoing(repo, missingroots=nodes,
-                                  missingheads=repo.heads())
+    outgoing = discovery.outgoing(
+        repo, missingroots=nodes, missingheads=repo.heads()
+    )
     cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
     gen = iter(lambda: cg.read(32768), '')
     return wireprototypes.streamres(gen=gen)
 
-@wireprotocommand('changegroupsubset', 'bases heads',
-                  permission='pull')
+
+@wireprotocommand('changegroupsubset', 'bases heads', permission='pull')
 def changegroupsubset(repo, proto, bases, heads):
     bases = wireprototypes.decodelist(bases)
     heads = wireprototypes.decodelist(heads)
-    outgoing = discovery.outgoing(repo, missingroots=bases,
-                                  missingheads=heads)
+    outgoing = discovery.outgoing(repo, missingroots=bases, missingheads=heads)
     cg = changegroupmod.makechangegroup(repo, outgoing, '01', 'serve')
     gen = iter(lambda: cg.read(32768), '')
     return wireprototypes.streamres(gen=gen)
 
-@wireprotocommand('debugwireargs', 'one two *',
-                  permission='pull')
+
+@wireprotocommand('debugwireargs', 'one two *', permission='pull')
 def debugwireargs(repo, proto, one, two, others):
     # only accept optional args from the known set
     opts = options('debugwireargs', ['three', 'four'], others)
-    return wireprototypes.bytesresponse(repo.debugwireargs(
-        one, two, **pycompat.strkwargs(opts)))
+    return wireprototypes.bytesresponse(
+        repo.debugwireargs(one, two, **pycompat.strkwargs(opts))
+    )
+
 
 def find_pullbundle(repo, proto, opts, clheads, heads, common):
     """Return a file object for the first matching pullbundle.
@@ -344,6 +379,7 @@
       E.g. do not send a bundle of all changes if the client wants only
       one specific branch of many.
     """
+
     def decodehexstring(s):
         return {binascii.unhexlify(h) for h in s.split(';')}
 
@@ -372,11 +408,13 @@
                 # Bad heads entry
                 continue
             if bundle_heads.issubset(common):
-                continue # Nothing new
+                continue  # Nothing new
             if all(cl.rev(rev) in common_anc for rev in bundle_heads):
-                continue # Still nothing new
-            if any(cl.rev(rev) not in heads_anc and
-                   cl.rev(rev) not in common_anc for rev in bundle_heads):
+                continue  # Still nothing new
+            if any(
+                cl.rev(rev) not in heads_anc and cl.rev(rev) not in common_anc
+                for rev in bundle_heads
+            ):
                 continue
         if 'bases' in entry:
             try:
@@ -395,10 +433,12 @@
             continue
     return None
 
+
 @wireprotocommand('getbundle', '*', permission='pull')
 def getbundle(repo, proto, others):
-    opts = options('getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(),
-                   others)
+    opts = options(
+        'getbundle', wireprototypes.GETBUNDLE_ARGUMENTS.keys(), others
+    )
     for k, v in opts.iteritems():
         keytype = wireprototypes.GETBUNDLE_ARGUMENTS[k]
         if keytype == 'nodes':
@@ -415,28 +455,29 @@
             else:
                 opts[k] = bool(v)
         elif keytype != 'plain':
-            raise KeyError('unknown getbundle option type %s'
-                           % keytype)
+            raise KeyError('unknown getbundle option type %s' % keytype)
 
     if not bundle1allowed(repo, 'pull'):
         if not exchange.bundle2requested(opts.get('bundlecaps')):
             if proto.name == 'http-v1':
                 return wireprototypes.ooberror(bundle2required)
-            raise error.Abort(bundle2requiredmain,
-                              hint=bundle2requiredhint)
+            raise error.Abort(bundle2requiredmain, hint=bundle2requiredhint)
 
     try:
         clheads = set(repo.changelog.heads())
         heads = set(opts.get('heads', set()))
         common = set(opts.get('common', set()))
         common.discard(nullid)
-        if (repo.ui.configbool('server', 'pullbundle') and
-            'partial-pull' in proto.getprotocaps()):
+        if (
+            repo.ui.configbool('server', 'pullbundle')
+            and 'partial-pull' in proto.getprotocaps()
+        ):
             # Check if a pre-built bundle covers this request.
             bundle = find_pullbundle(repo, proto, opts, clheads, heads, common)
             if bundle:
-                return wireprototypes.streamres(gen=util.filechunkiter(bundle),
-                                                prefer_uncompressed=True)
+                return wireprototypes.streamres(
+                    gen=util.filechunkiter(bundle), prefer_uncompressed=True
+                )
 
         if repo.ui.configbool('server', 'disablefullbundle'):
             # Check to see if this is a full clone.
@@ -444,36 +485,40 @@
             if changegroup and not common and clheads == heads:
                 raise error.Abort(
                     _('server has pull-based clones disabled'),
-                    hint=_('remove --pull if specified or upgrade Mercurial'))
+                    hint=_('remove --pull if specified or upgrade Mercurial'),
+                )
 
-        info, chunks = exchange.getbundlechunks(repo, 'serve',
-                                                **pycompat.strkwargs(opts))
+        info, chunks = exchange.getbundlechunks(
+            repo, 'serve', **pycompat.strkwargs(opts)
+        )
         prefercompressed = info.get('prefercompressed', True)
     except error.Abort as exc:
         # cleanly forward Abort error to the client
         if not exchange.bundle2requested(opts.get('bundlecaps')):
             if proto.name == 'http-v1':
                 return wireprototypes.ooberror(pycompat.bytestr(exc) + '\n')
-            raise # cannot do better for bundle1 + ssh
+            raise  # cannot do better for bundle1 + ssh
         # bundle2 request expect a bundle2 reply
         bundler = bundle2.bundle20(repo.ui)
         manargs = [('message', pycompat.bytestr(exc))]
         advargs = []
         if exc.hint is not None:
             advargs.append(('hint', exc.hint))
-        bundler.addpart(bundle2.bundlepart('error:abort',
-                                           manargs, advargs))
+        bundler.addpart(bundle2.bundlepart('error:abort', manargs, advargs))
         chunks = bundler.getchunks()
         prefercompressed = False
 
     return wireprototypes.streamres(
-        gen=chunks, prefer_uncompressed=not prefercompressed)
+        gen=chunks, prefer_uncompressed=not prefercompressed
+    )
+
 
 @wireprotocommand('heads', permission='pull')
 def heads(repo, proto):
     h = repo.heads()
     return wireprototypes.bytesresponse(wireprototypes.encodelist(h) + '\n')
 
+
 @wireprotocommand('hello', permission='pull')
 def hello(repo, proto):
     """Called as part of SSH handshake to obtain server info.
@@ -489,11 +534,13 @@
     caps = capabilities(repo, proto).data
     return wireprototypes.bytesresponse('capabilities: %s\n' % caps)
 
+
 @wireprotocommand('listkeys', 'namespace', permission='pull')
 def listkeys(repo, proto, namespace):
     d = sorted(repo.listkeys(encoding.tolocal(namespace)).items())
     return wireprototypes.bytesresponse(pushkeymod.encodekeys(d))
 
+
 @wireprotocommand('lookup', 'key', permission='pull')
 def lookup(repo, proto, key):
     try:
@@ -506,18 +553,22 @@
         success = 0
     return wireprototypes.bytesresponse('%d %s\n' % (success, r))
 
+
 @wireprotocommand('known', 'nodes *', permission='pull')
 def known(repo, proto, nodes, others):
-    v = ''.join(b and '1' or '0'
-                for b in repo.known(wireprototypes.decodelist(nodes)))
+    v = ''.join(
+        b and '1' or '0' for b in repo.known(wireprototypes.decodelist(nodes))
+    )
     return wireprototypes.bytesresponse(v)
 
+
 @wireprotocommand('protocaps', 'caps', permission='pull')
 def protocaps(repo, proto, caps):
     if proto.name == wireprototypes.SSHV1:
         proto._protocaps = set(caps.split(' '))
     return wireprototypes.bytesresponse('OK')
 
+
 @wireprotocommand('pushkey', 'namespace key old new', permission='push')
 def pushkey(repo, proto, namespace, key, old, new):
     # compatibility with pre-1.8 clients which were accidentally
@@ -526,27 +577,35 @@
         # looks like it could be a binary node
         try:
             new.decode('utf-8')
-            new = encoding.tolocal(new) # but cleanly decodes as UTF-8
+            new = encoding.tolocal(new)  # but cleanly decodes as UTF-8
         except UnicodeDecodeError:
-            pass # binary, leave unmodified
+            pass  # binary, leave unmodified
     else:
-        new = encoding.tolocal(new) # normal path
+        new = encoding.tolocal(new)  # normal path
 
     with proto.mayberedirectstdio() as output:
-        r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
-                         encoding.tolocal(old), new) or False
+        r = (
+            repo.pushkey(
+                encoding.tolocal(namespace),
+                encoding.tolocal(key),
+                encoding.tolocal(old),
+                new,
+            )
+            or False
+        )
 
     output = output.getvalue() if output else ''
     return wireprototypes.bytesresponse('%d\n%s' % (int(r), output))
 
+
 @wireprotocommand('stream_out', permission='pull')
 def stream(repo, proto):
     '''If the server supports streaming clone, it advertises the "stream"
     capability with a value representing the version and flags of the repo
     it is serving. Client checks to see if it understands the format.
     '''
-    return wireprototypes.streamreslegacy(
-        streamclone.generatev1wireproto(repo))
+    return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
+
 
 @wireprotocommand('unbundle', 'heads', permission='push')
 def unbundle(repo, proto, heads):
@@ -559,48 +618,57 @@
             try:
                 payload = proto.getpayload()
                 if repo.ui.configbool('server', 'streamunbundle'):
+
                     def cleanup():
                         # Ensure that the full payload is consumed, so
                         # that the connection doesn't contain trailing garbage.
                         for p in payload:
                             pass
+
                     fp = util.chunkbuffer(payload)
                 else:
                     # write bundle data to temporary file as it can be big
                     fp, tempname = None, None
+
                     def cleanup():
                         if fp:
                             fp.close()
                         if tempname:
                             os.unlink(tempname)
+
                     fd, tempname = pycompat.mkstemp(prefix='hg-unbundle-')
-                    repo.ui.debug('redirecting incoming bundle to %s\n' %
-                        tempname)
+                    repo.ui.debug(
+                        'redirecting incoming bundle to %s\n' % tempname
+                    )
                     fp = os.fdopen(fd, pycompat.sysstr('wb+'))
                     for p in payload:
                         fp.write(p)
                     fp.seek(0)
 
                 gen = exchange.readbundle(repo.ui, fp, None)
-                if (isinstance(gen, changegroupmod.cg1unpacker)
-                    and not bundle1allowed(repo, 'push')):
+                if isinstance(
+                    gen, changegroupmod.cg1unpacker
+                ) and not bundle1allowed(repo, 'push'):
                     if proto.name == 'http-v1':
                         # need to special case http because stderr do not get to
                         # the http client on failed push so we need to abuse
                         # some other error type to make sure the message get to
                         # the user.
                         return wireprototypes.ooberror(bundle2required)
-                    raise error.Abort(bundle2requiredmain,
-                                      hint=bundle2requiredhint)
+                    raise error.Abort(
+                        bundle2requiredmain, hint=bundle2requiredhint
+                    )
 
-                r = exchange.unbundle(repo, gen, their_heads, 'serve',
-                                      proto.client())
+                r = exchange.unbundle(
+                    repo, gen, their_heads, 'serve', proto.client()
+                )
                 if util.safehasattr(r, 'addpart'):
                     # The return looks streamable, we are in the bundle2 case
                     # and should return a stream.
                     return wireprototypes.streamreslegacy(gen=r.getchunks())
                 return wireprototypes.pushres(
-                    r, output.getvalue() if output else '')
+                    r, output.getvalue() if output else ''
+                )
 
             finally:
                 cleanup()
@@ -620,11 +688,13 @@
                         procutil.stderr.write("(%s)\n" % exc.hint)
                     procutil.stderr.flush()
                     return wireprototypes.pushres(
-                        0, output.getvalue() if output else '')
+                        0, output.getvalue() if output else ''
+                    )
                 except error.PushRaced:
                     return wireprototypes.pusherr(
                         pycompat.bytestr(exc),
-                        output.getvalue() if output else '')
+                        output.getvalue() if output else '',
+                    )
 
             bundler = bundle2.bundle20(repo.ui)
             for out in getattr(exc, '_bundle2salvagedoutput', ()):
@@ -635,15 +705,18 @@
                 except error.PushkeyFailed as exc:
                     # check client caps
                     remotecaps = getattr(exc, '_replycaps', None)
-                    if (remotecaps is not None
-                            and 'pushkey' not in remotecaps.get('error', ())):
+                    if (
+                        remotecaps is not None
+                        and 'pushkey' not in remotecaps.get('error', ())
+                    ):
                         # no support remote side, fallback to Abort handler.
                         raise
                     part = bundler.newpart('error:pushkey')
                     part.addparam('in-reply-to', exc.partid)
                     if exc.namespace is not None:
-                        part.addparam('namespace', exc.namespace,
-                                      mandatory=False)
+                        part.addparam(
+                            'namespace', exc.namespace, mandatory=False
+                        )
                     if exc.key is not None:
                         part.addparam('key', exc.key, mandatory=False)
                     if exc.new is not None:
@@ -663,9 +736,12 @@
                 advargs = []
                 if exc.hint is not None:
                     advargs.append(('hint', exc.hint))
-                bundler.addpart(bundle2.bundlepart('error:abort',
-                                                   manargs, advargs))
+                bundler.addpart(
+                    bundle2.bundlepart('error:abort', manargs, advargs)
+                )
             except error.PushRaced as exc:
-                bundler.newpart('error:pushraced',
-                                [('message', stringutil.forcebytestr(exc))])
+                bundler.newpart(
+                    'error:pushraced',
+                    [('message', stringutil.forcebytestr(exc))],
+                )
             return wireprototypes.streamreslegacy(gen=bundler.getchunks())
--- a/mercurial/wireprotov2peer.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/wireprotov2peer.py	Sun Oct 06 09:45:02 2019 -0400
@@ -20,9 +20,8 @@
     wireprotoframing,
     wireprototypes,
 )
-from .utils import (
-    cborutil,
-)
+from .utils import cborutil
+
 
 def formatrichmessage(atoms):
     """Format an encoded message from the framing protocol."""
@@ -39,6 +38,7 @@
 
     return b''.join(chunks)
 
+
 SUPPORTED_REDIRECT_PROTOCOLS = {
     b'http',
     b'https',
@@ -49,6 +49,7 @@
     b'sha256',
 }
 
+
 def redirecttargetsupported(ui, target):
     """Determine whether a redirect target entry is supported.
 
@@ -56,13 +57,17 @@
     the server.
     """
     if target.get(b'protocol') not in SUPPORTED_REDIRECT_PROTOCOLS:
-        ui.note(_('(remote redirect target %s uses unsupported protocol: %s)\n')
-                % (target[b'name'], target.get(b'protocol', b'')))
+        ui.note(
+            _('(remote redirect target %s uses unsupported protocol: %s)\n')
+            % (target[b'name'], target.get(b'protocol', b''))
+        )
         return False
 
     if target.get(b'snirequired') and not sslutil.hassni:
-        ui.note(_('(redirect target %s requires SNI, which is unsupported)\n') %
-                target[b'name'])
+        ui.note(
+            _('(redirect target %s requires SNI, which is unsupported)\n')
+            % target[b'name']
+        )
         return False
 
     if b'tlsversions' in target:
@@ -74,15 +79,20 @@
             supported.add(v[3:])
 
         if not tlsversions & supported:
-            ui.note(_('(remote redirect target %s requires unsupported TLS '
-                      'versions: %s)\n') % (
-                target[b'name'], b', '.join(sorted(tlsversions))))
+            ui.note(
+                _(
+                    '(remote redirect target %s requires unsupported TLS '
+                    'versions: %s)\n'
+                )
+                % (target[b'name'], b', '.join(sorted(tlsversions)))
+            )
             return False
 
     ui.note(_('(remote redirect target %s is compatible)\n') % target[b'name'])
 
     return True
 
+
 def supportedredirects(ui, apidescriptor):
     """Resolve the "redirect" command request key given an API descriptor.
 
@@ -96,17 +106,24 @@
     if not apidescriptor or b'redirect' not in apidescriptor:
         return None
 
-    targets = [t[b'name'] for t in apidescriptor[b'redirect'][b'targets']
-               if redirecttargetsupported(ui, t)]
+    targets = [
+        t[b'name']
+        for t in apidescriptor[b'redirect'][b'targets']
+        if redirecttargetsupported(ui, t)
+    ]
 
-    hashes = [h for h in apidescriptor[b'redirect'][b'hashes']
-              if h in SUPPORTED_CONTENT_HASHES]
+    hashes = [
+        h
+        for h in apidescriptor[b'redirect'][b'hashes']
+        if h in SUPPORTED_CONTENT_HASHES
+    ]
 
     return {
         b'targets': targets,
         b'hashes': hashes,
     }
 
+
 class commandresponse(object):
     """Represents the response to a command request.
 
@@ -162,9 +179,13 @@
                 # content redirect is the only object in the stream. Fail
                 # if we see a misbehaving server.
                 if self._redirect:
-                    raise error.Abort(_('received unexpected response data '
-                                        'after content redirect; the remote is '
-                                        'buggy'))
+                    raise error.Abort(
+                        _(
+                            'received unexpected response data '
+                            'after content redirect; the remote is '
+                            'buggy'
+                        )
+                    )
 
                 self._pendingevents.append(o)
 
@@ -190,7 +211,8 @@
                 fullhashes=l.get(b'fullhashes'),
                 fullhashseed=l.get(b'fullhashseed'),
                 serverdercerts=l.get(b'serverdercerts'),
-                servercadercerts=l.get(b'servercadercerts'))
+                servercadercerts=l.get(b'servercadercerts'),
+            )
             return
 
         atoms = [{'msg': o[b'error'][b'message']}]
@@ -237,6 +259,7 @@
             if stop:
                 break
 
+
 class clienthandler(object):
     """Object to handle higher-level client activities.
 
@@ -248,8 +271,9 @@
     with the higher-level peer API.
     """
 
-    def __init__(self, ui, clientreactor, opener=None,
-                 requestbuilder=util.urlreq.request):
+    def __init__(
+        self, ui, clientreactor, opener=None, requestbuilder=util.urlreq.request
+    ):
         self._ui = ui
         self._reactor = clientreactor
         self._requests = {}
@@ -265,8 +289,9 @@
 
         Returns an iterable of frames that should be sent over the wire.
         """
-        request, action, meta = self._reactor.callcommand(command, args,
-                                                          redirect=redirect)
+        request, action, meta = self._reactor.callcommand(
+            command, args, redirect=redirect
+        )
 
         if action != 'noop':
             raise error.ProgrammingError('%s not yet supported' % action)
@@ -347,7 +372,8 @@
             raise error.ProgrammingError(
                 'received frame for unknown request; this is either a bug in '
                 'the clientreactor not screening for this or this instance was '
-                'never told about this request: %r' % frame)
+                'never told about this request: %r' % frame
+            )
 
         response = self._responses[frame.requestid]
 
@@ -371,7 +397,8 @@
                     response._onerror(e)
         else:
             raise error.ProgrammingError(
-                'unhandled action from clientreactor: %s' % action)
+                'unhandled action from clientreactor: %s' % action
+            )
 
     def _processresponsedata(self, frame, meta, response):
         # This can raise. The caller can handle it.
@@ -423,16 +450,26 @@
 
         # TODO handle framed responses.
         if redirect.mediatype != b'application/mercurial-cbor':
-            raise error.Abort(_('cannot handle redirects for the %s media type')
-                              % redirect.mediatype)
+            raise error.Abort(
+                _('cannot handle redirects for the %s media type')
+                % redirect.mediatype
+            )
 
         if redirect.fullhashes:
-            self._ui.warn(_('(support for validating hashes on content '
-                            'redirects not supported)\n'))
+            self._ui.warn(
+                _(
+                    '(support for validating hashes on content '
+                    'redirects not supported)\n'
+                )
+            )
 
         if redirect.serverdercerts or redirect.servercadercerts:
-            self._ui.warn(_('(support for pinning server certificates on '
-                            'content redirects not supported)\n'))
+            self._ui.warn(
+                _(
+                    '(support for pinning server certificates on '
+                    'content redirects not supported)\n'
+                )
+            )
 
         headers = {
             r'Accept': redirect.mediatype,
@@ -456,9 +493,9 @@
         # The existing response object is associated with frame data. Rather
         # than try to normalize its state, just create a new object.
         oldresponse = self._responses[requestid]
-        self._responses[requestid] = commandresponse(requestid,
-                                                     oldresponse.command,
-                                                     fromredirect=True)
+        self._responses[requestid] = commandresponse(
+            requestid, oldresponse.command, fromredirect=True
+        )
 
         self._redirects.append((requestid, res))
 
@@ -496,32 +533,39 @@
             response._oninputcomplete()
             return False
 
+
 def decodebranchmap(objs):
     # Response should be a single CBOR map of branch name to array of nodes.
     bm = next(objs)
 
     return {encoding.tolocal(k): v for k, v in bm.items()}
 
+
 def decodeheads(objs):
     # Array of node bytestrings.
     return next(objs)
 
+
 def decodeknown(objs):
     # Bytestring where each byte is a 0 or 1.
     raw = next(objs)
 
-    return [True if raw[i:i + 1] == b'1' else False for i in range(len(raw))]
+    return [True if raw[i : i + 1] == b'1' else False for i in range(len(raw))]
+
 
 def decodelistkeys(objs):
     # Map with bytestring keys and values.
     return next(objs)
 
+
 def decodelookup(objs):
     return next(objs)
 
+
 def decodepushkey(objs):
     return next(objs)
 
+
 COMMAND_DECODERS = {
     'branchmap': decodebranchmap,
     'heads': decodeheads,
--- a/mercurial/wireprotov2server.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/wireprotov2server.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,9 +28,7 @@
     wireprotoframing,
     wireprototypes,
 )
-from .interfaces import (
-    util as interfaceutil,
-)
+from .interfaces import util as interfaceutil
 from .utils import (
     cborutil,
     stringutil,
@@ -47,6 +45,7 @@
 # there is a change to how caching works, etc.
 GLOBAL_CACHE_VERSION = 1
 
+
 def handlehttpv2request(rctx, req, res, checkperm, urlparts):
     from .hgweb import common as hgwebcommon
 
@@ -63,8 +62,9 @@
     if len(urlparts) == 1:
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('do not know how to process %s\n') %
-                         req.dispatchpath)
+        res.setbodybytes(
+            _('do not know how to process %s\n') % req.dispatchpath
+        )
         return
 
     permission, command = urlparts[0:2]
@@ -115,8 +115,10 @@
 
     proto = httpv2protocolhandler(req, ui)
 
-    if (not COMMANDS.commandavailable(command, proto)
-        and command not in extracommands):
+    if (
+        not COMMANDS.commandavailable(command, proto)
+        and command not in extracommands
+    ):
         res.status = b'404 Not Found'
         res.headers[b'Content-Type'] = b'text/plain'
         res.setbodybytes(_('invalid wire protocol command: %s') % command)
@@ -126,8 +128,10 @@
     if req.headers.get(b'Accept') != FRAMINGTYPE:
         res.status = b'406 Not Acceptable'
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('client MUST specify Accept header with value: %s\n')
-                           % FRAMINGTYPE)
+        res.setbodybytes(
+            _('client MUST specify Accept header with value: %s\n')
+            % FRAMINGTYPE
+        )
         return
 
     if req.headers.get(b'Content-Type') != FRAMINGTYPE:
@@ -135,12 +139,15 @@
         # TODO we should send a response with appropriate media type,
         # since client does Accept it.
         res.headers[b'Content-Type'] = b'text/plain'
-        res.setbodybytes(_('client MUST send Content-Type header with '
-                           'value: %s\n') % FRAMINGTYPE)
+        res.setbodybytes(
+            _('client MUST send Content-Type header with ' 'value: %s\n')
+            % FRAMINGTYPE
+        )
         return
 
     _processhttpv2request(ui, repo, req, res, permission, command, proto)
 
+
 def _processhttpv2reflectrequest(ui, repo, req, res):
     """Reads unified frame protocol request and dumps out state to client.
 
@@ -171,9 +178,10 @@
             states.append(b'received: <no frame>')
             break
 
-        states.append(b'received: %d %d %d %s' % (frame.typeid, frame.flags,
-                                                  frame.requestid,
-                                                  frame.payload))
+        states.append(
+            b'received: %d %d %d %s'
+            % (frame.typeid, frame.flags, frame.requestid, frame.payload)
+        )
 
         action, meta = reactor.onframerecv(frame)
         states.append(templatefilters.json((action, meta)))
@@ -186,6 +194,7 @@
     res.headers[b'Content-Type'] = b'text/plain'
     res.setbodybytes(b'\n'.join(states))
 
+
 def _processhttpv2request(ui, repo, req, res, authedperm, reqcommand, proto):
     """Post-validation handler for HTTPv2 requests.
 
@@ -216,9 +225,18 @@
             if not outstream:
                 outstream = reactor.makeoutputstream()
 
-            sentoutput = _httpv2runcommand(ui, repo, req, res, authedperm,
-                                           reqcommand, reactor, outstream,
-                                           meta, issubsequent=seencommand)
+            sentoutput = _httpv2runcommand(
+                ui,
+                repo,
+                req,
+                res,
+                authedperm,
+                reqcommand,
+                reactor,
+                outstream,
+                meta,
+                issubsequent=seencommand,
+            )
 
             if sentoutput:
                 return
@@ -233,7 +251,8 @@
             return
         else:
             raise error.ProgrammingError(
-                'unhandled action from frame processor: %s' % action)
+                'unhandled action from frame processor: %s' % action
+            )
 
     action, meta = reactor.oninputeof()
     if action == 'sendframes':
@@ -245,11 +264,23 @@
     elif action == 'noop':
         pass
     else:
-        raise error.ProgrammingError('unhandled action from frame processor: %s'
-                                     % action)
+        raise error.ProgrammingError(
+            'unhandled action from frame processor: %s' % action
+        )
+
 
-def _httpv2runcommand(ui, repo, req, res, authedperm, reqcommand, reactor,
-                      outstream, command, issubsequent):
+def _httpv2runcommand(
+    ui,
+    repo,
+    req,
+    res,
+    authedperm,
+    reqcommand,
+    reactor,
+    outstream,
+    command,
+    issubsequent,
+):
     """Dispatch a wire protocol command made from HTTPv2 requests.
 
     The authenticated permission (``authedperm``) along with the original
@@ -277,8 +308,10 @@
             # TODO proper error mechanism
             res.status = b'200 OK'
             res.headers[b'Content-Type'] = b'text/plain'
-            res.setbodybytes(_('wire protocol command not available: %s') %
-                             command['command'])
+            res.setbodybytes(
+                _('wire protocol command not available: %s')
+                % command['command']
+            )
             return True
 
         # TODO don't use assert here, since it may be elided by -O.
@@ -290,8 +323,10 @@
             # TODO proper error mechanism
             res.status = b'403 Forbidden'
             res.headers[b'Content-Type'] = b'text/plain'
-            res.setbodybytes(_('insufficient permissions to execute '
-                               'command: %s') % command['command'])
+            res.setbodybytes(
+                _('insufficient permissions to execute ' 'command: %s')
+                % command['command']
+            )
             return True
 
         # TODO should we also call checkperm() here? Maybe not if we're going
@@ -304,8 +339,9 @@
             # TODO proper error mechanism
             res.status = b'200 OK'
             res.headers[b'Content-Type'] = b'text/plain'
-            res.setbodybytes(_('multiple commands cannot be issued to this '
-                               'URL'))
+            res.setbodybytes(
+                _('multiple commands cannot be issued to this ' 'URL')
+            )
             return True
 
         if reqcommand != command['command']:
@@ -322,17 +358,21 @@
         objs = dispatch(repo, proto, command['command'], command['redirect'])
 
         action, meta = reactor.oncommandresponsereadyobjects(
-            outstream, command['requestid'], objs)
+            outstream, command['requestid'], objs
+        )
 
     except error.WireprotoCommandError as e:
         action, meta = reactor.oncommanderror(
-            outstream, command['requestid'], e.message, e.messageargs)
+            outstream, command['requestid'], e.message, e.messageargs
+        )
 
     except Exception as e:
         action, meta = reactor.onservererror(
-            outstream, command['requestid'],
-            _('exception when invoking command: %s') %
-            stringutil.forcebytestr(e))
+            outstream,
+            command['requestid'],
+            _('exception when invoking command: %s')
+            % stringutil.forcebytestr(e),
+        )
 
     if action == 'sendframes':
         res.setbodygen(meta['framegen'])
@@ -340,13 +380,16 @@
     elif action == 'noop':
         return False
     else:
-        raise error.ProgrammingError('unhandled event from reactor: %s' %
-                                     action)
+        raise error.ProgrammingError(
+            'unhandled event from reactor: %s' % action
+        )
+
 
 def getdispatchrepo(repo, proto, command):
     viewconfig = repo.ui.config('server', 'view')
     return repo.filtered(viewconfig)
 
+
 def dispatch(repo, proto, command, redirect):
     """Run a wire protocol command.
 
@@ -379,10 +422,15 @@
         redirecttargets = []
         redirecthashes = []
 
-    cacher = makeresponsecacher(repo, proto, command, args,
-                                cborutil.streamencode,
-                                redirecttargets=redirecttargets,
-                                redirecthashes=redirecthashes)
+    cacher = makeresponsecacher(
+        repo,
+        proto,
+        command,
+        args,
+        cborutil.streamencode,
+        redirecttargets=redirecttargets,
+        redirecthashes=redirecthashes,
+    )
 
     # But we have no cacher. Do default handling.
     if not cacher:
@@ -391,8 +439,9 @@
         return
 
     with cacher:
-        cachekey = entry.cachekeyfn(repo, proto, cacher,
-                                    **pycompat.strkwargs(args))
+        cachekey = entry.cachekeyfn(
+            repo, proto, cacher, **pycompat.strkwargs(args)
+        )
 
         # No cache key or the cacher doesn't like it. Do default handling.
         if cachekey is None or not cacher.setcachekey(cachekey):
@@ -417,6 +466,7 @@
         for o in cacher.onfinished():
             yield o
 
+
 @interfaceutil.implementer(wireprototypes.baseprotocolhandler)
 class httpv2protocolhandler(object):
     def __init__(self, req, ui, args=None):
@@ -434,15 +484,16 @@
         extra = set(self._args) - set(args)
         if extra:
             raise error.WireprotoCommandError(
-                'unsupported argument to command: %s' %
-                ', '.join(sorted(extra)))
+                'unsupported argument to command: %s' % ', '.join(sorted(extra))
+            )
 
         # And look for required arguments that are missing.
         missing = {a for a in args if args[a]['required']} - set(self._args)
 
         if missing:
             raise error.WireprotoCommandError(
-                'missing required arguments: %s' % ', '.join(sorted(missing)))
+                'missing required arguments: %s' % ', '.join(sorted(missing))
+            )
 
         # Now derive the arguments to pass to the command, taking into
         # account the arguments specified by the client.
@@ -485,11 +536,13 @@
     def checkperm(self, perm):
         raise NotImplementedError
 
+
 def httpv2apidescriptor(req, repo):
     proto = httpv2protocolhandler(req, repo.ui)
 
     return _capabilitiesv2(repo, proto)
 
+
 def _capabilitiesv2(repo, proto):
     """Obtain the set of capabilities for version 2 transports.
 
@@ -520,8 +573,10 @@
                 args[arg][b'validvalues'] = meta['validvalues']
 
         # TODO this type of check should be defined in a per-command callback.
-        if (command == b'rawstorefiledata'
-            and not streamclone.allowservergeneration(repo)):
+        if (
+            command == b'rawstorefiledata'
+            and not streamclone.allowservergeneration(repo)
+        ):
             continue
 
         caps['commands'][command] = {
@@ -533,8 +588,7 @@
             extracaps = entry.extracapabilitiesfn(repo, proto)
             caps['commands'][command].update(extracaps)
 
-    caps['rawrepoformats'] = sorted(repo.requirements &
-                                    repo.supportedformats)
+    caps['rawrepoformats'] = sorted(repo.requirements & repo.supportedformats)
 
     targets = getadvertisedredirecttargets(repo, proto)
     if targets:
@@ -558,6 +612,7 @@
 
     return proto.addcapabilities(repo, caps)
 
+
 def getadvertisedredirecttargets(repo, proto):
     """Obtain a list of content redirect targets.
 
@@ -596,8 +651,14 @@
     """
     return []
 
-def wireprotocommand(name, args=None, permission='push', cachekeyfn=None,
-                     extracapabilitiesfn=None):
+
+def wireprotocommand(
+    name,
+    args=None,
+    permission='push',
+    cachekeyfn=None,
+    extracapabilitiesfn=None,
+):
     """Decorator to declare a wire protocol command.
 
     ``name`` is the name of the wire protocol command being provided.
@@ -648,42 +709,53 @@
     containing the key in a cache the response to this command may be cached
     under.
     """
-    transports = {k for k, v in wireprototypes.TRANSPORTS.items()
-                  if v['version'] == 2}
+    transports = {
+        k for k, v in wireprototypes.TRANSPORTS.items() if v['version'] == 2
+    }
 
     if permission not in ('push', 'pull'):
-        raise error.ProgrammingError('invalid wire protocol permission; '
-                                     'got %s; expected "push" or "pull"' %
-                                     permission)
+        raise error.ProgrammingError(
+            'invalid wire protocol permission; '
+            'got %s; expected "push" or "pull"' % permission
+        )
 
     if args is None:
         args = {}
 
     if not isinstance(args, dict):
-        raise error.ProgrammingError('arguments for version 2 commands '
-                                     'must be declared as dicts')
+        raise error.ProgrammingError(
+            'arguments for version 2 commands ' 'must be declared as dicts'
+        )
 
     for arg, meta in args.items():
         if arg == '*':
-            raise error.ProgrammingError('* argument name not allowed on '
-                                         'version 2 commands')
+            raise error.ProgrammingError(
+                '* argument name not allowed on ' 'version 2 commands'
+            )
 
         if not isinstance(meta, dict):
-            raise error.ProgrammingError('arguments for version 2 commands '
-                                         'must declare metadata as a dict')
+            raise error.ProgrammingError(
+                'arguments for version 2 commands '
+                'must declare metadata as a dict'
+            )
 
         if 'type' not in meta:
-            raise error.ProgrammingError('%s argument for command %s does not '
-                                         'declare type field' % (arg, name))
+            raise error.ProgrammingError(
+                '%s argument for command %s does not '
+                'declare type field' % (arg, name)
+            )
 
         if meta['type'] not in ('bytes', 'int', 'list', 'dict', 'set', 'bool'):
-            raise error.ProgrammingError('%s argument for command %s has '
-                                         'illegal type: %s' % (arg, name,
-                                                               meta['type']))
+            raise error.ProgrammingError(
+                '%s argument for command %s has '
+                'illegal type: %s' % (arg, name, meta['type'])
+            )
 
         if 'example' not in meta:
-            raise error.ProgrammingError('%s argument for command %s does not '
-                                         'declare example field' % (arg, name))
+            raise error.ProgrammingError(
+                '%s argument for command %s does not '
+                'declare example field' % (arg, name)
+            )
 
         meta['required'] = 'default' not in meta
 
@@ -692,17 +764,24 @@
 
     def register(func):
         if name in COMMANDS:
-            raise error.ProgrammingError('%s command already registered '
-                                         'for version 2' % name)
+            raise error.ProgrammingError(
+                '%s command already registered ' 'for version 2' % name
+            )
 
         COMMANDS[name] = wireprototypes.commandentry(
-            func, args=args, transports=transports, permission=permission,
-            cachekeyfn=cachekeyfn, extracapabilitiesfn=extracapabilitiesfn)
+            func,
+            args=args,
+            transports=transports,
+            permission=permission,
+            cachekeyfn=cachekeyfn,
+            extracapabilitiesfn=extracapabilitiesfn,
+        )
 
         return func
 
     return register
 
+
 def makecommandcachekeyfn(command, localversion=None, allargs=False):
     """Construct a cache key derivation function with common features.
 
@@ -777,8 +856,10 @@
 
     return cachekeyfn
 
-def makeresponsecacher(repo, proto, command, args, objencoderfn,
-                       redirecttargets, redirecthashes):
+
+def makeresponsecacher(
+    repo, proto, command, args, objencoderfn, redirecttargets, redirecthashes
+):
     """Construct a cacher for a cacheable command.
 
     Returns an ``iwireprotocolcommandcacher`` instance.
@@ -788,6 +869,7 @@
     """
     return None
 
+
 def resolvenodes(repo, revisions):
     """Resolve nodes from a revisions specifier data structure."""
     cl = repo.changelog
@@ -797,13 +879,15 @@
     nodes = []
 
     if not isinstance(revisions, list):
-        raise error.WireprotoCommandError('revisions must be defined as an '
-                                          'array')
+        raise error.WireprotoCommandError(
+            'revisions must be defined as an ' 'array'
+        )
 
     for spec in revisions:
         if b'type' not in spec:
             raise error.WireprotoCommandError(
-                'type key not present in revision specifier')
+                'type key not present in revision specifier'
+            )
 
         typ = spec[b'type']
 
@@ -811,7 +895,8 @@
             if b'nodes' not in spec:
                 raise error.WireprotoCommandError(
                     'nodes key not present in changesetexplicit revision '
-                    'specifier')
+                    'specifier'
+                )
 
             for node in spec[b'nodes']:
                 if node not in seen:
@@ -823,10 +908,13 @@
                 if key not in spec:
                     raise error.WireprotoCommandError(
                         '%s key not present in changesetexplicitdepth revision '
-                        'specifier', (key,))
+                        'specifier',
+                        (key,),
+                    )
 
-            for rev in repo.revs(b'ancestors(%ln, %s)', spec[b'nodes'],
-                                 spec[b'depth'] - 1):
+            for rev in repo.revs(
+                b'ancestors(%ln, %s)', spec[b'nodes'], spec[b'depth'] - 1
+            ):
                 node = cl.node(rev)
 
                 if node not in seen:
@@ -838,11 +926,14 @@
                 if key not in spec:
                     raise error.WireprotoCommandError(
                         '%s key not present in changesetdagrange revision '
-                        'specifier', (key,))
+                        'specifier',
+                        (key,),
+                    )
 
             if not spec[b'heads']:
                 raise error.WireprotoCommandError(
-                    'heads key in changesetdagrange cannot be empty')
+                    'heads key in changesetdagrange cannot be empty'
+                )
 
             if spec[b'roots']:
                 common = [n for n in spec[b'roots'] if clhasnode(n)]
@@ -856,28 +947,30 @@
 
         else:
             raise error.WireprotoCommandError(
-                'unknown revision specifier type: %s', (typ,))
+                'unknown revision specifier type: %s', (typ,)
+            )
 
     return nodes
 
+
 @wireprotocommand('branchmap', permission='pull')
 def branchmapv2(repo, proto):
-    yield {encoding.fromlocal(k): v
-           for k, v in repo.branchmap().iteritems()}
+    yield {encoding.fromlocal(k): v for k, v in repo.branchmap().iteritems()}
+
 
 @wireprotocommand('capabilities', permission='pull')
 def capabilitiesv2(repo, proto):
     yield _capabilitiesv2(repo, proto)
 
+
 @wireprotocommand(
     'changesetdata',
     args={
         'revisions': {
             'type': 'list',
-            'example': [{
-                b'type': b'changesetexplicit',
-                b'nodes': [b'abcdef...'],
-            }],
+            'example': [
+                {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
+            ],
         },
         'fields': {
             'type': 'set',
@@ -886,7 +979,8 @@
             'validvalues': {b'bookmarks', b'parents', b'phase', b'revision'},
         },
     },
-    permission='pull')
+    permission='pull',
+)
 def changesetdata(repo, proto, revisions, fields):
     # TODO look for unknown fields and abort when they can't be serviced.
     # This could probably be validated by dispatcher using validvalues.
@@ -963,6 +1057,7 @@
                 b'bookmarks': sorted(marks),
             }
 
+
 class FileAccessError(Exception):
     """Represents an error accessing a specific file."""
 
@@ -971,6 +1066,7 @@
         self.msg = msg
         self.args = args
 
+
 def getfilestore(repo, proto, path):
     """Obtain a file storage object for use with wire protocol.
 
@@ -986,6 +1082,7 @@
 
     return fl
 
+
 def emitfilerevisions(repo, path, revisions, linknodes, fields):
     for revision in revisions:
         d = {
@@ -1018,6 +1115,7 @@
         for extra in followingdata:
             yield extra
 
+
 def makefilematcher(repo, pathfilter):
     """Construct a matcher from a path filter dict."""
 
@@ -1028,12 +1126,17 @@
                 if not pattern.startswith((b'path:', b'rootfilesin:')):
                     raise error.WireprotoCommandError(
                         '%s pattern must begin with `path:` or `rootfilesin:`; '
-                        'got %s', (key, pattern))
+                        'got %s',
+                        (key, pattern),
+                    )
 
     if pathfilter:
-        matcher = matchmod.match(repo.root, b'',
-                                 include=pathfilter.get(b'include', []),
-                                 exclude=pathfilter.get(b'exclude', []))
+        matcher = matchmod.match(
+            repo.root,
+            b'',
+            include=pathfilter.get(b'include', []),
+            exclude=pathfilter.get(b'exclude', []),
+        )
     else:
         matcher = matchmod.match(repo.root, b'')
 
@@ -1041,6 +1144,7 @@
     # filter those out.
     return repo.narrowmatch(matcher)
 
+
 @wireprotocommand(
     'filedata',
     args={
@@ -1049,26 +1153,21 @@
             'default': lambda: False,
             'example': True,
         },
-        'nodes': {
-            'type': 'list',
-            'example': [b'0123456...'],
-        },
+        'nodes': {'type': 'list', 'example': [b'0123456...'],},
         'fields': {
             'type': 'set',
             'default': set,
             'example': {b'parents', b'revision'},
             'validvalues': {b'parents', b'revision', b'linknode'},
         },
-        'path': {
-            'type': 'bytes',
-            'example': b'foo.txt',
-        }
+        'path': {'type': 'bytes', 'example': b'foo.txt',},
     },
     permission='pull',
     # TODO censoring a file revision won't invalidate the cache.
     # Figure out a way to take censoring into account when deriving
     # the cache key.
-    cachekeyfn=makecommandcachekeyfn('filedata', 1, allargs=True))
+    cachekeyfn=makecommandcachekeyfn('filedata', 1, allargs=True),
+)
 def filedata(repo, proto, haveparents, nodes, fields, path):
     # TODO this API allows access to file revisions that are attached to
     # secret changesets. filesdata does not have this problem. Maybe this
@@ -1088,8 +1187,9 @@
         try:
             store.rev(node)
         except error.LookupError:
-            raise error.WireprotoCommandError('unknown file node: %s',
-                                              (hex(node),))
+            raise error.WireprotoCommandError(
+                'unknown file node: %s', (hex(node),)
+            )
 
         # TODO by creating the filectx against a specific file revision
         # instead of changeset, linkrev() is always used. This is wrong for
@@ -1099,9 +1199,11 @@
         fctx = repo.filectx(path, fileid=node)
         linknodes[node] = clnode(fctx.introrev())
 
-    revisions = store.emitrevisions(nodes,
-                                    revisiondata=b'revision' in fields,
-                                    assumehaveparentrevisions=haveparents)
+    revisions = store.emitrevisions(
+        nodes,
+        revisiondata=b'revision' in fields,
+        assumehaveparentrevisions=haveparents,
+    )
 
     yield {
         b'totalitems': len(nodes),
@@ -1110,13 +1212,16 @@
     for o in emitfilerevisions(repo, path, revisions, linknodes, fields):
         yield o
 
+
 def filesdatacapabilities(repo, proto):
     batchsize = repo.ui.configint(
-        b'experimental', b'server.filesdata.recommended-batch-size')
+        b'experimental', b'server.filesdata.recommended-batch-size'
+    )
     return {
         b'recommendedbatchsize': batchsize,
     }
 
+
 @wireprotocommand(
     'filesdata',
     args={
@@ -1129,8 +1234,12 @@
             'type': 'set',
             'default': set,
             'example': {b'parents', b'revision'},
-            'validvalues': {b'firstchangeset', b'linknode', b'parents',
-                            b'revision'},
+            'validvalues': {
+                b'firstchangeset',
+                b'linknode',
+                b'parents',
+                b'revision',
+            },
         },
         'pathfilter': {
             'type': 'dict',
@@ -1139,10 +1248,9 @@
         },
         'revisions': {
             'type': 'list',
-            'example': [{
-                b'type': b'changesetexplicit',
-                b'nodes': [b'abcdef...'],
-            }],
+            'example': [
+                {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
+            ],
         },
     },
     permission='pull',
@@ -1150,7 +1258,8 @@
     # Figure out a way to take censoring into account when deriving
     # the cache key.
     cachekeyfn=makecommandcachekeyfn('filesdata', 1, allargs=True),
-    extracapabilitiesfn=filesdatacapabilities)
+    extracapabilitiesfn=filesdatacapabilities,
+)
 def filesdata(repo, proto, haveparents, fields, pathfilter, revisions):
     # TODO This should operate on a repo that exposes obsolete changesets. There
     # is a race between a client making a push that obsoletes a changeset and
@@ -1193,7 +1302,7 @@
 
     yield {
         b'totalpaths': len(fnodes),
-        b'totalitems': sum(len(v) for v in fnodes.values())
+        b'totalitems': sum(len(v) for v in fnodes.values()),
     }
 
     for path, filenodes in sorted(fnodes.items()):
@@ -1207,13 +1316,16 @@
             b'totalitems': len(filenodes),
         }
 
-        revisions = store.emitrevisions(filenodes.keys(),
-                                        revisiondata=b'revision' in fields,
-                                        assumehaveparentrevisions=haveparents)
+        revisions = store.emitrevisions(
+            filenodes.keys(),
+            revisiondata=b'revision' in fields,
+            assumehaveparentrevisions=haveparents,
+        )
 
         for o in emitfilerevisions(repo, path, revisions, filenodes, fields):
             yield o
 
+
 @wireprotocommand(
     'heads',
     args={
@@ -1223,52 +1335,47 @@
             'example': False,
         },
     },
-    permission='pull')
+    permission='pull',
+)
 def headsv2(repo, proto, publiconly):
     if publiconly:
         repo = repo.filtered('immutable')
 
     yield repo.heads()
 
+
 @wireprotocommand(
     'known',
     args={
-        'nodes': {
-            'type': 'list',
-            'default': list,
-            'example': [b'deadbeef'],
-        },
+        'nodes': {'type': 'list', 'default': list, 'example': [b'deadbeef'],},
     },
-    permission='pull')
+    permission='pull',
+)
 def knownv2(repo, proto, nodes):
     result = b''.join(b'1' if n else b'0' for n in repo.known(nodes))
     yield result
 
+
 @wireprotocommand(
     'listkeys',
-    args={
-        'namespace': {
-            'type': 'bytes',
-            'example': b'ns',
-        },
-    },
-    permission='pull')
+    args={'namespace': {'type': 'bytes', 'example': b'ns',},},
+    permission='pull',
+)
 def listkeysv2(repo, proto, namespace):
     keys = repo.listkeys(encoding.tolocal(namespace))
-    keys = {encoding.fromlocal(k): encoding.fromlocal(v)
-            for k, v in keys.iteritems()}
+    keys = {
+        encoding.fromlocal(k): encoding.fromlocal(v)
+        for k, v in keys.iteritems()
+    }
 
     yield keys
 
+
 @wireprotocommand(
     'lookup',
-    args={
-        'key': {
-            'type': 'bytes',
-            'example': b'foo',
-        },
-    },
-    permission='pull')
+    args={'key': {'type': 'bytes', 'example': b'foo',},},
+    permission='pull',
+)
 def lookupv2(repo, proto, key):
     key = encoding.tolocal(key)
 
@@ -1277,21 +1384,21 @@
 
     yield node
 
+
 def manifestdatacapabilities(repo, proto):
     batchsize = repo.ui.configint(
-        b'experimental', b'server.manifestdata.recommended-batch-size')
+        b'experimental', b'server.manifestdata.recommended-batch-size'
+    )
 
     return {
         b'recommendedbatchsize': batchsize,
     }
 
+
 @wireprotocommand(
     'manifestdata',
     args={
-        'nodes': {
-            'type': 'list',
-            'example': [b'0123456...'],
-        },
+        'nodes': {'type': 'list', 'example': [b'0123456...'],},
         'haveparents': {
             'type': 'bool',
             'default': lambda: False,
@@ -1303,14 +1410,12 @@
             'example': {b'parents', b'revision'},
             'validvalues': {b'parents', b'revision'},
         },
-        'tree': {
-            'type': 'bytes',
-            'example': b'',
-        },
+        'tree': {'type': 'bytes', 'example': b'',},
     },
     permission='pull',
     cachekeyfn=makecommandcachekeyfn('manifestdata', 1, allargs=True),
-    extracapabilitiesfn=manifestdatacapabilities)
+    extracapabilitiesfn=manifestdatacapabilities,
+)
 def manifestdata(repo, proto, haveparents, nodes, fields, tree):
     store = repo.manifestlog.getstorage(tree)
 
@@ -1319,12 +1424,13 @@
         try:
             store.rev(node)
         except error.LookupError:
-            raise error.WireprotoCommandError(
-                'unknown node: %s', (node,))
+            raise error.WireprotoCommandError('unknown node: %s', (node,))
 
-    revisions = store.emitrevisions(nodes,
-                                    revisiondata=b'revision' in fields,
-                                    assumehaveparentrevisions=haveparents)
+    revisions = store.emitrevisions(
+        nodes,
+        revisiondata=b'revision' in fields,
+        assumehaveparentrevisions=haveparents,
+    )
 
     yield {
         b'totalitems': len(nodes),
@@ -1358,49 +1464,39 @@
         for extra in followingdata:
             yield extra
 
+
 @wireprotocommand(
     'pushkey',
     args={
-        'namespace': {
-            'type': 'bytes',
-            'example': b'ns',
-        },
-        'key': {
-            'type': 'bytes',
-            'example': b'key',
-        },
-        'old': {
-            'type': 'bytes',
-            'example': b'old',
-        },
-        'new': {
-            'type': 'bytes',
-            'example': 'new',
-        },
+        'namespace': {'type': 'bytes', 'example': b'ns',},
+        'key': {'type': 'bytes', 'example': b'key',},
+        'old': {'type': 'bytes', 'example': b'old',},
+        'new': {'type': 'bytes', 'example': 'new',},
     },
-    permission='push')
+    permission='push',
+)
 def pushkeyv2(repo, proto, namespace, key, old, new):
     # TODO handle ui output redirection
-    yield repo.pushkey(encoding.tolocal(namespace),
-                       encoding.tolocal(key),
-                       encoding.tolocal(old),
-                       encoding.tolocal(new))
+    yield repo.pushkey(
+        encoding.tolocal(namespace),
+        encoding.tolocal(key),
+        encoding.tolocal(old),
+        encoding.tolocal(new),
+    )
 
 
 @wireprotocommand(
     'rawstorefiledata',
     args={
-        'files': {
-            'type': 'list',
-            'example': [b'changelog', b'manifestlog'],
-        },
+        'files': {'type': 'list', 'example': [b'changelog', b'manifestlog'],},
         'pathfilter': {
             'type': 'list',
             'default': lambda: None,
             'example': {b'include': [b'path:tests']},
         },
     },
-    permission='pull')
+    permission='pull',
+)
 def rawstorefiledata(repo, proto, files, pathfilter):
     if not streamclone.allowservergeneration(repo):
         raise error.WireprotoCommandError(b'stream clone is disabled')
@@ -1412,8 +1508,9 @@
 
     unsupported = files - allowedfiles
     if unsupported:
-        raise error.WireprotoCommandError(b'unknown file type: %s',
-                                          (b', '.join(sorted(unsupported)),))
+        raise error.WireprotoCommandError(
+            b'unknown file type: %s', (b', '.join(sorted(unsupported)),)
+        )
 
     with repo.lock():
         topfiles = list(repo.store.topfiles())
@@ -1453,5 +1550,4 @@
                 for chunk in util.filechunkiter(fh, limit=size):
                     yield chunk
 
-        yield wireprototypes.indefinitebytestringresponse(
-            getfiledata())
+        yield wireprototypes.indefinitebytestringresponse(getfiledata())
--- a/mercurial/worker.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/mercurial/worker.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,6 +16,7 @@
 
 try:
     import selectors
+
     selectors.BaseSelector
 except ImportError:
     from .thirdparty import selectors2 as selectors
@@ -29,6 +30,7 @@
     util,
 )
 
+
 def countcpus():
     '''try to count the number of CPUs on the system'''
 
@@ -50,6 +52,7 @@
 
     return 1
 
+
 def _numworkers(ui):
     s = ui.config('worker', 'numcpus')
     if s:
@@ -61,6 +64,7 @@
             raise error.Abort(_('number of cpus must be an integer'))
     return min(max(countcpus(), 4), 32)
 
+
 if pycompat.isposix or pycompat.iswindows:
     _STARTUP_COST = 0.01
     # The Windows worker is thread based. If tasks are CPU bound, threads
@@ -71,6 +75,7 @@
     _STARTUP_COST = 1e30
     _DISALLOW_THREAD_UNSAFE = False
 
+
 def worthwhile(ui, costperop, nops, threadsafe=True):
     '''try to determine whether the benefit of multiple processes can
     outweigh the cost of starting them'''
@@ -83,8 +88,10 @@
     benefit = linear - (_STARTUP_COST * workers + linear / workers)
     return benefit >= 0.15
 
-def worker(ui, costperarg, func, staticargs, args, hasretval=False,
-           threadsafe=True):
+
+def worker(
+    ui, costperarg, func, staticargs, args, hasretval=False, threadsafe=True
+):
     '''run a function, possibly in parallel in multiple worker
     processes.
 
@@ -113,11 +120,13 @@
         return _platformworker(ui, func, staticargs, args, hasretval)
     return func(*staticargs + (args,))
 
+
 def _posixworker(ui, func, staticargs, args, hasretval):
     workers = _numworkers(ui)
     oldhandler = signal.getsignal(signal.SIGINT)
     signal.signal(signal.SIGINT, signal.SIG_IGN)
     pids, problem = set(), [0]
+
     def killworkers():
         # unregister SIGCHLD handler as all children will be killed. This
         # function shouldn't be interrupted by another SIGCHLD; otherwise pids
@@ -130,6 +139,7 @@
             except OSError as err:
                 if err.errno != errno.ESRCH:
                     raise
+
     def waitforworkers(blocking=True):
         for pid in pids.copy():
             p = st = 0
@@ -155,10 +165,12 @@
             st = _exitstatus(st)
             if st and not problem[0]:
                 problem[0] = st
+
     def sigchldhandler(signum, frame):
         waitforworkers(blocking=False)
         if problem[0]:
             killworkers()
+
     oldchldhandler = signal.signal(signal.SIGCHLD, sigchldhandler)
     ui.flush()
     parentpid = os.getpid()
@@ -196,7 +208,7 @@
                     return 0
 
                 ret = scmutil.callcatch(ui, workerfunc)
-        except: # parent re-raises, child never returns
+        except:  # parent re-raises, child never returns
             if os.getpid() == parentpid:
                 raise
             exctype = sys.exc_info()[0]
@@ -206,7 +218,7 @@
             if os.getpid() != parentpid:
                 try:
                     ui.flush()
-                except: # never returns, no re-raises
+                except:  # never returns, no re-raises
                     pass
                 finally:
                     os._exit(ret & 255)
@@ -215,12 +227,14 @@
     for rfd, wfd in pipes:
         os.close(wfd)
         selector.register(os.fdopen(rfd, r'rb', 0), selectors.EVENT_READ)
+
     def cleanup():
         signal.signal(signal.SIGINT, oldhandler)
         waitforworkers()
         signal.signal(signal.SIGCHLD, oldchldhandler)
         selector.close()
         return problem[0]
+
     try:
         openpipes = len(pipes)
         while openpipes > 0:
@@ -239,7 +253,7 @@
                     if e.errno == errno.EINTR:
                         continue
                     raise
-    except: # re-raises
+    except:  # re-raises
         killworkers()
         cleanup()
         raise
@@ -251,6 +265,7 @@
     if hasretval:
         yield True, retval
 
+
 def _posixexitstatus(code):
     '''convert a posix exit status into the same form returned by
     os.spawnv
@@ -259,12 +274,14 @@
     if os.WIFEXITED(code):
         return os.WEXITSTATUS(code)
     elif os.WIFSIGNALED(code):
-        return -os.WTERMSIG(code)
+        return -(os.WTERMSIG(code))
+
 
 def _windowsworker(ui, func, staticargs, args, hasretval):
     class Worker(threading.Thread):
-        def __init__(self, taskqueue, resultqueue, func, staticargs, *args,
-                     **kwargs):
+        def __init__(
+            self, taskqueue, resultqueue, func, staticargs, *args, **kwargs
+        ):
             threading.Thread.__init__(self, *args, **kwargs)
             self._taskqueue = taskqueue
             self._resultqueue = resultqueue
@@ -298,6 +315,7 @@
                 raise
 
     threads = []
+
     def trykillworkers():
         # Allow up to 1 second to clean worker threads nicely
         cleanupend = time.time() + 1
@@ -311,8 +329,12 @@
                 # important to surface the inital exception than the
                 # fact that one of workers may be processing a large
                 # task and does not get to handle the interruption.
-                ui.warn(_("failed to kill worker threads while "
-                          "handling an exception\n"))
+                ui.warn(
+                    _(
+                        "failed to kill worker threads while "
+                        "handling an exception\n"
+                    )
+                )
                 return
 
     workers = _numworkers(ui)
@@ -341,7 +363,7 @@
                 if t.exception is not None:
                     raise t.exception
                 threads.remove(t)
-    except (Exception, KeyboardInterrupt): # re-raises
+    except (Exception, KeyboardInterrupt):  # re-raises
         trykillworkers()
         raise
     while not resultqueue.empty():
@@ -353,12 +375,14 @@
     if hasretval:
         yield True, retval
 
+
 if pycompat.iswindows:
     _platformworker = _windowsworker
 else:
     _platformworker = _posixworker
     _exitstatus = _posixexitstatus
 
+
 def partition(lst, nslices):
     '''partition a list into N slices of roughly equal size
 
--- a/setup.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/setup.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,50 +17,63 @@
     #
     # TODO: when we actually work on Python 3, use this string as the
     # actual supportedpy string.
-    supportedpy = ','.join([
-        '>=2.7',
-        '!=3.0.*',
-        '!=3.1.*',
-        '!=3.2.*',
-        '!=3.3.*',
-        '!=3.4.*',
-        '!=3.5.0',
-        '!=3.5.1',
-        '!=3.5.2',
-        '!=3.6.0',
-        '!=3.6.1',
-    ])
+    supportedpy = ','.join(
+        [
+            '>=2.7',
+            '!=3.0.*',
+            '!=3.1.*',
+            '!=3.2.*',
+            '!=3.3.*',
+            '!=3.4.*',
+            '!=3.5.0',
+            '!=3.5.1',
+            '!=3.5.2',
+            '!=3.6.0',
+            '!=3.6.1',
+        ]
+    )
 
 import sys, platform
 import sysconfig
+
 if sys.version_info[0] >= 3:
     printf = eval('print')
     libdir_escape = 'unicode_escape'
+
     def sysstr(s):
         return s.decode('latin-1')
+
+
 else:
     libdir_escape = 'string_escape'
+
     def printf(*args, **kwargs):
         f = kwargs.get('file', sys.stdout)
         end = kwargs.get('end', '\n')
         f.write(b' '.join(args) + end)
+
     def sysstr(s):
         return s
 
+
 # Attempt to guide users to a modern pip - this means that 2.6 users
 # should have a chance of getting a 4.2 release, and when we ratchet
 # the version requirement forward again hopefully everyone will get
 # something that works for them.
 if sys.version_info < (2, 7, 0, 'final'):
-    pip_message = ('This may be due to an out of date pip. '
-                   'Make sure you have pip >= 9.0.1.')
+    pip_message = (
+        'This may be due to an out of date pip. '
+        'Make sure you have pip >= 9.0.1.'
+    )
     try:
         import pip
+
         pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]])
-        if pip_version < (9, 0, 1) :
+        if pip_version < (9, 0, 1):
             pip_message = (
                 'Your pip version is out of date, please install '
-                'pip >= 9.0.1. pip {} detected.'.format(pip.__version__))
+                'pip >= 9.0.1. pip {} detected.'.format(pip.__version__)
+            )
         else:
             # pip is new enough - it must be something else
             pip_message = ''
@@ -70,7 +83,9 @@
 Mercurial does not support Python older than 2.7.
 Python {py} detected.
 {pip}
-""".format(py=sys.version_info, pip=pip_message)
+""".format(
+        py=sys.version_info, pip=pip_message
+    )
     printf(error, file=sys.stderr)
     sys.exit(1)
 
@@ -100,7 +115,9 @@
 
 See https://www.mercurial-scm.org/wiki/Python3 for more on Mercurial's
 Python 3 support.
-""".format(py='.'.join('%d' % x for x in sys.version_info[0:2]))
+""".format(
+            py='.'.join('%d' % x for x in sys.version_info[0:2])
+        )
 
         printf(error, file=sys.stderr)
         sys.exit(1)
@@ -114,27 +131,33 @@
 # Solaris Python packaging brain damage
 try:
     import hashlib
+
     sha = hashlib.sha1()
 except ImportError:
     try:
         import sha
-        sha.sha # silence unused import warning
+
+        sha.sha  # silence unused import warning
     except ImportError:
         raise SystemExit(
-            "Couldn't import standard hashlib (incomplete Python install).")
+            "Couldn't import standard hashlib (incomplete Python install)."
+        )
 
 try:
     import zlib
-    zlib.compressobj # silence unused import warning
+
+    zlib.compressobj  # silence unused import warning
 except ImportError:
     raise SystemExit(
-        "Couldn't import standard zlib (incomplete Python install).")
+        "Couldn't import standard zlib (incomplete Python install)."
+    )
 
 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
 isironpython = False
 try:
-    isironpython = (platform.python_implementation()
-                    .lower().find("ironpython") != -1)
+    isironpython = (
+        platform.python_implementation().lower().find("ironpython") != -1
+    )
 except AttributeError:
     pass
 
@@ -143,10 +166,12 @@
 else:
     try:
         import bz2
-        bz2.BZ2Compressor # silence unused import warning
+
+        bz2.BZ2Compressor  # silence unused import warning
     except ImportError:
         raise SystemExit(
-            "Couldn't import standard bz2 (incomplete Python install).")
+            "Couldn't import standard bz2 (incomplete Python install)."
+        )
 
 ispypy = "PyPy" in sys.version
 
@@ -163,10 +188,11 @@
 import shutil
 import tempfile
 from distutils import log
+
 # We have issues with setuptools on some platforms and builders. Until
 # those are resolved, setuptools is opt-in except for platforms where
 # we don't have issues.
-issetuptools = (os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ)
+issetuptools = os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ
 if issetuptools:
     from setuptools import setup
 else:
@@ -194,6 +220,7 @@
 # Explain to distutils.StrictVersion how our release candidates are versionned
 StrictVersion.version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?-?(rc(\d+))?$')
 
+
 def write_if_changed(path, content):
     """Write content to a file iff the content hasn't changed."""
     if os.path.exists(path):
@@ -206,11 +233,13 @@
         with open(path, 'wb') as fh:
             fh.write(content)
 
+
 scripts = ['hg']
 if os.name == 'nt':
     # We remove hg.bat if we are able to build hg.exe.
     scripts.append('contrib/win32/hg.bat')
 
+
 def cancompile(cc, code):
     tmpdir = tempfile.mkdtemp(prefix='hg-install-')
     devnull = oldstderr = None
@@ -238,32 +267,39 @@
             devnull.close()
         shutil.rmtree(tmpdir)
 
+
 # simplified version of distutils.ccompiler.CCompiler.has_function
 # that actually removes its temporary files.
 def hasfunction(cc, funcname):
     code = 'int main(void) { %s(); }\n' % funcname
     return cancompile(cc, code)
 
+
 def hasheader(cc, headername):
     code = '#include <%s>\nint main(void) { return 0; }\n' % headername
     return cancompile(cc, code)
 
+
 # py2exe needs to be installed to work
 try:
     import py2exe
-    py2exe.Distribution # silence unused import warning
+
+    py2exe.Distribution  # silence unused import warning
     py2exeloaded = True
     # import py2exe's patched Distribution class
     from distutils.core import Distribution
 except ImportError:
     py2exeloaded = False
 
+
 def runcmd(cmd, env, cwd=None):
-    p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
-                         stderr=subprocess.PIPE, env=env, cwd=cwd)
+    p = subprocess.Popen(
+        cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd
+    )
     out, err = p.communicate()
     return p.returncode, out, err
 
+
 class hgcommand(object):
     def __init__(self, cmd, env):
         self.cmd = cmd
@@ -279,22 +315,31 @@
             return ''
         return out
 
+
 def filterhgerr(err):
     # If root is executing setup.py, but the repository is owned by
     # another user (as in "sudo python setup.py install") we will get
     # trust warnings since the .hg/hgrc file is untrusted. That is
     # fine, we don't want to load it anyway.  Python may warn about
     # a missing __init__.py in mercurial/locale, we also ignore that.
-    err = [e for e in err.splitlines()
-           if (not e.startswith(b'not trusting file')
-               and not e.startswith(b'warning: Not importing')
-               and not e.startswith(b'obsolete feature not enabled')
-               and not e.startswith(b'*** failed to import extension')
-               and not e.startswith(b'devel-warn:')
-               and not (e.startswith(b'(third party extension')
-                        and e.endswith(b'or newer of Mercurial; disabling)')))]
+    err = [
+        e
+        for e in err.splitlines()
+        if (
+            not e.startswith(b'not trusting file')
+            and not e.startswith(b'warning: Not importing')
+            and not e.startswith(b'obsolete feature not enabled')
+            and not e.startswith(b'*** failed to import extension')
+            and not e.startswith(b'devel-warn:')
+            and not (
+                e.startswith(b'(third party extension')
+                and e.endswith(b'or newer of Mercurial; disabling)')
+            )
+        )
+    ]
     return b'\n'.join(b'  ' + e for e in err)
 
+
 def findhg():
     """Try to figure out how we should invoke hg for examining the local
     repository contents.
@@ -334,18 +379,23 @@
     if retcode == 0 and not filterhgerr(err):
         return hgcommand(hgcmd, hgenv)
 
-    raise SystemExit('Unable to find a working hg binary to extract the '
-                     'version from the repository tags')
+    raise SystemExit(
+        'Unable to find a working hg binary to extract the '
+        'version from the repository tags'
+    )
+
 
 def localhgenv():
     """Get an environment dictionary to use for invoking or importing
     mercurial from the local repository."""
     # Execute hg out of this directory with a custom environment which takes
     # care to not use any hgrc files and do no localization.
-    env = {'HGMODULEPOLICY': 'py',
-           'HGRCPATH': '',
-           'LANGUAGE': 'C',
-           'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
+    env = {
+        'HGMODULEPOLICY': 'py',
+        'HGRCPATH': '',
+        'LANGUAGE': 'C',
+        'PATH': '',
+    }  # make pypi modules that use os.environ['PATH'] happy
     if 'LD_LIBRARY_PATH' in os.environ:
         env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
     if 'SystemRoot' in os.environ:
@@ -354,6 +404,7 @@
         env['SystemRoot'] = os.environ['SystemRoot']
     return env
 
+
 version = ''
 
 if os.path.isdir('.hg'):
@@ -367,11 +418,11 @@
         # Continuing with an invalid version number will break extensions
         # that define minimumhgversion.
         raise SystemExit('Unable to determine hg version from local repository')
-    if numerictags: # tag(s) found
+    if numerictags:  # tag(s) found
         version = numerictags[-1]
-        if hgid.endswith('+'): # propagate the dirty status to the tag
+        if hgid.endswith('+'):  # propagate the dirty status to the tag
             version += '+'
-    else: # no tag found
+    else:  # no tag found
         ltagcmd = ['parents', '--template', '{latesttag}']
         ltag = sysstr(hg.run(ltagcmd))
         changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag]
@@ -380,8 +431,9 @@
     if version.endswith('+'):
         version += time.strftime('%Y%m%d')
 elif os.path.exists('.hg_archival.txt'):
-    kw = dict([[t.strip() for t in l.split(':', 1)]
-               for l in open('.hg_archival.txt')])
+    kw = dict(
+        [[t.strip() for t in l.split(':', 1)] for l in open('.hg_archival.txt')]
+    )
     if 'tag' in kw:
         version = kw['tag']
     elif 'latesttag' in kw:
@@ -397,15 +449,21 @@
     if not isinstance(versionb, bytes):
         versionb = versionb.encode('ascii')
 
-    write_if_changed('mercurial/__version__.py', b''.join([
-        b'# this file is autogenerated by setup.py\n'
-        b'version = b"%s"\n' % versionb,
-    ]))
+    write_if_changed(
+        'mercurial/__version__.py',
+        b''.join(
+            [
+                b'# this file is autogenerated by setup.py\n'
+                b'version = b"%s"\n' % versionb,
+            ]
+        ),
+    )
 
 try:
     oldpolicy = os.environ.get('HGMODULEPOLICY', None)
     os.environ['HGMODULEPOLICY'] = 'py'
     from mercurial import __version__
+
     version = __version__.version
 except ImportError:
     version = b'unknown'
@@ -415,19 +473,23 @@
     else:
         os.environ['HGMODULEPOLICY'] = oldpolicy
 
+
 class hgbuild(build):
     # Insert hgbuildmo first so that files in mercurial/locale/ are found
     # when build_py is run next.
     sub_commands = [('build_mo', None)] + build.sub_commands
 
+
 class hgbuildmo(build):
 
     description = "build translations (.mo files)"
 
     def run(self):
         if not find_executable('msgfmt'):
-            self.warn("could not find msgfmt executable, no translations "
-                     "will be built")
+            self.warn(
+                "could not find msgfmt executable, no translations "
+                "will be built"
+            )
             return
 
         podir = 'i18n'
@@ -466,18 +528,23 @@
         # too late for some cases
         return not self.pure and Distribution.has_ext_modules(self)
 
+
 # This is ugly as a one-liner. So use a variable.
 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
 buildextnegops['no-zstd'] = 'zstd'
 buildextnegops['no-rust'] = 'rust'
 
+
 class hgbuildext(build_ext):
     user_options = build_ext.user_options + [
         ('zstd', None, 'compile zstd bindings [default]'),
         ('no-zstd', None, 'do not compile zstd bindings'),
-        ('rust', None,
-         'compile Rust extensions if they are in use '
-         '(requires Cargo) [default]'),
+        (
+            'rust',
+            None,
+            'compile Rust extensions if they are in use '
+            '(requires Cargo) [default]',
+        ),
         ('no-rust', None, 'do not compile Rust extensions'),
     ]
 
@@ -499,14 +566,17 @@
         return build_ext.finalize_options(self)
 
     def build_extensions(self):
-        ruststandalones = [e for e in self.extensions
-                           if isinstance(e, RustStandaloneExtension)]
-        self.extensions = [e for e in self.extensions
-                           if e not in ruststandalones]
+        ruststandalones = [
+            e for e in self.extensions if isinstance(e, RustStandaloneExtension)
+        ]
+        self.extensions = [
+            e for e in self.extensions if e not in ruststandalones
+        ]
         # Filter out zstd if disabled via argument.
         if not self.zstd:
-            self.extensions = [e for e in self.extensions
-                               if e.name != 'mercurial.zstd']
+            self.extensions = [
+                e for e in self.extensions if e.name != 'mercurial.zstd'
+            ]
 
         # Build Rust standalon extensions if it'll be used
         # and its build is not explictely disabled (for external build
@@ -518,16 +588,21 @@
         return build_ext.build_extensions(self)
 
     def build_extension(self, ext):
-        if (self.distribution.rust and self.rust
-            and isinstance(ext, RustExtension)):
-                ext.rustbuild()
+        if (
+            self.distribution.rust
+            and self.rust
+            and isinstance(ext, RustExtension)
+        ):
+            ext.rustbuild()
         try:
             build_ext.build_extension(self, ext)
         except CCompilerError:
             if not getattr(ext, 'optional', False):
                 raise
-            log.warn("Failed to build optional extension '%s' (skipping)",
-                     ext.name)
+            log.warn(
+                "Failed to build optional extension '%s' (skipping)", ext.name
+            )
+
 
 class hgbuildscripts(build_scripts):
     def run(self):
@@ -554,6 +629,7 @@
 
         return build_scripts.run(self)
 
+
 class hgbuildpy(build_py):
     def finalize_options(self):
         build_py.finalize_options(self)
@@ -565,18 +641,24 @@
                 bdiffbuild,
                 mpatchbuild,
             )
-            exts = [mpatchbuild.ffi.distutils_extension(),
-                    bdiffbuild.ffi.distutils_extension()]
+
+            exts = [
+                mpatchbuild.ffi.distutils_extension(),
+                bdiffbuild.ffi.distutils_extension(),
+            ]
             # cffi modules go here
             if sys.platform == 'darwin':
                 from mercurial.cffi import osutilbuild
+
                 exts.append(osutilbuild.ffi.distutils_extension())
             self.distribution.ext_modules = exts
         else:
             h = os.path.join(get_python_inc(), 'Python.h')
             if not os.path.exists(h):
-                raise SystemExit('Python headers are required to build '
-                                 'Mercurial but weren\'t found in %s' % h)
+                raise SystemExit(
+                    'Python headers are required to build '
+                    'Mercurial but weren\'t found in %s' % h
+                )
 
     def run(self):
         basepath = os.path.join(self.build_lib, 'mercurial')
@@ -591,15 +673,17 @@
         else:
             modulepolicy = 'rust+c' if rust else 'c'
 
-        content = b''.join([
-            b'# this file is autogenerated by setup.py\n',
-            b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
-        ])
-        write_if_changed(os.path.join(basepath, '__modulepolicy__.py'),
-                         content)
+        content = b''.join(
+            [
+                b'# this file is autogenerated by setup.py\n',
+                b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
+            ]
+        )
+        write_if_changed(os.path.join(basepath, '__modulepolicy__.py'), content)
 
         build_py.run(self)
 
+
 class buildhgextindex(Command):
     description = 'generate prebuilt index of hgext (for frozen package)'
     user_options = []
@@ -617,10 +701,13 @@
                 f.write('# empty\n')
 
         # here no extension enabled, disabled() lists up everything
-        code = ('import pprint; from mercurial import extensions; '
-                'pprint.pprint(extensions.disabled())')
-        returncode, out, err = runcmd([sys.executable, '-c', code],
-                                      localhgenv())
+        code = (
+            'import pprint; from mercurial import extensions; '
+            'pprint.pprint(extensions.disabled())'
+        )
+        returncode, out, err = runcmd(
+            [sys.executable, '-c', code], localhgenv()
+        )
         if err or returncode != 0:
             raise DistutilsExecError(err)
 
@@ -629,12 +716,17 @@
             f.write(b'docs = ')
             f.write(out)
 
+
 class buildhgexe(build_ext):
     description = 'compile hg.exe from mercurial/exewrapper.c'
     user_options = build_ext.user_options + [
-        ('long-paths-support', None, 'enable support for long paths on '
-                                     'Windows (off by default and '
-                                     'experimental)'),
+        (
+            'long-paths-support',
+            None,
+            'enable support for long paths on '
+            'Windows (off by default and '
+            'experimental)',
+        ),
     ]
 
     LONG_PATHS_MANIFEST = """
@@ -656,34 +748,39 @@
         if os.name != 'nt':
             return
         if isinstance(self.compiler, HackedMingw32CCompiler):
-            self.compiler.compiler_so = self.compiler.compiler # no -mdll
-            self.compiler.dll_libraries = [] # no -lmsrvc90
+            self.compiler.compiler_so = self.compiler.compiler  # no -mdll
+            self.compiler.dll_libraries = []  # no -lmsrvc90
 
         # Different Python installs can have different Python library
         # names. e.g. the official CPython distribution uses pythonXY.dll
         # and MinGW uses libpythonX.Y.dll.
         _kernel32 = ctypes.windll.kernel32
-        _kernel32.GetModuleFileNameA.argtypes = [ctypes.c_void_p,
-                                                 ctypes.c_void_p,
-                                                 ctypes.c_ulong]
+        _kernel32.GetModuleFileNameA.argtypes = [
+            ctypes.c_void_p,
+            ctypes.c_void_p,
+            ctypes.c_ulong,
+        ]
         _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
         size = 1000
         buf = ctypes.create_string_buffer(size + 1)
-        filelen = _kernel32.GetModuleFileNameA(sys.dllhandle, ctypes.byref(buf),
-                                               size)
+        filelen = _kernel32.GetModuleFileNameA(
+            sys.dllhandle, ctypes.byref(buf), size
+        )
 
         if filelen > 0 and filelen != size:
             dllbasename = os.path.basename(buf.value)
             if not dllbasename.lower().endswith(b'.dll'):
-                raise SystemExit('Python DLL does not end with .dll: %s' %
-                                 dllbasename)
+                raise SystemExit(
+                    'Python DLL does not end with .dll: %s' % dllbasename
+                )
             pythonlib = dllbasename[:-4]
         else:
-            log.warn('could not determine Python DLL filename; '
-                     'assuming pythonXY')
+            log.warn(
+                'could not determine Python DLL filename; ' 'assuming pythonXY'
+            )
 
             hv = sys.hexversion
-            pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff)
+            pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xFF)
 
         log.info('using %s as Python library name' % pythonlib)
         with open('mercurial/hgpythonlib.h', 'wb') as f:
@@ -694,14 +791,16 @@
         if sys.version_info[0] >= 3:
             macros = [('_UNICODE', None), ('UNICODE', None)]
 
-        objects = self.compiler.compile(['mercurial/exewrapper.c'],
-                                         output_dir=self.build_temp,
-                                         macros=macros)
+        objects = self.compiler.compile(
+            ['mercurial/exewrapper.c'],
+            output_dir=self.build_temp,
+            macros=macros,
+        )
         dir = os.path.dirname(self.get_ext_fullpath('dummy'))
         self.hgtarget = os.path.join(dir, 'hg')
-        self.compiler.link_executable(objects, self.hgtarget,
-                                      libraries=[],
-                                      output_dir=self.build_temp)
+        self.compiler.link_executable(
+            objects, self.hgtarget, libraries=[], output_dir=self.build_temp
+        )
         if self.long_paths_support:
             self.addlongpathsmanifest()
 
@@ -733,8 +832,16 @@
         log.info("running mt.exe to update hg.exe's manifest in-place")
         # supplying both -manifest and -inputresource to mt.exe makes
         # it merge the embedded and supplied manifests in the -outputresource
-        self.spawn(['mt.exe', '-nologo', '-manifest', manfname,
-                    inputresource, outputresource])
+        self.spawn(
+            [
+                'mt.exe',
+                '-nologo',
+                '-manifest',
+                manfname,
+                inputresource,
+                outputresource,
+            ]
+        )
         log.info("done updating hg.exe's manifest")
         os.remove(manfname)
 
@@ -743,6 +850,7 @@
         dir = os.path.dirname(self.get_ext_fullpath('dummy'))
         return os.path.join(self.build_temp, dir, 'hg.exe')
 
+
 class hgbuilddoc(Command):
     description = 'build documentation'
     user_options = [
@@ -782,12 +890,12 @@
             txt = 'doc/%s.txt' % root
             log.info('generating %s' % txt)
             res, out, err = runcmd(
-                [sys.executable, 'gendoc.py', root],
-                os.environ,
-                cwd='doc')
+                [sys.executable, 'gendoc.py', root], os.environ, cwd='doc'
+            )
             if res:
-                raise SystemExit('error running gendoc.py: %s' %
-                                 '\n'.join([out, err]))
+                raise SystemExit(
+                    'error running gendoc.py: %s' % '\n'.join([out, err])
+                )
 
             with open(txt, 'wb') as fh:
                 fh.write(out)
@@ -799,10 +907,12 @@
             res, out, err = runcmd(
                 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
                 os.environ,
-                cwd='doc')
+                cwd='doc',
+            )
             if res:
-                raise SystemExit('error running gendoc: %s' %
-                                 '\n'.join([out, err]))
+                raise SystemExit(
+                    'error running gendoc: %s' % '\n'.join([out, err])
+                )
 
             with open(gendoc, 'wb') as fh:
                 fh.write(out)
@@ -810,34 +920,58 @@
         def genman(root):
             log.info('generating doc/%s' % root)
             res, out, err = runcmd(
-                [sys.executable, 'runrst', 'hgmanpage', '--halt', 'warning',
-                 '--strip-elements-with-class', 'htmlonly',
-                 '%s.txt' % root, root],
+                [
+                    sys.executable,
+                    'runrst',
+                    'hgmanpage',
+                    '--halt',
+                    'warning',
+                    '--strip-elements-with-class',
+                    'htmlonly',
+                    '%s.txt' % root,
+                    root,
+                ],
                 os.environ,
-                cwd='doc')
+                cwd='doc',
+            )
             if res:
-                raise SystemExit('error running runrst: %s' %
-                                 '\n'.join([out, err]))
+                raise SystemExit(
+                    'error running runrst: %s' % '\n'.join([out, err])
+                )
 
             normalizecrlf('doc/%s' % root)
 
         def genhtml(root):
             log.info('generating doc/%s.html' % root)
             res, out, err = runcmd(
-                [sys.executable, 'runrst', 'html', '--halt', 'warning',
-                 '--link-stylesheet', '--stylesheet-path', 'style.css',
-                 '%s.txt' % root, '%s.html' % root],
+                [
+                    sys.executable,
+                    'runrst',
+                    'html',
+                    '--halt',
+                    'warning',
+                    '--link-stylesheet',
+                    '--stylesheet-path',
+                    'style.css',
+                    '%s.txt' % root,
+                    '%s.html' % root,
+                ],
                 os.environ,
-                cwd='doc')
+                cwd='doc',
+            )
             if res:
-                raise SystemExit('error running runrst: %s' %
-                                 '\n'.join([out, err]))
+                raise SystemExit(
+                    'error running runrst: %s' % '\n'.join([out, err])
+                )
 
             normalizecrlf('doc/%s.html' % root)
 
         # This logic is duplicated in doc/Makefile.
-        sources = set(f for f in os.listdir('mercurial/help')
-                      if re.search(r'[0-9]\.txt$', f))
+        sources = set(
+            f
+            for f in os.listdir('mercurial/help')
+            if re.search(r'[0-9]\.txt$', f)
+        )
 
         # common.txt is a one-off.
         gentxt('common')
@@ -854,13 +988,20 @@
             if self.html:
                 genhtml(root)
 
+
 class hginstall(install):
 
     user_options = install.user_options + [
-        ('old-and-unmanageable', None,
-         'noop, present for eggless setuptools compat'),
-        ('single-version-externally-managed', None,
-         'noop, present for eggless setuptools compat'),
+        (
+            'old-and-unmanageable',
+            None,
+            'noop, present for eggless setuptools compat',
+        ),
+        (
+            'single-version-externally-managed',
+            None,
+            'noop, present for eggless setuptools compat',
+        ),
     ]
 
     # Also helps setuptools not be sad while we refuse to create eggs.
@@ -873,6 +1014,7 @@
         excl = set(['bdist_egg'])
         return filter(lambda x: x not in excl, install.get_sub_commands(self))
 
+
 class hginstalllib(install_lib):
     '''
     This is a specialization of install_lib that replaces the copy_file used
@@ -887,6 +1029,7 @@
 
     def run(self):
         realcopyfile = file_util.copy_file
+
         def copyfileandsetmode(*args, **kwargs):
             src, dst = args[0], args[1]
             dst, copied = realcopyfile(*args, **kwargs)
@@ -901,12 +1044,14 @@
                 m = stat.S_IMODE(st[stat.ST_MODE])
                 m = (m & ~int('0777', 8)) | setmode
                 os.chmod(dst, m)
+
         file_util.copy_file = copyfileandsetmode
         try:
             install_lib.run(self)
         finally:
             file_util.copy_file = realcopyfile
 
+
 class hginstallscripts(install_scripts):
     '''
     This is a specialization of install_scripts that replaces the @LIBDIR@ with
@@ -921,8 +1066,7 @@
 
     def finalize_options(self):
         install_scripts.finalize_options(self)
-        self.set_undefined_options('install',
-                                   ('install_lib', 'install_lib'))
+        self.set_undefined_options('install', ('install_lib', 'install_lib'))
 
     def run(self):
         install_scripts.run(self)
@@ -946,17 +1090,19 @@
         # we can't reliably set the libdir in wheels: the default behavior
         # of looking in sys.path must do.
 
-        if (os.path.splitdrive(self.install_dir)[0] !=
-            os.path.splitdrive(self.install_lib)[0]):
+        if (
+            os.path.splitdrive(self.install_dir)[0]
+            != os.path.splitdrive(self.install_lib)[0]
+        ):
             # can't make relative paths from one drive to another, so use an
             # absolute path instead
             libdir = self.install_lib
         else:
             common = os.path.commonprefix((self.install_dir, self.install_lib))
-            rest = self.install_dir[len(common):]
+            rest = self.install_dir[len(common) :]
             uplevel = len([n for n in os.path.split(rest) if n])
 
-            libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
+            libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common) :]
 
         for outfile in self.outfiles:
             with open(outfile, 'rb') as fp:
@@ -970,14 +1116,17 @@
             # install path. During wheel packaging, the shebang has a special
             # value.
             if data.startswith(b'#!python'):
-                log.info('not rewriting @LIBDIR@ in %s because install path '
-                         'not known' % outfile)
+                log.info(
+                    'not rewriting @LIBDIR@ in %s because install path '
+                    'not known' % outfile
+                )
                 continue
 
             data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape))
             with open(outfile, 'wb') as fp:
                 fp.write(data)
 
+
 # virtualenv installs custom distutils/__init__.py and
 # distutils/distutils.cfg files which essentially proxy back to the
 # "real" distutils in the main Python install. The presence of this
@@ -1020,8 +1169,10 @@
             res.modules = modules
 
             import opcode
-            distutilsreal = os.path.join(os.path.dirname(opcode.__file__),
-                                         'distutils')
+
+            distutilsreal = os.path.join(
+                os.path.dirname(opcode.__file__), 'distutils'
+            )
 
             for root, dirs, files in os.walk(distutilsreal):
                 for f in sorted(files):
@@ -1042,60 +1193,74 @@
                         continue
 
                     if modname.endswith('.__init__'):
-                        modname = modname[:-len('.__init__')]
+                        modname = modname[: -len('.__init__')]
                         path = os.path.dirname(full)
                     else:
                         path = None
 
-                    res.modules[modname] = py2exemodule(modname, full,
-                                                        path=path)
+                    res.modules[modname] = py2exemodule(
+                        modname, full, path=path
+                    )
 
             if 'distutils' not in res.modules:
                 raise SystemExit('could not find distutils modules')
 
             return res
 
-cmdclass = {'build': hgbuild,
-            'build_doc': hgbuilddoc,
-            'build_mo': hgbuildmo,
-            'build_ext': hgbuildext,
-            'build_py': hgbuildpy,
-            'build_scripts': hgbuildscripts,
-            'build_hgextindex': buildhgextindex,
-            'install': hginstall,
-            'install_lib': hginstalllib,
-            'install_scripts': hginstallscripts,
-            'build_hgexe': buildhgexe,
-            }
+
+cmdclass = {
+    'build': hgbuild,
+    'build_doc': hgbuilddoc,
+    'build_mo': hgbuildmo,
+    'build_ext': hgbuildext,
+    'build_py': hgbuildpy,
+    'build_scripts': hgbuildscripts,
+    'build_hgextindex': buildhgextindex,
+    'install': hginstall,
+    'install_lib': hginstalllib,
+    'install_scripts': hginstallscripts,
+    'build_hgexe': buildhgexe,
+}
 
 if py2exehacked:
     cmdclass['py2exe'] = hgbuildpy2exe
 
-packages = ['mercurial',
-            'mercurial.cext',
-            'mercurial.cffi',
-            'mercurial.hgweb',
-            'mercurial.interfaces',
-            'mercurial.pure',
-            'mercurial.thirdparty',
-            'mercurial.thirdparty.attr',
-            'mercurial.thirdparty.zope',
-            'mercurial.thirdparty.zope.interface',
-            'mercurial.utils',
-            'mercurial.revlogutils',
-            'mercurial.testing',
-            'hgext', 'hgext.convert', 'hgext.fsmonitor',
-            'hgext.fastannotate',
-            'hgext.fsmonitor.pywatchman',
-            'hgext.highlight',
-            'hgext.infinitepush',
-            'hgext.largefiles', 'hgext.lfs', 'hgext.narrow',
-            'hgext.remotefilelog',
-            'hgext.zeroconf', 'hgext3rd',
-            'hgdemandimport']
+packages = [
+    'mercurial',
+    'mercurial.cext',
+    'mercurial.cffi',
+    'mercurial.hgweb',
+    'mercurial.interfaces',
+    'mercurial.pure',
+    'mercurial.thirdparty',
+    'mercurial.thirdparty.attr',
+    'mercurial.thirdparty.zope',
+    'mercurial.thirdparty.zope.interface',
+    'mercurial.utils',
+    'mercurial.revlogutils',
+    'mercurial.testing',
+    'hgext',
+    'hgext.convert',
+    'hgext.fsmonitor',
+    'hgext.fastannotate',
+    'hgext.fsmonitor.pywatchman',
+    'hgext.highlight',
+    'hgext.infinitepush',
+    'hgext.largefiles',
+    'hgext.lfs',
+    'hgext.narrow',
+    'hgext.remotefilelog',
+    'hgext.zeroconf',
+    'hgext3rd',
+    'hgdemandimport',
+]
 if sys.version_info[0] == 2:
-    packages.extend(['mercurial.thirdparty.concurrent',
-                     'mercurial.thirdparty.concurrent.futures'])
+    packages.extend(
+        [
+            'mercurial.thirdparty.concurrent',
+            'mercurial.thirdparty.concurrent.futures',
+        ]
+    )
 
 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
     # py2exe can't cope with namespace packages very well, so we have to
@@ -1103,9 +1268,11 @@
     # image here. This is gross, but you gotta do what you gotta do.
     packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
 
-common_depends = ['mercurial/bitmanipulation.h',
-                  'mercurial/compat.h',
-                  'mercurial/cext/util.h']
+common_depends = [
+    'mercurial/bitmanipulation.h',
+    'mercurial/compat.h',
+    'mercurial/cext/util.h',
+]
 common_include_dirs = ['mercurial']
 
 osutil_cflags = []
@@ -1117,16 +1284,24 @@
         osutil_cflags.append('-DHAVE_%s' % func.upper())
 
 for plat, macro, code in [
-    ('bsd|darwin', 'BSD_STATFS', '''
+    (
+        'bsd|darwin',
+        'BSD_STATFS',
+        '''
      #include <sys/param.h>
      #include <sys/mount.h>
      int main() { struct statfs s; return sizeof(s.f_fstypename); }
-     '''),
-    ('linux', 'LINUX_STATFS', '''
+     ''',
+    ),
+    (
+        'linux',
+        'LINUX_STATFS',
+        '''
      #include <linux/magic.h>
      #include <sys/vfs.h>
      int main() { struct statfs s; return sizeof(s.f_type); }
-     '''),
+     ''',
+    ),
 ]:
     if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
         osutil_cflags.append('-DHAVE_%s' % macro)
@@ -1150,17 +1325,20 @@
     'mercurial/thirdparty/xdiff/xutils.h',
 ]
 
+
 class RustCompilationError(CCompilerError):
     """Exception class for Rust compilation errors."""
 
+
 class RustExtension(Extension):
     """Base classes for concrete Rust Extension classes.
     """
 
     rusttargetdir = os.path.join('rust', 'target', 'release')
 
-    def __init__(self, mpath, sources, rustlibname, subcrate,
-                 py3_features=None, **kw):
+    def __init__(
+        self, mpath, sources, rustlibname, subcrate, py3_features=None, **kw
+    ):
         Extension.__init__(self, mpath, sources, **kw)
         srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
         self.py3_features = py3_features
@@ -1172,9 +1350,11 @@
         if os.path.exists(cargo_lock):
             self.depends.append(cargo_lock)
         for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
-            self.depends.extend(os.path.join(dirpath, fname)
-                                for fname in fnames
-                                if os.path.splitext(fname)[1] == '.rs')
+            self.depends.extend(
+                os.path.join(dirpath, fname)
+                for fname in fnames
+                if os.path.splitext(fname)[1] == '.rs'
+            )
 
     @staticmethod
     def rustdylibsuffix():
@@ -1202,16 +1382,19 @@
             # Unix only fix (os.path.expanduser not really reliable if
             # HOME is shadowed like this)
             import pwd
+
             env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
 
         cargocmd = ['cargo', 'rustc', '-vv', '--release']
         if sys.version_info[0] == 3 and self.py3_features is not None:
-            cargocmd.extend(('--features', self.py3_features,
-                             '--no-default-features'))
+            cargocmd.extend(
+                ('--features', self.py3_features, '--no-default-features')
+            )
         cargocmd.append('--')
         if sys.platform == 'darwin':
-            cargocmd.extend(("-C", "link-arg=-undefined",
-                             "-C", "link-arg=dynamic_lookup"))
+            cargocmd.extend(
+                ("-C", "link-arg=-undefined", "-C", "link-arg=dynamic_lookup")
+            )
         try:
             subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
         except OSError as exc:
@@ -1219,14 +1402,17 @@
                 raise RustCompilationError("Cargo not found")
             elif exc.errno == errno.EACCES:
                 raise RustCompilationError(
-                    "Cargo found, but permisssion to execute it is denied")
+                    "Cargo found, but permisssion to execute it is denied"
+                )
             else:
                 raise
         except subprocess.CalledProcessError:
             raise RustCompilationError(
                 "Cargo failed. Working directory: %r, "
                 "command: %r, environment: %r"
-                % (self.rustsrcdir, cargocmd, env))
+                % (self.rustsrcdir, cargocmd, env)
+            )
+
 
 class RustEnhancedExtension(RustExtension):
     """A C Extension, conditionally enhanced with Rust code.
@@ -1237,8 +1423,9 @@
     """
 
     def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
-        RustExtension.__init__(self, mpath, sources, rustlibname, subcrate,
-                               **kw)
+        RustExtension.__init__(
+            self, mpath, sources, rustlibname, subcrate, **kw
+        )
         if hgrustext != 'direct-ffi':
             return
         self.extra_compile_args.append('-DWITH_RUST')
@@ -1249,11 +1436,12 @@
         if hgrustext == 'direct-ffi':
             RustExtension.rustbuild(self)
 
+
 class RustStandaloneExtension(RustExtension):
-
     def __init__(self, pydottedname, rustcrate, dylibname, **kw):
-        RustExtension.__init__(self, pydottedname, [], dylibname, rustcrate,
-                               **kw)
+        RustExtension.__init__(
+            self, pydottedname, [], dylibname, rustcrate, **kw
+        )
         self.dylibname = dylibname
 
     def build(self, target_dir):
@@ -1261,58 +1449,85 @@
         target = [target_dir]
         target.extend(self.name.split('.'))
         target[-1] += DYLIB_SUFFIX
-        shutil.copy2(os.path.join(self.rusttargetdir,
-                                  self.dylibname + self.rustdylibsuffix()),
-                     os.path.join(*target))
+        shutil.copy2(
+            os.path.join(
+                self.rusttargetdir, self.dylibname + self.rustdylibsuffix()
+            ),
+            os.path.join(*target),
+        )
 
 
 extmodules = [
-    Extension('mercurial.cext.base85', ['mercurial/cext/base85.c'],
-              include_dirs=common_include_dirs,
-              depends=common_depends),
-    Extension('mercurial.cext.bdiff', ['mercurial/bdiff.c',
-                                       'mercurial/cext/bdiff.c'] + xdiff_srcs,
-              include_dirs=common_include_dirs,
-              depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers),
-    Extension('mercurial.cext.mpatch', ['mercurial/mpatch.c',
-                                        'mercurial/cext/mpatch.c'],
-              include_dirs=common_include_dirs,
-              depends=common_depends),
+    Extension(
+        'mercurial.cext.base85',
+        ['mercurial/cext/base85.c'],
+        include_dirs=common_include_dirs,
+        depends=common_depends,
+    ),
+    Extension(
+        'mercurial.cext.bdiff',
+        ['mercurial/bdiff.c', 'mercurial/cext/bdiff.c'] + xdiff_srcs,
+        include_dirs=common_include_dirs,
+        depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers,
+    ),
+    Extension(
+        'mercurial.cext.mpatch',
+        ['mercurial/mpatch.c', 'mercurial/cext/mpatch.c'],
+        include_dirs=common_include_dirs,
+        depends=common_depends,
+    ),
     RustEnhancedExtension(
-        'mercurial.cext.parsers', ['mercurial/cext/charencode.c',
-                                   'mercurial/cext/dirs.c',
-                                   'mercurial/cext/manifest.c',
-                                   'mercurial/cext/parsers.c',
-                                   'mercurial/cext/pathencode.c',
-                                   'mercurial/cext/revlog.c'],
+        'mercurial.cext.parsers',
+        [
+            'mercurial/cext/charencode.c',
+            'mercurial/cext/dirs.c',
+            'mercurial/cext/manifest.c',
+            'mercurial/cext/parsers.c',
+            'mercurial/cext/pathencode.c',
+            'mercurial/cext/revlog.c',
+        ],
         'hgdirectffi',
         'hg-direct-ffi',
         include_dirs=common_include_dirs,
-        depends=common_depends + ['mercurial/cext/charencode.h',
-                                  'mercurial/cext/revlog.h',
-                                  'rust/hg-core/src/ancestors.rs',
-                                  'rust/hg-core/src/lib.rs']),
-    Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
-              include_dirs=common_include_dirs,
-              extra_compile_args=osutil_cflags,
-              extra_link_args=osutil_ldflags,
-              depends=common_depends),
+        depends=common_depends
+        + [
+            'mercurial/cext/charencode.h',
+            'mercurial/cext/revlog.h',
+            'rust/hg-core/src/ancestors.rs',
+            'rust/hg-core/src/lib.rs',
+        ],
+    ),
     Extension(
-        'mercurial.thirdparty.zope.interface._zope_interface_coptimizations', [
-        'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
-        ]),
-    Extension('hgext.fsmonitor.pywatchman.bser',
-              ['hgext/fsmonitor/pywatchman/bser.c']),
-    RustStandaloneExtension('mercurial.rustext', 'hg-cpython', 'librusthg',
-                            py3_features='python3'),
-    ]
+        'mercurial.cext.osutil',
+        ['mercurial/cext/osutil.c'],
+        include_dirs=common_include_dirs,
+        extra_compile_args=osutil_cflags,
+        extra_link_args=osutil_ldflags,
+        depends=common_depends,
+    ),
+    Extension(
+        'mercurial.thirdparty.zope.interface._zope_interface_coptimizations',
+        [
+            'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
+        ],
+    ),
+    Extension(
+        'hgext.fsmonitor.pywatchman.bser', ['hgext/fsmonitor/pywatchman/bser.c']
+    ),
+    RustStandaloneExtension(
+        'mercurial.rustext', 'hg-cpython', 'librusthg', py3_features='python3'
+    ),
+]
 
 
 sys.path.insert(0, 'contrib/python-zstandard')
 import setup_zstd
-extmodules.append(setup_zstd.get_c_extension(
-    name='mercurial.zstd',
-    root=os.path.abspath(os.path.dirname(__file__))))
+
+extmodules.append(
+    setup_zstd.get_c_extension(
+        name='mercurial.zstd', root=os.path.abspath(os.path.dirname(__file__))
+    )
+)
 
 try:
     from distutils import cygwinccompiler
@@ -1337,6 +1552,7 @@
     class HackedMingw32CCompiler(object):
         pass
 
+
 if os.name == 'nt':
     # Allow compiler/linker flags to be added to Visual Studio builds.  Passing
     # extra_link_args to distutils.extensions.Extension() doesn't have any
@@ -1354,15 +1570,21 @@
 
     msvccompiler.MSVCCompiler = HackedMSVCCompiler
 
-packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
-                             'help/*.txt',
-                             'help/internals/*.txt',
-                             'default.d/*.rc',
-                             'dummycert.pem']}
+packagedata = {
+    'mercurial': [
+        'locale/*/LC_MESSAGES/hg.mo',
+        'help/*.txt',
+        'help/internals/*.txt',
+        'default.d/*.rc',
+        'dummycert.pem',
+    ]
+}
+
 
 def ordinarypath(p):
     return p and p[0] != '.' and p[-1] != '~'
 
+
 for root in ('templates',):
     for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
         curdir = curdir.split(os.sep, 1)[1]
@@ -1402,9 +1624,12 @@
 
 if py2exeloaded:
     extra['console'] = [
-        {'script':'hg',
-         'copyright':'Copyright (C) 2005-2019 Matt Mackall and others',
-         'product_version':version}]
+        {
+            'script': 'hg',
+            'copyright': 'Copyright (C) 2005-2019 Matt Mackall and others',
+            'product_version': version,
+        }
+    ]
     # Sub command of 'build' because 'py2exe' does not handle sub_commands.
     # Need to override hgbuild because it has a private copy of
     # build.sub_commands.
@@ -1438,8 +1663,9 @@
         version = version[0]
         if sys.version_info[0] == 3:
             version = version.decode('utf-8')
-        xcode4 = (version.startswith('Xcode') and
-                  StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
+        xcode4 = version.startswith('Xcode') and StrictVersion(
+            version.split()[1]
+        ) >= StrictVersion('4.0')
         xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None
     else:
         # xcodebuild returns empty on OS X Lion with XCode 4.3 not
@@ -1463,59 +1689,66 @@
         cflags = get_config_var('CFLAGS')
         if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None:
             os.environ['CFLAGS'] = (
-                os.environ.get('CFLAGS', '') + ' -Qunused-arguments')
+                os.environ.get('CFLAGS', '') + ' -Qunused-arguments'
+            )
 
-setup(name='mercurial',
-      version=setupversion,
-      author='Matt Mackall and many others',
-      author_email='mercurial@mercurial-scm.org',
-      url='https://mercurial-scm.org/',
-      download_url='https://mercurial-scm.org/release/',
-      description=('Fast scalable distributed SCM (revision control, version '
-                   'control) system'),
-      long_description=('Mercurial is a distributed SCM tool written in Python.'
-                        ' It is used by a number of large projects that require'
-                        ' fast, reliable distributed revision control, such as '
-                        'Mozilla.'),
-      license='GNU GPLv2 or any later version',
-      classifiers=[
-          'Development Status :: 6 - Mature',
-          'Environment :: Console',
-          'Intended Audience :: Developers',
-          'Intended Audience :: System Administrators',
-          'License :: OSI Approved :: GNU General Public License (GPL)',
-          'Natural Language :: Danish',
-          'Natural Language :: English',
-          'Natural Language :: German',
-          'Natural Language :: Italian',
-          'Natural Language :: Japanese',
-          'Natural Language :: Portuguese (Brazilian)',
-          'Operating System :: Microsoft :: Windows',
-          'Operating System :: OS Independent',
-          'Operating System :: POSIX',
-          'Programming Language :: C',
-          'Programming Language :: Python',
-          'Topic :: Software Development :: Version Control',
-      ],
-      scripts=scripts,
-      packages=packages,
-      ext_modules=extmodules,
-      data_files=datafiles,
-      package_data=packagedata,
-      cmdclass=cmdclass,
-      distclass=hgdist,
-      options={
-          'py2exe': {
-              'bundle_files': 3,
-              'dll_excludes': py2exedllexcludes,
-              'excludes': py2exeexcludes,
-              'packages': py2exepackages,
-          },
-          'bdist_mpkg': {
-              'zipdist': False,
-              'license': 'COPYING',
-              'readme': 'contrib/packaging/macosx/Readme.html',
-              'welcome': 'contrib/packaging/macosx/Welcome.html',
-          },
-      },
-      **extra)
+setup(
+    name='mercurial',
+    version=setupversion,
+    author='Matt Mackall and many others',
+    author_email='mercurial@mercurial-scm.org',
+    url='https://mercurial-scm.org/',
+    download_url='https://mercurial-scm.org/release/',
+    description=(
+        'Fast scalable distributed SCM (revision control, version '
+        'control) system'
+    ),
+    long_description=(
+        'Mercurial is a distributed SCM tool written in Python.'
+        ' It is used by a number of large projects that require'
+        ' fast, reliable distributed revision control, such as '
+        'Mozilla.'
+    ),
+    license='GNU GPLv2 or any later version',
+    classifiers=[
+        'Development Status :: 6 - Mature',
+        'Environment :: Console',
+        'Intended Audience :: Developers',
+        'Intended Audience :: System Administrators',
+        'License :: OSI Approved :: GNU General Public License (GPL)',
+        'Natural Language :: Danish',
+        'Natural Language :: English',
+        'Natural Language :: German',
+        'Natural Language :: Italian',
+        'Natural Language :: Japanese',
+        'Natural Language :: Portuguese (Brazilian)',
+        'Operating System :: Microsoft :: Windows',
+        'Operating System :: OS Independent',
+        'Operating System :: POSIX',
+        'Programming Language :: C',
+        'Programming Language :: Python',
+        'Topic :: Software Development :: Version Control',
+    ],
+    scripts=scripts,
+    packages=packages,
+    ext_modules=extmodules,
+    data_files=datafiles,
+    package_data=packagedata,
+    cmdclass=cmdclass,
+    distclass=hgdist,
+    options={
+        'py2exe': {
+            'bundle_files': 3,
+            'dll_excludes': py2exedllexcludes,
+            'excludes': py2exeexcludes,
+            'packages': py2exepackages,
+        },
+        'bdist_mpkg': {
+            'zipdist': False,
+            'license': 'COPYING',
+            'readme': 'contrib/packaging/macosx/Readme.html',
+            'welcome': 'contrib/packaging/macosx/Welcome.html',
+        },
+    },
+    **extra
+)
--- a/tests/artifacts/scripts/generate-churning-bundle.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/artifacts/scripts/generate-churning-bundle.py	Sun Oct 06 09:45:02 2019 -0400
@@ -39,15 +39,17 @@
 #
 # At each revision, the beginning on the file change,
 # and set of other lines changes too.
-FILENAME='SPARSE-REVLOG-TEST-FILE'
+FILENAME = 'SPARSE-REVLOG-TEST-FILE'
 NB_LINES = 10500
 ALWAYS_CHANGE_LINES = 500
 OTHER_CHANGES = 300
 
+
 def nextcontent(previous_content):
     """utility to produce a new file content from the previous one"""
     return hashlib.md5(previous_content).hexdigest()
 
+
 def filecontent(iteridx, oldcontent):
     """generate a new file content
 
@@ -72,6 +74,7 @@
             to_write = oldcontent[idx]
         yield to_write
 
+
 def updatefile(filename, idx):
     """update <filename> to be at appropriate content for iteration <idx>"""
     existing = None
@@ -82,6 +85,7 @@
         for line in filecontent(idx, existing):
             target.write(line)
 
+
 def hg(command, *args):
     """call a mercurial command with appropriate config and argument"""
     env = os.environ.copy()
@@ -101,6 +105,7 @@
     env['HGRCPATH'] = ''
     return subprocess.check_call(full_cmd, env=env)
 
+
 def run(target):
     tmpdir = tempfile.mkdtemp(prefix='tmp-hg-test-big-file-bundle-')
     try:
@@ -131,8 +136,8 @@
         shutil.rmtree(tmpdir)
     return 0
 
+
 if __name__ == '__main__':
     orig = os.path.realpath(os.path.dirname(sys.argv[0]))
     target = os.path.join(orig, os.pardir, 'cache', BUNDLE_NAME)
     sys.exit(run(target))
-
--- a/tests/autodiff.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/autodiff.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,9 +13,12 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
-@command(b'autodiff',
+
+@command(
+    b'autodiff',
     [(b'', b'git', b'', b'git upgrade mode (yes/no/auto/warn/abort)')],
-    b'[OPTION]... [FILE]...')
+    b'[OPTION]... [FILE]...',
+)
 def autodiff(ui, repo, *pats, **opts):
     opts = pycompat.byteskwargs(opts)
     diffopts = patch.difffeatureopts(ui, opts)
@@ -31,21 +34,31 @@
     elif git == b'warn':
         diffopts.git = False
         diffopts.upgrade = True
+
         def losedatafn(fn=None, **kwargs):
             brokenfiles.add(fn)
             return True
+
     elif git == b'abort':
         diffopts.git = False
         diffopts.upgrade = True
+
         def losedatafn(fn=None, **kwargs):
             raise error.Abort(b'losing data for %s' % fn)
+
     else:
         raise error.Abort(b'--git must be yes, no or auto')
 
     ctx1, ctx2 = scmutil.revpair(repo, [])
     m = scmutil.match(ctx2, pats, opts)
-    it = patch.diff(repo, ctx1.node(), ctx2.node(), match=m, opts=diffopts,
-                    losedatafn=losedatafn)
+    it = patch.diff(
+        repo,
+        ctx1.node(),
+        ctx2.node(),
+        match=m,
+        opts=diffopts,
+        losedatafn=losedatafn,
+    )
     for chunk in it:
         ui.write(chunk)
     for fn in sorted(brokenfiles):
--- a/tests/badserverext.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/badserverext.py	Sun Oct 06 09:45:02 2019 -0400
@@ -33,29 +33,27 @@
 
 import socket
 
-from mercurial import(
+from mercurial import (
     pycompat,
     registrar,
 )
 
-from mercurial.hgweb import (
-    server,
-)
+from mercurial.hgweb import server
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem(b'badserver', b'closeafteraccept',
-    default=False,
+configitem(
+    b'badserver', b'closeafteraccept', default=False,
 )
-configitem(b'badserver', b'closeafterrecvbytes',
-    default=b'0',
+configitem(
+    b'badserver', b'closeafterrecvbytes', default=b'0',
 )
-configitem(b'badserver', b'closeaftersendbytes',
-    default=b'0',
+configitem(
+    b'badserver', b'closeaftersendbytes', default=b'0',
 )
-configitem(b'badserver', b'closebeforeaccept',
-    default=False,
+configitem(
+    b'badserver', b'closebeforeaccept', default=False,
 )
 
 # We can't adjust __class__ on a socket instance. So we define a proxy type.
@@ -67,8 +65,9 @@
         '_closeaftersendbytes',
     )
 
-    def __init__(self, obj, logfp, closeafterrecvbytes=0,
-                 closeaftersendbytes=0):
+    def __init__(
+        self, obj, logfp, closeafterrecvbytes=0, closeaftersendbytes=0
+    ):
         object.__setattr__(self, '_orig', obj)
         object.__setattr__(self, '_logfp', logfp)
         object.__setattr__(self, '_closeafterrecvbytes', closeafterrecvbytes)
@@ -97,14 +96,19 @@
         f = object.__getattribute__(self, '_orig').makefile(mode, bufsize)
 
         logfp = object.__getattribute__(self, '_logfp')
-        closeafterrecvbytes = object.__getattribute__(self,
-                                                      '_closeafterrecvbytes')
-        closeaftersendbytes = object.__getattribute__(self,
-                                                      '_closeaftersendbytes')
+        closeafterrecvbytes = object.__getattribute__(
+            self, '_closeafterrecvbytes'
+        )
+        closeaftersendbytes = object.__getattribute__(
+            self, '_closeaftersendbytes'
+        )
 
-        return fileobjectproxy(f, logfp,
-                               closeafterrecvbytes=closeafterrecvbytes,
-                               closeaftersendbytes=closeaftersendbytes)
+        return fileobjectproxy(
+            f,
+            logfp,
+            closeafterrecvbytes=closeafterrecvbytes,
+            closeaftersendbytes=closeaftersendbytes,
+        )
 
     def sendall(self, data, flags=0):
         remaining = object.__getattribute__(self, '_closeaftersendbytes')
@@ -124,8 +128,10 @@
 
         result = object.__getattribute__(self, '_orig').sendall(newdata, flags)
 
-        self._writelog(b'sendall(%d from %d) -> (%d) %s' % (
-            len(newdata), len(data), remaining, newdata))
+        self._writelog(
+            b'sendall(%d from %d) -> (%d) %s'
+            % (len(newdata), len(data), remaining, newdata)
+        )
 
         object.__setattr__(self, '_closeaftersendbytes', remaining)
 
@@ -147,8 +153,9 @@
         '_closeaftersendbytes',
     )
 
-    def __init__(self, obj, logfp, closeafterrecvbytes=0,
-                 closeaftersendbytes=0):
+    def __init__(
+        self, obj, logfp, closeafterrecvbytes=0, closeaftersendbytes=0
+    ):
         object.__setattr__(self, '_orig', obj)
         object.__setattr__(self, '_logfp', logfp)
         object.__setattr__(self, '_closeafterrecvbytes', closeafterrecvbytes)
@@ -192,9 +199,9 @@
         # No read limit. Call original function.
         if not remaining:
             result = object.__getattribute__(self, '_orig').read(size)
-            self._writelog(b'read(%d) -> (%d) (%s) %s' % (size,
-                                                          len(result),
-                                                          result))
+            self._writelog(
+                b'read(%d) -> (%d) (%s) %s' % (size, len(result), result)
+            )
             return result
 
         origsize = size
@@ -207,8 +214,10 @@
         result = object.__getattribute__(self, '_orig').read(size)
         remaining -= len(result)
 
-        self._writelog(b'read(%d from %d) -> (%d) %s' % (
-            size, origsize, len(result), result))
+        self._writelog(
+            b'read(%d from %d) -> (%d) %s'
+            % (size, origsize, len(result), result)
+        )
 
         object.__setattr__(self, '_closeafterrecvbytes', remaining)
 
@@ -227,8 +236,9 @@
         # No read limit. Call original function.
         if not remaining:
             result = object.__getattribute__(self, '_orig').readline(size)
-            self._writelog(b'readline(%d) -> (%d) %s' % (
-                size, len(result), result))
+            self._writelog(
+                b'readline(%d) -> (%d) %s' % (size, len(result), result)
+            )
             return result
 
         origsize = size
@@ -241,8 +251,10 @@
         result = object.__getattribute__(self, '_orig').readline(size)
         remaining -= len(result)
 
-        self._writelog(b'readline(%d from %d) -> (%d) %s' % (
-            size, origsize, len(result), result))
+        self._writelog(
+            b'readline(%d from %d) -> (%d) %s'
+            % (size, origsize, len(result), result)
+        )
 
         object.__setattr__(self, '_closeafterrecvbytes', remaining)
 
@@ -271,8 +283,10 @@
 
         remaining -= len(newdata)
 
-        self._writelog(b'write(%d from %d) -> (%d) %s' % (
-            len(newdata), len(data), remaining, newdata))
+        self._writelog(
+            b'write(%d from %d) -> (%d) %s'
+            % (len(newdata), len(data), remaining, newdata)
+        )
 
         result = object.__getattribute__(self, '_orig').write(newdata)
 
@@ -286,6 +300,7 @@
 
         return result
 
+
 def extsetup(ui):
     # Change the base HTTP server class so various events can be performed.
     # See SocketServer.BaseServer for how the specially named methods work.
@@ -310,8 +325,9 @@
                     elif name.lower() == 'server':
                         value = 'badhttpserver'
 
-                    return super(badrequesthandler, self).send_header(name,
-                                                                      value)
+                    return super(badrequesthandler, self).send_header(
+                        name, value
+                    )
 
             self.RequestHandlerClass = badrequesthandler
 
@@ -348,9 +364,12 @@
                 closeaftersendbytes = 0
 
             if closeafterrecvbytes or closeaftersendbytes:
-                socket = socketproxy(socket, self.errorlog,
-                                     closeafterrecvbytes=closeafterrecvbytes,
-                                     closeaftersendbytes=closeaftersendbytes)
+                socket = socketproxy(
+                    socket,
+                    self.errorlog,
+                    closeafterrecvbytes=closeafterrecvbytes,
+                    closeaftersendbytes=closeaftersendbytes,
+                )
 
             return super(badserver, self).process_request(socket, address)
 
--- a/tests/basic_test_result.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/basic_test_result.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,8 +2,8 @@
 
 import unittest
 
+
 class TestResult(unittest._TextTestResult):
-
     def __init__(self, options, *args, **kwargs):
         super(TestResult, self).__init__(*args, **kwargs)
         self._options = options
--- a/tests/blackbox-readonly-dispatch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/blackbox-readonly-dispatch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -6,6 +6,7 @@
     ui as uimod,
 )
 
+
 def testdispatch(cmd):
     """Simple wrapper around dispatch.dispatch()
 
@@ -18,6 +19,7 @@
     result = dispatch.dispatch(req)
     ui.status(b"result: %r\n" % result)
 
+
 # create file 'foo', add and commit
 f = open(b'foo', 'wb')
 f.write(b'foo\n')
--- a/tests/bruterebase.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/bruterebase.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,6 +23,7 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
+
 @command(b'debugbruterebase')
 def debugbruterebase(ui, repo, source, dest):
     """for every non-empty subset of source, run rebase -r subset -d dest
@@ -45,7 +46,7 @@
             subset = [rev for j, rev in enumerate(srevs) if i & (1 << j) != 0]
             spec = revsetlang.formatspec(b'%ld', subset)
             tr = repo.transaction(b'rebase')
-            tr._report = lambda x: 0 # hide "transaction abort"
+            tr._report = lambda x: 0  # hide "transaction abort"
 
             ui.pushbuffer()
             try:
@@ -70,5 +71,5 @@
             repo.vfs.tryunlink(b'rebasestate')
 
             subsetdesc = b''.join(getdesc(rev) for rev in subset)
-            ui.write((b'%s: %s\n') % (subsetdesc.rjust(len(srevs)), summary))
+            ui.write(b'%s: %s\n' % (subsetdesc.rjust(len(srevs)), summary))
             tr.abort()
--- a/tests/check-perf-code.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/check-perf-code.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,23 +9,31 @@
 
 # write static check patterns here
 perfpypats = [
-  [
-    (r'(branchmap|repoview|repoviewutil)\.subsettable',
-     "use getbranchmapsubsettable() for early Mercurial"),
-    (r'\.(vfs|svfs|opener|sopener)',
-     "use getvfs()/getsvfs() for early Mercurial"),
-    (r'ui\.configint',
-     "use getint() instead of ui.configint() for early Mercurial"),
-  ],
-  # warnings
-  [
-  ]
+    [
+        (
+            r'(branchmap|repoview|repoviewutil)\.subsettable',
+            "use getbranchmapsubsettable() for early Mercurial",
+        ),
+        (
+            r'\.(vfs|svfs|opener|sopener)',
+            "use getvfs()/getsvfs() for early Mercurial",
+        ),
+        (
+            r'ui\.configint',
+            "use getint() instead of ui.configint() for early Mercurial",
+        ),
+    ],
+    # warnings
+    [],
 ]
 
+
 def modulewhitelist(names):
-    replacement = [('.py', ''), ('.c', ''), # trim suffix
-                   ('mercurial%s' % ('/'), ''), # trim "mercurial/" path
-                  ]
+    replacement = [
+        ('.py', ''),
+        ('.c', ''),  # trim suffix
+        ('mercurial%s' % '/', ''),  # trim "mercurial/" path
+    ]
     ignored = {'__init__'}
     modules = {}
 
@@ -45,6 +53,7 @@
 
     return whitelist
 
+
 if __name__ == "__main__":
     # in this case, it is assumed that result of "hg files" at
     # multiple revisions is given via stdin
@@ -61,10 +70,14 @@
         #        bar,
         #        baz
         #    )
-        ((r'from mercurial import [(][a-z0-9, \n#]*\n(?! *%s,|^[ #]*\n|[)])'
-          % ',| *'.join(whitelist)),
-         "import newer module separately in try clause for early Mercurial"
-         ))
+        (
+            (
+                r'from mercurial import [(][a-z0-9, \n#]*\n(?! *%s,|^[ #]*\n|[)])'
+                % ',| *'.join(whitelist)
+            ),
+            "import newer module separately in try clause for early Mercurial",
+        )
+    )
 
     # import contrib/check-code.py as checkcode
     assert 'RUNTESTDIR' in os.environ, "use check-perf-code.py in *.t script"
@@ -73,7 +86,8 @@
     checkcode = __import__('check-code')
 
     # register perf.py specific entry with "checks" in check-code.py
-    checkcode.checks.append(('perf.py', r'contrib/perf.py$', '',
-                             checkcode.pyfilters, perfpypats))
+    checkcode.checks.append(
+        ('perf.py', r'contrib/perf.py$', '', checkcode.pyfilters, perfpypats)
+    )
 
     sys.exit(checkcode.main())
--- a/tests/common-pattern.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/common-pattern.py	Sun Oct 06 09:45:02 2019 -0400
@@ -5,110 +5,115 @@
 
 substitutions = [
     # list of possible compressions
-    (br'(zstd,)?zlib,none,bzip2',
-     br'$USUAL_COMPRESSIONS$'
-    ),
-    (br'=(zstd,)?zlib',
-     br'=$BUNDLE2_COMPRESSIONS$'
-    ),
+    (br'(zstd,)?zlib,none,bzip2', br'$USUAL_COMPRESSIONS$'),
+    (br'=(zstd,)?zlib', br'=$BUNDLE2_COMPRESSIONS$'),
     # capabilities sent through http
-    (br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
-     br'bookmarks%250A'
-     br'changegroup%253D01%252C02%250A'
-     br'digests%253Dmd5%252Csha1%252Csha512%250A'
-     br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
-     br'hgtagsfnodes%250A'
-     br'listkeys%250A'
-     br'phases%253Dheads%250A'
-     br'pushkey%250A'
-     br'remote-changegroup%253Dhttp%252Chttps%250A'
-     br'rev-branch-cache%250A'
-     br'stream%253Dv2',
-     # (the replacement patterns)
-     br'$USUAL_BUNDLE_CAPS$'
+    (
+        br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
+        br'bookmarks%250A'
+        br'changegroup%253D01%252C02%250A'
+        br'digests%253Dmd5%252Csha1%252Csha512%250A'
+        br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
+        br'hgtagsfnodes%250A'
+        br'listkeys%250A'
+        br'phases%253Dheads%250A'
+        br'pushkey%250A'
+        br'remote-changegroup%253Dhttp%252Chttps%250A'
+        br'rev-branch-cache%250A'
+        br'stream%253Dv2',
+        # (the replacement patterns)
+        br'$USUAL_BUNDLE_CAPS$',
     ),
-    (br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
-     br'bookmarks%250A'
-     br'changegroup%253D01%252C02%250A'
-     br'digests%253Dmd5%252Csha1%252Csha512%250A'
-     br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
-     br'hgtagsfnodes%250A'
-     br'listkeys%250A'
-     br'phases%253Dheads%250A'
-     br'pushkey%250A'
-     br'remote-changegroup%253Dhttp%252Chttps',
-     # (the replacement patterns)
-     br'$USUAL_BUNDLE_CAPS_SERVER$'
-     ),
+    (
+        br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
+        br'bookmarks%250A'
+        br'changegroup%253D01%252C02%250A'
+        br'digests%253Dmd5%252Csha1%252Csha512%250A'
+        br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
+        br'hgtagsfnodes%250A'
+        br'listkeys%250A'
+        br'phases%253Dheads%250A'
+        br'pushkey%250A'
+        br'remote-changegroup%253Dhttp%252Chttps',
+        # (the replacement patterns)
+        br'$USUAL_BUNDLE_CAPS_SERVER$',
+    ),
     # bundle2 capabilities sent through ssh
-    (br'bundle2=HG20%0A'
-     br'bookmarks%0A'
-     br'changegroup%3D01%2C02%0A'
-     br'digests%3Dmd5%2Csha1%2Csha512%0A'
-     br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
-     br'hgtagsfnodes%0A'
-     br'listkeys%0A'
-     br'phases%3Dheads%0A'
-     br'pushkey%0A'
-     br'remote-changegroup%3Dhttp%2Chttps%0A'
-     br'rev-branch-cache%0A'
-     br'stream%3Dv2',
-     # (replacement patterns)
-     br'$USUAL_BUNDLE2_CAPS$'
+    (
+        br'bundle2=HG20%0A'
+        br'bookmarks%0A'
+        br'changegroup%3D01%2C02%0A'
+        br'digests%3Dmd5%2Csha1%2Csha512%0A'
+        br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
+        br'hgtagsfnodes%0A'
+        br'listkeys%0A'
+        br'phases%3Dheads%0A'
+        br'pushkey%0A'
+        br'remote-changegroup%3Dhttp%2Chttps%0A'
+        br'rev-branch-cache%0A'
+        br'stream%3Dv2',
+        # (replacement patterns)
+        br'$USUAL_BUNDLE2_CAPS$',
     ),
     # bundle2 capabilities advertised by the server
-    (br'bundle2=HG20%0A'
-     br'bookmarks%0A'
-     br'changegroup%3D01%2C02%0A'
-     br'digests%3Dmd5%2Csha1%2Csha512%0A'
-     br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
-     br'hgtagsfnodes%0A'
-     br'listkeys%0A'
-     br'phases%3Dheads%0A'
-     br'pushkey%0A'
-     br'remote-changegroup%3Dhttp%2Chttps%0A'
-     br'rev-branch-cache',
-     # (replacement patterns)
-     br'$USUAL_BUNDLE2_CAPS_SERVER$'
-     ),
     (
-     br'bundle2=HG20%0A'
-     br'bookmarks%0A'
-     br'changegroup%3D01%2C02%0A'
-     br'digests%3Dmd5%2Csha1%2Csha512%0A'
-     br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
-     br'hgtagsfnodes%0A'
-     br'listkeys%0A'
-     br'pushkey%0A'
-     br'remote-changegroup%3Dhttp%2Chttps%0A'
-     br'rev-branch-cache%0A'
-     br'stream%3Dv2',
-     # (replacement patterns)
-     br'$USUAL_BUNDLE2_CAPS_NO_PHASES$'
+        br'bundle2=HG20%0A'
+        br'bookmarks%0A'
+        br'changegroup%3D01%2C02%0A'
+        br'digests%3Dmd5%2Csha1%2Csha512%0A'
+        br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
+        br'hgtagsfnodes%0A'
+        br'listkeys%0A'
+        br'phases%3Dheads%0A'
+        br'pushkey%0A'
+        br'remote-changegroup%3Dhttp%2Chttps%0A'
+        br'rev-branch-cache',
+        # (replacement patterns)
+        br'$USUAL_BUNDLE2_CAPS_SERVER$',
+    ),
+    (
+        br'bundle2=HG20%0A'
+        br'bookmarks%0A'
+        br'changegroup%3D01%2C02%0A'
+        br'digests%3Dmd5%2Csha1%2Csha512%0A'
+        br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
+        br'hgtagsfnodes%0A'
+        br'listkeys%0A'
+        br'pushkey%0A'
+        br'remote-changegroup%3Dhttp%2Chttps%0A'
+        br'rev-branch-cache%0A'
+        br'stream%3Dv2',
+        # (replacement patterns)
+        br'$USUAL_BUNDLE2_CAPS_NO_PHASES$',
     ),
     # HTTP access log dates
-    (br' - - \[\d\d/.../2\d\d\d \d\d:\d\d:\d\d] "(GET|PUT|POST)',
-     lambda m: br' - - [$LOGDATE$] "' + m.group(1)
+    (
+        br' - - \[\d\d/.../2\d\d\d \d\d:\d\d:\d\d] "(GET|PUT|POST)',
+        lambda m: br' - - [$LOGDATE$] "' + m.group(1),
     ),
     # HTTP error log dates
-    (br' - - \[\d\d/.../2\d\d\d \d\d:\d\d:\d\d] (HG error:|Exception)',
-     lambda m: br' - - [$ERRDATE$] ' + m.group(1)
+    (
+        br' - - \[\d\d/.../2\d\d\d \d\d:\d\d:\d\d] (HG error:|Exception)',
+        lambda m: br' - - [$ERRDATE$] ' + m.group(1),
     ),
     # HTTP header dates- RFC 1123
-    (br'([Dd]ate): [A-Za-z]{3}, \d\d [A-Za-z]{3} \d{4} \d\d:\d\d:\d\d GMT',
-     lambda m: br'%s: $HTTP_DATE$' % m.group(1)
+    (
+        br'([Dd]ate): [A-Za-z]{3}, \d\d [A-Za-z]{3} \d{4} \d\d:\d\d:\d\d GMT',
+        lambda m: br'%s: $HTTP_DATE$' % m.group(1),
     ),
     # LFS expiration value
-    (br'"expires_at": "\d{4}-\d\d-\d\dT\d\d:\d\d:\d\dZ"',
-     br'"expires_at": "$ISO_8601_DATE_TIME$"'
+    (
+        br'"expires_at": "\d{4}-\d\d-\d\dT\d\d:\d\d:\d\dZ"',
+        br'"expires_at": "$ISO_8601_DATE_TIME$"',
     ),
     # Windows has an extra '/' in the following lines that get globbed away:
     #   pushing to file:/*/$TESTTMP/r2 (glob)
     #   comparing with file:/*/$TESTTMP/r2 (glob)
     #   sub/maybelarge.dat: largefile 34..9c not available from
     #       file:/*/$TESTTMP/largefiles-repo (glob)
-    (br'(.*file:/)/?(/\$TESTTMP.*)',
-     lambda m: m.group(1) + b'*' + m.group(2) + b' (glob)'
+    (
+        br'(.*file:/)/?(/\$TESTTMP.*)',
+        lambda m: m.group(1) + b'*' + m.group(2) + b' (glob)',
     ),
 ]
 
@@ -119,31 +124,26 @@
         # than in Rust, and automatic conversion is not possible
         # because of module member privacy.
         br'No such file or directory \(os error 2\)',
-
         # strerror()
         br'No such file or directory',
-
         # FormatMessage(ERROR_FILE_NOT_FOUND)
         br'The system cannot find the file specified',
     ),
     br'$ENOTDIR$': (
         # strerror()
         br'Not a directory',
-
         # FormatMessage(ERROR_PATH_NOT_FOUND)
         br'The system cannot find the path specified',
     ),
     br'$ECONNRESET$': (
         # strerror()
         br'Connection reset by peer',
-
         # FormatMessage(WSAECONNRESET)
         br'An existing connection was forcibly closed by the remote host',
     ),
     br'$EADDRINUSE$': (
         # strerror()
         br'Address already in use',
-
         # FormatMessage(WSAEADDRINUSE)
         br'Only one usage of each socket address'
         br' \(protocol/network address/port\) is normally permitted',
@@ -151,9 +151,8 @@
     br'$EADDRNOTAVAIL$': (
         # strerror()
         br'Cannot assign requested address',
-
         # FormatMessage(WSAEADDRNOTAVAIL)
-    )
+    ),
 }
 
 for replace, msgs in _errors.items():
@@ -165,41 +164,35 @@
     # cloning subrepo s\ss from $TESTTMP/t/s/ss
     # cloning subrepo foo\bar from http://localhost:$HGPORT/foo/bar
     br'(?m)^cloning subrepo \S+\\.*',
-
     # pulling from $TESTTMP\issue1852a
     br'(?m)^pulling from \$TESTTMP\\.*',
-
     # pushing to $TESTTMP\a
     br'(?m)^pushing to \$TESTTMP\\.*',
-
     # pushing subrepo s\ss to $TESTTMP/t/s/ss
     br'(?m)^pushing subrepo \S+\\\S+ to.*',
-
     # moving d1\d11\a1 to d3/d11/a1
     br'(?m)^moving \S+\\.*',
-
     # d1\a: not recording move - dummy does not exist
     br'\S+\\\S+: not recording move .+',
-
     # reverting s\a
     br'(?m)^reverting (?!subrepo ).*\\.*',
-
     # saved backup bundle to
     #     $TESTTMP\test\.hg\strip-backup/443431ffac4f-2fc5398a-backup.hg
     br'(?m)^saved backup bundle to \$TESTTMP.*\.hg',
-
     # no changes made to subrepo s\ss since last push to ../tcc/s/ss
     br'(?m)^no changes made to subrepo \S+\\\S+ since.*',
-
     # changeset 5:9cc5aa7204f0: stuff/maybelarge.dat references missing
     #     $TESTTMP\largefiles-repo-hg\.hg\largefiles\76..38
     br'(?m)^changeset .* references (corrupted|missing) \$TESTTMP\\.*',
-
     # stuff/maybelarge.dat: largefile 76..38 not available from
     #     file:/*/$TESTTMP\largefiles-repo (glob)
     br'.*: largefile \S+ not available from file:/\*/.+',
 ]
 
 if os.name == 'nt':
-    substitutions.extend([(s, lambda match: match.group().replace(b'\\', b'/'))
-                          for s in _winpathfixes])
+    substitutions.extend(
+        [
+            (s, lambda match: match.group().replace(b'\\', b'/'))
+            for s in _winpathfixes
+        ]
+    )
--- a/tests/crashgetbundler.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/crashgetbundler.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,14 +1,12 @@
 from __future__ import absolute_import
 
 from mercurial.i18n import _
-from mercurial import (
-        changegroup,
-        error,
-        extensions
-    )
+from mercurial import changegroup, error, extensions
+
 
 def abort(orig, *args, **kwargs):
     raise error.Abort(_('this is an exercise'))
 
+
 def uisetup(ui):
     extensions.wrapfunction(changegroup, 'getbundler', abort)
--- a/tests/drawdag.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/drawdag.py	Sun Oct 06 09:45:02 2019 -0400
@@ -102,14 +102,19 @@
 command = registrar.command(cmdtable)
 
 _pipechars = b'\\/+-|'
-_nonpipechars = b''.join(pycompat.bytechr(i) for i in range(33, 127)
-                         if pycompat.bytechr(i) not in _pipechars)
+_nonpipechars = b''.join(
+    pycompat.bytechr(i)
+    for i in range(33, 127)
+    if pycompat.bytechr(i) not in _pipechars
+)
+
 
 def _isname(ch):
     """char -> bool. return True if ch looks like part of a name, False
     otherwise"""
     return ch in _nonpipechars
 
+
 def _parseasciigraph(text):
     r"""str -> {str : [str]}. convert the ASCII graph to edges
 
@@ -166,7 +171,7 @@
         if x < 0 or y < 0:
             return b' '
         try:
-            return lines[y][x:x + 1] or b' '
+            return lines[y][x : x + 1] or b' '
         except IndexError:
             return b' '
 
@@ -261,6 +266,7 @@
 
     return dict(edges)
 
+
 class simplefilectx(object):
     def __init__(self, path, data):
         self._data = data
@@ -281,6 +287,7 @@
     def flags(self):
         return b''
 
+
 class simplecommitctx(context.committablectx):
     def __init__(self, repo, name, parentctxs, added):
         opts = {
@@ -306,6 +313,7 @@
     def p2copies(self):
         return {}
 
+
 def _walkgraph(edges):
     """yield node, parents in topologically order"""
     visible = set(edges.keys())
@@ -327,6 +335,7 @@
                 if leaf in v:
                     v.remove(leaf)
 
+
 def _getcomments(text):
     r"""
     >>> [pycompat.sysstr(s) for s in _getcomments(br'''
@@ -345,6 +354,7 @@
             continue
         yield line.split(b' # ', 1)[1].split(b' # ')[0].strip()
 
+
 @command(b'debugdrawdag', [])
 def debugdrawdag(ui, repo, **opts):
     r"""read an ASCII graph from stdin and create changesets
@@ -368,11 +378,10 @@
     edges = _parseasciigraph(text)
     for k, v in edges.items():
         if len(v) > 2:
-            raise error.Abort(_('%s: too many parents: %s')
-                              % (k, b' '.join(v)))
+            raise error.Abort(_('%s: too many parents: %s') % (k, b' '.join(v)))
 
     # parse comments to get extra file content instructions
-    files = collections.defaultdict(dict) # {(name, path): content}
+    files = collections.defaultdict(dict)  # {(name, path): content}
     comments = list(_getcomments(text))
     filere = re.compile(br'^(\w+)/([\w/]+)\s*=\s*(.*)$', re.M)
     for name, path, content in filere.findall(b'\n'.join(comments)):
@@ -410,14 +419,15 @@
         ctx = simplecommitctx(repo, name, pctxs, added)
         n = ctx.commit()
         committed[name] = n
-        tagsmod.tag(repo, [name], n, message=None, user=None, date=None,
-                    local=True)
+        tagsmod.tag(
+            repo, [name], n, message=None, user=None, date=None, local=True
+        )
 
     # handle special comments
     with repo.wlock(), repo.lock(), repo.transaction(b'drawdag'):
         getctx = lambda x: repo.unfiltered()[committed[x.strip()]]
         for comment in comments:
-            rels = [] # obsolete relationships
+            rels = []  # obsolete relationships
             args = comment.split(b':', 1)
             if len(args) <= 1:
                 continue
--- a/tests/dumbhttp.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/dumbhttp.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,36 +23,64 @@
 OptionParser = optparse.OptionParser
 
 if os.environ.get('HGIPV6', '0') == '1':
+
     class simplehttpserver(httpserver.httpserver):
         address_family = socket.AF_INET6
+
+
 else:
     simplehttpserver = httpserver.httpserver
 
+
 class _httprequesthandler(httpserver.simplehttprequesthandler):
     def log_message(self, format, *args):
         httpserver.simplehttprequesthandler.log_message(self, format, *args)
         sys.stderr.flush()
 
+
 class simplehttpservice(object):
     def __init__(self, host, port):
         self.address = (host, port)
+
     def init(self):
         self.httpd = simplehttpserver(self.address, _httprequesthandler)
+
     def run(self):
         self.httpd.serve_forever()
 
+
 if __name__ == '__main__':
     parser = OptionParser()
-    parser.add_option('-p', '--port', dest='port', type='int', default=8000,
-        help='TCP port to listen on', metavar='PORT')
-    parser.add_option('-H', '--host', dest='host', default='localhost',
-        help='hostname or IP to listen on', metavar='HOST')
+    parser.add_option(
+        '-p',
+        '--port',
+        dest='port',
+        type='int',
+        default=8000,
+        help='TCP port to listen on',
+        metavar='PORT',
+    )
+    parser.add_option(
+        '-H',
+        '--host',
+        dest='host',
+        default='localhost',
+        help='hostname or IP to listen on',
+        metavar='HOST',
+    )
     parser.add_option('--logfile', help='file name of access/error log')
-    parser.add_option('--pid', dest='pid',
-        help='file name where the PID of the server is stored')
-    parser.add_option('-f', '--foreground', dest='foreground',
+    parser.add_option(
+        '--pid',
+        dest='pid',
+        help='file name where the PID of the server is stored',
+    )
+    parser.add_option(
+        '-f',
+        '--foreground',
+        dest='foreground',
         action='store_true',
-        help='do not start the HTTP server in the background')
+        help='do not start the HTTP server in the background',
+    )
     parser.add_option('--daemon-postexec', action='append')
 
     (options, args) = parser.parse_args()
@@ -60,18 +88,26 @@
     signal.signal(signal.SIGTERM, lambda x, y: sys.exit(0))
 
     if options.foreground and options.logfile:
-        parser.error("options --logfile and --foreground are mutually "
-                     "exclusive")
+        parser.error(
+            "options --logfile and --foreground are mutually " "exclusive"
+        )
     if options.foreground and options.pid:
         parser.error("options --pid and --foreground are mutually exclusive")
 
-    opts = {b'pid_file': options.pid,
-            b'daemon': not options.foreground,
-            b'daemon_postexec': pycompat.rapply(encoding.strtolocal,
-                                                options.daemon_postexec)}
+    opts = {
+        b'pid_file': options.pid,
+        b'daemon': not options.foreground,
+        b'daemon_postexec': pycompat.rapply(
+            encoding.strtolocal, options.daemon_postexec
+        ),
+    }
     service = simplehttpservice(options.host, options.port)
     runargs = [sys.executable, __file__] + sys.argv[1:]
     runargs = [pycompat.fsencode(a) for a in runargs]
-    server.runservice(opts, initfn=service.init, runfn=service.run,
-                      logfile=options.logfile,
-                      runargs=runargs)
+    server.runservice(
+        opts,
+        initfn=service.init,
+        runfn=service.run,
+        logfile=options.logfile,
+        runargs=runargs,
+    )
--- a/tests/dummysmtpd.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/dummysmtpd.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,10 +18,12 @@
     ui as uimod,
 )
 
+
 def log(msg):
     sys.stdout.write(msg)
     sys.stdout.flush()
 
+
 class dummysmtpserver(smtpd.SMTPServer):
     def __init__(self, localaddr):
         smtpd.SMTPServer.__init__(self, localaddr, remoteaddr=None)
@@ -38,6 +40,7 @@
         # the expected way, and the server is available for subsequent requests.
         traceback.print_exc()
 
+
 class dummysmtpsecureserver(dummysmtpserver):
     def __init__(self, localaddr, certfile):
         dummysmtpserver.__init__(self, localaddr)
@@ -58,25 +61,30 @@
             return
         smtpd.SMTPChannel(self, conn, addr)
 
+
 def run():
     try:
         asyncore.loop()
     except KeyboardInterrupt:
         pass
 
+
 def _encodestrsonly(v):
     if isinstance(v, type(u'')):
         return v.encode('ascii')
     return v
 
+
 def bytesvars(obj):
     unidict = vars(obj)
     bd = {k.encode('ascii'): _encodestrsonly(v) for k, v in unidict.items()}
     if bd[b'daemon_postexec'] is not None:
         bd[b'daemon_postexec'] = [
-            _encodestrsonly(v) for v in bd[b'daemon_postexec']]
+            _encodestrsonly(v) for v in bd[b'daemon_postexec']
+        ]
     return bd
 
+
 def main():
     op = optparse.OptionParser()
     op.add_option('-d', '--daemon', action='store_true')
@@ -92,6 +100,7 @@
         op.error('--certificate must be specified')
 
     addr = (opts.address, opts.port)
+
     def init():
         if opts.tls == 'none':
             dummysmtpserver(addr)
@@ -100,9 +109,13 @@
         log('listening at %s:%d\n' % addr)
 
     server.runservice(
-        bytesvars(opts), initfn=init, runfn=run,
-        runargs=[pycompat.sysexecutable,
-                 pycompat.fsencode(__file__)] + pycompat.sysargv[1:])
+        bytesvars(opts),
+        initfn=init,
+        runfn=run,
+        runargs=[pycompat.sysexecutable, pycompat.fsencode(__file__)]
+        + pycompat.sysargv[1:],
+    )
+
 
 if __name__ == '__main__':
     main()
--- a/tests/failfilemerge.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/failfilemerge.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,12 +8,13 @@
     filemerge,
 )
 
-def failfilemerge(filemergefn,
-                  premerge, repo, wctx, mynode, orig, fcd, fco, fca,
-                  labels=None):
+
+def failfilemerge(
+    filemergefn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
+):
     raise error.Abort("^C")
     return filemergefn(premerge, repo, mynode, orig, fcd, fco, fca, labels)
 
+
 def extsetup(ui):
-    extensions.wrapfunction(filemerge, '_filemerge',
-                            failfilemerge)
+    extensions.wrapfunction(filemerge, '_filemerge', failfilemerge)
--- a/tests/fakedirstatewritetime.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/fakedirstatewritetime.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
 
 try:
     from mercurial import rustext
+
     rustext.__name__  # force actual import (see hgdemandimport)
 except ImportError:
     rustext = None
@@ -25,13 +26,14 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem(b'fakedirstatewritetime', b'fakenow',
-    default=None,
+configitem(
+    b'fakedirstatewritetime', b'fakenow', default=None,
 )
 
 parsers = policy.importmod(r'parsers')
 rustmod = policy.importrust(r'parsers')
 
+
 def pack_dirstate(fakenow, orig, dmap, copymap, pl, now):
     # execute what original parsers.pack_dirstate should do actually
     # for consistency
@@ -43,6 +45,7 @@
 
     return orig(dmap, copymap, pl, fakenow)
 
+
 def fakewrite(ui, func):
     # fake "now" of 'pack_dirstate' only if it is invoked while 'func'
 
@@ -62,9 +65,9 @@
         # The Rust implementation does not use public parse/pack dirstate
         # to prevent conversion round-trips
         orig_dirstatemap_write = dirstate.dirstatemap.write
-        wrapper = lambda self, st, now: orig_dirstatemap_write(self,
-                                                               st,
-                                                               fakenow)
+        wrapper = lambda self, st, now: orig_dirstatemap_write(
+            self, st, fakenow
+        )
         dirstate.dirstatemap.write = wrapper
 
     orig_dirstate_getfsnow = dirstate._getfsnow
@@ -83,16 +86,19 @@
         if rustmod is not None:
             dirstate.dirstatemap.write = orig_dirstatemap_write
 
+
 def _poststatusfixup(orig, workingctx, status, fixup):
     ui = workingctx.repo().ui
-    return fakewrite(ui, lambda : orig(workingctx, status, fixup))
+    return fakewrite(ui, lambda: orig(workingctx, status, fixup))
+
 
 def markcommitted(orig, committablectx, node):
     ui = committablectx.repo().ui
-    return fakewrite(ui, lambda : orig(committablectx, node))
+    return fakewrite(ui, lambda: orig(committablectx, node))
+
 
 def extsetup(ui):
-    extensions.wrapfunction(context.workingctx, '_poststatusfixup',
-                            _poststatusfixup)
-    extensions.wrapfunction(context.workingctx, 'markcommitted',
-                            markcommitted)
+    extensions.wrapfunction(
+        context.workingctx, '_poststatusfixup', _poststatusfixup
+    )
+    extensions.wrapfunction(context.workingctx, 'markcommitted', markcommitted)
--- a/tests/fakemergerecord.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/fakemergerecord.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,9 +12,15 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
-@command(b'fakemergerecord',
-         [(b'X', b'mandatory', None, b'add a fake mandatory record'),
-          (b'x', b'advisory', None, b'add a fake advisory record')], '')
+
+@command(
+    b'fakemergerecord',
+    [
+        (b'X', b'mandatory', None, b'add a fake mandatory record'),
+        (b'x', b'advisory', None, b'add a fake advisory record'),
+    ],
+    '',
+)
 def fakemergerecord(ui, repo, *pats, **opts):
     with repo.wlock():
         ms = merge.mergestate.read(repo)
--- a/tests/fakepatchtime.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/fakepatchtime.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,18 +13,34 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem(b'fakepatchtime', b'fakenow',
-    default=None,
+configitem(
+    b'fakepatchtime', b'fakenow', default=None,
 )
 
-def internalpatch(orig, ui, repo, patchobj, strip,
-                  prefix=b'', files=None,
-                  eolmode=b'strict', similarity=0):
+
+def internalpatch(
+    orig,
+    ui,
+    repo,
+    patchobj,
+    strip,
+    prefix=b'',
+    files=None,
+    eolmode=b'strict',
+    similarity=0,
+):
     if files is None:
         files = set()
-    r = orig(ui, repo, patchobj, strip,
-             prefix=prefix, files=files,
-             eolmode=eolmode, similarity=similarity)
+    r = orig(
+        ui,
+        repo,
+        patchobj,
+        strip,
+        prefix=prefix,
+        files=files,
+        eolmode=eolmode,
+        similarity=similarity,
+    )
 
     fakenow = ui.config(b'fakepatchtime', b'fakenow')
     if fakenow:
@@ -36,5 +52,6 @@
 
     return r
 
+
 def extsetup(ui):
     extensions.wrapfunction(patchmod, 'internalpatch', internalpatch)
--- a/tests/filterpyflakes.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/filterpyflakes.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,7 +22,7 @@
     for pat in pats:
         if re.search(pat, line):
             keep = False
-            break # pattern matches
+            break  # pattern matches
     if keep:
         fn = line.split(':', 1)[0]
         f = open(fn)
--- a/tests/flagprocessorext.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/flagprocessorext.py	Sun Oct 06 09:45:02 2019 -0400
@@ -12,42 +12,49 @@
     revlog,
     util,
 )
-from mercurial.revlogutils import (
-    flagutil,
-)
+from mercurial.revlogutils import flagutil
 
 # Test only: These flags are defined here only in the context of testing the
 # behavior of the flag processor. The canonical way to add flags is to get in
 # touch with the community and make them known in revlog.
-REVIDX_NOOP = (1 << 3)
-REVIDX_BASE64 = (1 << 2)
-REVIDX_GZIP = (1 << 1)
+REVIDX_NOOP = 1 << 3
+REVIDX_BASE64 = 1 << 2
+REVIDX_GZIP = 1 << 1
 REVIDX_FAIL = 1
 
+
 def validatehash(self, text):
     return True
 
+
 def bypass(self, text):
     return False
 
+
 def noopdonothing(self, text, sidedata):
     return (text, True)
 
+
 def noopdonothingread(self, text):
     return (text, True, {})
 
+
 def b64encode(self, text, sidedata):
     return (base64.b64encode(text), False)
 
+
 def b64decode(self, text):
     return (base64.b64decode(text), True, {})
 
+
 def gzipcompress(self, text, sidedata):
     return (zlib.compress(text), False)
 
+
 def gzipdecompress(self, text):
     return (zlib.decompress(text), True, {})
 
+
 def supportedoutgoingversions(orig, repo):
     versions = orig(repo)
     versions.discard(b'01')
@@ -55,16 +62,26 @@
     versions.add(b'03')
     return versions
 
+
 def allsupportedversions(orig, ui):
     versions = orig(ui)
     versions.add(b'03')
     return versions
 
+
 def makewrappedfile(obj):
     class wrappedfile(obj.__class__):
-        def addrevision(self, text, transaction, link, p1, p2,
-                        cachedelta=None, node=None,
-                        flags=flagutil.REVIDX_DEFAULT_FLAGS):
+        def addrevision(
+            self,
+            text,
+            transaction,
+            link,
+            p1,
+            p2,
+            cachedelta=None,
+            node=None,
+            flags=flagutil.REVIDX_DEFAULT_FLAGS,
+        ):
             if b'[NOOP]' in text:
                 flags |= REVIDX_NOOP
 
@@ -79,14 +96,20 @@
             if b'[FAIL]' in text:
                 flags |= REVIDX_FAIL
 
-            return super(wrappedfile, self).addrevision(text, transaction, link,
-                                                        p1, p2,
-                                                        cachedelta=cachedelta,
-                                                        node=node,
-                                                        flags=flags)
+            return super(wrappedfile, self).addrevision(
+                text,
+                transaction,
+                link,
+                p1,
+                p2,
+                cachedelta=cachedelta,
+                node=node,
+                flags=flags,
+            )
 
     obj.__class__ = wrappedfile
 
+
 def reposetup(ui, repo):
     class wrappingflagprocessorrepo(repo.__class__):
         def file(self, f):
@@ -96,15 +119,14 @@
 
     repo.__class__ = wrappingflagprocessorrepo
 
+
 def extsetup(ui):
     # Enable changegroup3 for flags to be sent over the wire
     wrapfunction = extensions.wrapfunction
-    wrapfunction(changegroup,
-                 'supportedoutgoingversions',
-                 supportedoutgoingversions)
-    wrapfunction(changegroup,
-                 'allsupportedversions',
-                 allsupportedversions)
+    wrapfunction(
+        changegroup, 'supportedoutgoingversions', supportedoutgoingversions
+    )
+    wrapfunction(changegroup, 'allsupportedversions', allsupportedversions)
 
     # Teach revlog about our test flags
     flags = [REVIDX_NOOP, REVIDX_BASE64, REVIDX_GZIP, REVIDX_FAIL]
@@ -117,26 +139,11 @@
 
     # Register flag processors for each extension
     flagutil.addflagprocessor(
-        REVIDX_NOOP,
-        (
-            noopdonothingread,
-            noopdonothing,
-            validatehash,
-        )
+        REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
     )
     flagutil.addflagprocessor(
-        REVIDX_BASE64,
-        (
-            b64decode,
-            b64encode,
-            bypass,
-        ),
+        REVIDX_BASE64, (b64decode, b64encode, bypass,),
     )
     flagutil.addflagprocessor(
-        REVIDX_GZIP,
-        (
-            gzipdecompress,
-            gzipcompress,
-            bypass
-        )
+        REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
     )
--- a/tests/fsmonitor-run-tests.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/fsmonitor-run-tests.py	Sun Oct 06 09:45:02 2019 -0400
@@ -28,14 +28,18 @@
 
 if sys.version_info > (3, 5, 0):
     PYTHON3 = True
-    xrange = range # we use xrange in one place, and we'd rather not use range
+    xrange = range  # we use xrange in one place, and we'd rather not use range
+
     def _bytespath(p):
         return p.encode('utf-8')
 
+
 elif sys.version_info >= (3, 0, 0):
-    print('%s is only supported on Python 3.5+ and 2.7, not %s' %
-          (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
-    sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
+    print(
+        '%s is only supported on Python 3.5+ and 2.7, not %s'
+        % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
+    )
+    sys.exit(70)  # EX_SOFTWARE from `man 3 sysexit`
 else:
     PYTHON3 = False
 
@@ -46,21 +50,29 @@
     def _bytespath(p):
         return p
 
+
 def getparser():
     """Obtain the argument parser used by the CLI."""
     parser = argparse.ArgumentParser(
         description='Run tests with fsmonitor enabled.',
-        epilog='Unrecognized options are passed to run-tests.py.')
+        epilog='Unrecognized options are passed to run-tests.py.',
+    )
     # - keep these sorted
     # - none of these options should conflict with any in run-tests.py
-    parser.add_argument('--keep-fsmonitor-tmpdir', action='store_true',
-        help='keep temporary directory with fsmonitor state')
-    parser.add_argument('--watchman',
+    parser.add_argument(
+        '--keep-fsmonitor-tmpdir',
+        action='store_true',
+        help='keep temporary directory with fsmonitor state',
+    )
+    parser.add_argument(
+        '--watchman',
         help='location of watchman binary (default: watchman in PATH)',
-        default='watchman')
+        default='watchman',
+    )
 
     return parser
 
+
 @contextlib.contextmanager
 def watchman(args):
     basedir = tempfile.mkdtemp(prefix='hg-fsmonitor')
@@ -82,19 +94,24 @@
 
         argv = [
             args.watchman,
-            '--sockname', sockfile,
-            '--logfile', logfile,
-            '--pidfile', pidfile,
-            '--statefile', statefile,
+            '--sockname',
+            sockfile,
+            '--logfile',
+            logfile,
+            '--pidfile',
+            pidfile,
+            '--statefile',
+            statefile,
             '--foreground',
-            '--log-level=2', # debug logging for watchman
+            '--log-level=2',  # debug logging for watchman
         ]
 
         envb = osenvironb.copy()
         envb[b'WATCHMAN_CONFIG_FILE'] = _bytespath(cfgfile)
         with open(clilogfile, 'wb') as f:
             proc = subprocess.Popen(
-                argv, env=envb, stdin=None, stdout=f, stderr=f)
+                argv, env=envb, stdin=None, stdout=f, stderr=f
+            )
             try:
                 yield sockfile
             finally:
@@ -106,6 +123,7 @@
         else:
             shutil.rmtree(basedir, ignore_errors=True)
 
+
 def run():
     parser = getparser()
     args, runtestsargv = parser.parse_known_args()
@@ -120,20 +138,23 @@
         blacklist = os.path.join(runtestdir, 'blacklists', 'fsmonitor')
 
         runtestsargv.insert(0, runtests)
-        runtestsargv.extend([
-            '--extra-config',
-            'extensions.fsmonitor=',
-            # specify fsmonitor.mode=paranoid always in order to force
-            # fsmonitor extension execute "paranoid" code path
-            #
-            # TODO: make fsmonitor-run-tests.py accept specific options
-            '--extra-config',
-            'fsmonitor.mode=paranoid',
-            '--blacklist',
-            blacklist,
-        ])
+        runtestsargv.extend(
+            [
+                '--extra-config',
+                'extensions.fsmonitor=',
+                # specify fsmonitor.mode=paranoid always in order to force
+                # fsmonitor extension execute "paranoid" code path
+                #
+                # TODO: make fsmonitor-run-tests.py accept specific options
+                '--extra-config',
+                'fsmonitor.mode=paranoid',
+                '--blacklist',
+                blacklist,
+            ]
+        )
 
         return subprocess.call(runtestsargv)
 
+
 if __name__ == '__main__':
     sys.exit(run())
--- a/tests/generate-working-copy-states.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/generate-working-copy-states.py	Sun Oct 06 09:45:02 2019 -0400
@@ -43,16 +43,27 @@
     depth = len(parentcontents)
     if depth == maxchangesets + 1:
         for tracked in (b'untracked', b'tracked'):
-            filename = b"_".join([(content is None and b'missing' or content)
-                                for content in parentcontents]) + b"-" + tracked
+            filename = (
+                b"_".join(
+                    [
+                        (content is None and b'missing' or content)
+                        for content in parentcontents
+                    ]
+                )
+                + b"-"
+                + tracked
+            )
             yield (filename, parentcontents)
     else:
-        for content in ({None, b'content' + (b"%d" % (depth + 1))} |
-                      set(parentcontents)):
-            for combination in generatestates(maxchangesets,
-                                              parentcontents + [content]):
+        for content in {None, b'content' + (b"%d" % (depth + 1))} | set(
+            parentcontents
+        ):
+            for combination in generatestates(
+                maxchangesets, parentcontents + [content]
+            ):
                 yield combination
 
+
 # retrieve the command line arguments
 target = sys.argv[1]
 maxchangesets = int(sys.argv[2])
--- a/tests/get-with-headers.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/get-with-headers.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,6 +19,7 @@
 
 try:
     import msvcrt
+
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
 except ImportError:
@@ -31,11 +32,14 @@
 parser.add_argument('--headeronly', action='store_true')
 parser.add_argument('--json', action='store_true')
 parser.add_argument('--hgproto')
-parser.add_argument('--requestheader', nargs='*', default=[],
-                    help='Send an additional HTTP request header. Argument '
-                         'value is <header>=<value>')
-parser.add_argument('--bodyfile',
-                    help='Write HTTP response body to a file')
+parser.add_argument(
+    '--requestheader',
+    nargs='*',
+    default=[],
+    help='Send an additional HTTP request header. Argument '
+    'value is <header>=<value>',
+)
+parser.add_argument('--bodyfile', help='Write HTTP response body to a file')
 parser.add_argument('host')
 parser.add_argument('path')
 parser.add_argument('show', nargs='*')
@@ -49,6 +53,8 @@
 requestheaders = args.requestheader
 
 tag = None
+
+
 def request(host, path, show):
     assert not path.startswith('/'), path
     global tag
@@ -65,15 +71,19 @@
     conn = httplib.HTTPConnection(host)
     conn.request("GET", '/' + path, None, headers)
     response = conn.getresponse()
-    stdout.write(b'%d %s\n' % (response.status,
-                               response.reason.encode('ascii')))
+    stdout.write(
+        b'%d %s\n' % (response.status, response.reason.encode('ascii'))
+    )
     if show[:1] == ['-']:
-        show = sorted(h for h, v in response.getheaders()
-                      if h.lower() not in show)
+        show = sorted(
+            h for h, v in response.getheaders() if h.lower() not in show
+        )
     for h in [h.lower() for h in show]:
         if response.getheader(h, None) is not None:
-            stdout.write(b"%s: %s\n" % (h.encode('ascii'),
-                                        response.getheader(h).encode('ascii')))
+            stdout.write(
+                b"%s: %s\n"
+                % (h.encode('ascii'), response.getheader(h).encode('ascii'))
+            )
     if not headeronly:
         stdout.write(b'\n')
         data = response.read()
@@ -104,6 +114,7 @@
 
     return response.status
 
+
 status = request(args.host, args.path, args.show)
 if twice:
     status = request(args.host, args.path, args.show)
--- a/tests/heredoctest.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/heredoctest.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,10 +2,12 @@
 
 import sys
 
+
 def flush():
     sys.stdout.flush()
     sys.stderr.flush()
 
+
 globalvars = {}
 lines = sys.stdin.readlines()
 while lines:
--- a/tests/hghave.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/hghave.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,6 +17,7 @@
 
 try:
     import msvcrt
+
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
 except ImportError:
@@ -26,6 +27,7 @@
 stderr = getattr(sys.stderr, 'buffer', sys.stderr)
 
 if sys.version_info[0] >= 3:
+
     def _bytespath(p):
         if p is None:
             return p
@@ -35,35 +37,47 @@
         if p is None:
             return p
         return p.decode('utf-8')
+
+
 else:
+
     def _bytespath(p):
         return p
 
     _strpath = _bytespath
 
+
 def check(name, desc):
     """Registers a check function for a feature."""
+
     def decorator(func):
         checks[name] = (func, desc)
         return func
+
     return decorator
 
+
 def checkvers(name, desc, vers):
     """Registers a check function for each of a series of versions.
 
     vers can be a list or an iterator"""
+
     def decorator(func):
         def funcv(v):
             def f():
                 return func(v)
+
             return f
+
         for v in vers:
             v = str(v)
             f = funcv(v)
             checks['%s%s' % (name, v.replace('.', ''))] = (f, desc % v)
         return func
+
     return decorator
 
+
 def checkfeatures(features):
     result = {
         'error': [],
@@ -94,13 +108,15 @@
 
     return result
 
+
 def require(features):
     """Require that features are available, exiting if not."""
     result = checkfeatures(features)
 
     for missing in result['missing']:
-        stderr.write(('skipped: unknown feature: %s\n'
-                      % missing).encode('utf-8'))
+        stderr.write(
+            ('skipped: unknown feature: %s\n' % missing).encode('utf-8')
+        )
     for msg in result['skipped']:
         stderr.write(('skipped: %s\n' % msg).encode('utf-8'))
     for msg in result['error']:
@@ -112,21 +128,25 @@
     if result['skipped'] or result['error']:
         sys.exit(1)
 
+
 def matchoutput(cmd, regexp, ignorestatus=False):
     """Return the match object if cmd executes successfully and its output
     is matched by the supplied regular expression.
     """
     r = re.compile(regexp)
     p = subprocess.Popen(
-        cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+        cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+    )
     s = p.communicate()[0]
     ret = p.returncode
     return (ignorestatus or not ret) and r.search(s)
 
+
 @check("baz", "GNU Arch baz client")
 def has_baz():
     return matchoutput('baz --version 2>&1', br'baz Bazaar version')
 
+
 @check("bzr", "Canonical's Bazaar client")
 def has_bzr():
     try:
@@ -135,48 +155,61 @@
         import bzrlib.errors
         import bzrlib.revision
         import bzrlib.revisionspec
+
         bzrlib.revisionspec.RevisionSpec
         return bzrlib.__doc__ is not None
     except (AttributeError, ImportError):
         return False
 
+
 @checkvers("bzr", "Canonical's Bazaar client >= %s", (1.14,))
 def has_bzr_range(v):
     major, minor = v.split('rc')[0].split('.')[0:2]
     try:
         import bzrlib
-        return (bzrlib.__doc__ is not None
-                and bzrlib.version_info[:2] >= (int(major), int(minor)))
+
+        return bzrlib.__doc__ is not None and bzrlib.version_info[:2] >= (
+            int(major),
+            int(minor),
+        )
     except ImportError:
         return False
 
+
 @check("chg", "running with chg")
 def has_chg():
     return 'CHGHG' in os.environ
 
+
 @check("cvs", "cvs client/server")
 def has_cvs():
     re = br'Concurrent Versions System.*?server'
     return matchoutput('cvs --version 2>&1', re) and not has_msys()
 
+
 @check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
 def has_cvs112():
     re = br'Concurrent Versions System \(CVS\) 1.12.*?server'
     return matchoutput('cvs --version 2>&1', re) and not has_msys()
 
+
 @check("cvsnt", "cvsnt client/server")
 def has_cvsnt():
     re = br'Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)'
     return matchoutput('cvsnt --version 2>&1', re)
 
+
 @check("darcs", "darcs client")
 def has_darcs():
     return matchoutput('darcs --version', br'\b2\.([2-9]|\d{2})', True)
 
+
 @check("mtn", "monotone client (>= 1.0)")
 def has_mtn():
     return matchoutput('mtn --version', br'monotone', True) and not matchoutput(
-        'mtn --version', br'monotone 0\.', True)
+        'mtn --version', br'monotone 0\.', True
+    )
+
 
 @check("eol-in-paths", "end-of-lines in paths")
 def has_eol_in_paths():
@@ -188,6 +221,7 @@
     except (IOError, OSError):
         return False
 
+
 @check("execbit", "executable bit")
 def has_executablebit():
     try:
@@ -198,7 +232,7 @@
             m = os.stat(fn).st_mode & 0o777
             new_file_has_exec = m & EXECFLAGS
             os.chmod(fn, m ^ EXECFLAGS)
-            exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0o777) == m)
+            exec_flags_cannot_flip = (os.stat(fn).st_mode & 0o777) == m
         finally:
             os.unlink(fn)
     except (IOError, OSError):
@@ -206,6 +240,7 @@
         return False
     return not (new_file_has_exec or exec_flags_cannot_flip)
 
+
 @check("icasefs", "case insensitive file system")
 def has_icasefs():
     # Stolen from mercurial.util
@@ -225,6 +260,7 @@
     finally:
         os.remove(path)
 
+
 @check("fifo", "named pipes")
 def has_fifo():
     if getattr(os, "mkfifo", None) is None:
@@ -237,10 +273,12 @@
     except OSError:
         return False
 
+
 @check("killdaemons", 'killdaemons.py support')
 def has_killdaemons():
     return True
 
+
 @check("cacheable", "cacheable filesystem")
 def has_cacheable_fs():
     from mercurial import util
@@ -252,59 +290,71 @@
     finally:
         os.remove(path)
 
+
 @check("lsprof", "python lsprof module")
 def has_lsprof():
     try:
         import _lsprof
-        _lsprof.Profiler # silence unused import warning
+
+        _lsprof.Profiler  # silence unused import warning
         return True
     except ImportError:
         return False
 
+
 def gethgversion():
     m = matchoutput('hg --version --quiet 2>&1', br'(\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
 
-@checkvers("hg", "Mercurial >= %s",
-            list([(1.0 * x) / 10 for x in range(9, 99)]))
+
+@checkvers(
+    "hg", "Mercurial >= %s", list([(1.0 * x) / 10 for x in range(9, 99)])
+)
 def has_hg_range(v):
     major, minor = v.split('.')[0:2]
     return gethgversion() >= (int(major), int(minor))
 
+
 @check("hg08", "Mercurial >= 0.8")
 def has_hg08():
     if checks["hg09"][0]():
         return True
     return matchoutput('hg help annotate 2>&1', '--date')
 
+
 @check("hg07", "Mercurial >= 0.7")
 def has_hg07():
     if checks["hg08"][0]():
         return True
     return matchoutput('hg --version --quiet 2>&1', 'Mercurial Distributed SCM')
 
+
 @check("hg06", "Mercurial >= 0.6")
 def has_hg06():
     if checks["hg07"][0]():
         return True
     return matchoutput('hg --version --quiet 2>&1', 'Mercurial version')
 
+
 @check("gettext", "GNU Gettext (msgfmt)")
 def has_gettext():
     return matchoutput('msgfmt --version', br'GNU gettext-tools')
 
+
 @check("git", "git command line client")
 def has_git():
     return matchoutput('git --version 2>&1', br'^git version')
 
+
 def getgitversion():
     m = matchoutput('git --version 2>&1', br'git version (\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
 
+
 # https://github.com/git-lfs/lfs-test-server
 @check("lfs-test-server", "git-lfs test server")
 def has_lfsserver():
@@ -316,40 +366,49 @@
         for path in os.environ["PATH"].split(os.pathsep)
     )
 
+
 @checkvers("git", "git client (with ext::sh support) version >= %s", (1.9,))
 def has_git_range(v):
     major, minor = v.split('.')[0:2]
     return getgitversion() >= (int(major), int(minor))
 
+
 @check("docutils", "Docutils text processing library")
 def has_docutils():
     try:
         import docutils.core
-        docutils.core.publish_cmdline # silence unused import
+
+        docutils.core.publish_cmdline  # silence unused import
         return True
     except ImportError:
         return False
 
+
 def getsvnversion():
     m = matchoutput('svn --version --quiet 2>&1', br'^(\d+)\.(\d+)')
     if not m:
         return (0, 0)
     return (int(m.group(1)), int(m.group(2)))
 
+
 @checkvers("svn", "subversion client and admin tools >= %s", (1.3, 1.5))
 def has_svn_range(v):
     major, minor = v.split('.')[0:2]
     return getsvnversion() >= (int(major), int(minor))
 
+
 @check("svn", "subversion client and admin tools")
 def has_svn():
-    return (matchoutput('svn --version 2>&1', br'^svn, version') and
-            matchoutput('svnadmin --version 2>&1', br'^svnadmin, version'))
+    return matchoutput('svn --version 2>&1', br'^svn, version') and matchoutput(
+        'svnadmin --version 2>&1', br'^svnadmin, version'
+    )
+
 
 @check("svn-bindings", "subversion python bindings")
 def has_svn_bindings():
     try:
         import svn.core
+
         version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
         if version < (1, 4):
             return False
@@ -357,10 +416,13 @@
     except ImportError:
         return False
 
+
 @check("p4", "Perforce server and client")
 def has_p4():
-    return (matchoutput('p4 -V', br'Rev\. P4/') and
-            matchoutput('p4d -V', br'Rev\. P4D/'))
+    return matchoutput('p4 -V', br'Rev\. P4/') and matchoutput(
+        'p4d -V', br'Rev\. P4D/'
+    )
+
 
 @check("symlink", "symbolic links")
 def has_symlink():
@@ -374,9 +436,11 @@
     except (OSError, AttributeError):
         return False
 
+
 @check("hardlink", "hardlinks")
 def has_hardlink():
     from mercurial import util
+
     fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix)
     os.close(fh)
     name = tempfile.mktemp(dir='.', prefix=tempprefix)
@@ -389,15 +453,18 @@
     finally:
         os.unlink(fn)
 
+
 @check("hardlink-whitelisted", "hardlinks on whitelisted filesystems")
 def has_hardlink_whitelisted():
     from mercurial import util
+
     try:
         fstype = util.getfstype(b'.')
     except OSError:
         return False
     return fstype in util._hardlinkfswhitelist
 
+
 @check("rmcwd", "can remove current working directory")
 def has_rmcwd():
     ocwd = os.getcwd()
@@ -418,22 +485,27 @@
         except OSError:
             pass
 
+
 @check("tla", "GNU Arch tla client")
 def has_tla():
     return matchoutput('tla --version 2>&1', br'The GNU Arch Revision')
 
+
 @check("gpg", "gpg client")
 def has_gpg():
     return matchoutput('gpg --version 2>&1', br'GnuPG')
 
+
 @check("gpg2", "gpg client v2")
 def has_gpg2():
     return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.')
 
+
 @check("gpg21", "gpg client v2.1+")
 def has_gpg21():
     return matchoutput('gpg --version 2>&1', br'GnuPG[^0-9]+2\.(?!0)')
 
+
 @check("unix-permissions", "unix-style permissions")
 def has_unix_permissions():
     d = tempfile.mkdtemp(dir='.', prefix=tempprefix)
@@ -451,25 +523,30 @@
     finally:
         os.rmdir(d)
 
+
 @check("unix-socket", "AF_UNIX socket family")
 def has_unix_socket():
     return getattr(socket, 'AF_UNIX', None) is not None
 
+
 @check("root", "root permissions")
 def has_root():
     return getattr(os, 'geteuid', None) and os.geteuid() == 0
 
+
 @check("pyflakes", "Pyflakes python linter")
 def has_pyflakes():
-    return matchoutput("sh -c \"echo 'import re' 2>&1 | pyflakes\"",
-                       br"<stdin>:1: 're' imported but unused",
-                       True)
+    return matchoutput(
+        "sh -c \"echo 'import re' 2>&1 | pyflakes\"",
+        br"<stdin>:1: 're' imported but unused",
+        True,
+    )
+
 
 @check("pylint", "Pylint python linter")
 def has_pylint():
-    return matchoutput("pylint --help",
-                       br"Usage:  pylint",
-                       True)
+    return matchoutput("pylint --help", br"Usage:  pylint", True)
+
 
 @check("clang-format", "clang-format C code formatter")
 def has_clang_format():
@@ -477,49 +554,59 @@
     # style changed somewhere between 4.x and 6.x
     return m and int(m.group(1)) >= 6
 
+
 @check("jshint", "JSHint static code analysis tool")
 def has_jshint():
     return matchoutput("jshint --version 2>&1", br"jshint v")
 
+
 @check("pygments", "Pygments source highlighting library")
 def has_pygments():
     try:
         import pygments
-        pygments.highlight # silence unused import warning
+
+        pygments.highlight  # silence unused import warning
         return True
     except ImportError:
         return False
 
+
 @check("outer-repo", "outer repo")
 def has_outer_repo():
     # failing for other reasons than 'no repo' imply that there is a repo
-    return not matchoutput('hg root 2>&1',
-                           br'abort: no repository found', True)
+    return not matchoutput('hg root 2>&1', br'abort: no repository found', True)
+
 
 @check("ssl", "ssl module available")
 def has_ssl():
     try:
         import ssl
+
         ssl.CERT_NONE
         return True
     except ImportError:
         return False
 
+
 @check("sslcontext", "python >= 2.7.9 ssl")
 def has_sslcontext():
     try:
         import ssl
+
         ssl.SSLContext
         return True
     except (ImportError, AttributeError):
         return False
 
+
 @check("defaultcacerts", "can verify SSL certs by system's CA certs store")
 def has_defaultcacerts():
     from mercurial import sslutil, ui as uimod
+
     ui = uimod.ui.load()
     return sslutil._defaultcacerts(ui) or sslutil._canloaddefaultcerts
 
+
 @check("defaultcacertsloaded", "detected presence of loaded system CA certs")
 def has_defaultcacertsloaded():
     import ssl
@@ -540,67 +627,82 @@
 
     return len(ctx.get_ca_certs()) > 0
 
+
 @check("tls1.2", "TLS 1.2 protocol support")
 def has_tls1_2():
     from mercurial import sslutil
+
     return b'tls1.2' in sslutil.supportedprotocols
 
+
 @check("windows", "Windows")
 def has_windows():
     return os.name == 'nt'
 
+
 @check("system-sh", "system() uses sh")
 def has_system_sh():
     return os.name != 'nt'
 
+
 @check("serve", "platform and python can manage 'hg serve -d'")
 def has_serve():
     return True
 
+
 @check("test-repo", "running tests from repository")
 def has_test_repo():
     t = os.environ["TESTDIR"]
     return os.path.isdir(os.path.join(t, "..", ".hg"))
 
+
 @check("tic", "terminfo compiler and curses module")
 def has_tic():
     try:
         import curses
+
         curses.COLOR_BLUE
         return matchoutput('test -x "`which tic`"', br'')
     except ImportError:
         return False
 
+
 @check("msys", "Windows with MSYS")
 def has_msys():
     return os.getenv('MSYSTEM')
 
+
 @check("aix", "AIX")
 def has_aix():
     return sys.platform.startswith("aix")
 
+
 @check("osx", "OS X")
 def has_osx():
     return sys.platform == 'darwin'
 
+
 @check("osxpackaging", "OS X packaging tools")
 def has_osxpackaging():
     try:
-        return (matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
-                and matchoutput(
-                    'productbuild', br'Usage: productbuild ',
-                    ignorestatus=1)
-                and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
-                and matchoutput(
-                    'xar --help', br'Usage: xar', ignorestatus=1))
+        return (
+            matchoutput('pkgbuild', br'Usage: pkgbuild ', ignorestatus=1)
+            and matchoutput(
+                'productbuild', br'Usage: productbuild ', ignorestatus=1
+            )
+            and matchoutput('lsbom', br'Usage: lsbom', ignorestatus=1)
+            and matchoutput('xar --help', br'Usage: xar', ignorestatus=1)
+        )
     except ImportError:
         return False
 
+
 @check('linuxormacos', 'Linux or MacOS')
 def has_linuxormacos():
     # This isn't a perfect test for MacOS. But it is sufficient for our needs.
     return sys.platform.startswith(('linux', 'darwin'))
 
+
 @check("docker", "docker support")
 def has_docker():
     pat = br'A self-sufficient runtime for'
@@ -618,33 +720,42 @@
         return True
     return False
 
+
 @check("debhelper", "debian packaging tools")
 def has_debhelper():
     # Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
     # quote), so just accept anything in that spot.
-    dpkg = matchoutput('dpkg --version',
-                       br"Debian .dpkg' package management program")
-    dh = matchoutput('dh --help',
-                     br'dh is a part of debhelper.', ignorestatus=True)
-    dh_py2 = matchoutput('dh_python2 --help',
-                         br'other supported Python versions')
+    dpkg = matchoutput(
+        'dpkg --version', br"Debian .dpkg' package management program"
+    )
+    dh = matchoutput(
+        'dh --help', br'dh is a part of debhelper.', ignorestatus=True
+    )
+    dh_py2 = matchoutput(
+        'dh_python2 --help', br'other supported Python versions'
+    )
     # debuild comes from the 'devscripts' package, though you might want
     # the 'build-debs' package instead, which has a dependency on devscripts.
-    debuild = matchoutput('debuild --help',
-                          br'to run debian/rules with given parameter')
+    debuild = matchoutput(
+        'debuild --help', br'to run debian/rules with given parameter'
+    )
     return dpkg and dh and dh_py2 and debuild
 
-@check("debdeps",
-       "debian build dependencies (run dpkg-checkbuilddeps in contrib/)")
+
+@check(
+    "debdeps", "debian build dependencies (run dpkg-checkbuilddeps in contrib/)"
+)
 def has_debdeps():
     # just check exit status (ignoring output)
     path = '%s/../contrib/packaging/debian/control' % os.environ['TESTDIR']
     return matchoutput('dpkg-checkbuilddeps %s' % path, br'')
 
+
 @check("demandimport", "demandimport enabled")
 def has_demandimport():
     # chg disables demandimport intentionally for performance wins.
-    return ((not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable')
+    return (not has_chg()) and os.environ.get('HGDEMANDIMPORT') != 'disable'
+
 
 @checkvers("py", "Python >= %s", (2.7, 3.5, 3.6, 3.7, 3.8, 3.9))
 def has_python_range(v):
@@ -653,73 +764,91 @@
 
     return (py_major, py_minor) >= (int(major), int(minor))
 
+
 @check("py3", "running with Python 3.x")
 def has_py3():
     return 3 == sys.version_info[0]
 
+
 @check("py3exe", "a Python 3.x interpreter is available")
 def has_python3exe():
     return matchoutput('python3 -V', br'^Python 3.(5|6|7|8|9)')
 
+
 @check("pure", "running with pure Python code")
 def has_pure():
-    return any([
-        os.environ.get("HGMODULEPOLICY") == "py",
-        os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
-    ])
+    return any(
+        [
+            os.environ.get("HGMODULEPOLICY") == "py",
+            os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure",
+        ]
+    )
+
 
 @check("slow", "allow slow tests (use --allow-slow-tests)")
 def has_slow():
     return os.environ.get('HGTEST_SLOW') == 'slow'
 
+
 @check("hypothesis", "Hypothesis automated test generation")
 def has_hypothesis():
     try:
         import hypothesis
+
         hypothesis.given
         return True
     except ImportError:
         return False
 
+
 @check("unziplinks", "unzip(1) understands and extracts symlinks")
 def unzip_understands_symlinks():
     return matchoutput('unzip --help', br'Info-ZIP')
 
+
 @check("zstd", "zstd Python module available")
 def has_zstd():
     try:
         import mercurial.zstd
+
         mercurial.zstd.__version__
         return True
     except ImportError:
         return False
 
+
 @check("devfull", "/dev/full special file")
 def has_dev_full():
     return os.path.exists('/dev/full')
 
+
 @check("virtualenv", "Python virtualenv support")
 def has_virtualenv():
     try:
         import virtualenv
+
         virtualenv.ACTIVATE_SH
         return True
     except ImportError:
         return False
 
+
 @check("fsmonitor", "running tests with fsmonitor")
 def has_fsmonitor():
     return 'HGFSMONITOR_TESTS' in os.environ
 
+
 @check("fuzzywuzzy", "Fuzzy string matching library")
 def has_fuzzywuzzy():
     try:
         import fuzzywuzzy
+
         fuzzywuzzy.__version__
         return True
     except ImportError:
         return False
 
+
 @check("clang-libfuzzer", "clang new enough to include libfuzzer")
 def has_clang_libfuzzer():
     mat = matchoutput('clang --version', br'clang version (\d)')
@@ -728,23 +857,28 @@
         return int(mat.group(1)) > 5
     return False
 
+
 @check("clang-6.0", "clang 6.0 with version suffix (libfuzzer included)")
 def has_clang60():
     return matchoutput('clang-6.0 --version', br'clang version 6\.')
 
+
 @check("xdiff", "xdiff algorithm")
 def has_xdiff():
     try:
         from mercurial import policy
+
         bdiff = policy.importmod('bdiff')
         return bdiff.xdiffblocks(b'', b'') == [(0, 0, 0, 0)]
     except (ImportError, AttributeError):
         return False
 
+
 @check('extraextensions', 'whether tests are running with extra extensions')
 def has_extraextensions():
     return 'HGTESTEXTRAEXTENSIONS' in os.environ
 
+
 def getrepofeatures():
     """Obtain set of repository features in use.
 
@@ -783,26 +917,32 @@
 
     return features
 
+
 @check('reporevlogstore', 'repository using the default revlog store')
 def has_reporevlogstore():
     return 'revlogstore' in getrepofeatures()
 
+
 @check('reposimplestore', 'repository using simple storage extension')
 def has_reposimplestore():
     return 'simplestore' in getrepofeatures()
 
+
 @check('repobundlerepo', 'whether we can open bundle files as repos')
 def has_repobundlerepo():
     return 'bundlerepo' in getrepofeatures()
 
+
 @check('repofncache', 'repository has an fncache')
 def has_repofncache():
     return 'fncache' in getrepofeatures()
 
+
 @check('sqlite', 'sqlite3 module is available')
 def has_sqlite():
     try:
         import sqlite3
+
         version = sqlite3.sqlite_version_info
     except ImportError:
         return False
@@ -813,16 +953,19 @@
 
     return matchoutput('sqlite3 -version', br'^3\.\d+')
 
+
 @check('vcr', 'vcr http mocking library')
 def has_vcr():
     try:
         import vcr
+
         vcr.VCR
         return True
     except (ImportError, AttributeError):
         pass
     return False
 
+
 @check('emacs', 'GNU Emacs')
 def has_emacs():
     # Our emacs lisp uses `with-eval-after-load` which is new in emacs
--- a/tests/hgweberror.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/hgweberror.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,9 +2,8 @@
 
 from __future__ import absolute_import
 
-from mercurial.hgweb import (
-    webcommands,
-)
+from mercurial.hgweb import webcommands
+
 
 def raiseerror(web):
     '''Dummy web command that raises an uncaught Exception.'''
@@ -19,6 +18,7 @@
 
     raise AttributeError('I am an uncaught error!')
 
+
 def extsetup(ui):
     setattr(webcommands, 'raiseerror', raiseerror)
     webcommands.__all__.append(b'raiseerror')
--- a/tests/httpserverauth.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/httpserverauth.py	Sun Oct 06 09:45:02 2019 -0400
@@ -4,9 +4,8 @@
 import hashlib
 
 from mercurial.hgweb import common
-from mercurial import (
-    node,
-)
+from mercurial import node
+
 
 def parse_keqv_list(req, l):
     """Parse list of key=value strings where keys are not duplicated."""
@@ -18,6 +17,7 @@
         parsed[k] = v
     return parsed
 
+
 class digestauthserver(object):
     def __init__(self):
         self._user_hashes = {}
@@ -42,8 +42,10 @@
         # We aren't testing the protocol here, just that the bytes make the
         # proper round trip.  So hardcoded seems fine.
         nonce = b'064af982c5b571cea6450d8eda91c20d'
-        return b'realm="%s", nonce="%s", algorithm=MD5, qop="auth"' % (realm,
-                                                                       nonce)
+        return b'realm="%s", nonce="%s", algorithm=MD5, qop="auth"' % (
+            realm,
+            nonce,
+        )
 
     def checkauth(self, req, header):
         log = req.rawenv[b'wsgi.errors']
@@ -53,8 +55,9 @@
 
         if resp.get(b'algorithm', b'MD5').upper() != b'MD5':
             log.write(b'Unsupported algorithm: %s' % resp.get(b'algorithm'))
-            raise common.ErrorResponse(common.HTTP_FORBIDDEN,
-                                       b"unknown algorithm")
+            raise common.ErrorResponse(
+                common.HTTP_FORBIDDEN, b"unknown algorithm"
+            )
         user = resp[b'username']
         realm = resp[b'realm']
         nonce = resp[b'nonce']
@@ -79,22 +82,29 @@
 
         respdig = kd(ha1, noncebit)
         if respdig != resp[b'response']:
-            log.write(b'User/realm "%s/%s" gave %s, but expected %s'
-                      % (user, realm, resp[b'response'], respdig))
+            log.write(
+                b'User/realm "%s/%s" gave %s, but expected %s'
+                % (user, realm, resp[b'response'], respdig)
+            )
             return False
 
         return True
 
+
 digest = digestauthserver()
 
+
 def perform_authentication(hgweb, req, op):
     auth = req.headers.get(b'Authorization')
 
     if req.headers.get(b'X-HgTest-AuthType') == b'Digest':
         if not auth:
             challenge = digest.makechallenge(b'mercurial')
-            raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who',
-                    [(b'WWW-Authenticate', b'Digest %s' % challenge)])
+            raise common.ErrorResponse(
+                common.HTTP_UNAUTHORIZED,
+                b'who',
+                [(b'WWW-Authenticate', b'Digest %s' % challenge)],
+            )
 
         if not digest.checkauth(req, auth[7:]):
             raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no')
@@ -102,12 +112,16 @@
         return
 
     if not auth:
-        raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, b'who',
-                [(b'WWW-Authenticate', b'Basic Realm="mercurial"')])
+        raise common.ErrorResponse(
+            common.HTTP_UNAUTHORIZED,
+            b'who',
+            [(b'WWW-Authenticate', b'Basic Realm="mercurial"')],
+        )
 
     if base64.b64decode(auth.split()[1]).split(b':', 1) != [b'user', b'pass']:
         raise common.ErrorResponse(common.HTTP_FORBIDDEN, b'no')
 
+
 def extsetup(ui):
     common.permhooks.insert(0, perform_authentication)
     digest.adduser(b'user', b'pass', b'mercurial')
--- a/tests/hypothesishelpers.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/hypothesishelpers.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,14 +21,14 @@
 from hypothesis import given
 
 # hypothesis store data regarding generate example and code
-set_hypothesis_home_dir(os.path.join(
-    os.getenv('TESTTMP'), ".hypothesis"
-))
+set_hypothesis_home_dir(os.path.join(os.getenv('TESTTMP'), ".hypothesis"))
+
 
 def check(*args, **kwargs):
     """decorator to make a function a hypothesis test
 
     Decorated function are run immediately (to be used doctest style)"""
+
     def accept(f):
         # Workaround for https://github.com/DRMacIver/hypothesis/issues/206
         # Fixed in version 1.13 (released 2015 october 29th)
@@ -39,22 +39,24 @@
         except Exception:
             traceback.print_exc(file=sys.stdout)
             sys.exit(1)
+
     return accept
 
 
 def roundtrips(data, decode, encode):
     """helper to tests function that must do proper encode/decode roundtripping
     """
+
     @given(data)
     def testroundtrips(value):
         encoded = encode(value)
         decoded = decode(encoded)
         if decoded != value:
             raise ValueError(
-                "Round trip failed: %s(%r) -> %s(%r) -> %r" % (
-                    encode.__name__, value, decode.__name__, encoded,
-                    decoded
-                ))
+                "Round trip failed: %s(%r) -> %s(%r) -> %r"
+                % (encode.__name__, value, decode.__name__, encoded, decoded)
+            )
+
     try:
         testroundtrips()
     except Exception:
@@ -66,6 +68,9 @@
 
 # strategy for generating bytestring that might be an issue for Mercurial
 bytestrings = (
-    st.builds(lambda s, e: s.encode(e), st.text(), st.sampled_from([
-        'utf-8', 'utf-16',
-    ]))) | st.binary()
+    st.builds(
+        lambda s, e: s.encode(e),
+        st.text(),
+        st.sampled_from(['utf-8', 'utf-16',]),
+    )
+) | st.binary()
--- a/tests/killdaemons.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/killdaemons.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,7 +7,7 @@
 import sys
 import time
 
-if os.name =='nt':
+if os.name == 'nt':
     import ctypes
 
     _BOOL = ctypes.c_long
@@ -46,15 +46,17 @@
         WAIT_TIMEOUT = 258
         WAIT_FAILED = _DWORD(0xFFFFFFFF).value
         handle = ctypes.windll.kernel32.OpenProcess(
-                PROCESS_TERMINATE|SYNCHRONIZE|PROCESS_QUERY_INFORMATION,
-                False, pid)
+            PROCESS_TERMINATE | SYNCHRONIZE | PROCESS_QUERY_INFORMATION,
+            False,
+            pid,
+        )
         if handle is None:
-            _check(0, 87) # err 87 when process not found
-            return # process not found, already finished
+            _check(0, 87)  # err 87 when process not found
+            return  # process not found, already finished
         try:
             r = ctypes.windll.kernel32.WaitForSingleObject(handle, 100)
             if r == WAIT_OBJECT_0:
-                pass # terminated, but process handle still available
+                pass  # terminated, but process handle still available
             elif r == WAIT_TIMEOUT:
                 _check(ctypes.windll.kernel32.TerminateProcess(handle, -1))
             elif r == WAIT_FAILED:
@@ -63,19 +65,21 @@
             # TODO?: forcefully kill when timeout
             #        and ?shorter waiting time? when tryhard==True
             r = ctypes.windll.kernel32.WaitForSingleObject(handle, 100)
-                                                       # timeout = 100 ms
+            # timeout = 100 ms
             if r == WAIT_OBJECT_0:
-                pass # process is terminated
+                pass  # process is terminated
             elif r == WAIT_TIMEOUT:
                 logfn('# Daemon process %d is stuck')
             elif r == WAIT_FAILED:
                 _check(0)  # err stored in GetLastError()
-        except: #re-raises
-            ctypes.windll.kernel32.CloseHandle(handle) # no _check, keep error
+        except:  # re-raises
+            ctypes.windll.kernel32.CloseHandle(handle)  # no _check, keep error
             raise
         _check(ctypes.windll.kernel32.CloseHandle(handle))
 
+
 else:
+
     def kill(pid, logfn, tryhard=True):
         try:
             os.kill(pid, 0)
@@ -94,6 +98,7 @@
             if err.errno != errno.ESRCH:
                 raise
 
+
 def killdaemons(pidfile, tryhard=True, remove=False, logfn=None):
     if not logfn:
         logfn = lambda s: s
@@ -107,8 +112,10 @@
                     if pid <= 0:
                         raise ValueError
                 except ValueError:
-                    logfn('# Not killing daemon process %s - invalid pid'
-                          % line.rstrip())
+                    logfn(
+                        '# Not killing daemon process %s - invalid pid'
+                        % line.rstrip()
+                    )
                     continue
                 pids.append(pid)
         for pid in pids:
@@ -118,9 +125,10 @@
     except IOError:
         pass
 
+
 if __name__ == '__main__':
     if len(sys.argv) > 1:
-        path, = sys.argv[1:]
+        (path,) = sys.argv[1:]
     else:
         path = os.environ["DAEMON_PIDS"]
 
--- a/tests/list-tree.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/list-tree.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,6 +10,7 @@
 ap.add_argument('path', nargs='+')
 opts = ap.parse_args()
 
+
 def gather():
     for p in opts.path:
         if not os.path.exists(p):
@@ -24,4 +25,5 @@
         else:
             yield p
 
+
 print('\n'.join(sorted(gather(), key=lambda x: x.replace(os.path.sep, '/'))))
--- a/tests/lockdelay.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/lockdelay.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,8 +7,8 @@
 import os
 import time
 
+
 def reposetup(ui, repo):
-
     class delayedlockrepo(repo.__class__):
         def lock(self):
             delay = float(os.environ.get('HGPRELOCKDELAY', '0.0'))
@@ -19,4 +19,5 @@
             if delay:
                 time.sleep(delay)
             return res
+
     repo.__class__ = delayedlockrepo
--- a/tests/logexceptions.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/logexceptions.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,14 +18,16 @@
     extensions,
 )
 
+
 def handleexception(orig, ui):
     res = orig(ui)
 
     if not ui.environ.get(b'HGEXCEPTIONSDIR'):
         return res
 
-    dest = os.path.join(ui.environ[b'HGEXCEPTIONSDIR'],
-                        str(uuid.uuid4()).encode('ascii'))
+    dest = os.path.join(
+        ui.environ[b'HGEXCEPTIONSDIR'], str(uuid.uuid4()).encode('ascii')
+    )
 
     exc_type, exc_value, exc_tb = sys.exc_info()
 
@@ -69,6 +71,6 @@
         ]
         fh.write(b'\0'.join(p.encode('utf-8', 'replace') for p in parts))
 
+
 def extsetup(ui):
-    extensions.wrapfunction(dispatch, 'handlecommandexception',
-                            handleexception)
+    extensions.wrapfunction(dispatch, 'handlecommandexception', handleexception)
--- a/tests/ls-l.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/ls-l.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,6 +8,7 @@
 import stat
 import sys
 
+
 def modestr(st):
     mode = st.st_mode
     result = ''
@@ -23,6 +24,7 @@
                 result += '-'
     return result
 
+
 def sizestr(st):
     if st.st_mode & stat.S_IFREG:
         return '%7d' % st.st_size
@@ -30,6 +32,7 @@
         # do not show size for non regular files
         return ' ' * 7
 
+
 os.chdir((sys.argv[1:] + ['.'])[0])
 
 for name in sorted(os.listdir('.')):
--- a/tests/md5sum.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/md5sum.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,6 +14,7 @@
 
 try:
     import msvcrt
+
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
 except ImportError:
--- a/tests/mockblackbox.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/mockblackbox.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,14 +1,15 @@
 from __future__ import absolute_import
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 # XXX: we should probably offer a devel option to do this in blackbox directly
 def getuser():
     return b'bob'
+
+
 def getpid():
     return 5000
 
+
 # mock the date and user apis so the output is always the same
 def uisetup(ui):
     procutil.getuser = getuser
--- a/tests/mockmakedate.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/mockmakedate.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,6 +7,7 @@
 from mercurial import pycompat
 from mercurial.utils import dateutil
 
+
 def mockmakedate():
     filename = os.path.join(os.environ['TESTTMP'], 'testtime')
     try:
@@ -18,4 +19,5 @@
         timef.write(pycompat.bytestr(time))
     return (time, 0)
 
+
 dateutil.makedate = mockmakedate
--- a/tests/mocktime.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/mocktime.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,6 +3,7 @@
 import os
 import time
 
+
 class mocktime(object):
     def __init__(self, increment):
         self.time = 0
@@ -14,5 +15,6 @@
         self.pos += 1
         return self.time
 
+
 def uisetup(ui):
     time.time = mocktime(os.environ.get('MOCKTIME', '0.1'))
--- a/tests/printenv.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/printenv.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,6 +19,7 @@
 
 try:
     import msvcrt
+
     msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
@@ -52,8 +53,7 @@
 
 # variables with empty values may not exist on all platforms, filter
 # them now for portability sake.
-env = [(k, v) for k, v in os.environ.items()
-       if k.startswith("HG_") and v]
+env = [(k, v) for k, v in os.environ.items() if k.startswith("HG_") and v]
 env.sort()
 
 out.write(b"%s hook: " % args.name.encode('ascii'))
@@ -62,8 +62,9 @@
 else:
     filter = lambda x: x
 
-vars = [b"%s=%s" % (k.encode('ascii'), filter(v).encode('ascii'))
-        for k, v in env]
+vars = [
+    b"%s=%s" % (k.encode('ascii'), filter(v).encode('ascii')) for k, v in env
+]
 
 # Print variables on out
 if not args.line:
--- a/tests/printrevset.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/printrevset.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,16 +1,15 @@
 from __future__ import absolute_import
 from mercurial import (
-  cmdutil,
-  commands,
-  extensions,
-  logcmdutil,
-  revsetlang,
-  smartset,
+    cmdutil,
+    commands,
+    extensions,
+    logcmdutil,
+    revsetlang,
+    smartset,
 )
 
-from mercurial.utils import (
-  stringutil,
-)
+from mercurial.utils import stringutil
+
 
 def logrevset(repo, pats, opts):
     revs = logcmdutil._initialrevs(repo, opts)
@@ -19,6 +18,7 @@
     match, pats, slowpath = logcmdutil._makematcher(repo, revs, pats, opts)
     return logcmdutil._makerevset(repo, match, pats, slowpath, opts)
 
+
 def uisetup(ui):
     def printrevset(orig, repo, pats, opts):
         revs, filematcher = orig(repo, pats, opts)
@@ -35,7 +35,14 @@
             ui.write(stringutil.prettyrepr(revs) + b'\n')
             revs = smartset.baseset()  # display no revisions
         return revs, filematcher
+
     extensions.wrapfunction(logcmdutil, 'getrevs', printrevset)
     aliases, entry = cmdutil.findcmd(b'log', commands.table)
-    entry[1].append((b'', b'print-revset', False,
-                     b'print generated revset and exit (DEPRECATED)'))
+    entry[1].append(
+        (
+            b'',
+            b'print-revset',
+            False,
+            b'print generated revset and exit (DEPRECATED)',
+        )
+    )
--- a/tests/pullext.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/pullext.py	Sun Oct 06 09:45:02 2019 -0400
@@ -14,9 +14,8 @@
     extensions,
     localrepo,
 )
-from mercurial.interfaces import (
-    repository,
-)
+from mercurial.interfaces import repository
+
 
 def clonecommand(orig, ui, repo, *args, **kwargs):
     if kwargs.get(r'include') or kwargs.get(r'exclude'):
@@ -30,9 +29,11 @@
 
     return orig(ui, repo, *args, **kwargs)
 
+
 def featuresetup(ui, features):
     features.add(repository.NARROW_REQUIREMENT)
 
+
 def extsetup(ui):
     entry = extensions.wrapcommand(commands.table, b'clone', clonecommand)
 
@@ -40,13 +41,16 @@
     hasdepth = any(x[1] == b'depth' for x in entry[1])
 
     if not hasinclude:
-        entry[1].append((b'', b'include', [],
-                         _(b'pattern of file/directory to clone')))
-        entry[1].append((b'', b'exclude', [],
-                         _(b'pattern of file/directory to not clone')))
+        entry[1].append(
+            (b'', b'include', [], _(b'pattern of file/directory to clone'))
+        )
+        entry[1].append(
+            (b'', b'exclude', [], _(b'pattern of file/directory to not clone'))
+        )
 
     if not hasdepth:
-        entry[1].append((b'', b'depth', b'',
-                         _(b'ancestry depth of changesets to fetch')))
+        entry[1].append(
+            (b'', b'depth', b'', _(b'ancestry depth of changesets to fetch'))
+        )
 
     localrepo.featuresetupfuncs.add(featuresetup)
--- a/tests/remotefilelog-getflogheads.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/remotefilelog-getflogheads.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,9 +9,8 @@
 cmdtable = {}
 command = registrar.command(cmdtable)
 
-@command(b'getflogheads',
-         [],
-         b'path')
+
+@command(b'getflogheads', [], b'path')
 def getflogheads(ui, repo, path):
     """
     Extension printing a remotefilelog's heads
--- a/tests/revlog-formatv0.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/revlog-formatv0.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,29 +23,39 @@
 import sys
 
 files = [
-    (b'formatv0/.hg/00changelog.i',
-     b'000000000000004400000000000000000000000000000000000000'
-     b'000000000000000000000000000000000000000000000000000000'
-     b'0000a1ef0b125355d27765928be600cfe85784284ab3'),
-    (b'formatv0/.hg/00changelog.d',
-     b'756163613935613961356635353036303562366138343738336237'
-     b'61623536363738616436356635380a757365720a3020300a656d70'
-     b'74790a0a656d7074792066696c65'),
-    (b'formatv0/.hg/00manifest.i',
-     b'000000000000003000000000000000000000000000000000000000'
-     b'000000000000000000000000000000000000000000000000000000'
-     b'0000aca95a9a5f550605b6a84783b7ab56678ad65f58'),
-    (b'formatv0/.hg/00manifest.d',
-     b'75656d707479006238306465356431333837353835343163356630'
-     b'35323635616431343461623966613836643164620a'),
-    (b'formatv0/.hg/data/empty.i',
-     b'000000000000000000000000000000000000000000000000000000'
-     b'000000000000000000000000000000000000000000000000000000'
-     b'0000b80de5d138758541c5f05265ad144ab9fa86d1db'),
-    (b'formatv0/.hg/data/empty.d',
-     b''),
+    (
+        b'formatv0/.hg/00changelog.i',
+        b'000000000000004400000000000000000000000000000000000000'
+        b'000000000000000000000000000000000000000000000000000000'
+        b'0000a1ef0b125355d27765928be600cfe85784284ab3',
+    ),
+    (
+        b'formatv0/.hg/00changelog.d',
+        b'756163613935613961356635353036303562366138343738336237'
+        b'61623536363738616436356635380a757365720a3020300a656d70'
+        b'74790a0a656d7074792066696c65',
+    ),
+    (
+        b'formatv0/.hg/00manifest.i',
+        b'000000000000003000000000000000000000000000000000000000'
+        b'000000000000000000000000000000000000000000000000000000'
+        b'0000aca95a9a5f550605b6a84783b7ab56678ad65f58',
+    ),
+    (
+        b'formatv0/.hg/00manifest.d',
+        b'75656d707479006238306465356431333837353835343163356630'
+        b'35323635616431343461623966613836643164620a',
+    ),
+    (
+        b'formatv0/.hg/data/empty.i',
+        b'000000000000000000000000000000000000000000000000000000'
+        b'000000000000000000000000000000000000000000000000000000'
+        b'0000b80de5d138758541c5f05265ad144ab9fa86d1db',
+    ),
+    (b'formatv0/.hg/data/empty.d', b''),
 ]
 
+
 def makedirs(name):
     """recursive directory creation"""
     parent = os.path.dirname(name)
@@ -53,6 +63,7 @@
         makedirs(parent)
     os.mkdir(name)
 
+
 makedirs(os.path.join(*'formatv0/.hg/data'.split('/')))
 
 for name, data in files:
--- a/tests/revnamesext.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/revnamesext.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,17 +2,20 @@
 
 from __future__ import absolute_import
 
-from mercurial import (
-    namespaces,
-)
+from mercurial import namespaces
+
 
 def reposetup(ui, repo):
     names = {b'r%d' % rev: repo[rev].node() for rev in repo}
     namemap = lambda r, name: names.get(name)
     nodemap = lambda r, node: [b'r%d' % repo[node].rev()]
 
-    ns = namespaces.namespace(b'revnames', templatename=b'revname',
-                              logname=b'revname',
-                              listnames=lambda r: names.keys(),
-                              namemap=namemap, nodemap=nodemap)
+    ns = namespaces.namespace(
+        b'revnames',
+        templatename=b'revname',
+        logname=b'revname',
+        listnames=lambda r: names.keys(),
+        namemap=namemap,
+        nodemap=nodemap,
+    )
     repo.names.addnamespace(ns)
--- a/tests/run-tests.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/run-tests.py	Sun Oct 06 09:45:02 2019 -0400
@@ -75,9 +75,11 @@
 
 try:
     import shlex
+
     shellquote = shlex.quote
 except (ImportError, AttributeError):
     import pipes
+
     shellquote = pipes.quote
 
 processlock = threading.Lock()
@@ -85,13 +87,14 @@
 pygmentspresent = False
 # ANSI color is unsupported prior to Windows 10
 if os.name != 'nt':
-    try: # is pygments installed
+    try:  # is pygments installed
         import pygments
         import pygments.lexers as lexers
         import pygments.lexer as lexer
         import pygments.formatters as formatters
         import pygments.token as token
         import pygments.style as style
+
         pygmentspresent = True
         difflexer = lexers.DiffLexer()
         terminal256formatter = formatters.Terminal256Formatter()
@@ -99,6 +102,7 @@
         pass
 
 if pygmentspresent:
+
     class TestRunnerStyle(style.Style):
         default_style = ""
         skipped = token.string_to_tokentype("Token.Generic.Skipped")
@@ -106,10 +110,10 @@
         skippedname = token.string_to_tokentype("Token.Generic.SName")
         failedname = token.string_to_tokentype("Token.Generic.FName")
         styles = {
-            skipped:         '#e5e5e5',
-            skippedname:     '#00ffff',
-            failed:          '#7f0000',
-            failedname:      '#ff0000',
+            skipped: '#e5e5e5',
+            skippedname: '#00ffff',
+            failed: '#7f0000',
+            failedname: '#ff0000',
         }
 
     class TestRunnerLexer(lexer.RegexLexer):
@@ -127,7 +131,7 @@
             'failed': [
                 (testpattern, token.Generic.FName),
                 (r'(:| ).*', token.Generic.Failed),
-            ]
+            ],
         }
 
     runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
@@ -137,7 +141,8 @@
 
 if sys.version_info > (3, 5, 0):
     PYTHON3 = True
-    xrange = range # we use xrange in one place, and we'd rather not use range
+    xrange = range  # we use xrange in one place, and we'd rather not use range
+
     def _bytespath(p):
         if p is None:
             return p
@@ -158,20 +163,27 @@
                 self.__len__ = strenv.__len__
                 self.clear = strenv.clear
                 self._strenv = strenv
+
             def __getitem__(self, k):
                 v = self._strenv.__getitem__(_strpath(k))
                 return _bytespath(v)
+
             def __setitem__(self, k, v):
                 self._strenv.__setitem__(_strpath(k), _strpath(v))
+
             def __delitem__(self, k):
                 self._strenv.__delitem__(_strpath(k))
+
             def __contains__(self, k):
                 return self._strenv.__contains__(_strpath(k))
+
             def __iter__(self):
                 return iter([_bytespath(k) for k in iter(self._strenv)])
+
             def get(self, k, default=None):
                 v = self._strenv.get(_strpath(k), _strpath(default))
                 return _bytespath(v)
+
             def pop(self, k, default=None):
                 v = self._strenv.pop(_strpath(k), _strpath(default))
                 return _bytespath(v)
@@ -183,9 +195,11 @@
         getcwdb = lambda: _bytespath(os.getcwd())
 
 elif sys.version_info >= (3, 0, 0):
-    print('%s is only supported on Python 3.5+ and 2.7, not %s' %
-          (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
-    sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
+    print(
+        '%s is only supported on Python 3.5+ and 2.7, not %s'
+        % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
+    )
+    sys.exit(70)  # EX_SOFTWARE from `man 3 sysexit`
 else:
     PYTHON3 = False
 
@@ -228,9 +242,11 @@
     else:
         return False
 
+
 # useipv6 will be set by parseargs
 useipv6 = None
 
+
 def checkportisavailable(port):
     """return true if a port seems free to bind on localhost"""
     if useipv6:
@@ -243,19 +259,31 @@
         s.close()
         return True
     except socket.error as exc:
-        if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
-                             errno.EPROTONOSUPPORT):
+        if exc.errno not in (
+            errno.EADDRINUSE,
+            errno.EADDRNOTAVAIL,
+            errno.EPROTONOSUPPORT,
+        ):
             raise
     return False
 
+
 closefds = os.name == 'posix'
+
+
 def Popen4(cmd, wd, timeout, env=None):
     processlock.acquire()
-    p = subprocess.Popen(_strpath(cmd), shell=True, bufsize=-1,
-                         cwd=_strpath(wd), env=env,
-                         close_fds=closefds,
-                         stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                         stderr=subprocess.STDOUT)
+    p = subprocess.Popen(
+        _strpath(cmd),
+        shell=True,
+        bufsize=-1,
+        cwd=_strpath(wd),
+        env=env,
+        close_fds=closefds,
+        stdin=subprocess.PIPE,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+    )
     processlock.release()
 
     p.fromchild = p.stdout
@@ -264,17 +292,20 @@
 
     p.timeout = False
     if timeout:
+
         def t():
             start = time.time()
             while time.time() - start < timeout and p.returncode is None:
-                time.sleep(.1)
+                time.sleep(0.1)
             p.timeout = True
             if p.returncode is None:
                 terminate(p)
+
         threading.Thread(target=t).start()
 
     return p
 
+
 if sys.executable:
     sysexecutable = sys.executable
 elif os.environ.get('PYTHONEXECUTABLE'):
@@ -297,9 +328,11 @@
     'shell': ('HGTEST_SHELL', 'sh'),
 }
 
+
 def canonpath(path):
     return os.path.realpath(os.path.expanduser(path))
 
+
 def parselistfiles(files, listtype, warn=True):
     entries = dict()
     for filename in files:
@@ -321,6 +354,7 @@
         f.close()
     return entries
 
+
 def parsettestcases(path):
     """read a .t test file, return a set of test case names
 
@@ -337,131 +371,262 @@
             raise
     return cases
 
+
 def getparser():
     """Obtain the OptionParser used by the CLI."""
     parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
 
     selection = parser.add_argument_group('Test Selection')
-    selection.add_argument('--allow-slow-tests', action='store_true',
-        help='allow extremely slow tests')
-    selection.add_argument("--blacklist", action="append",
-        help="skip tests listed in the specified blacklist file")
-    selection.add_argument("--changed",
-        help="run tests that are changed in parent rev or working directory")
-    selection.add_argument("-k", "--keywords",
-        help="run tests matching keywords")
-    selection.add_argument("-r", "--retest", action="store_true",
-        help = "retest failed tests")
-    selection.add_argument("--test-list", action="append",
-        help="read tests to run from the specified file")
-    selection.add_argument("--whitelist", action="append",
-        help="always run tests listed in the specified whitelist file")
-    selection.add_argument('tests', metavar='TESTS', nargs='*',
-                        help='Tests to run')
+    selection.add_argument(
+        '--allow-slow-tests',
+        action='store_true',
+        help='allow extremely slow tests',
+    )
+    selection.add_argument(
+        "--blacklist",
+        action="append",
+        help="skip tests listed in the specified blacklist file",
+    )
+    selection.add_argument(
+        "--changed",
+        help="run tests that are changed in parent rev or working directory",
+    )
+    selection.add_argument(
+        "-k", "--keywords", help="run tests matching keywords"
+    )
+    selection.add_argument(
+        "-r", "--retest", action="store_true", help="retest failed tests"
+    )
+    selection.add_argument(
+        "--test-list",
+        action="append",
+        help="read tests to run from the specified file",
+    )
+    selection.add_argument(
+        "--whitelist",
+        action="append",
+        help="always run tests listed in the specified whitelist file",
+    )
+    selection.add_argument(
+        'tests', metavar='TESTS', nargs='*', help='Tests to run'
+    )
 
     harness = parser.add_argument_group('Test Harness Behavior')
-    harness.add_argument('--bisect-repo',
-                        metavar='bisect_repo',
-                        help=("Path of a repo to bisect. Use together with "
-                              "--known-good-rev"))
-    harness.add_argument("-d", "--debug", action="store_true",
+    harness.add_argument(
+        '--bisect-repo',
+        metavar='bisect_repo',
+        help=(
+            "Path of a repo to bisect. Use together with " "--known-good-rev"
+        ),
+    )
+    harness.add_argument(
+        "-d",
+        "--debug",
+        action="store_true",
         help="debug mode: write output of test scripts to console"
-             " rather than capturing and diffing it (disables timeout)")
-    harness.add_argument("-f", "--first", action="store_true",
-        help="exit on the first test failure")
-    harness.add_argument("-i", "--interactive", action="store_true",
-        help="prompt to accept changed output")
-    harness.add_argument("-j", "--jobs", type=int,
+        " rather than capturing and diffing it (disables timeout)",
+    )
+    harness.add_argument(
+        "-f",
+        "--first",
+        action="store_true",
+        help="exit on the first test failure",
+    )
+    harness.add_argument(
+        "-i",
+        "--interactive",
+        action="store_true",
+        help="prompt to accept changed output",
+    )
+    harness.add_argument(
+        "-j",
+        "--jobs",
+        type=int,
         help="number of jobs to run in parallel"
-             " (default: $%s or %d)" % defaults['jobs'])
-    harness.add_argument("--keep-tmpdir", action="store_true",
-        help="keep temporary directory after running tests")
-    harness.add_argument('--known-good-rev',
-                        metavar="known_good_rev",
-                        help=("Automatically bisect any failures using this "
-                              "revision as a known-good revision."))
-    harness.add_argument("--list-tests", action="store_true",
-        help="list tests instead of running them")
-    harness.add_argument("--loop", action="store_true",
-        help="loop tests repeatedly")
-    harness.add_argument('--random', action="store_true",
-        help='run tests in random order')
-    harness.add_argument('--order-by-runtime', action="store_true",
-        help='run slowest tests first, according to .testtimes')
-    harness.add_argument("-p", "--port", type=int,
+        " (default: $%s or %d)" % defaults['jobs'],
+    )
+    harness.add_argument(
+        "--keep-tmpdir",
+        action="store_true",
+        help="keep temporary directory after running tests",
+    )
+    harness.add_argument(
+        '--known-good-rev',
+        metavar="known_good_rev",
+        help=(
+            "Automatically bisect any failures using this "
+            "revision as a known-good revision."
+        ),
+    )
+    harness.add_argument(
+        "--list-tests",
+        action="store_true",
+        help="list tests instead of running them",
+    )
+    harness.add_argument(
+        "--loop", action="store_true", help="loop tests repeatedly"
+    )
+    harness.add_argument(
+        '--random', action="store_true", help='run tests in random order'
+    )
+    harness.add_argument(
+        '--order-by-runtime',
+        action="store_true",
+        help='run slowest tests first, according to .testtimes',
+    )
+    harness.add_argument(
+        "-p",
+        "--port",
+        type=int,
         help="port on which servers should listen"
-             " (default: $%s or %d)" % defaults['port'])
-    harness.add_argument('--profile-runner', action='store_true',
-                        help='run statprof on run-tests')
-    harness.add_argument("-R", "--restart", action="store_true",
-        help="restart at last error")
-    harness.add_argument("--runs-per-test", type=int, dest="runs_per_test",
-        help="run each test N times (default=1)", default=1)
-    harness.add_argument("--shell",
-        help="shell to use (default: $%s or %s)" % defaults['shell'])
-    harness.add_argument('--showchannels', action='store_true',
-                        help='show scheduling channels')
-    harness.add_argument("--slowtimeout", type=int,
+        " (default: $%s or %d)" % defaults['port'],
+    )
+    harness.add_argument(
+        '--profile-runner',
+        action='store_true',
+        help='run statprof on run-tests',
+    )
+    harness.add_argument(
+        "-R", "--restart", action="store_true", help="restart at last error"
+    )
+    harness.add_argument(
+        "--runs-per-test",
+        type=int,
+        dest="runs_per_test",
+        help="run each test N times (default=1)",
+        default=1,
+    )
+    harness.add_argument(
+        "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
+    )
+    harness.add_argument(
+        '--showchannels', action='store_true', help='show scheduling channels'
+    )
+    harness.add_argument(
+        "--slowtimeout",
+        type=int,
         help="kill errant slow tests after SLOWTIMEOUT seconds"
-             " (default: $%s or %d)" % defaults['slowtimeout'])
-    harness.add_argument("-t", "--timeout", type=int,
+        " (default: $%s or %d)" % defaults['slowtimeout'],
+    )
+    harness.add_argument(
+        "-t",
+        "--timeout",
+        type=int,
         help="kill errant tests after TIMEOUT seconds"
-             " (default: $%s or %d)" % defaults['timeout'])
-    harness.add_argument("--tmpdir",
+        " (default: $%s or %d)" % defaults['timeout'],
+    )
+    harness.add_argument(
+        "--tmpdir",
         help="run tests in the given temporary directory"
-             " (implies --keep-tmpdir)")
-    harness.add_argument("-v", "--verbose", action="store_true",
-        help="output verbose messages")
+        " (implies --keep-tmpdir)",
+    )
+    harness.add_argument(
+        "-v", "--verbose", action="store_true", help="output verbose messages"
+    )
 
     hgconf = parser.add_argument_group('Mercurial Configuration')
-    hgconf.add_argument("--chg", action="store_true",
-        help="install and use chg wrapper in place of hg")
-    hgconf.add_argument("--compiler",
-        help="compiler to build with")
-    hgconf.add_argument('--extra-config-opt', action="append", default=[],
-        help='set the given config opt in the test hgrc')
-    hgconf.add_argument("-l", "--local", action="store_true",
+    hgconf.add_argument(
+        "--chg",
+        action="store_true",
+        help="install and use chg wrapper in place of hg",
+    )
+    hgconf.add_argument("--compiler", help="compiler to build with")
+    hgconf.add_argument(
+        '--extra-config-opt',
+        action="append",
+        default=[],
+        help='set the given config opt in the test hgrc',
+    )
+    hgconf.add_argument(
+        "-l",
+        "--local",
+        action="store_true",
         help="shortcut for --with-hg=<testdir>/../hg, "
-             "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
-    hgconf.add_argument("--ipv6", action="store_true",
-        help="prefer IPv6 to IPv4 for network related tests")
-    hgconf.add_argument("--pure", action="store_true",
-        help="use pure Python code instead of C extensions")
-    hgconf.add_argument("-3", "--py3-warnings", action="store_true",
-        help="enable Py3k warnings on Python 2.7+")
-    hgconf.add_argument("--with-chg", metavar="CHG",
-        help="use specified chg wrapper in place of hg")
-    hgconf.add_argument("--with-hg",
+        "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
+    )
+    hgconf.add_argument(
+        "--ipv6",
+        action="store_true",
+        help="prefer IPv6 to IPv4 for network related tests",
+    )
+    hgconf.add_argument(
+        "--pure",
+        action="store_true",
+        help="use pure Python code instead of C extensions",
+    )
+    hgconf.add_argument(
+        "-3",
+        "--py3-warnings",
+        action="store_true",
+        help="enable Py3k warnings on Python 2.7+",
+    )
+    hgconf.add_argument(
+        "--with-chg",
+        metavar="CHG",
+        help="use specified chg wrapper in place of hg",
+    )
+    hgconf.add_argument(
+        "--with-hg",
         metavar="HG",
         help="test using specified hg script rather than a "
-             "temporary installation")
+        "temporary installation",
+    )
 
     reporting = parser.add_argument_group('Results Reporting')
-    reporting.add_argument("-C", "--annotate", action="store_true",
-        help="output files annotated with coverage")
-    reporting.add_argument("--color", choices=["always", "auto", "never"],
+    reporting.add_argument(
+        "-C",
+        "--annotate",
+        action="store_true",
+        help="output files annotated with coverage",
+    )
+    reporting.add_argument(
+        "--color",
+        choices=["always", "auto", "never"],
         default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
-        help="colorisation: always|auto|never (default: auto)")
-    reporting.add_argument("-c", "--cover", action="store_true",
-        help="print a test coverage report")
-    reporting.add_argument('--exceptions', action='store_true',
-        help='log all exceptions and generate an exception report')
-    reporting.add_argument("-H", "--htmlcov", action="store_true",
-        help="create an HTML report of the coverage of the files")
-    reporting.add_argument("--json", action="store_true",
-        help="store test result data in 'report.json' file")
-    reporting.add_argument("--outputdir",
-        help="directory to write error logs to (default=test directory)")
-    reporting.add_argument("-n", "--nodiff", action="store_true",
-        help="skip showing test changes")
-    reporting.add_argument("-S", "--noskips", action="store_true",
-        help="don't report skip tests verbosely")
-    reporting.add_argument("--time", action="store_true",
-        help="time how long each test takes")
-    reporting.add_argument("--view",
-        help="external diff viewer")
-    reporting.add_argument("--xunit",
-        help="record xunit results at specified path")
+        help="colorisation: always|auto|never (default: auto)",
+    )
+    reporting.add_argument(
+        "-c",
+        "--cover",
+        action="store_true",
+        help="print a test coverage report",
+    )
+    reporting.add_argument(
+        '--exceptions',
+        action='store_true',
+        help='log all exceptions and generate an exception report',
+    )
+    reporting.add_argument(
+        "-H",
+        "--htmlcov",
+        action="store_true",
+        help="create an HTML report of the coverage of the files",
+    )
+    reporting.add_argument(
+        "--json",
+        action="store_true",
+        help="store test result data in 'report.json' file",
+    )
+    reporting.add_argument(
+        "--outputdir",
+        help="directory to write error logs to (default=test directory)",
+    )
+    reporting.add_argument(
+        "-n", "--nodiff", action="store_true", help="skip showing test changes"
+    )
+    reporting.add_argument(
+        "-S",
+        "--noskips",
+        action="store_true",
+        help="don't report skip tests verbosely",
+    )
+    reporting.add_argument(
+        "--time", action="store_true", help="time how long each test takes"
+    )
+    reporting.add_argument("--view", help="external diff viewer")
+    reporting.add_argument(
+        "--xunit", help="record xunit results at specified path"
+    )
 
     for option, (envvar, default) in defaults.items():
         defaults[option] = type(default)(os.environ.get(envvar, default))
@@ -469,6 +634,7 @@
 
     return parser
 
+
 def parseargs(args, parser):
     """Parse arguments with our OptionParser and validate results."""
     options = parser.parse_args(args)
@@ -488,14 +654,18 @@
         for relpath, attr in pathandattrs:
             binpath = os.path.join(reporootdir, relpath)
             if os.name != 'nt' and not os.access(binpath, os.X_OK):
-                parser.error('--local specified, but %r not found or '
-                             'not executable' % binpath)
+                parser.error(
+                    '--local specified, but %r not found or '
+                    'not executable' % binpath
+                )
             setattr(options, attr, _strpath(binpath))
 
     if options.with_hg:
         options.with_hg = canonpath(_bytespath(options.with_hg))
-        if not (os.path.isfile(options.with_hg) and
-                os.access(options.with_hg, os.X_OK)):
+        if not (
+            os.path.isfile(options.with_hg)
+            and os.access(options.with_hg, os.X_OK)
+        ):
             parser.error('--with-hg must specify an executable hg script')
         if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
             sys.stderr.write('warning: --with-hg should specify an hg script\n')
@@ -506,17 +676,23 @@
     if options.with_chg:
         options.chg = False  # no installation to temporary location
         options.with_chg = canonpath(_bytespath(options.with_chg))
-        if not (os.path.isfile(options.with_chg) and
-                os.access(options.with_chg, os.X_OK)):
+        if not (
+            os.path.isfile(options.with_chg)
+            and os.access(options.with_chg, os.X_OK)
+        ):
             parser.error('--with-chg must specify a chg executable')
     if options.chg and options.with_hg:
         # chg shares installation location with hg
-        parser.error('--chg does not work when --with-hg is specified '
-                     '(use --with-chg instead)')
+        parser.error(
+            '--chg does not work when --with-hg is specified '
+            '(use --with-chg instead)'
+        )
 
     if options.color == 'always' and not pygmentspresent:
-        sys.stderr.write('warning: --color=always ignored because '
-                         'pygments is not installed\n')
+        sys.stderr.write(
+            'warning: --color=always ignored because '
+            'pygments is not installed\n'
+        )
 
     if options.bisect_repo and not options.known_good_rev:
         parser.error("--bisect-repo cannot be used without --known-good-rev")
@@ -526,13 +702,15 @@
         useipv6 = checksocketfamily('AF_INET6')
     else:
         # only use IPv6 if IPv4 is unavailable and IPv6 is available
-        useipv6 = ((not checksocketfamily('AF_INET'))
-                   and checksocketfamily('AF_INET6'))
+        useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
+            'AF_INET6'
+        )
 
     options.anycoverage = options.cover or options.annotate or options.htmlcov
     if options.anycoverage:
         try:
             import coverage
+
             covver = version.StrictVersion(coverage.__version__).version
             if covver < (3, 3):
                 parser.error('coverage options require coverage 3.3 or later')
@@ -541,12 +719,14 @@
 
     if options.anycoverage and options.local:
         # this needs some path mangling somewhere, I guess
-        parser.error("sorry, coverage options do not work when --local "
-                     "is specified")
+        parser.error(
+            "sorry, coverage options do not work when --local " "is specified"
+        )
 
     if options.anycoverage and options.with_hg:
-        parser.error("sorry, coverage options do not work when --with-hg "
-                     "is specified")
+        parser.error(
+            "sorry, coverage options do not work when --with-hg " "is specified"
+        )
 
     global verbose
     if options.verbose:
@@ -561,17 +741,16 @@
         parser.error("-i/--interactive and -d/--debug are incompatible")
     if options.debug:
         if options.timeout != defaults['timeout']:
-            sys.stderr.write(
-                'warning: --timeout option ignored with --debug\n')
+            sys.stderr.write('warning: --timeout option ignored with --debug\n')
         if options.slowtimeout != defaults['slowtimeout']:
             sys.stderr.write(
-                'warning: --slowtimeout option ignored with --debug\n')
+                'warning: --slowtimeout option ignored with --debug\n'
+            )
         options.timeout = 0
         options.slowtimeout = 0
     if options.py3_warnings:
         if PYTHON3:
-            parser.error(
-                '--py3-warnings can only be used on Python 2.7')
+            parser.error('--py3-warnings can only be used on Python 2.7')
 
     if options.blacklist:
         options.blacklist = parselistfiles(options.blacklist, 'blacklist')
@@ -585,6 +764,7 @@
 
     return options
 
+
 def rename(src, dst):
     """Like os.rename(), trade atomicity and opened files friendliness
     for existing destination support.
@@ -592,6 +772,7 @@
     shutil.copy(src, dst)
     os.remove(src)
 
+
 def makecleanable(path):
     """Try to fix directory permission recursively so that the entire tree
     can be deleted"""
@@ -603,11 +784,14 @@
             except OSError:
                 pass
 
+
 _unified_diff = difflib.unified_diff
 if PYTHON3:
     import functools
+
     _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
 
+
 def getdiff(expected, output, ref, err):
     servefail = False
     lines = []
@@ -618,12 +802,16 @@
                 line = line[:-2] + b'\n'
         lines.append(line)
         if not servefail and line.startswith(
-                             b'+  abort: child process failed to start'):
+            b'+  abort: child process failed to start'
+        ):
             servefail = True
 
     return servefail, lines
 
+
 verbose = False
+
+
 def vlog(*msg):
     """Log only when in verbose mode."""
     if verbose is False:
@@ -631,6 +819,7 @@
 
     return log(*msg)
 
+
 # Bytes that break XML even in a CDATA block: control characters 0-31
 # sans \t, \n and \r
 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
@@ -641,6 +830,7 @@
 #   output..output (feature !)\n
 optline = re.compile(br'(.*) \((.+?) !\)\n$')
 
+
 def cdatasafe(data):
     """Make a string safe to include in a CDATA block.
 
@@ -651,6 +841,7 @@
     """
     return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
 
+
 def log(*msg):
     """Log something to stdout.
 
@@ -664,12 +855,15 @@
         print()
         sys.stdout.flush()
 
+
 def highlightdiff(line, color):
     if not color:
         return line
     assert pygmentspresent
-    return pygments.highlight(line.decode('latin1'), difflexer,
-                              terminal256formatter).encode('latin1')
+    return pygments.highlight(
+        line.decode('latin1'), difflexer, terminal256formatter
+    ).encode('latin1')
+
 
 def highlightmsg(msg, color):
     if not color:
@@ -677,6 +871,7 @@
     assert pygmentspresent
     return pygments.highlight(msg, runnerlexer, runnerformatter)
 
+
 def terminate(proc):
     """Terminate subprocess"""
     vlog('# Terminating process %d' % proc.pid)
@@ -685,10 +880,12 @@
     except OSError:
         pass
 
+
 def killdaemons(pidfile):
     import killdaemons as killmod
-    return killmod.killdaemons(pidfile, tryhard=False, remove=True,
-                               logfn=vlog)
+
+    return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
+
 
 class Test(unittest.TestCase):
     """Encapsulates a single, runnable test.
@@ -701,14 +898,24 @@
     # Status code reserved for skipped tests (used by hghave).
     SKIPPED_STATUS = 80
 
-    def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
-                 debug=False,
-                 first=False,
-                 timeout=None,
-                 startport=None, extraconfigopts=None,
-                 py3warnings=False, shell=None, hgcommand=None,
-                 slowtimeout=None, usechg=False,
-                 useipv6=False):
+    def __init__(
+        self,
+        path,
+        outputdir,
+        tmpdir,
+        keeptmpdir=False,
+        debug=False,
+        first=False,
+        timeout=None,
+        startport=None,
+        extraconfigopts=None,
+        py3warnings=False,
+        shell=None,
+        hgcommand=None,
+        slowtimeout=None,
+        usechg=False,
+        useipv6=False,
+    ):
         """Create a test from parameters.
 
         path is the full path to the file defining the test.
@@ -783,7 +990,7 @@
         # If we're not in --debug mode and reference output file exists,
         # check test output against it.
         if self._debug:
-            return None # to match "out is None"
+            return None  # to match "out is None"
         elif os.path.exists(self.refpath):
             with open(self.refpath, 'rb') as f:
                 return f.read().splitlines(True)
@@ -830,8 +1037,9 @@
                     raise
 
         if self._usechg:
-            self._chgsockdir = os.path.join(self._threadtmp,
-                                            b'%s.chgsock' % name)
+            self._chgsockdir = os.path.join(
+                self._threadtmp, b'%s.chgsock' % name
+            )
             os.mkdir(self._chgsockdir)
 
     def run(self, result):
@@ -914,7 +1122,7 @@
         self._skipped = False
 
         if ret == self.SKIPPED_STATUS:
-            if out is None: # Debug mode, nothing to parse.
+            if out is None:  # Debug mode, nothing to parse.
                 missing = ['unknown']
                 failed = None
             else:
@@ -934,8 +1142,11 @@
             self.fail('no result code from test')
         elif out != self._refout:
             # Diff generation may rely on written .err file.
-            if ((ret != 0 or out != self._refout) and not self._skipped
-                and not self._debug):
+            if (
+                (ret != 0 or out != self._refout)
+                and not self._skipped
+                and not self._debug
+            ):
                 with open(self.errpath, 'wb') as f:
                     for line in out:
                         f.write(line)
@@ -965,9 +1176,13 @@
         self._daemonpids = []
 
         if self._keeptmpdir:
-            log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
-                (self._testtmp.decode('utf-8'),
-                 self._threadtmp.decode('utf-8')))
+            log(
+                '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
+                % (
+                    self._testtmp.decode('utf-8'),
+                    self._threadtmp.decode('utf-8'),
+                )
+            )
         else:
             try:
                 shutil.rmtree(self._testtmp)
@@ -983,8 +1198,12 @@
             # files are deleted
             shutil.rmtree(self._chgsockdir, True)
 
-        if ((self._ret != 0 or self._out != self._refout) and not self._skipped
-            and not self._debug and self._out):
+        if (
+            (self._ret != 0 or self._out != self._refout)
+            and not self._skipped
+            and not self._debug
+            and self._out
+        ):
             with open(self.errpath, 'wb') as f:
                 for line in self._out:
                     f.write(line)
@@ -1017,7 +1236,7 @@
             self._portmap(2),
             (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
             (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
-            ]
+        ]
         r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
 
         replacementfile = os.path.join(self._testdir, b'common-pattern.py')
@@ -1038,10 +1257,15 @@
 
     def _escapepath(self, p):
         if os.name == 'nt':
-            return (
-                (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
-                    c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
-                    for c in [p[i:i + 1] for i in range(len(p))]))
+            return b''.join(
+                c.isalpha()
+                and b'[%s%s]' % (c.lower(), c.upper())
+                or c in b'/\\'
+                and br'[/\\]'
+                or c.isdigit()
+                and c
+                or b'\\' + c
+                for c in [p[i : i + 1] for i in range(len(p))]
             )
         else:
             return re.escape(p)
@@ -1083,9 +1307,11 @@
 
     def _getenv(self):
         """Obtain environment variables to use during test execution."""
+
         def defineport(i):
             offset = '' if i == 0 else '%s' % i
             env["HGPORT%s" % offset] = '%s' % (self._startport + i)
+
         env = os.environ.copy()
         env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
         env['HGEMITWARNINGS'] = '1'
@@ -1097,11 +1323,13 @@
             # This list should be parallel to _portmap in _getreplacements
             defineport(port)
         env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
-        env["DAEMON_PIDS"] = _strpath(os.path.join(self._threadtmp,
-                                                   b'daemon.pids'))
-        env["HGEDITOR"] = ('"' + sysexecutable + '"'
-                           + ' -c "import sys; sys.exit(0)"')
-        env["HGUSER"]   = "test"
+        env["DAEMON_PIDS"] = _strpath(
+            os.path.join(self._threadtmp, b'daemon.pids')
+        )
+        env["HGEDITOR"] = (
+            '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
+        )
+        env["HGUSER"] = "test"
         env["HGENCODING"] = "ascii"
         env["HGENCODINGMODE"] = "strict"
         env["HGHOSTNAME"] = "test-hostname"
@@ -1111,7 +1339,8 @@
             # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
             # non-test one in as a default, otherwise set to devnull
             env['HGTESTCATAPULTSERVERPIPE'] = env.get(
-                'HGCATAPULTSERVERPIPE', os.devnull)
+                'HGCATAPULTSERVERPIPE', os.devnull
+            )
 
         extraextensions = []
         for opt in self._extraconfigopts:
@@ -1187,11 +1416,15 @@
             hgrc.write(b'all-warnings = true\n')
             hgrc.write(b'default-date = 0 0\n')
             hgrc.write(b'[largefiles]\n')
-            hgrc.write(b'usercache = %s\n' %
-                       (os.path.join(self._testtmp, b'.cache/largefiles')))
+            hgrc.write(
+                b'usercache = %s\n'
+                % (os.path.join(self._testtmp, b'.cache/largefiles'))
+            )
             hgrc.write(b'[lfs]\n')
-            hgrc.write(b'usercache = %s\n' %
-                       (os.path.join(self._testtmp, b'.cache/lfs')))
+            hgrc.write(
+                b'usercache = %s\n'
+                % (os.path.join(self._testtmp, b'.cache/lfs'))
+            )
             hgrc.write(b'[web]\n')
             hgrc.write(b'address = localhost\n')
             hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
@@ -1199,8 +1432,9 @@
 
             for opt in self._extraconfigopts:
                 section, key = opt.encode('utf-8').split(b'.', 1)
-                assert b'=' in key, ('extra config opt %s must '
-                                     'have an = for assignment' % opt)
+                assert b'=' in key, (
+                    'extra config opt %s must ' 'have an = for assignment' % opt
+                )
                 hgrc.write(b'[%s]\n%s\n' % (section, key))
 
     def fail(self, msg):
@@ -1215,13 +1449,14 @@
         Return a tuple (exitcode, output). output is None in debug mode.
         """
         if self._debug:
-            proc = subprocess.Popen(_strpath(cmd), shell=True,
-                                    cwd=_strpath(self._testtmp),
-                                    env=env)
+            proc = subprocess.Popen(
+                _strpath(cmd), shell=True, cwd=_strpath(self._testtmp), env=env
+            )
             ret = proc.wait()
             return (ret, None)
 
         proc = Popen4(cmd, self._testtmp, self._timeout, env)
+
         def cleanup():
             terminate(proc)
             ret = proc.wait()
@@ -1257,6 +1492,7 @@
 
         return ret, output.splitlines(True)
 
+
 class PythonTest(Test):
     """A Python-based test."""
 
@@ -1270,13 +1506,13 @@
         cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
         vlog("# Running", cmd)
         normalizenewlines = os.name == 'nt'
-        result = self._runcommand(cmd, env,
-                                  normalizenewlines=normalizenewlines)
+        result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
         if self._aborted:
             raise KeyboardInterrupt()
 
         return result
 
+
 # Some glob patterns apply only in some circumstances, so the script
 # might want to remove (glob) annotations that otherwise should be
 # retained.
@@ -1301,9 +1537,11 @@
 
 MARK_OPTIONAL = b" (?)\n"
 
+
 def isoptional(line):
     return line.endswith(MARK_OPTIONAL)
 
+
 class TTest(Test):
     """A "t test" is a test backed by a .t file."""
 
@@ -1376,9 +1614,12 @@
         # TODO do something smarter when all other uses of hghave are gone.
         runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
         tdir = runtestdir.replace(b'\\', b'/')
-        proc = Popen4(b'%s -c "%s/hghave %s"' %
-                      (self._shell, tdir, allreqs),
-                      self._testtmp, 0, self._getenv())
+        proc = Popen4(
+            b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
+            self._testtmp,
+            0,
+            self._getenv(),
+        )
         stdout, stderr = proc.communicate()
         ret = proc.wait()
         if wifexited(ret):
@@ -1419,25 +1660,30 @@
         # up script results with our source. These markers include input
         # line number and the last return code.
         salt = b"SALT%d" % time.time()
+
         def addsalt(line, inpython):
             if inpython:
                 script.append(b'%s %d 0\n' % (salt, line))
             else:
                 script.append(b'echo %s %d $?\n' % (salt, line))
+
         activetrace = []
         session = str(uuid.uuid4())
         if PYTHON3:
             session = session.encode('ascii')
-        hgcatapult = (os.getenv('HGTESTCATAPULTSERVERPIPE') or
-                      os.getenv('HGCATAPULTSERVERPIPE'))
+        hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
+            'HGCATAPULTSERVERPIPE'
+        )
+
         def toggletrace(cmd=None):
             if not hgcatapult or hgcatapult == os.devnull:
                 return
 
             if activetrace:
                 script.append(
-                    b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n' % (
-                        session, activetrace[0]))
+                    b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
+                    % (session, activetrace[0])
+                )
             if cmd is None:
                 return
 
@@ -1447,8 +1693,9 @@
                 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
             quoted = quoted.replace(b'\\', b'\\\\')
             script.append(
-                b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n' % (
-                    session, quoted))
+                b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
+                % (session, quoted)
+            )
             activetrace[0:] = [quoted]
 
         script = []
@@ -1550,21 +1797,21 @@
                 after.setdefault(pos, []).append(l)
             elif skipping:
                 after.setdefault(pos, []).append(l)
-            elif l.startswith(b'  >>> '): # python inlines
+            elif l.startswith(b'  >>> '):  # python inlines
                 after.setdefault(pos, []).append(l)
                 prepos = pos
                 pos = n
                 if not inpython:
                     # We've just entered a Python block. Add the header.
                     inpython = True
-                    addsalt(prepos, False) # Make sure we report the exit code.
+                    addsalt(prepos, False)  # Make sure we report the exit code.
                     script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
                 addsalt(n, True)
                 script.append(l[2:])
-            elif l.startswith(b'  ... '): # python inlines
+            elif l.startswith(b'  ... '):  # python inlines
                 after.setdefault(prepos, []).append(l)
                 script.append(l[2:])
-            elif l.startswith(b'  $ '): # commands
+            elif l.startswith(b'  $ '):  # commands
                 if inpython:
                     script.append(b'EOF\n')
                     inpython = False
@@ -1578,10 +1825,10 @@
                 if len(cmd) == 2 and cmd[0] == b'cd':
                     l = b'  $ cd %s || exit 1\n' % cmd[1]
                 script.append(rawcmd)
-            elif l.startswith(b'  > '): # continuations
+            elif l.startswith(b'  > '):  # continuations
                 after.setdefault(prepos, []).append(l)
                 script.append(l[4:])
-            elif l.startswith(b'  '): # results
+            elif l.startswith(b'  '):  # results
                 # Queue up a list of expected results.
                 expected.setdefault(pos, []).append(l[2:])
             else:
@@ -1603,7 +1850,7 @@
 
     def _processoutput(self, exitcode, output, salt, after, expected):
         # Merge the script output back into a unified test.
-        warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
+        warnonly = WARN_UNDEFINED  # 1: not yet; 2: yes; 3: for sure not
         if exitcode != 0:
             warnonly = WARN_NO
 
@@ -1614,19 +1861,16 @@
             if salt in out_rawline:
                 out_line, cmd_line = out_rawline.split(salt, 1)
 
-            pos, postout, warnonly = self._process_out_line(out_line,
-                                                            pos,
-                                                            postout,
-                                                            expected,
-                                                            warnonly)
-            pos, postout = self._process_cmd_line(cmd_line, pos, postout,
-                                                  after)
+            pos, postout, warnonly = self._process_out_line(
+                out_line, pos, postout, expected, warnonly
+            )
+            pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
 
         if pos in after:
             postout += after.pop(pos)
 
         if warnonly == WARN_YES:
-            exitcode = False # Set exitcode to warned.
+            exitcode = False  # Set exitcode to warned.
 
         return exitcode, postout
 
@@ -1648,7 +1892,7 @@
                 if isinstance(r, str):
                     if r == '-glob':
                         out_line = ''.join(el.rsplit(' (glob)', 1))
-                        r = '' # Warn only this line.
+                        r = ''  # Warn only this line.
                     elif r == "retry":
                         postout.append(b'  ' + el)
                     else:
@@ -1663,8 +1907,7 @@
                     else:
                         m = optline.match(el)
                         if m:
-                            conditions = [
-                                c for c in m.group(2).split(b' ')]
+                            conditions = [c for c in m.group(2).split(b' ')]
 
                             if not self._iftest(conditions):
                                 optional.append(i)
@@ -1685,10 +1928,11 @@
                 postout.append(b'  ' + el)
             else:
                 if self.NEEDESCAPE(out_line):
-                    out_line = TTest._stringescape(b'%s (esc)\n' %
-                                               out_line.rstrip(b'\n'))
-                postout.append(b'  ' + out_line) # Let diff deal with it.
-                if r != '': # If line failed.
+                    out_line = TTest._stringescape(
+                        b'%s (esc)\n' % out_line.rstrip(b'\n')
+                    )
+                postout.append(b'  ' + out_line)  # Let diff deal with it.
+                if r != '':  # If line failed.
                     warnonly = WARN_NO
                 elif warnonly == WARN_UNDEFINED:
                     warnonly = WARN_YES
@@ -1752,10 +1996,10 @@
         i, n = 0, len(el)
         res = b''
         while i < n:
-            c = el[i:i + 1]
+            c = el[i : i + 1]
             i += 1
-            if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
-                res += el[i - 1:i + 1]
+            if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
+                res += el[i - 1 : i + 1]
                 i += 1
             elif c == b'*':
                 res += b'.*'
@@ -1768,12 +2012,12 @@
         return TTest.rematch(res, l)
 
     def linematch(self, el, l):
-        if el == l: # perfect match (fast)
+        if el == l:  # perfect match (fast)
             return True, True
         retry = False
         if isoptional(el):
             retry = "retry"
-            el = el[:-len(MARK_OPTIONAL)] + b"\n"
+            el = el[: -len(MARK_OPTIONAL)] + b"\n"
         else:
             m = optline.match(el)
             if m:
@@ -1817,10 +2061,12 @@
         for line in lines:
             if line.startswith(TTest.SKIPPED_PREFIX):
                 line = line.splitlines()[0]
-                missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
+                missing.append(
+                    line[len(TTest.SKIPPED_PREFIX) :].decode('utf-8')
+                )
             elif line.startswith(TTest.FAILED_PREFIX):
                 line = line.splitlines()[0]
-                failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
+                failed.append(line[len(TTest.FAILED_PREFIX) :].decode('utf-8'))
 
         return missing, failed
 
@@ -1832,12 +2078,15 @@
     def _stringescape(s):
         return TTest.ESCAPESUB(TTest._escapef, s)
 
+
 iolock = threading.RLock()
 firstlock = threading.RLock()
 firsterror = False
 
+
 class TestResult(unittest._TextTestResult):
     """Holds results when executing via unittest."""
+
     # Don't worry too much about accessing the non-public _TextTestResult.
     # It is relatively common in Python testing tools.
     def __init__(self, options, *args, **kwargs):
@@ -1864,7 +2113,7 @@
             self.color = pygmentspresent and self.stream.isatty()
         elif options.color == 'never':
             self.color = False
-        else: # 'always', for testing purposes
+        else:  # 'always', for testing purposes
             self.color = pygmentspresent
 
     def onStart(self, test):
@@ -1942,12 +2191,15 @@
                 pass
             elif self._options.view:
                 v = self._options.view
-                subprocess.call(r'"%s" "%s" "%s"' %
-                                (v, _strpath(test.refpath),
-                                 _strpath(test.errpath)), shell=True)
+                subprocess.call(
+                    r'"%s" "%s" "%s"'
+                    % (v, _strpath(test.refpath), _strpath(test.errpath)),
+                    shell=True,
+                )
             else:
-                servefail, lines = getdiff(expected, got,
-                                           test.refpath, test.errpath)
+                servefail, lines = getdiff(
+                    expected, got, test.refpath, test.errpath
+                )
                 self.stream.write('\n')
                 for line in lines:
                     line = highlightdiff(line, self.color)
@@ -1961,14 +2213,16 @@
 
                 if servefail:
                     raise test.failureException(
-                        'server failed to start (HGPORT=%s)' % test._startport)
+                        'server failed to start (HGPORT=%s)' % test._startport
+                    )
 
             # handle interactive prompt without releasing iolock
             if self._options.interactive:
                 if test.readrefout() != expected:
                     self.stream.write(
                         'Reference output has changed (run again to prompt '
-                        'changes)')
+                        'changes)'
+                    )
                 else:
                     self.stream.write('Accept this change? [n] ')
                     self.stream.flush()
@@ -1992,7 +2246,7 @@
         # This module has one limitation. It can only work for Linux user
         # and not for Windows.
         test.started = os.times()
-        if self._firststarttime is None: # thread racy but irrelevant
+        if self._firststarttime is None:  # thread racy but irrelevant
             self._firststarttime = test.started[4]
 
     def stopTest(self, test, interrupted=False):
@@ -2003,18 +2257,24 @@
         starttime = test.started
         endtime = test.stopped
         origin = self._firststarttime
-        self.times.append((test.name,
-                           endtime[2] - starttime[2], # user space CPU time
-                           endtime[3] - starttime[3], # sys  space CPU time
-                           endtime[4] - starttime[4], # real time
-                           starttime[4] - origin, # start date in run context
-                           endtime[4] - origin, # end date in run context
-                           ))
+        self.times.append(
+            (
+                test.name,
+                endtime[2] - starttime[2],  # user space CPU time
+                endtime[3] - starttime[3],  # sys  space CPU time
+                endtime[4] - starttime[4],  # real time
+                starttime[4] - origin,  # start date in run context
+                endtime[4] - origin,  # end date in run context
+            )
+        )
 
         if interrupted:
             with iolock:
-                self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
-                    test.name, self.times[-1][3]))
+                self.stream.writeln(
+                    'INTERRUPTED: %s (after %d seconds)'
+                    % (test.name, self.times[-1][3])
+                )
+
 
 def getTestResult():
     """
@@ -2026,13 +2286,25 @@
     else:
         return TestResult
 
+
 class TestSuite(unittest.TestSuite):
     """Custom unittest TestSuite that knows how to execute Mercurial tests."""
 
-    def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
-                 retest=False, keywords=None, loop=False, runs_per_test=1,
-                 loadtest=None, showchannels=False,
-                 *args, **kwargs):
+    def __init__(
+        self,
+        testdir,
+        jobs=1,
+        whitelist=None,
+        blacklist=None,
+        retest=False,
+        keywords=None,
+        loop=False,
+        runs_per_test=1,
+        loadtest=None,
+        showchannels=False,
+        *args,
+        **kwargs
+    ):
         """Create a new instance that can run tests with a configuration.
 
         testdir specifies the directory where tests are executed from. This
@@ -2079,11 +2351,13 @@
         tests = []
         num_tests = [0]
         for test in self._tests:
+
             def get():
                 num_tests[0] += 1
                 if getattr(test, 'should_reload', False):
                     return self._loadtest(test, num_tests[0])
                 return test
+
             if not os.path.exists(test.path):
                 result.addSkip(test, "Doesn't exist")
                 continue
@@ -2131,7 +2405,7 @@
                 done.put(None)
             except KeyboardInterrupt:
                 pass
-            except: # re-raises
+            except:  # re-raises
                 done.put(('!', test, 'run-test raised an error, see traceback'))
                 raise
             finally:
@@ -2156,7 +2430,7 @@
                     sys.stdout.flush()
                 for x in xrange(10):
                     if channels:
-                        time.sleep(.1)
+                        time.sleep(0.1)
                 count += 1
 
         stoppedearly = False
@@ -2181,15 +2455,15 @@
                     if self._loop:
                         if getattr(test, 'should_reload', False):
                             num_tests[0] += 1
-                            tests.append(
-                                self._loadtest(test, num_tests[0]))
+                            tests.append(self._loadtest(test, num_tests[0]))
                         else:
                             tests.append(test)
                     if self._jobs == 1:
                         job(test, result)
                     else:
-                        t = threading.Thread(target=job, name=test.name,
-                                             args=(test, result))
+                        t = threading.Thread(
+                            target=job, name=test.name, args=(test, result)
+                        )
                         t.start()
                     running += 1
 
@@ -2212,24 +2486,28 @@
 
         return result
 
+
 # Save the most recent 5 wall-clock runtimes of each test to a
 # human-readable text file named .testtimes. Tests are sorted
 # alphabetically, while times for each test are listed from oldest to
 # newest.
 
+
 def loadtimes(outputdir):
     times = []
     try:
         with open(os.path.join(outputdir, b'.testtimes')) as fp:
             for line in fp:
                 m = re.match('(.*?) ([0-9. ]+)', line)
-                times.append((m.group(1),
-                              [float(t) for t in m.group(2).split()]))
+                times.append(
+                    (m.group(1), [float(t) for t in m.group(2).split()])
+                )
     except IOError as err:
         if err.errno != errno.ENOENT:
             raise
     return times
 
+
 def savetimes(outputdir, result):
     saved = dict(loadtimes(outputdir))
     maxruns = 5
@@ -2241,8 +2519,9 @@
             ts.append(real)
             ts[:] = ts[-maxruns:]
 
-    fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
-                                   dir=outputdir, text=True)
+    fd, tmpname = tempfile.mkstemp(
+        prefix=b'.testtimes', dir=outputdir, text=True
+    )
     with os.fdopen(fd, 'w') as fp:
         for name, ts in sorted(saved.items()):
             fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
@@ -2256,6 +2535,7 @@
     except OSError:
         pass
 
+
 class TextTestRunner(unittest.TextTestRunner):
     """Custom unittest test runner that uses appropriate settings."""
 
@@ -2264,8 +2544,9 @@
 
         self._runner = runner
 
-        self._result = getTestResult()(self._runner.options, self.stream,
-                                       self.descriptions, self.verbosity)
+        self._result = getTestResult()(
+            self._runner.options, self.stream, self.descriptions, self.verbosity
+        )
 
     def listtests(self, test):
         test = sorted(test, key=lambda t: t.name)
@@ -2299,17 +2580,20 @@
             self.stream.writeln('')
 
             if not self._runner.options.noskips:
-                for test, msg in sorted(self._result.skipped,
-                                        key=lambda s: s[0].name):
+                for test, msg in sorted(
+                    self._result.skipped, key=lambda s: s[0].name
+                ):
                     formatted = 'Skipped %s: %s\n' % (test.name, msg)
                     msg = highlightmsg(formatted, self._result.color)
                     self.stream.write(msg)
-            for test, msg in sorted(self._result.failures,
-                                    key=lambda f: f[0].name):
+            for test, msg in sorted(
+                self._result.failures, key=lambda f: f[0].name
+            ):
                 formatted = 'Failed %s: %s\n' % (test.name, msg)
                 self.stream.write(highlightmsg(formatted, self._result.color))
-            for test, msg in sorted(self._result.errors,
-                                    key=lambda e: e[0].name):
+            for test, msg in sorted(
+                self._result.errors, key=lambda e: e[0].name
+            ):
                 self.stream.writeln('Errored %s: %s' % (test.name, msg))
 
             if self._runner.options.xunit:
@@ -2329,31 +2613,41 @@
                 self._bisecttests(t for t, m in self._result.failures)
             self.stream.writeln(
                 '# Ran %d tests, %d skipped, %d failed.'
-                % (self._result.testsRun, skipped + ignored, failed))
+                % (self._result.testsRun, skipped + ignored, failed)
+            )
             if failed:
-                self.stream.writeln('python hash seed: %s' %
-                    os.environ['PYTHONHASHSEED'])
+                self.stream.writeln(
+                    'python hash seed: %s' % os.environ['PYTHONHASHSEED']
+                )
             if self._runner.options.time:
                 self.printtimes(self._result.times)
 
             if self._runner.options.exceptions:
                 exceptions = aggregateexceptions(
-                    os.path.join(self._runner._outputdir, b'exceptions'))
+                    os.path.join(self._runner._outputdir, b'exceptions')
+                )
 
                 self.stream.writeln('Exceptions Report:')
-                self.stream.writeln('%d total from %d frames' %
-                                    (exceptions['total'],
-                                     len(exceptions['exceptioncounts'])))
+                self.stream.writeln(
+                    '%d total from %d frames'
+                    % (exceptions['total'], len(exceptions['exceptioncounts']))
+                )
                 combined = exceptions['combined']
                 for key in sorted(combined, key=combined.get, reverse=True):
                     frame, line, exc = key
                     totalcount, testcount, leastcount, leasttest = combined[key]
 
-                    self.stream.writeln('%d (%d tests)\t%s: %s (%s - %d total)'
-                                        % (totalcount,
-                                           testcount,
-                                           frame, exc,
-                                           leasttest, leastcount))
+                    self.stream.writeln(
+                        '%d (%d tests)\t%s: %s (%s - %d total)'
+                        % (
+                            totalcount,
+                            testcount,
+                            frame,
+                            exc,
+                            leasttest,
+                            leastcount,
+                        )
+                    )
 
             self.stream.flush()
 
@@ -2364,14 +2658,17 @@
         bisectrepo = self._runner.options.bisect_repo
         if bisectrepo:
             bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
+
         def pread(args):
             env = os.environ.copy()
             env['HGPLAIN'] = '1'
-            p = subprocess.Popen(args, stderr=subprocess.STDOUT,
-                                 stdout=subprocess.PIPE, env=env)
+            p = subprocess.Popen(
+                args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
+            )
             data = p.stdout.read()
             p.wait()
             return data
+
         for test in tests:
             pread(bisectcmd + ['--reset']),
             pread(bisectcmd + ['--bad', '.'])
@@ -2382,32 +2679,43 @@
             withhg = self._runner.options.with_hg
             if withhg:
                 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
-            rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts,
-                                   test)
+            rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
             data = pread(bisectcmd + ['--command', rtc])
             m = re.search(
-                (br'\nThe first (?P<goodbad>bad|good) revision '
-                 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
-                 br'summary: +(?P<summary>[^\n]+)\n'),
-                data, (re.MULTILINE | re.DOTALL))
+                (
+                    br'\nThe first (?P<goodbad>bad|good) revision '
+                    br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
+                    br'summary: +(?P<summary>[^\n]+)\n'
+                ),
+                data,
+                (re.MULTILINE | re.DOTALL),
+            )
             if m is None:
                 self.stream.writeln(
-                    'Failed to identify failure point for %s' % test)
+                    'Failed to identify failure point for %s' % test
+                )
                 continue
             dat = m.groupdict()
             verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
             self.stream.writeln(
-                '%s %s by %s (%s)' % (
-                    test, verb, dat['node'].decode('ascii'),
-                    dat['summary'].decode('utf8', 'ignore')))
+                '%s %s by %s (%s)'
+                % (
+                    test,
+                    verb,
+                    dat['node'].decode('ascii'),
+                    dat['summary'].decode('utf8', 'ignore'),
+                )
+            )
 
     def printtimes(self, times):
         # iolock held by run
         self.stream.writeln('# Producing time report')
         times.sort(key=lambda t: (t[3]))
         cols = '%7.3f %7.3f %7.3f %7.3f %7.3f   %s'
-        self.stream.writeln('%-7s %-7s %-7s %-7s %-7s   %s' %
-                            ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
+        self.stream.writeln(
+            '%-7s %-7s %-7s %-7s %-7s   %s'
+            % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
+        )
         for tdata in times:
             test = tdata[0]
             cuser, csys, real, start, end = tdata[1:6]
@@ -2419,11 +2727,12 @@
         timesd = dict((t[0], t[3]) for t in result.times)
         doc = minidom.Document()
         s = doc.createElement('testsuite')
-        s.setAttribute('errors', "0") # TODO
+        s.setAttribute('errors', "0")  # TODO
         s.setAttribute('failures', str(len(result.failures)))
         s.setAttribute('name', 'run-tests')
-        s.setAttribute('skipped', str(len(result.skipped) +
-                                      len(result.ignored)))
+        s.setAttribute(
+            'skipped', str(len(result.skipped) + len(result.ignored))
+        )
         s.setAttribute('tests', str(result.testsRun))
         doc.appendChild(s)
         for tc in result.successes:
@@ -2474,10 +2783,11 @@
             timesd[test] = tdata[1:]
 
         outcome = {}
-        groups = [('success', ((tc, None)
-                   for tc in result.successes)),
-                  ('failure', result.failures),
-                  ('skip', result.skipped)]
+        groups = [
+            ('success', ((tc, None) for tc in result.successes)),
+            ('failure', result.failures),
+            ('skip', result.skipped),
+        ]
         for res, testcases in groups:
             for tc, __ in testcases:
                 if tc.name in timesd:
@@ -2486,23 +2796,26 @@
                         diff = diff.decode('unicode_escape')
                     except UnicodeDecodeError as e:
                         diff = '%r decoding diff, sorry' % e
-                    tres = {'result': res,
-                            'time': ('%0.3f' % timesd[tc.name][2]),
-                            'cuser': ('%0.3f' % timesd[tc.name][0]),
-                            'csys': ('%0.3f' % timesd[tc.name][1]),
-                            'start': ('%0.3f' % timesd[tc.name][3]),
-                            'end': ('%0.3f' % timesd[tc.name][4]),
-                            'diff': diff,
-                            }
+                    tres = {
+                        'result': res,
+                        'time': ('%0.3f' % timesd[tc.name][2]),
+                        'cuser': ('%0.3f' % timesd[tc.name][0]),
+                        'csys': ('%0.3f' % timesd[tc.name][1]),
+                        'start': ('%0.3f' % timesd[tc.name][3]),
+                        'end': ('%0.3f' % timesd[tc.name][4]),
+                        'diff': diff,
+                    }
                 else:
                     # blacklisted test
                     tres = {'result': res}
 
                 outcome[tc.name] = tres
-        jsonout = json.dumps(outcome, sort_keys=True, indent=4,
-                             separators=(',', ': '))
+        jsonout = json.dumps(
+            outcome, sort_keys=True, indent=4, separators=(',', ': ')
+        )
         outf.writelines(("testreport =", jsonout))
 
+
 def sorttests(testdescs, previoustimes, shuffle=False):
     """Do an in-place sort of tests."""
     if shuffle:
@@ -2510,29 +2823,32 @@
         return
 
     if previoustimes:
+
         def sortkey(f):
             f = f['path']
             if f in previoustimes:
                 # Use most recent time as estimate
-                return -previoustimes[f][-1]
+                return -(previoustimes[f][-1])
             else:
                 # Default to a rather arbitrary value of 1 second for new tests
                 return -1.0
+
     else:
         # keywords for slow tests
-        slow = {b'svn': 10,
-                b'cvs': 10,
-                b'hghave': 10,
-                b'largefiles-update': 10,
-                b'run-tests': 10,
-                b'corruption': 10,
-                b'race': 10,
-                b'i18n': 10,
-                b'check': 100,
-                b'gendoc': 100,
-                b'contrib-perf': 200,
-                b'merge-combination': 100,
-                }
+        slow = {
+            b'svn': 10,
+            b'cvs': 10,
+            b'hghave': 10,
+            b'largefiles-update': 10,
+            b'run-tests': 10,
+            b'corruption': 10,
+            b'race': 10,
+            b'i18n': 10,
+            b'check': 100,
+            b'gendoc': 100,
+            b'contrib-perf': 200,
+            b'merge-combination': 100,
+        }
         perf = {}
 
         def sortkey(f):
@@ -2558,6 +2874,7 @@
 
     testdescs.sort(key=sortkey)
 
+
 class TestRunner(object):
     """Holds context for executing tests.
 
@@ -2614,6 +2931,7 @@
             testdescs = self.findtests(tests)
             if options.profile_runner:
                 import statprof
+
                 statprof.start()
             result = self._run(testdescs)
             if options.profile_runner:
@@ -2668,8 +2986,7 @@
                 d = osenvironb.get(b'TMP', None)
             tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
 
-        self._hgtmp = osenvironb[b'HGTMP'] = (
-            os.path.realpath(tmpdir))
+        self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
 
         if self.options.with_hg:
             self._installdir = None
@@ -2791,7 +3108,8 @@
             osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
             logexceptions = os.path.join(self._testdir, b'logexceptions.py')
             self.options.extra_config_opt.append(
-                'extensions.logexceptions=%s' % logexceptions.decode('utf-8'))
+                'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
+            )
 
         vlog("# Using TESTDIR", self._testdir)
         vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
@@ -2803,7 +3121,7 @@
         try:
             return self._runtests(testdescs) or 0
         finally:
-            time.sleep(.1)
+            time.sleep(0.1)
             self._cleanup()
 
     def findtests(self, args):
@@ -2814,8 +3132,12 @@
         """
         if not args:
             if self.options.changed:
-                proc = Popen4(b'hg st --rev "%s" -man0 .' %
-                              _bytespath(self.options.changed), None, 0)
+                proc = Popen4(
+                    b'hg st --rev "%s" -man0 .'
+                    % _bytespath(self.options.changed),
+                    None,
+                    0,
+                )
                 stdout, stderr = proc.communicate()
                 args = stdout.strip(b'\0').split(b'\0')
             else:
@@ -2832,13 +3154,16 @@
         args = expanded_args
 
         testcasepattern = re.compile(
-            br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-\.#]+))')
+            br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-\.#]+))'
+        )
         tests = []
         for t in args:
             case = []
 
-            if not (os.path.basename(t).startswith(b'test-')
-                    and (t.endswith(b'.py') or t.endswith(b'.t'))):
+            if not (
+                os.path.basename(t).startswith(b'test-')
+                and (t.endswith(b'.py') or t.endswith(b'.t'))
+            ):
 
                 m = testcasepattern.match(os.path.basename(t))
                 if m is not None:
@@ -2854,12 +3179,14 @@
                 casedimensions = parsettestcases(t)
                 if casedimensions:
                     cases = []
+
                     def addcases(case, casedimensions):
                         if not casedimensions:
                             cases.append(case)
                         else:
                             for c in casedimensions[0]:
                                 addcases(case + [c], casedimensions[1:])
+
                     addcases([], casedimensions)
                     if case and case in cases:
                         cases = [case]
@@ -2913,16 +3240,19 @@
             if kws is not None and PYTHON3:
                 kws = kws.encode('utf-8')
 
-            suite = TestSuite(self._testdir,
-                              jobs=jobs,
-                              whitelist=self.options.whitelisted,
-                              blacklist=self.options.blacklist,
-                              retest=self.options.retest,
-                              keywords=kws,
-                              loop=self.options.loop,
-                              runs_per_test=self.options.runs_per_test,
-                              showchannels=self.options.showchannels,
-                              tests=tests, loadtest=_reloadtest)
+            suite = TestSuite(
+                self._testdir,
+                jobs=jobs,
+                whitelist=self.options.whitelisted,
+                blacklist=self.options.blacklist,
+                retest=self.options.retest,
+                keywords=kws,
+                loop=self.options.loop,
+                runs_per_test=self.options.runs_per_test,
+                showchannels=self.options.showchannels,
+                tests=tests,
+                loadtest=_reloadtest,
+            )
             verbosity = 1
             if self.options.list_tests:
                 verbosity = 0
@@ -2942,8 +3272,10 @@
                     assert self._installdir
                     self._installchg()
 
-                log('running %d tests using %d parallel processes' % (
-                    num_tests, jobs))
+                log(
+                    'running %d tests using %d parallel processes'
+                    % (num_tests, jobs)
+                )
 
                 result = runner.run(suite)
 
@@ -2962,7 +3294,7 @@
             return 1
 
     def _getport(self, count):
-        port = self._ports.get(count) # do we have a cached entry?
+        port = self._ports.get(count)  # do we have a cached entry?
         if port is None:
             portneeded = 3
             # above 100 tries we just give up and let test reports failure
@@ -3000,18 +3332,23 @@
         # extra keyword parameters. 'case' is used by .t tests
         kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
 
-        t = testcls(refpath, self._outputdir, tmpdir,
-                    keeptmpdir=self.options.keep_tmpdir,
-                    debug=self.options.debug,
-                    first=self.options.first,
-                    timeout=self.options.timeout,
-                    startport=self._getport(count),
-                    extraconfigopts=self.options.extra_config_opt,
-                    py3warnings=self.options.py3_warnings,
-                    shell=self.options.shell,
-                    hgcommand=self._hgcommand,
-                    usechg=bool(self.options.with_chg or self.options.chg),
-                    useipv6=useipv6, **kwds)
+        t = testcls(
+            refpath,
+            self._outputdir,
+            tmpdir,
+            keeptmpdir=self.options.keep_tmpdir,
+            debug=self.options.debug,
+            first=self.options.first,
+            timeout=self.options.timeout,
+            startport=self._getport(count),
+            extraconfigopts=self.options.extra_config_opt,
+            py3warnings=self.options.py3_warnings,
+            shell=self.options.shell,
+            hgcommand=self._hgcommand,
+            usechg=bool(self.options.with_chg or self.options.chg),
+            useipv6=useipv6,
+            **kwds
+        )
         t.should_reload = True
         return t
 
@@ -3036,8 +3373,10 @@
         # os.symlink() is a thing with py3 on Windows, but it requires
         # Administrator rights.
         if getattr(os, 'symlink', None) and os.name != 'nt':
-            vlog("# Making python executable in test path a symlink to '%s'" %
-                 sysexecutable)
+            vlog(
+                "# Making python executable in test path a symlink to '%s'"
+                % sysexecutable
+            )
             mypython = os.path.join(self._tmpbindir, pyexename)
             try:
                 if os.readlink(mypython) == sysexecutable:
@@ -3056,8 +3395,10 @@
                         raise
         else:
             exedir, exename = os.path.split(sysexecutable)
-            vlog("# Modifying search path to find %s as %s in '%s'" %
-                 (exename, pyexename, exedir))
+            vlog(
+                "# Modifying search path to find %s as %s in '%s'"
+                % (exename, pyexename, exedir)
+            )
             path = os.environ['PATH'].split(os.pathsep)
             while exedir in path:
                 path.remove(exedir)
@@ -3097,17 +3438,24 @@
             # least on Windows for now, deal with .pydistutils.cfg bugs
             # when they happen.
             nohome = b''
-        cmd = (b'"%(exe)s" setup.py %(pure)s clean --all'
-               b' build %(compiler)s --build-base="%(base)s"'
-               b' install --force --prefix="%(prefix)s"'
-               b' --install-lib="%(libdir)s"'
-               b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
-               % {b'exe': exe, b'pure': pure,
-                  b'compiler': compiler,
-                  b'base': os.path.join(self._hgtmp, b"build"),
-                  b'prefix': self._installdir, b'libdir': self._pythondir,
-                  b'bindir': self._bindir,
-                  b'nohome': nohome, b'logfile': installerrs})
+        cmd = (
+            b'"%(exe)s" setup.py %(pure)s clean --all'
+            b' build %(compiler)s --build-base="%(base)s"'
+            b' install --force --prefix="%(prefix)s"'
+            b' --install-lib="%(libdir)s"'
+            b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
+            % {
+                b'exe': exe,
+                b'pure': pure,
+                b'compiler': compiler,
+                b'base': os.path.join(self._hgtmp, b"build"),
+                b'prefix': self._installdir,
+                b'libdir': self._pythondir,
+                b'bindir': self._bindir,
+                b'nohome': nohome,
+                b'logfile': installerrs,
+            }
+        )
 
         # setuptools requires install directories to exist.
         def makedirs(p):
@@ -3116,6 +3464,7 @@
             except OSError as e:
                 if e.errno != errno.EEXIST:
                     raise
+
         makedirs(self._pythondir)
         makedirs(self._bindir)
 
@@ -3155,8 +3504,10 @@
             with open(hgbat, 'rb') as f:
                 data = f.read()
             if br'"%~dp0..\python" "%~dp0hg" %*' in data:
-                data = data.replace(br'"%~dp0..\python" "%~dp0hg" %*',
-                                    b'"%~dp0python" "%~dp0hg" %*')
+                data = data.replace(
+                    br'"%~dp0..\python" "%~dp0hg" %*',
+                    b'"%~dp0python" "%~dp0hg" %*',
+                )
                 with open(hgbat, 'wb') as f:
                     f.write(data)
             else:
@@ -3182,17 +3533,20 @@
     def _checkhglib(self, verb):
         """Ensure that the 'mercurial' package imported by python is
         the one we expect it to be.  If not, print a warning to stderr."""
-        if ((self._bindir == self._pythondir) and
-            (self._bindir != self._tmpbindir)):
+        if (self._bindir == self._pythondir) and (
+            self._bindir != self._tmpbindir
+        ):
             # The pythondir has been inferred from --with-hg flag.
             # We cannot expect anything sensible here.
             return
         expecthg = os.path.join(self._pythondir, b'mercurial')
         actualhg = self._gethgpath()
         if os.path.abspath(actualhg) != os.path.abspath(expecthg):
-            sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
-                             '         (expected %s)\n'
-                             % (verb, actualhg, expecthg))
+            sys.stderr.write(
+                'warning: %s with unexpected mercurial lib: %s\n'
+                '         (expected %s)\n' % (verb, actualhg, expecthg)
+            )
+
     def _gethgpath(self):
         """Return the path to the mercurial package that is actually found by
         the current Python interpreter."""
@@ -3216,14 +3570,20 @@
         vlog('# Performing temporary installation of CHG')
         assert os.path.dirname(self._bindir) == self._installdir
         assert self._hgroot, 'must be called after _installhg()'
-        cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
-               % {b'make': b'make',  # TODO: switch by option or environment?
-                  b'prefix': self._installdir})
+        cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
+            b'make': b'make',  # TODO: switch by option or environment?
+            b'prefix': self._installdir,
+        }
         cwd = os.path.join(self._hgroot, b'contrib', b'chg')
         vlog("# Running", cmd)
-        proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
-                                stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-                                stderr=subprocess.STDOUT)
+        proc = subprocess.Popen(
+            cmd,
+            shell=True,
+            cwd=cwd,
+            stdin=subprocess.PIPE,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+        )
         out, _err = proc.communicate()
         if proc.returncode != 0:
             if PYTHON3:
@@ -3235,6 +3595,7 @@
     def _outputcoverage(self):
         """Produce code coverage output."""
         import coverage
+
         coverage = coverage.coverage
 
         vlog('# Producing coverage report')
@@ -3280,8 +3641,11 @@
             if found:
                 vlog("# Found prerequisite", p, "at", found)
             else:
-                print("WARNING: Did not find prerequisite tool: %s " %
-                      p.decode("utf-8"))
+                print(
+                    "WARNING: Did not find prerequisite tool: %s "
+                    % p.decode("utf-8")
+                )
+
 
 def aggregateexceptions(path):
     exceptioncounts = collections.Counter()
@@ -3322,10 +3686,12 @@
     # impacted tests.
     combined = {}
     for key in exceptioncounts:
-        combined[key] = (exceptioncounts[key],
-                         len(testsbyfailure[key]),
-                         leastfailing[key][0],
-                         leastfailing[key][1])
+        combined[key] = (
+            exceptioncounts[key],
+            len(testsbyfailure[key]),
+            leastfailing[key][0],
+            leastfailing[key][1],
+        )
 
     return {
         'exceptioncounts': exceptioncounts,
@@ -3336,11 +3702,13 @@
         'bytest': failuresbytest,
     }
 
+
 if __name__ == '__main__':
     runner = TestRunner()
 
     try:
         import msvcrt
+
         msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
         msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
         msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
--- a/tests/seq.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/seq.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,6 +13,7 @@
 
 try:
     import msvcrt
+
     msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
     msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
--- a/tests/silenttestrunner.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/silenttestrunner.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,6 +3,7 @@
 import sys
 import unittest
 
+
 def main(modulename):
     '''run the tests found in module, printing nothing when all tests pass'''
     module = sys.modules[modulename]
@@ -20,5 +21,6 @@
             sys.stdout.write(exc)
         sys.exit(1)
 
+
 if os.environ.get('SILENT_BE_NOISY'):
     main = unittest.main
--- a/tests/simplestorerepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/simplestorerepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -21,9 +21,7 @@
     nullid,
     nullrev,
 )
-from mercurial.thirdparty import (
-    attr,
-)
+from mercurial.thirdparty import attr
 from mercurial import (
     ancestor,
     bundlerepo,
@@ -44,9 +42,7 @@
     cborutil,
     storageutil,
 )
-from mercurial.revlogutils import (
-    flagutil,
-)
+from mercurial.revlogutils import flagutil
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
@@ -56,6 +52,7 @@
 
 REQUIREMENT = 'testonly-simplestore'
 
+
 def validatenode(node):
     if isinstance(node, int):
         raise ValueError('expected node; got int')
@@ -63,13 +60,16 @@
     if len(node) != 20:
         raise ValueError('expected 20 byte node')
 
+
 def validaterev(rev):
     if not isinstance(rev, int):
         raise ValueError('expected int')
 
+
 class simplestoreerror(error.StorageError):
     pass
 
+
 @interfaceutil.implementer(repository.irevisiondelta)
 @attr.s(slots=True)
 class simplestorerevisiondelta(object):
@@ -83,6 +83,7 @@
     delta = attr.ib()
     linknode = attr.ib(default=None)
 
+
 @interfaceutil.implementer(repository.iverifyproblem)
 @attr.s(frozen=True)
 class simplefilestoreproblem(object):
@@ -90,6 +91,7 @@
     error = attr.ib(default=None)
     node = attr.ib(default=None)
 
+
 @interfaceutil.implementer(repository.ifilestorage)
 class filestorage(object):
     """Implements storage for a tracked path.
@@ -152,8 +154,9 @@
             p1rev, p2rev = self.parentrevs(self.rev(entry[b'node']))
 
             # start, length, rawsize, chainbase, linkrev, p1, p2, node
-            self._index.append((0, 0, 0, -1, entry[b'linkrev'], p1rev, p2rev,
-                                entry[b'node']))
+            self._index.append(
+                (0, 0, 0, -1, entry[b'linkrev'], p1rev, p2rev, entry[b'node'])
+            )
 
         self._index.append((0, 0, 0, -1, -1, -1, -1, nullid))
 
@@ -261,8 +264,9 @@
         validaterev(baserev)
         validaterev(rev)
 
-        if ((self._flags(baserev) & revlog.REVIDX_RAWTEXT_CHANGING_FLAGS)
-            or (self._flags(rev) & revlog.REVIDX_RAWTEXT_CHANGING_FLAGS)):
+        if (self._flags(baserev) & revlog.REVIDX_RAWTEXT_CHANGING_FLAGS) or (
+            self._flags(rev) & revlog.REVIDX_RAWTEXT_CHANGING_FLAGS
+        ):
             return False
 
         return True
@@ -271,8 +275,9 @@
         if p1 is None and p2 is None:
             p1, p2 = self.parents(node)
         if node != storageutil.hashrevisionsha1(text, p1, p2):
-            raise simplestoreerror(_("integrity check failed on %s") %
-                self._path)
+            raise simplestoreerror(
+                _("integrity check failed on %s") % self._path
+            )
 
     def revision(self, nodeorrev, raw=False):
         if isinstance(nodeorrev, int):
@@ -313,7 +318,7 @@
             return revision
 
         start = revision.index(b'\1\n', 2)
-        return revision[start + 2:]
+        return revision[start + 2 :]
 
     def renamed(self, node):
         validatenode(node)
@@ -405,9 +410,14 @@
 
         return [b'/'.join((self._storepath, f)) for f in entries]
 
-    def storageinfo(self, exclusivefiles=False, sharedfiles=False,
-                    revisionscount=False, trackedsize=False,
-                    storedsize=False):
+    def storageinfo(
+        self,
+        exclusivefiles=False,
+        sharedfiles=False,
+        revisionscount=False,
+        trackedsize=False,
+        storedsize=False,
+    ):
         # TODO do a real implementation of this
         return {
             'exclusivefiles': [],
@@ -425,22 +435,31 @@
                 self.revision(node)
             except Exception as e:
                 yield simplefilestoreproblem(
-                    error='unpacking %s: %s' % (node, e),
-                    node=node)
+                    error='unpacking %s: %s' % (node, e), node=node
+                )
                 state['skipread'].add(node)
 
-    def emitrevisions(self, nodes, nodesorder=None, revisiondata=False,
-                      assumehaveparentrevisions=False,
-                      deltamode=repository.CG_DELTAMODE_STD):
+    def emitrevisions(
+        self,
+        nodes,
+        nodesorder=None,
+        revisiondata=False,
+        assumehaveparentrevisions=False,
+        deltamode=repository.CG_DELTAMODE_STD,
+    ):
         # TODO this will probably break on some ordering options.
         nodes = [n for n in nodes if n != nullid]
         if not nodes:
             return
         for delta in storageutil.emitrevisions(
-                self, nodes, nodesorder, simplestorerevisiondelta,
-                revisiondata=revisiondata,
-                assumehaveparentrevisions=assumehaveparentrevisions,
-                deltamode=deltamode):
+            self,
+            nodes,
+            nodesorder,
+            simplestorerevisiondelta,
+            revisiondata=revisiondata,
+            assumehaveparentrevisions=assumehaveparentrevisions,
+            deltamode=deltamode,
+        ):
             yield delta
 
     def add(self, text, meta, transaction, linkrev, p1, p2):
@@ -449,8 +468,17 @@
 
         return self.addrevision(text, transaction, linkrev, p1, p2)
 
-    def addrevision(self, text, transaction, linkrev, p1, p2, node=None,
-                    flags=revlog.REVIDX_DEFAULT_FLAGS, cachedelta=None):
+    def addrevision(
+        self,
+        text,
+        transaction,
+        linkrev,
+        p1,
+        p2,
+        node=None,
+        flags=revlog.REVIDX_DEFAULT_FLAGS,
+        cachedelta=None,
+    ):
         validatenode(p1)
         validatenode(p2)
 
@@ -467,8 +495,9 @@
         if validatehash:
             self.checkhash(rawtext, node, p1=p1, p2=p2)
 
-        return self._addrawrevision(node, rawtext, transaction, linkrev, p1, p2,
-                                    flags)
+        return self._addrawrevision(
+            node, rawtext, transaction, linkrev, p1, p2, flags
+        )
 
     def _addrawrevision(self, node, rawtext, transaction, link, p1, p2, flags):
         transaction.addbackup(self._indexpath)
@@ -477,13 +506,15 @@
 
         self._svfs.write(path, rawtext)
 
-        self._indexdata.append({
-            b'node': node,
-            b'p1': p1,
-            b'p2': p2,
-            b'linkrev': link,
-            b'flags': flags,
-        })
+        self._indexdata.append(
+            {
+                b'node': node,
+                b'p1': p1,
+                b'p2': p2,
+                b'linkrev': link,
+                b'flags': flags,
+            }
+        )
 
         self._reflectindexupdate()
 
@@ -491,14 +522,22 @@
 
     def _reflectindexupdate(self):
         self._refreshindex()
-        self._svfs.write(self._indexpath,
-                         ''.join(cborutil.streamencode(self._indexdata)))
+        self._svfs.write(
+            self._indexpath, ''.join(cborutil.streamencode(self._indexdata))
+        )
 
-    def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None,
-                 maybemissingparents=False):
+    def addgroup(
+        self,
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        maybemissingparents=False,
+    ):
         if maybemissingparents:
-            raise error.Abort(_('simple store does not support missing parents '
-                                'write mode'))
+            raise error.Abort(
+                _('simple store does not support missing parents ' 'write mode')
+            )
 
         nodes = []
 
@@ -519,8 +558,9 @@
             else:
                 text = mdiff.patch(self.revision(deltabase), delta)
 
-            self._addrawrevision(node, text, transaction, linkrev, p1, p2,
-                                 flags)
+            self._addrawrevision(
+                node, text, transaction, linkrev, p1, p2, flags
+            )
 
             if addrevisioncb:
                 addrevisioncb(self, node)
@@ -535,8 +575,7 @@
             revishead[self.rev(entry[b'p1'])] = False
             revishead[self.rev(entry[b'p2'])] = False
 
-        return [rev for rev, ishead in sorted(revishead.items())
-                if ishead]
+        return [rev for rev, ishead in sorted(revishead.items()) if ishead]
 
     def heads(self, start=None, stop=None):
         # This is copied from revlog.py.
@@ -584,8 +623,12 @@
 
     def getstrippoint(self, minlink):
         return storageutil.resolvestripinfo(
-            minlink, len(self) - 1, self._headrevs(), self.linkrev,
-            self.parentrevs)
+            minlink,
+            len(self) - 1,
+            self._headrevs(),
+            self.linkrev,
+            self.parentrevs,
+        )
 
     def strip(self, minlink, transaction):
         if not len(self):
@@ -599,6 +642,7 @@
         self._indexdata[rev:] = []
         self._reflectindexupdate()
 
+
 def issimplestorefile(f, kind, st):
     if kind != stat.S_IFREG:
         return False
@@ -613,6 +657,7 @@
     # Otherwise assume it belongs to the simple store.
     return True
 
+
 class simplestore(store.encodedstore):
     def datafiles(self):
         for x in super(simplestore, self).datafiles():
@@ -629,6 +674,7 @@
 
             yield unencoded, encoded, size
 
+
 def reposetup(ui, repo):
     if not repo.local():
         return
@@ -642,9 +688,11 @@
 
     repo.__class__ = simplestorerepo
 
+
 def featuresetup(ui, supported):
     supported.add(REQUIREMENT)
 
+
 def newreporequirements(orig, ui, createopts):
     """Modifies default requirements for new repos to use the simple store."""
     requirements = orig(ui, createopts)
@@ -652,21 +700,23 @@
     # These requirements are only used to affect creation of the store
     # object. We have our own store. So we can remove them.
     # TODO do this once we feel like taking the test hit.
-    #if 'fncache' in requirements:
+    # if 'fncache' in requirements:
     #    requirements.remove('fncache')
-    #if 'dotencode' in requirements:
+    # if 'dotencode' in requirements:
     #    requirements.remove('dotencode')
 
     requirements.add(REQUIREMENT)
 
     return requirements
 
+
 def makestore(orig, requirements, path, vfstype):
     if REQUIREMENT not in requirements:
         return orig(requirements, path, vfstype)
 
     return simplestore(path, vfstype)
 
+
 def verifierinit(orig, self, *args, **kwargs):
     orig(self, *args, **kwargs)
 
@@ -674,10 +724,12 @@
     # advertised. So suppress these warnings.
     self.warnorphanstorefiles = False
 
+
 def extsetup(ui):
     localrepo.featuresetupfuncs.add(featuresetup)
 
-    extensions.wrapfunction(localrepo, 'newreporequirements',
-                            newreporequirements)
+    extensions.wrapfunction(
+        localrepo, 'newreporequirements', newreporequirements
+    )
     extensions.wrapfunction(localrepo, 'makestore', makestore)
     extensions.wrapfunction(verify.verifier, '__init__', verifierinit)
--- a/tests/sitecustomize.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/sitecustomize.py	Sun Oct 06 09:45:02 2019 -0400
@@ -6,8 +6,9 @@
         import coverage
         import uuid
 
-        covpath = os.path.join(os.environ['COVERAGE_DIR'],
-                               'cov.%s' % uuid.uuid1())
+        covpath = os.path.join(
+            os.environ['COVERAGE_DIR'], 'cov.%s' % uuid.uuid1()
+        )
         cov = coverage.coverage(data_file=covpath, auto_data=True)
         cov._warn_no_data = False
         cov._warn_unimported_source = False
--- a/tests/sshprotoext.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/sshprotoext.py	Sun Oct 06 09:45:02 2019 -0400
@@ -25,14 +25,17 @@
 configitem(b'sshpeer', b'mode', default=None)
 configitem(b'sshpeer', b'handshake-mode', default=None)
 
+
 class bannerserver(wireprotoserver.sshserver):
     """Server that sends a banner to stdout."""
+
     def serve_forever(self):
         for i in range(10):
             self._fout.write(b'banner: line %d\n' % i)
 
         super(bannerserver, self).serve_forever()
 
+
 class prehelloserver(wireprotoserver.sshserver):
     """Tests behavior when connecting to <0.9.1 servers.
 
@@ -41,6 +44,7 @@
     to SSH servers. This mock server tests behavior of the handshake
     when ``hello`` is not supported.
     """
+
     def serve_forever(self):
         l = self._fin.readline()
         assert l == b'hello\n'
@@ -48,13 +52,15 @@
         wireprotoserver._sshv1respondbytes(self._fout, b'')
         l = self._fin.readline()
         assert l == b'between\n'
-        proto = wireprotoserver.sshv1protocolhandler(self._ui, self._fin,
-                                                     self._fout)
+        proto = wireprotoserver.sshv1protocolhandler(
+            self._ui, self._fin, self._fout
+        )
         rsp = wireprotov1server.dispatch(self._repo, proto, b'between')
         wireprotoserver._sshv1respondbytes(self._fout, rsp.data)
 
         super(prehelloserver, self).serve_forever()
 
+
 def performhandshake(orig, ui, stdin, stdout, stderr):
     """Wrapped version of sshpeer._performhandshake to send extra commands."""
     mode = ui.config(b'sshpeer', b'handshake-mode')
@@ -73,8 +79,8 @@
         stdin.flush()
         return orig(ui, stdin, stdout, stderr)
     else:
-        raise error.ProgrammingError(b'unknown HANDSHAKECOMMANDMODE: %s' %
-                                     mode)
+        raise error.ProgrammingError(b'unknown HANDSHAKECOMMANDMODE: %s' % mode)
+
 
 def extsetup(ui):
     # It's easier for tests to define the server behavior via environment
--- a/tests/svnurlof.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/svnurlof.py	Sun Oct 06 09:45:02 2019 -0400
@@ -6,6 +6,7 @@
     util,
 )
 
+
 def main(argv):
     enc = util.urlreq.quote(pycompat.sysbytes(argv[1]))
     if pycompat.iswindows:
@@ -14,5 +15,6 @@
         fmt = 'file://%s'
     print(fmt % pycompat.sysstr(enc))
 
+
 if __name__ == '__main__':
     main(sys.argv)
--- a/tests/svnxml.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/svnxml.py	Sun Oct 06 09:45:02 2019 -0400
@@ -5,10 +5,10 @@
 import sys
 import xml.dom.minidom
 
+
 def xmltext(e):
-    return ''.join(c.data for c
-                   in e.childNodes
-                   if c.nodeType == c.TEXT_NODE)
+    return ''.join(c.data for c in e.childNodes if c.nodeType == c.TEXT_NODE)
+
 
 def parseentry(entry):
     e = {}
@@ -27,6 +27,7 @@
             e['paths'].append((path, action, frompath, fromrev))
     return e
 
+
 def parselog(data):
     entries = []
     doc = xml.dom.minidom.parseString(data)
@@ -34,6 +35,7 @@
         entries.append(parseentry(e))
     return entries
 
+
 def printentries(entries):
     try:
         fp = sys.stdout.buffer
@@ -49,8 +51,8 @@
             p = b' %s %s%s\n' % (action, path, frominfo)
             fp.write(p)
 
+
 if __name__ == '__main__':
     data = sys.stdin.read()
     entries = parselog(data)
     printentries(entries)
-
--- a/tests/test-absorb-filefixupstate.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-absorb-filefixupstate.py	Sun Oct 06 09:45:02 2019 -0400
@@ -4,6 +4,7 @@
 from mercurial import pycompat
 from hgext import absorb
 
+
 class simplefctx(object):
     def __init__(self, content):
         self.content = content
@@ -11,6 +12,7 @@
     def data(self):
         return self.content
 
+
 def insertreturns(x):
     # insert "\n"s after each single char
     if isinstance(x, bytes):
@@ -18,6 +20,7 @@
     else:
         return pycompat.maplist(insertreturns, x)
 
+
 def removereturns(x):
     # the revert of "insertreturns"
     if isinstance(x, bytes):
@@ -25,10 +28,14 @@
     else:
         return pycompat.maplist(removereturns, x)
 
+
 def assertlistequal(lhs, rhs, decorator=lambda x: x):
     if lhs != rhs:
-        raise RuntimeError('mismatch:\n actual:   %r\n expected: %r'
-                           % tuple(map(decorator, [lhs, rhs])))
+        raise RuntimeError(
+            'mismatch:\n actual:   %r\n expected: %r'
+            % tuple(map(decorator, [lhs, rhs]))
+        )
+
 
 def testfilefixup(oldcontents, workingcopy, expectedcontents, fixups=None):
     """([str], str, [str], [(rev, a1, a2, b1, b2)]?) -> None
@@ -43,22 +50,24 @@
     expectedcontents = insertreturns(expectedcontents)
     oldcontents = insertreturns(oldcontents)
     workingcopy = insertreturns(workingcopy)
-    state = absorb.filefixupstate(pycompat.maplist(simplefctx, oldcontents),
-                                  'path')
+    state = absorb.filefixupstate(
+        pycompat.maplist(simplefctx, oldcontents), 'path'
+    )
     state.diffwith(simplefctx(workingcopy))
     if fixups is not None:
         assertlistequal(state.fixups, fixups)
     state.apply()
     assertlistequal(state.finalcontents, expectedcontents, removereturns)
 
+
 def buildcontents(linesrevs):
     # linesrevs: [(linecontent : str, revs : [int])]
     revs = set(itertools.chain(*[revs for line, revs in linesrevs]))
     return [b''] + [
-        b''.join([l for l, rs in linesrevs if r in rs])
-        for r in sorted(revs)
+        b''.join([l for l, rs in linesrevs if r in rs]) for r in sorted(revs)
     ]
 
+
 # input case 0: one single commit
 case0 = [b'', b'11']
 
@@ -69,11 +78,7 @@
 testfilefixup(case0, b'222', [b'', b'222'])
 
 # input case 1: 3 lines, each commit adds one line
-case1 = buildcontents([
-    (b'1', [1, 2, 3]),
-    (b'2', [   2, 3]),
-    (b'3', [      3]),
-])
+case1 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2, 3]), (b'3', [3]),])
 
 # 1:1 line mapping
 testfilefixup(case1, b'123', case1)
@@ -90,10 +95,10 @@
 testfilefixup(case1, b'ab', case1)
 
 # deletion
-testfilefixup(case1, b'',   [b'', b'', b'', b''])
-testfilefixup(case1, b'1',  [b'', b'1', b'1', b'1'])
-testfilefixup(case1, b'2',  [b'', b'', b'2', b'2'])
-testfilefixup(case1, b'3',  [b'', b'', b'', b'3'])
+testfilefixup(case1, b'', [b'', b'', b'', b''])
+testfilefixup(case1, b'1', [b'', b'1', b'1', b'1'])
+testfilefixup(case1, b'2', [b'', b'', b'2', b'2'])
+testfilefixup(case1, b'3', [b'', b'', b'', b'3'])
 testfilefixup(case1, b'13', [b'', b'1', b'1', b'13'])
 
 # replaces
@@ -116,15 +121,12 @@
 testfilefixup(case1, b'12b3', case1)
 
 # input case 2: delete in the middle
-case2 = buildcontents([
-    (b'11', [1, 2]),
-    (b'22', [1   ]),
-    (b'33', [1, 2]),
-])
+case2 = buildcontents([(b'11', [1, 2]), (b'22', [1]), (b'33', [1, 2]),])
 
 # deletion (optimize code should make it 2 chunks)
-testfilefixup(case2, b'', [b'', b'22', b''],
-              fixups=[(4, 0, 2, 0, 0), (4, 2, 4, 0, 0)])
+testfilefixup(
+    case2, b'', [b'', b'22', b''], fixups=[(4, 0, 2, 0, 0), (4, 2, 4, 0, 0)]
+)
 
 # 1:1 line mapping
 testfilefixup(case2, b'aaaa', [b'', b'aa22aa', b'aaaa'])
@@ -134,11 +136,7 @@
 testfilefixup(case2, b'aaa', case2)
 
 # input case 3: rev 3 reverts rev 2
-case3 = buildcontents([
-    (b'1', [1, 2, 3]),
-    (b'2', [   2   ]),
-    (b'3', [1, 2, 3]),
-])
+case3 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2]), (b'3', [1, 2, 3]),])
 
 # 1:1 line mapping
 testfilefixup(case3, b'13', case3)
@@ -157,24 +155,26 @@
 testfilefixup(case3, b'a13c', [b'', b'a13c', b'a123c', b'a13c'])
 
 # input case 4: a slightly complex case
-case4 = buildcontents([
-    (b'1', [1, 2, 3]),
-    (b'2', [   2, 3]),
-    (b'3', [1, 2,  ]),
-    (b'4', [1,    3]),
-    (b'5', [      3]),
-    (b'6', [   2, 3]),
-    (b'7', [   2   ]),
-    (b'8', [   2, 3]),
-    (b'9', [      3]),
-])
+case4 = buildcontents(
+    [
+        (b'1', [1, 2, 3]),
+        (b'2', [2, 3]),
+        (b'3', [1, 2,]),
+        (b'4', [1, 3]),
+        (b'5', [3]),
+        (b'6', [2, 3]),
+        (b'7', [2]),
+        (b'8', [2, 3]),
+        (b'9', [3]),
+    ]
+)
 
 testfilefixup(case4, b'1245689', case4)
 testfilefixup(case4, b'1a2456bbb', case4)
 testfilefixup(case4, b'1abc5689', case4)
 testfilefixup(case4, b'1ab5689', [b'', b'134', b'1a3678', b'1ab5689'])
 testfilefixup(case4, b'aa2bcd8ee', [b'', b'aa34', b'aa23d78', b'aa2bcd8ee'])
-testfilefixup(case4, b'aa2bcdd8ee',[b'', b'aa34', b'aa23678', b'aa24568ee'])
+testfilefixup(case4, b'aa2bcdd8ee', [b'', b'aa34', b'aa23678', b'aa24568ee'])
 testfilefixup(case4, b'aaaaaa', case4)
 testfilefixup(case4, b'aa258b', [b'', b'aa34', b'aa2378', b'aa258b'])
 testfilefixup(case4, b'25bb', [b'', b'34', b'23678', b'25689'])
@@ -183,11 +183,7 @@
 testfilefixup(case4, b'', [b'', b'34', b'37', b''])
 
 # input case 5: replace a small chunk which is near a deleted line
-case5 = buildcontents([
-    (b'12', [1, 2]),
-    (b'3',  [1]),
-    (b'4',  [1, 2]),
-])
+case5 = buildcontents([(b'12', [1, 2]), (b'3', [1]), (b'4', [1, 2]),])
 
 testfilefixup(case5, b'1cd4', [b'', b'1cd34', b'1cd4'])
 
--- a/tests/test-ancestor.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-ancestor.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,6 +22,7 @@
     long = int
     xrange = range
 
+
 def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
     '''nodes: total number of nodes in the graph
     rootprob: probability that a new node (not 0) will be a root
@@ -51,6 +52,7 @@
 
     return graph
 
+
 def buildancestorsets(graph):
     ancs = [None] * len(graph)
     for i in xrange(len(graph)):
@@ -61,17 +63,21 @@
             ancs[i].update(ancs[p])
     return ancs
 
+
 class naiveincrementalmissingancestors(object):
     def __init__(self, ancs, bases):
         self.ancs = ancs
         self.bases = set(bases)
+
     def addbases(self, newbases):
         self.bases.update(newbases)
+
     def removeancestorsfrom(self, revs):
         for base in self.bases:
             if base != nullrev:
                 revs.difference_update(self.ancs[base])
         revs.discard(nullrev)
+
     def missingancestors(self, revs):
         res = set()
         for rev in revs:
@@ -82,6 +88,7 @@
                 res.difference_update(self.ancs[base])
         return sorted(res)
 
+
 def test_missingancestors(seed, rng):
     # empirically observed to take around 1 second
     graphcount = 100
@@ -138,8 +145,14 @@
                     inc.removeancestorsfrom(hrevs)
                     naiveinc.removeancestorsfrom(rrevs)
                     if hrevs != rrevs:
-                        err(seed, graph, bases, seq, sorted(hrevs),
-                            sorted(rrevs))
+                        err(
+                            seed,
+                            graph,
+                            bases,
+                            seq,
+                            sorted(hrevs),
+                            sorted(rrevs),
+                        )
                 else:
                     revs = samplerevs(graphnodes)
                     seq.append(('missingancestors', revs))
@@ -148,6 +161,7 @@
                     if h != r:
                         err(seed, graph, bases, seq, h, r)
 
+
 # graph is a dict of child->parent adjacency lists for this graph:
 # o  13
 # |
@@ -177,9 +191,23 @@
 # |
 # o  0
 
-graph = {0: [-1, -1], 1: [0, -1], 2: [1, -1], 3: [1, -1], 4: [2, -1],
-         5: [4, -1], 6: [4, -1], 7: [4, -1], 8: [-1, -1], 9: [6, 7],
-         10: [5, -1], 11: [3, 7], 12: [9, -1], 13: [8, -1]}
+graph = {
+    0: [-1, -1],
+    1: [0, -1],
+    2: [1, -1],
+    3: [1, -1],
+    4: [2, -1],
+    5: [4, -1],
+    6: [4, -1],
+    7: [4, -1],
+    8: [-1, -1],
+    9: [6, 7],
+    10: [5, -1],
+    11: [3, 7],
+    12: [9, -1],
+    13: [8, -1],
+}
+
 
 def test_missingancestors_explicit():
     """A few explicit cases, easier to check for catching errors in refactors.
@@ -187,43 +215,128 @@
     The bigger graph at the end has been produced by the random generator
     above, and we have some evidence that the other tests don't cover it.
     """
-    for i, (bases, revs) in enumerate((({1, 2, 3, 4, 7}, set(xrange(10))),
-                                       ({10}, set({11, 12, 13, 14})),
-                                       ({7}, set({1, 2, 3, 4, 5})),
-                                       )):
+    for i, (bases, revs) in enumerate(
+        (
+            ({1, 2, 3, 4, 7}, set(xrange(10))),
+            ({10}, set({11, 12, 13, 14})),
+            ({7}, set({1, 2, 3, 4, 5})),
+        )
+    ):
         print("%% removeancestorsfrom(), example %d" % (i + 1))
         missanc = ancestor.incrementalmissingancestors(graph.get, bases)
         missanc.removeancestorsfrom(revs)
         print("remaining (sorted): %s" % sorted(list(revs)))
 
-    for i, (bases, revs) in enumerate((({10}, {11}),
-                                       ({11}, {10}),
-                                       ({7}, {9, 11}),
-                                       )):
+    for i, (bases, revs) in enumerate(
+        (({10}, {11}), ({11}, {10}), ({7}, {9, 11}),)
+    ):
         print("%% missingancestors(), example %d" % (i + 1))
         missanc = ancestor.incrementalmissingancestors(graph.get, bases)
         print("return %s" % missanc.missingancestors(revs))
 
     print("% removeancestorsfrom(), bigger graph")
     vecgraph = [
-        [-1, -1], [0, -1], [1, 0], [2, 1], [3, -1], [4, -1], [5, 1],
-        [2, -1], [7, -1], [8, -1], [9, -1], [10, 1], [3, -1], [12, -1],
-        [13, -1], [14, -1], [4, -1], [16, -1], [17, -1], [18, -1],
-        [19, 11], [20, -1], [21, -1], [22, -1], [23, -1], [2, -1],
-        [3, -1], [26, 24], [27, -1], [28, -1], [12, -1], [1, -1], [1, 9],
-        [32, -1], [33, -1], [34, 31], [35, -1], [36, 26], [37, -1],
-        [38, -1], [39, -1], [40, -1], [41, -1], [42, 26], [0, -1],
-        [44, -1], [45, 4], [40, -1], [47, -1], [36, 0], [49, -1],
-        [-1, -1], [51, -1], [52, -1], [53, -1], [14, -1],
-        [55, -1], [15, -1], [23, -1], [58, -1], [59, -1], [2, -1],
-        [61, 59], [62, -1], [63, -1], [-1, -1], [65, -1],
-        [66, -1], [67, -1], [68, -1], [37, 28], [69, 25],
-        [71, -1], [72, -1], [50, 2], [74, -1], [12, -1],
-        [18, -1], [77, -1], [78, -1], [79, -1], [43, 33],
-        [81, -1], [82, -1], [83, -1], [84, 45], [85, -1],
-        [86, -1], [-1, -1], [88, -1], [-1, -1], [76, 83], [44, -1],
-        [92, -1], [93, -1], [9, -1], [95, 67], [96, -1], [97, -1],
-        [-1, -1]]
+        [-1, -1],
+        [0, -1],
+        [1, 0],
+        [2, 1],
+        [3, -1],
+        [4, -1],
+        [5, 1],
+        [2, -1],
+        [7, -1],
+        [8, -1],
+        [9, -1],
+        [10, 1],
+        [3, -1],
+        [12, -1],
+        [13, -1],
+        [14, -1],
+        [4, -1],
+        [16, -1],
+        [17, -1],
+        [18, -1],
+        [19, 11],
+        [20, -1],
+        [21, -1],
+        [22, -1],
+        [23, -1],
+        [2, -1],
+        [3, -1],
+        [26, 24],
+        [27, -1],
+        [28, -1],
+        [12, -1],
+        [1, -1],
+        [1, 9],
+        [32, -1],
+        [33, -1],
+        [34, 31],
+        [35, -1],
+        [36, 26],
+        [37, -1],
+        [38, -1],
+        [39, -1],
+        [40, -1],
+        [41, -1],
+        [42, 26],
+        [0, -1],
+        [44, -1],
+        [45, 4],
+        [40, -1],
+        [47, -1],
+        [36, 0],
+        [49, -1],
+        [-1, -1],
+        [51, -1],
+        [52, -1],
+        [53, -1],
+        [14, -1],
+        [55, -1],
+        [15, -1],
+        [23, -1],
+        [58, -1],
+        [59, -1],
+        [2, -1],
+        [61, 59],
+        [62, -1],
+        [63, -1],
+        [-1, -1],
+        [65, -1],
+        [66, -1],
+        [67, -1],
+        [68, -1],
+        [37, 28],
+        [69, 25],
+        [71, -1],
+        [72, -1],
+        [50, 2],
+        [74, -1],
+        [12, -1],
+        [18, -1],
+        [77, -1],
+        [78, -1],
+        [79, -1],
+        [43, 33],
+        [81, -1],
+        [82, -1],
+        [83, -1],
+        [84, 45],
+        [85, -1],
+        [86, -1],
+        [-1, -1],
+        [88, -1],
+        [-1, -1],
+        [76, 83],
+        [44, -1],
+        [92, -1],
+        [93, -1],
+        [9, -1],
+        [95, 67],
+        [96, -1],
+        [97, -1],
+        [-1, -1],
+    ]
     problem_rev = 28
     problem_base = 70
     # problem_rev is a parent of problem_base, but a faulty implementation
@@ -239,16 +352,24 @@
     else:
         print("Ok")
 
+
 def genlazyancestors(revs, stoprev=0, inclusive=False):
-    print(("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" %
-           (revs, stoprev, inclusive)))
-    return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev,
-                                  inclusive=inclusive)
+    print(
+        (
+            "%% lazy ancestor set for %s, stoprev = %s, inclusive = %s"
+            % (revs, stoprev, inclusive)
+        )
+    )
+    return ancestor.lazyancestors(
+        graph.get, revs, stoprev=stoprev, inclusive=inclusive
+    )
+
 
 def printlazyancestors(s, l):
     print('membership: %r' % [n for n in l if n in s])
     print('iteration:  %r' % list(s))
 
+
 def test_lazyancestors():
     # Empty revs
     s = genlazyancestors([])
@@ -282,6 +403,7 @@
     s = genlazyancestors([10, 1], inclusive=True)
     printlazyancestors(s, [2, 10, 4, 5, -1, 0, 1])
 
+
 # The C gca algorithm requires a real repo. These are textual descriptions of
 # DAGs that have been known to be problematic, and, optionally, known pairs
 # of revisions and their expected ancestor list.
@@ -290,6 +412,8 @@
     (b'+3*3/*2*2/*4*4/*4/2*4/2*2', {}),
     (b'+2*2*/2*4*/4*/3*2/4', {(6, 7): [3, 5]}),
 ]
+
+
 def test_gca():
     u = uimod.ui.load()
     for i, (dag, tests) in enumerate(dagtests):
@@ -312,19 +436,21 @@
                 if (a, b) in tests:
                     expected = tests[(a, b)]
                 if cgcas != pygcas or (expected and cgcas != expected):
-                    print("test_gca: for dag %s, gcas for %d, %d:"
-                          % (dag, a, b))
+                    print(
+                        "test_gca: for dag %s, gcas for %d, %d:" % (dag, a, b)
+                    )
                     print("  C returned:      %s" % cgcas)
                     print("  Python returned: %s" % pygcas)
                     if expected:
                         print("  expected:        %s" % expected)
 
+
 def main():
     seed = None
     opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed='])
     for o, a in opts:
         if o in ('-s', '--seed'):
-            seed = long(a, base=0) # accepts base 10 or 16 strings
+            seed = long(a, base=0)  # accepts base 10 or 16 strings
 
     if seed is None:
         try:
@@ -338,5 +464,6 @@
     test_lazyancestors()
     test_gca()
 
+
 if __name__ == '__main__':
     main()
--- a/tests/test-annotate.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-annotate.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,15 +13,19 @@
     _annotatepair,
 )
 
+
 def tr(a):
-    return [annotateline(fctx, lineno, skip)
-            for fctx, lineno, skip in zip(a.fctxs, a.linenos, a.skips)]
+    return [
+        annotateline(fctx, lineno, skip)
+        for fctx, lineno, skip in zip(a.fctxs, a.linenos, a.skips)
+    ]
+
 
 class AnnotateTests(unittest.TestCase):
     """Unit tests for annotate code."""
 
     def testannotatepair(self):
-        self.maxDiff = None # camelcase-required
+        self.maxDiff = None  # camelcase-required
 
         oldfctx = b'old'
         p1fctx, p2fctx, childfctx = b'p1', b'p2', b'c'
@@ -41,70 +45,94 @@
         oldann = decorate(olddata, oldfctx)
         p1ann = decorate(p1data, p1fctx)
         p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
-        self.assertEqual(tr(p1ann), [
-            annotateline(b'old', 1),
-            annotateline(b'old', 2),
-            annotateline(b'p1', 3),
-        ])
+        self.assertEqual(
+            tr(p1ann),
+            [
+                annotateline(b'old', 1),
+                annotateline(b'old', 2),
+                annotateline(b'p1', 3),
+            ],
+        )
 
         p2ann = decorate(p2data, p2fctx)
         p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
-        self.assertEqual(tr(p2ann), [
-            annotateline(b'old', 1),
-            annotateline(b'p2', 2),
-            annotateline(b'p2', 3),
-        ])
+        self.assertEqual(
+            tr(p2ann),
+            [
+                annotateline(b'old', 1),
+                annotateline(b'p2', 2),
+                annotateline(b'p2', 3),
+            ],
+        )
 
         # Test with multiple parents (note the difference caused by ordering)
 
         childann = decorate(childdata, childfctx)
-        childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
-                                 diffopts)
-        self.assertEqual(tr(childann), [
-            annotateline(b'old', 1),
-            annotateline(b'c', 2),
-            annotateline(b'p2', 2),
-            annotateline(b'c', 4),
-            annotateline(b'p2', 3),
-        ])
+        childann = _annotatepair(
+            [p1ann, p2ann], childfctx, childann, False, diffopts
+        )
+        self.assertEqual(
+            tr(childann),
+            [
+                annotateline(b'old', 1),
+                annotateline(b'c', 2),
+                annotateline(b'p2', 2),
+                annotateline(b'c', 4),
+                annotateline(b'p2', 3),
+            ],
+        )
 
         childann = decorate(childdata, childfctx)
-        childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
-                                 diffopts)
-        self.assertEqual(tr(childann), [
-            annotateline(b'old', 1),
-            annotateline(b'c', 2),
-            annotateline(b'p1', 3),
-            annotateline(b'c', 4),
-            annotateline(b'p2', 3),
-        ])
+        childann = _annotatepair(
+            [p2ann, p1ann], childfctx, childann, False, diffopts
+        )
+        self.assertEqual(
+            tr(childann),
+            [
+                annotateline(b'old', 1),
+                annotateline(b'c', 2),
+                annotateline(b'p1', 3),
+                annotateline(b'c', 4),
+                annotateline(b'p2', 3),
+            ],
+        )
 
         # Test with skipchild (note the difference caused by ordering)
 
         childann = decorate(childdata, childfctx)
-        childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
-                                 diffopts)
-        self.assertEqual(tr(childann), [
-            annotateline(b'old', 1),
-            annotateline(b'old', 2, True),
-            # note that this line was carried over from earlier so it is *not*
-            # marked skipped
-            annotateline(b'p2', 2),
-            annotateline(b'p2', 2, True),
-            annotateline(b'p2', 3),
-        ])
+        childann = _annotatepair(
+            [p1ann, p2ann], childfctx, childann, True, diffopts
+        )
+        self.assertEqual(
+            tr(childann),
+            [
+                annotateline(b'old', 1),
+                annotateline(b'old', 2, True),
+                # note that this line was carried over from earlier so it is *not*
+                # marked skipped
+                annotateline(b'p2', 2),
+                annotateline(b'p2', 2, True),
+                annotateline(b'p2', 3),
+            ],
+        )
 
         childann = decorate(childdata, childfctx)
-        childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
-                                 diffopts)
-        self.assertEqual(tr(childann), [
-            annotateline(b'old', 1),
-            annotateline(b'old', 2, True),
-            annotateline(b'p1', 3),
-            annotateline(b'p1', 3, True),
-            annotateline(b'p2', 3),
-        ])
+        childann = _annotatepair(
+            [p2ann, p1ann], childfctx, childann, True, diffopts
+        )
+        self.assertEqual(
+            tr(childann),
+            [
+                annotateline(b'old', 1),
+                annotateline(b'old', 2, True),
+                annotateline(b'p1', 3),
+                annotateline(b'p1', 3, True),
+                annotateline(b'p2', 3),
+            ],
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-atomictempfile.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-atomictempfile.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,11 +11,13 @@
     pycompat,
     util,
 )
+
 atomictempfile = util.atomictempfile
 
 if pycompat.ispy3:
     xrange = range
 
+
 class testatomictempfile(unittest.TestCase):
     def setUp(self):
         self._testdir = tempfile.mkdtemp(b'atomictempfiletest')
@@ -28,15 +30,19 @@
         file = atomictempfile(self._filename)
         self.assertFalse(os.path.isfile(self._filename))
         tempfilename = file._tempname
-        self.assertTrue(tempfilename in glob.glob(
-            os.path.join(self._testdir, b'.testfilename-*')))
+        self.assertTrue(
+            tempfilename
+            in glob.glob(os.path.join(self._testdir, b'.testfilename-*'))
+        )
 
         file.write(b'argh\n')
         file.close()
 
         self.assertTrue(os.path.isfile(self._filename))
-        self.assertTrue(tempfilename not in glob.glob(
-            os.path.join(self._testdir, b'.testfilename-*')))
+        self.assertTrue(
+            tempfilename
+            not in glob.glob(os.path.join(self._testdir, b'.testfilename-*'))
+        )
 
     # discard() removes the temp file without making the write permanent
     def testdiscard(self):
@@ -84,7 +90,7 @@
 
             # st_mtime should be advanced "repetition" times, because
             # all atomicwrite() occurred at same time (in sec)
-            oldtime = (oldstat[stat.ST_MTIME] + repetition) & 0x7fffffff
+            oldtime = (oldstat[stat.ST_MTIME] + repetition) & 0x7FFFFFFF
             self.assertTrue(newstat[stat.ST_MTIME] == oldtime)
             # no more examination is needed, if assumption above is true
             break
@@ -120,6 +126,8 @@
             pass
         self.assertFalse(os.path.isfile(b'foo'))
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-batching.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-batching.py	Sun Oct 06 09:45:02 2019 -0400
@@ -15,22 +15,27 @@
     wireprotov1peer,
 )
 
+
 def bprint(*bs):
     print(*[pycompat.sysstr(b) for b in bs])
 
+
 # equivalent of repo.repository
 class thing(object):
     def hello(self):
         return b"Ready."
 
+
 # equivalent of localrepo.localrepository
 class localthing(thing):
     def foo(self, one, two=None):
         if one:
             return b"%s and %s" % (one, two,)
         return b"Nope"
+
     def bar(self, b, a):
         return b"%s und %s" % (b, a,)
+
     def greet(self, name=None):
         return b"Hello, %s" % name
 
@@ -42,6 +47,7 @@
         finally:
             e.close()
 
+
 # usage of "thing" interface
 def use(it):
 
@@ -63,6 +69,7 @@
     bprint(fbar.result())
     bprint(fbar2.result())
 
+
 # local usage
 mylocal = localthing()
 print()
@@ -73,18 +80,24 @@
 
 # shared
 
+
 def escapearg(plain):
-    return (plain
-            .replace(b':', b'::')
-            .replace(b',', b':,')
-            .replace(b';', b':;')
-            .replace(b'=', b':='))
+    return (
+        plain.replace(b':', b'::')
+        .replace(b',', b':,')
+        .replace(b';', b':;')
+        .replace(b'=', b':=')
+    )
+
+
 def unescapearg(escaped):
-    return (escaped
-            .replace(b':=', b'=')
-            .replace(b':;', b';')
-            .replace(b':,', b',')
-            .replace(b'::', b':'))
+    return (
+        escaped.replace(b':=', b'=')
+        .replace(b':;', b';')
+        .replace(b':,', b',')
+        .replace(b'::', b':')
+    )
+
 
 # server side
 
@@ -92,9 +105,11 @@
 class server(object):
     def __init__(self, local):
         self.local = local
+
     def _call(self, name, args):
         args = dict(arg.split(b'=', 1) for arg in args)
         return getattr(self, name)(**args)
+
     def perform(self, req):
         bprint(b"REQ:", req)
         name, args = req.split(b'?', 1)
@@ -103,6 +118,7 @@
         res = getattr(self, pycompat.sysstr(name))(**pycompat.strkwargs(vals))
         bprint(b"  ->", res)
         return res
+
     def batch(self, cmds):
         res = []
         for pair in cmds.split(b';'):
@@ -112,15 +128,25 @@
                 if a:
                     n, v = a.split(b'=')
                     vals[n] = unescapearg(v)
-            res.append(escapearg(getattr(self, pycompat.sysstr(name))(
-                **pycompat.strkwargs(vals))))
+            res.append(
+                escapearg(
+                    getattr(self, pycompat.sysstr(name))(
+                        **pycompat.strkwargs(vals)
+                    )
+                )
+            )
         return b';'.join(res)
+
     def foo(self, one, two):
         return mangle(self.local.foo(unmangle(one), unmangle(two)))
+
     def bar(self, b, a):
         return mangle(self.local.bar(unmangle(b), unmangle(a)))
+
     def greet(self, name):
         return mangle(self.local.greet(unmangle(name)))
+
+
 myserver = server(mylocal)
 
 # local side
@@ -129,16 +155,21 @@
 # here we just transform the strings a bit to check we're properly en-/decoding
 def mangle(s):
     return b''.join(pycompat.bytechr(ord(c) + 1) for c in pycompat.bytestr(s))
+
+
 def unmangle(s):
     return b''.join(pycompat.bytechr(ord(c) - 1) for c in pycompat.bytestr(s))
 
+
 # equivalent of wireproto.wirerepository and something like http's wire format
 class remotething(thing):
     def __init__(self, server):
         self.server = server
+
     def _submitone(self, name, args):
         req = name + b'?' + b'&'.join([b'%s=%s' % (n, v) for n, v in args])
         return self.server.perform(req)
+
     def _submitbatch(self, cmds):
         req = []
         for name, args in cmds:
@@ -176,6 +207,7 @@
     def greet(self, name=None):
         return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)]))
 
+
 # demo remote usage
 
 myproxy = remotething(myserver)
--- a/tests/test-bdiff.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-bdiff.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,25 +3,28 @@
 import struct
 import unittest
 
-from mercurial import (
-    mdiff,
-)
+from mercurial import mdiff
 
-class diffreplace(
-    collections.namedtuple('diffreplace', 'start end from_ to')):
+
+class diffreplace(collections.namedtuple('diffreplace', 'start end from_ to')):
     def __repr__(self):
         return 'diffreplace(%r, %r, %r, %r)' % self
 
+
 class BdiffTests(unittest.TestCase):
-
     def assert_bdiff_applies(self, a, b):
         d = mdiff.textdiff(a, b)
         c = a
         if d:
             c = mdiff.patches(a, [d])
         self.assertEqual(
-            c, b, ("bad diff+patch result from\n  %r to\n  "
-                   "%r: \nbdiff: %r\npatched: %r" % (a, b, d, c[:200])))
+            c,
+            b,
+            (
+                "bad diff+patch result from\n  %r to\n  "
+                "%r: \nbdiff: %r\npatched: %r" % (a, b, d, c[:200])
+            ),
+        )
 
     def assert_bdiff(self, a, b):
         self.assert_bdiff_applies(a, b)
@@ -58,11 +61,11 @@
         q = 0
         actions = []
         while pos < len(bin):
-            p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12])
+            p1, p2, l = struct.unpack(">lll", bin[pos : pos + 12])
             pos += 12
             if p1:
                 actions.append(a[q:p1])
-            actions.append(diffreplace(p1, p2, a[p1:p2], bin[pos:pos + l]))
+            actions.append(diffreplace(p1, p2, a[p1:p2], bin[pos : pos + l]))
             pos += l
             q = p2
         if q < len(a):
@@ -71,37 +74,55 @@
 
     def test_issue1295(self):
         cases = [
-            (b"x\n\nx\n\nx\n\nx\n\nz\n", b"x\n\nx\n\ny\n\nx\n\nx\n\nz\n",
-             [b'x\n\nx\n\n',
-              diffreplace(6, 6, b'', b'y\n\n'),
-              b'x\n\nx\n\nz\n']),
-            (b"x\n\nx\n\nx\n\nx\n\nz\n", b"x\n\nx\n\ny\n\nx\n\ny\n\nx\n\nz\n",
-             [b'x\n\nx\n\n',
-              diffreplace(6, 6, b'', b'y\n\n'),
-              b'x\n\n',
-              diffreplace(9, 9, b'', b'y\n\n'),
-              b'x\n\nz\n']),
+            (
+                b"x\n\nx\n\nx\n\nx\n\nz\n",
+                b"x\n\nx\n\ny\n\nx\n\nx\n\nz\n",
+                [
+                    b'x\n\nx\n\n',
+                    diffreplace(6, 6, b'', b'y\n\n'),
+                    b'x\n\nx\n\nz\n',
+                ],
+            ),
+            (
+                b"x\n\nx\n\nx\n\nx\n\nz\n",
+                b"x\n\nx\n\ny\n\nx\n\ny\n\nx\n\nz\n",
+                [
+                    b'x\n\nx\n\n',
+                    diffreplace(6, 6, b'', b'y\n\n'),
+                    b'x\n\n',
+                    diffreplace(9, 9, b'', b'y\n\n'),
+                    b'x\n\nz\n',
+                ],
+            ),
         ]
         for old, new, want in cases:
             self.assertEqual(self.showdiff(old, new), want)
 
     def test_issue1295_varies_on_pure(self):
-            # we should pick up abbbc. rather than bc.de as the longest match
-        got = self.showdiff(b"a\nb\nb\nb\nc\n.\nd\ne\n.\nf\n",
-                            b"a\nb\nb\na\nb\nb\nb\nc\n.\nb\nc\n.\nd\ne\nf\n")
-        want_c = [b'a\nb\nb\n',
-                  diffreplace(6, 6, b'', b'a\nb\nb\nb\nc\n.\n'),
-                  b'b\nc\n.\nd\ne\n',
-                  diffreplace(16, 18, b'.\n', b''),
-                  b'f\n']
-        want_pure = [diffreplace(0, 0, b'', b'a\nb\nb\n'),
-                     b'a\nb\nb\nb\nc\n.\n',
-                     diffreplace(12, 12, b'', b'b\nc\n.\n'),
-                     b'd\ne\n',
-                     diffreplace(16, 18, b'.\n', b''), b'f\n']
-        self.assertTrue(got in (want_c, want_pure),
-                        'got: %r, wanted either %r or %r' % (
-                            got, want_c, want_pure))
+        # we should pick up abbbc. rather than bc.de as the longest match
+        got = self.showdiff(
+            b"a\nb\nb\nb\nc\n.\nd\ne\n.\nf\n",
+            b"a\nb\nb\na\nb\nb\nb\nc\n.\nb\nc\n.\nd\ne\nf\n",
+        )
+        want_c = [
+            b'a\nb\nb\n',
+            diffreplace(6, 6, b'', b'a\nb\nb\nb\nc\n.\n'),
+            b'b\nc\n.\nd\ne\n',
+            diffreplace(16, 18, b'.\n', b''),
+            b'f\n',
+        ]
+        want_pure = [
+            diffreplace(0, 0, b'', b'a\nb\nb\n'),
+            b'a\nb\nb\nb\nc\n.\n',
+            diffreplace(12, 12, b'', b'b\nc\n.\n'),
+            b'd\ne\n',
+            diffreplace(16, 18, b'.\n', b''),
+            b'f\n',
+        ]
+        self.assertTrue(
+            got in (want_c, want_pure),
+            'got: %r, wanted either %r or %r' % (got, want_c, want_pure),
+        )
 
     def test_fixws(self):
         cases = [
@@ -113,39 +134,55 @@
         for a, b, allws in cases:
             c = mdiff.fixws(a, allws)
             self.assertEqual(
-                c, b, 'fixws(%r) want %r got %r (allws=%r)' % (a, b, c, allws))
+                c, b, 'fixws(%r) want %r got %r (allws=%r)' % (a, b, c, allws)
+            )
 
     def test_nice_diff_for_trivial_change(self):
-        self.assertEqual(self.showdiff(
-            b''.join(b'<%d\n-\n' % i for i in range(5)),
-            b''.join(b'>%d\n-\n' % i for i in range(5))),
-                         [diffreplace(0, 3, b'<0\n', b'>0\n'),
-                          b'-\n',
-                          diffreplace(5, 8, b'<1\n', b'>1\n'),
-                          b'-\n',
-                          diffreplace(10, 13, b'<2\n', b'>2\n'),
-                          b'-\n',
-                          diffreplace(15, 18, b'<3\n', b'>3\n'),
-                          b'-\n',
-                          diffreplace(20, 23, b'<4\n', b'>4\n'),
-                          b'-\n'])
+        self.assertEqual(
+            self.showdiff(
+                b''.join(b'<%d\n-\n' % i for i in range(5)),
+                b''.join(b'>%d\n-\n' % i for i in range(5)),
+            ),
+            [
+                diffreplace(0, 3, b'<0\n', b'>0\n'),
+                b'-\n',
+                diffreplace(5, 8, b'<1\n', b'>1\n'),
+                b'-\n',
+                diffreplace(10, 13, b'<2\n', b'>2\n'),
+                b'-\n',
+                diffreplace(15, 18, b'<3\n', b'>3\n'),
+                b'-\n',
+                diffreplace(20, 23, b'<4\n', b'>4\n'),
+                b'-\n',
+            ],
+        )
 
     def test_prefer_appending(self):
         # 1 line to 3 lines
-        self.assertEqual(self.showdiff(b'a\n', b'a\n' * 3),
-                         [b'a\n', diffreplace(2, 2, b'', b'a\na\n')])
+        self.assertEqual(
+            self.showdiff(b'a\n', b'a\n' * 3),
+            [b'a\n', diffreplace(2, 2, b'', b'a\na\n')],
+        )
         # 1 line to 5 lines
-        self.assertEqual(self.showdiff(b'a\n', b'a\n' * 5),
-                         [b'a\n', diffreplace(2, 2, b'', b'a\na\na\na\n')])
+        self.assertEqual(
+            self.showdiff(b'a\n', b'a\n' * 5),
+            [b'a\n', diffreplace(2, 2, b'', b'a\na\na\na\n')],
+        )
 
     def test_prefer_removing_trailing(self):
         # 3 lines to 1 line
-        self.assertEqual(self.showdiff(b'a\n' * 3, b'a\n'),
-                         [b'a\n', diffreplace(2, 6, b'a\na\n', b'')])
+        self.assertEqual(
+            self.showdiff(b'a\n' * 3, b'a\n'),
+            [b'a\n', diffreplace(2, 6, b'a\na\n', b'')],
+        )
         # 5 lines to 1 line
-        self.assertEqual(self.showdiff(b'a\n' * 5, b'a\n'),
-                         [b'a\n', diffreplace(2, 10, b'a\na\na\na\n', b'')])
+        self.assertEqual(
+            self.showdiff(b'a\n' * 5, b'a\n'),
+            [b'a\n', diffreplace(2, 10, b'a\na\na\na\n', b'')],
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-cappedreader.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-cappedreader.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,9 +3,8 @@
 import io
 import unittest
 
-from mercurial import (
-    util,
-)
+from mercurial import util
+
 
 class CappedReaderTests(unittest.TestCase):
     def testreadfull(self):
@@ -86,6 +85,8 @@
         self.assertEqual(res, b'')
         self.assertEqual(source.tell(), 100)
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-cbor.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-cbor.py	Sun Oct 06 09:45:02 2019 -0400
@@ -5,44 +5,46 @@
 import unittest
 
 # TODO migrate to canned cbor test strings and stop using thirdparty.cbor
-tpp = os.path.normpath(os.path.join(os.path.dirname(__file__),
-                                    '..', 'mercurial', 'thirdparty'))
+tpp = os.path.normpath(
+    os.path.join(os.path.dirname(__file__), '..', 'mercurial', 'thirdparty')
+)
 if not os.path.exists(tpp):
     # skip, not in a repo
     sys.exit(80)
 sys.path[0:0] = [tpp]
 import cbor
+
 del sys.path[0]
 
-from mercurial.utils import (
-    cborutil,
-)
+from mercurial.utils import cborutil
+
 
 class TestCase(unittest.TestCase):
     if not getattr(unittest.TestCase, 'assertRaisesRegex', False):
         # Python 3.7 deprecates the regex*p* version, but 2.7 lacks
         # the regex version.
-        assertRaisesRegex = (# camelcase-required
-            unittest.TestCase.assertRaisesRegexp)
+        assertRaisesRegex = (  # camelcase-required
+            unittest.TestCase.assertRaisesRegexp
+        )
+
 
 def loadit(it):
     return cbor.loads(b''.join(it))
 
+
 class BytestringTests(TestCase):
     def testsimple(self):
         self.assertEqual(
-            list(cborutil.streamencode(b'foobar')),
-            [b'\x46', b'foobar'])
+            list(cborutil.streamencode(b'foobar')), [b'\x46', b'foobar']
+        )
+
+        self.assertEqual(loadit(cborutil.streamencode(b'foobar')), b'foobar')
+
+        self.assertEqual(cborutil.decodeall(b'\x46foobar'), [b'foobar'])
 
         self.assertEqual(
-            loadit(cborutil.streamencode(b'foobar')),
-            b'foobar')
-
-        self.assertEqual(cborutil.decodeall(b'\x46foobar'),
-                         [b'foobar'])
-
-        self.assertEqual(cborutil.decodeall(b'\x46foobar\x45fizbi'),
-                         [b'foobar', b'fizbi'])
+            cborutil.decodeall(b'\x46foobar\x45fizbi'), [b'foobar', b'fizbi']
+        )
 
     def testlong(self):
         source = b'x' * 1048576
@@ -65,19 +67,26 @@
                 b'\x43',
                 b'\xee\xff\x99',
                 b'\xff',
-            ])
+            ],
+        )
 
         self.assertEqual(
             loadit(cborutil.streamencodebytestringfromiter(source)),
-            b''.join(source))
+            b''.join(source),
+        )
 
-        self.assertEqual(cborutil.decodeall(b'\x5f\x44\xaa\xbb\xcc\xdd'
-                                            b'\x43\xee\xff\x99\xff'),
-                         [b'\xaa\xbb\xcc\xdd', b'\xee\xff\x99', b''])
+        self.assertEqual(
+            cborutil.decodeall(
+                b'\x5f\x44\xaa\xbb\xcc\xdd' b'\x43\xee\xff\x99\xff'
+            ),
+            [b'\xaa\xbb\xcc\xdd', b'\xee\xff\x99', b''],
+        )
 
         for i, chunk in enumerate(
-            cborutil.decodeall(b'\x5f\x44\xaa\xbb\xcc\xdd'
-                               b'\x43\xee\xff\x99\xff')):
+            cborutil.decodeall(
+                b'\x5f\x44\xaa\xbb\xcc\xdd' b'\x43\xee\xff\x99\xff'
+            )
+        ):
             self.assertIsInstance(chunk, cborutil.bytestringchunk)
 
             if i == 0:
@@ -95,7 +104,8 @@
 
         self.assertEqual(
             loadit(cborutil.streamencodebytestringfromiter(source)),
-            b''.join(source))
+            b''.join(source),
+        )
 
     def testindefinite(self):
         source = b'\x00\x01\x02\x03' + b'\xff' * 16384
@@ -110,8 +120,9 @@
         self.assertEqual(next(it), b'\x42')
         self.assertEqual(next(it), b'\xff\xff')
 
-        dest = b''.join(cborutil.streamencodeindefinitebytestring(
-            source, chunksize=42))
+        dest = b''.join(
+            cborutil.streamencodeindefinitebytestring(source, chunksize=42)
+        )
         self.assertEqual(cbor.loads(dest), source)
 
         self.assertEqual(b''.join(cborutil.decodeall(dest)), source)
@@ -140,27 +151,42 @@
             elif len(source) < 1048576:
                 hlen = 5
 
-            self.assertEqual(cborutil.decodeitem(encoded),
-                             (True, source, hlen + len(source),
-                              cborutil.SPECIAL_NONE))
+            self.assertEqual(
+                cborutil.decodeitem(encoded),
+                (True, source, hlen + len(source), cborutil.SPECIAL_NONE),
+            )
 
     def testpartialdecode(self):
         encoded = b''.join(cborutil.streamencode(b'foobar'))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -6, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -5, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -4, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:6]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:7]),
-                         (True, b'foobar', 7, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -6, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -5, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -4, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:6]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:7]),
+            (True, b'foobar', 7, cborutil.SPECIAL_NONE),
+        )
 
     def testpartialdecodevariouslengths(self):
         lens = [
@@ -192,11 +218,11 @@
         for size in lens:
             if size < 24:
                 hlen = 1
-            elif size < 2**8:
+            elif size < 2 ** 8:
                 hlen = 2
-            elif size < 2**16:
+            elif size < 2 ** 16:
                 hlen = 3
-            elif size < 2**32:
+            elif size < 2 ** 32:
                 hlen = 5
             else:
                 assert False
@@ -207,107 +233,158 @@
             res = cborutil.decodeitem(encoded[0:1])
 
             if hlen > 1:
-                self.assertEqual(res, (False, None, -(hlen - 1),
-                                       cborutil.SPECIAL_NONE))
+                self.assertEqual(
+                    res, (False, None, -(hlen - 1), cborutil.SPECIAL_NONE)
+                )
             else:
-                self.assertEqual(res, (False, None, -(size + hlen - 1),
-                                       cborutil.SPECIAL_NONE))
+                self.assertEqual(
+                    res,
+                    (False, None, -(size + hlen - 1), cborutil.SPECIAL_NONE),
+                )
 
             # Decoding partial header reports remaining header size.
             for i in range(hlen - 1):
-                self.assertEqual(cborutil.decodeitem(encoded[0:i + 1]),
-                                 (False, None, -(hlen - i - 1),
-                                  cborutil.SPECIAL_NONE))
+                self.assertEqual(
+                    cborutil.decodeitem(encoded[0 : i + 1]),
+                    (False, None, -(hlen - i - 1), cborutil.SPECIAL_NONE),
+                )
 
             # Decoding complete header reports item size.
-            self.assertEqual(cborutil.decodeitem(encoded[0:hlen]),
-                             (False, None, -size, cborutil.SPECIAL_NONE))
+            self.assertEqual(
+                cborutil.decodeitem(encoded[0:hlen]),
+                (False, None, -size, cborutil.SPECIAL_NONE),
+            )
 
             # Decoding single byte after header reports item size - 1
-            self.assertEqual(cborutil.decodeitem(encoded[0:hlen + 1]),
-                             (False, None, -(size - 1), cborutil.SPECIAL_NONE))
+            self.assertEqual(
+                cborutil.decodeitem(encoded[0 : hlen + 1]),
+                (False, None, -(size - 1), cborutil.SPECIAL_NONE),
+            )
 
             # Decoding all but the last byte reports -1 needed.
-            self.assertEqual(cborutil.decodeitem(encoded[0:hlen + size - 1]),
-                             (False, None, -1, cborutil.SPECIAL_NONE))
+            self.assertEqual(
+                cborutil.decodeitem(encoded[0 : hlen + size - 1]),
+                (False, None, -1, cborutil.SPECIAL_NONE),
+            )
 
             # Decoding last byte retrieves value.
-            self.assertEqual(cborutil.decodeitem(encoded[0:hlen + size]),
-                             (True, source, hlen + size, cborutil.SPECIAL_NONE))
+            self.assertEqual(
+                cborutil.decodeitem(encoded[0 : hlen + size]),
+                (True, source, hlen + size, cborutil.SPECIAL_NONE),
+            )
 
     def testindefinitepartialdecode(self):
-        encoded = b''.join(cborutil.streamencodebytestringfromiter(
-            [b'foobar', b'biz']))
+        encoded = b''.join(
+            cborutil.streamencodebytestringfromiter([b'foobar', b'biz'])
+        )
 
         # First item should be begin of bytestring special.
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (True, None, 1,
-                          cborutil.SPECIAL_START_INDEFINITE_BYTESTRING))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (True, None, 1, cborutil.SPECIAL_START_INDEFINITE_BYTESTRING),
+        )
 
         # Second item should be the first chunk. But only available when
         # we give it 7 bytes (1 byte header + 6 byte chunk).
-        self.assertEqual(cborutil.decodeitem(encoded[1:2]),
-                         (False, None, -6, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[1:3]),
-                         (False, None, -5, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[1:4]),
-                         (False, None, -4, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[1:5]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[1:6]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[1:7]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[1:2]),
+            (False, None, -6, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[1:3]),
+            (False, None, -5, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[1:4]),
+            (False, None, -4, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[1:5]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[1:6]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[1:7]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
 
-        self.assertEqual(cborutil.decodeitem(encoded[1:8]),
-                         (True, b'foobar', 7, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[1:8]),
+            (True, b'foobar', 7, cborutil.SPECIAL_NONE),
+        )
 
         # Third item should be second chunk. But only available when
         # we give it 4 bytes (1 byte header + 3 byte chunk).
-        self.assertEqual(cborutil.decodeitem(encoded[8:9]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[8:10]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[8:11]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[8:9]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[8:10]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[8:11]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
 
-        self.assertEqual(cborutil.decodeitem(encoded[8:12]),
-                         (True, b'biz', 4, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[8:12]),
+            (True, b'biz', 4, cborutil.SPECIAL_NONE),
+        )
 
         # Fourth item should be end of indefinite stream marker.
-        self.assertEqual(cborutil.decodeitem(encoded[12:13]),
-                         (True, None, 1, cborutil.SPECIAL_INDEFINITE_BREAK))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[12:13]),
+            (True, None, 1, cborutil.SPECIAL_INDEFINITE_BREAK),
+        )
 
         # Now test the behavior when going through the decoder.
 
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:1]),
-                         (False, 1, 0))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:2]),
-                         (False, 1, 6))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:3]),
-                         (False, 1, 5))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:4]),
-                         (False, 1, 4))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:5]),
-                         (False, 1, 3))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:6]),
-                         (False, 1, 2))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:7]),
-                         (False, 1, 1))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:8]),
-                         (True, 8, 0))
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:1]), (False, 1, 0)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:2]), (False, 1, 6)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:3]), (False, 1, 5)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:4]), (False, 1, 4)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:5]), (False, 1, 3)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:6]), (False, 1, 2)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:7]), (False, 1, 1)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:8]), (True, 8, 0)
+        )
 
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:9]),
-                         (True, 8, 3))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:10]),
-                         (True, 8, 2))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:11]),
-                         (True, 8, 1))
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:12]),
-                         (True, 12, 0))
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:9]), (True, 8, 3)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:10]), (True, 8, 2)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:11]), (True, 8, 1)
+        )
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:12]), (True, 12, 0)
+        )
 
-        self.assertEqual(cborutil.sansiodecoder().decode(encoded[0:13]),
-                         (True, 13, 0))
+        self.assertEqual(
+            cborutil.sansiodecoder().decode(encoded[0:13]), (True, 13, 0)
+        )
 
         decoder = cborutil.sansiodecoder()
         decoder.decode(encoded[0:8])
@@ -316,27 +393,28 @@
         self.assertTrue(values[0].isfirst)
         self.assertFalse(values[0].islast)
 
-        self.assertEqual(decoder.decode(encoded[8:12]),
-                         (True, 4, 0))
+        self.assertEqual(decoder.decode(encoded[8:12]), (True, 4, 0))
         values = decoder.getavailable()
         self.assertEqual(values, [b'biz'])
         self.assertFalse(values[0].isfirst)
         self.assertFalse(values[0].islast)
 
-        self.assertEqual(decoder.decode(encoded[12:]),
-                         (True, 1, 0))
+        self.assertEqual(decoder.decode(encoded[12:]), (True, 1, 0))
         values = decoder.getavailable()
         self.assertEqual(values, [b''])
         self.assertFalse(values[0].isfirst)
         self.assertTrue(values[0].islast)
 
+
 class StringTests(TestCase):
     def testdecodeforbidden(self):
         encoded = b'\x63foo'
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'string major type not supported'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'string major type not supported'
+        ):
             cborutil.decodeall(encoded)
 
+
 class IntTests(TestCase):
     def testsmall(self):
         self.assertEqual(list(cborutil.streamencode(0)), [b'\x00'])
@@ -355,8 +433,9 @@
         self.assertEqual(cborutil.decodeall(b'\x04'), [4])
 
         # Multiple value decode works.
-        self.assertEqual(cborutil.decodeall(b'\x00\x01\x02\x03\x04'),
-                         [0, 1, 2, 3, 4])
+        self.assertEqual(
+            cborutil.decodeall(b'\x00\x01\x02\x03\x04'), [0, 1, 2, 3, 4]
+        )
 
     def testnegativesmall(self):
         self.assertEqual(list(cborutil.streamencode(-1)), [b'\x20'])
@@ -375,8 +454,9 @@
         self.assertEqual(cborutil.decodeall(b'\x24'), [-5])
 
         # Multiple value decode works.
-        self.assertEqual(cborutil.decodeall(b'\x20\x21\x22\x23\x24'),
-                         [-1, -2, -3, -4, -5])
+        self.assertEqual(
+            cborutil.decodeall(b'\x20\x21\x22\x23\x24'), [-1, -2, -3, -4, -5]
+        )
 
     def testrange(self):
         for i in range(-70000, 70000, 10):
@@ -388,117 +468,196 @@
     def testdecodepartialubyte(self):
         encoded = b''.join(cborutil.streamencode(250))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (True, 250, 2, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (True, 250, 2, cborutil.SPECIAL_NONE),
+        )
 
     def testdecodepartialbyte(self):
         encoded = b''.join(cborutil.streamencode(-42))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (True, -42, 2, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (True, -42, 2, cborutil.SPECIAL_NONE),
+        )
 
     def testdecodepartialushort(self):
-        encoded = b''.join(cborutil.streamencode(2**15))
+        encoded = b''.join(cborutil.streamencode(2 ** 15))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (True, 2**15, 3, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (True, 2 ** 15, 3, cborutil.SPECIAL_NONE),
+        )
 
     def testdecodepartialshort(self):
         encoded = b''.join(cborutil.streamencode(-1024))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (True, -1024, 3, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (True, -1024, 3, cborutil.SPECIAL_NONE),
+        )
 
     def testdecodepartialulong(self):
-        encoded = b''.join(cborutil.streamencode(2**28))
+        encoded = b''.join(cborutil.streamencode(2 ** 28))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -4, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (True, 2**28, 5, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -4, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (True, 2 ** 28, 5, cborutil.SPECIAL_NONE),
+        )
 
     def testdecodepartiallong(self):
         encoded = b''.join(cborutil.streamencode(-1048580))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -4, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (True, -1048580, 5, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -4, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (True, -1048580, 5, cborutil.SPECIAL_NONE),
+        )
 
     def testdecodepartialulonglong(self):
-        encoded = b''.join(cborutil.streamencode(2**32))
+        encoded = b''.join(cborutil.streamencode(2 ** 32))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -8, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -7, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -6, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -5, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (False, None, -4, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:6]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:7]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:8]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:9]),
-                         (True, 2**32, 9, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -8, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -7, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -6, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -5, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (False, None, -4, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:6]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:7]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:8]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:9]),
+            (True, 2 ** 32, 9, cborutil.SPECIAL_NONE),
+        )
 
         with self.assertRaisesRegex(
-            cborutil.CBORDecodeError, 'input data not fully consumed'):
+            cborutil.CBORDecodeError, 'input data not fully consumed'
+        ):
             cborutil.decodeall(encoded[0:1])
 
         with self.assertRaisesRegex(
-            cborutil.CBORDecodeError, 'input data not fully consumed'):
+            cborutil.CBORDecodeError, 'input data not fully consumed'
+        ):
             cborutil.decodeall(encoded[0:2])
 
     def testdecodepartiallonglong(self):
         encoded = b''.join(cborutil.streamencode(-7000000000))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -8, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -7, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -6, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -5, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (False, None, -4, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:6]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:7]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:8]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:9]),
-                         (True, -7000000000, 9, cborutil.SPECIAL_NONE))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -8, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -7, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -6, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -5, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (False, None, -4, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:6]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:7]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:8]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:9]),
+            (True, -7000000000, 9, cborutil.SPECIAL_NONE),
+        )
+
 
 class ArrayTests(TestCase):
     def testempty(self):
@@ -510,35 +669,36 @@
     def testbasic(self):
         source = [b'foo', b'bar', 1, -10]
 
-        chunks = [
-            b'\x84', b'\x43', b'foo', b'\x43', b'bar', b'\x01', b'\x29']
+        chunks = [b'\x84', b'\x43', b'foo', b'\x43', b'bar', b'\x01', b'\x29']
 
         self.assertEqual(list(cborutil.streamencode(source)), chunks)
 
         self.assertEqual(cborutil.decodeall(b''.join(chunks)), [source])
 
     def testemptyfromiter(self):
-        self.assertEqual(b''.join(cborutil.streamencodearrayfromiter([])),
-                         b'\x9f\xff')
+        self.assertEqual(
+            b''.join(cborutil.streamencodearrayfromiter([])), b'\x9f\xff'
+        )
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'indefinite length uint not allowed'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'indefinite length uint not allowed'
+        ):
             cborutil.decodeall(b'\x9f\xff')
 
     def testfromiter1(self):
         source = [b'foo']
 
-        self.assertEqual(list(cborutil.streamencodearrayfromiter(source)), [
-            b'\x9f',
-            b'\x43', b'foo',
-            b'\xff',
-        ])
+        self.assertEqual(
+            list(cborutil.streamencodearrayfromiter(source)),
+            [b'\x9f', b'\x43', b'foo', b'\xff',],
+        )
 
         dest = b''.join(cborutil.streamencodearrayfromiter(source))
         self.assertEqual(cbor.loads(dest), source)
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'indefinite length uint not allowed'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'indefinite length uint not allowed'
+        ):
             cborutil.decodeall(dest)
 
     def testtuple(self):
@@ -552,37 +712,59 @@
     def testpartialdecode(self):
         source = list(range(4))
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (True, 4, 1, cborutil.SPECIAL_START_ARRAY))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (True, 4, 1, cborutil.SPECIAL_START_ARRAY))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (True, 4, 1, cborutil.SPECIAL_START_ARRAY),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (True, 4, 1, cborutil.SPECIAL_START_ARRAY),
+        )
 
         source = list(range(23))
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (True, 23, 1, cborutil.SPECIAL_START_ARRAY))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (True, 23, 1, cborutil.SPECIAL_START_ARRAY))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (True, 23, 1, cborutil.SPECIAL_START_ARRAY),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (True, 23, 1, cborutil.SPECIAL_START_ARRAY),
+        )
 
         source = list(range(24))
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (True, 24, 2, cborutil.SPECIAL_START_ARRAY))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (True, 24, 2, cborutil.SPECIAL_START_ARRAY))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (True, 24, 2, cborutil.SPECIAL_START_ARRAY),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (True, 24, 2, cborutil.SPECIAL_START_ARRAY),
+        )
 
         source = list(range(256))
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (True, 256, 3, cborutil.SPECIAL_START_ARRAY))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (True, 256, 3, cborutil.SPECIAL_START_ARRAY))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (True, 256, 3, cborutil.SPECIAL_START_ARRAY),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (True, 256, 3, cborutil.SPECIAL_START_ARRAY),
+        )
 
     def testnested(self):
         source = [[], [], [[], [], []]]
@@ -607,17 +789,18 @@
         # Single value array whose value is an empty indefinite bytestring.
         encoded = b'\x81\x5f\x40\xff'
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'indefinite length bytestrings not '
-                                    'allowed as array values'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError,
+            'indefinite length bytestrings not ' 'allowed as array values',
+        ):
             cborutil.decodeall(encoded)
 
+
 class SetTests(TestCase):
     def testempty(self):
-        self.assertEqual(list(cborutil.streamencode(set())), [
-            b'\xd9\x01\x02',
-            b'\x80',
-        ])
+        self.assertEqual(
+            list(cborutil.streamencode(set())), [b'\xd9\x01\x02', b'\x80',]
+        )
 
         self.assertEqual(cborutil.decodeall(b'\xd9\x01\x02\x80'), [set()])
 
@@ -633,99 +816,135 @@
         # Must use array to encode sets.
         encoded = b'\xd9\x01\x02\xa0'
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'expected array after finite set '
-                                    'semantic tag'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError,
+            'expected array after finite set ' 'semantic tag',
+        ):
             cborutil.decodeall(encoded)
 
     def testpartialdecode(self):
         # Semantic tag item will be 3 bytes. Set header will be variable
         # depending on length.
         encoded = b''.join(cborutil.streamencode({i for i in range(23)}))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (True, 23, 4, cborutil.SPECIAL_START_SET))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (True, 23, 4, cborutil.SPECIAL_START_SET))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (True, 23, 4, cborutil.SPECIAL_START_SET),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (True, 23, 4, cborutil.SPECIAL_START_SET),
+        )
 
         encoded = b''.join(cborutil.streamencode({i for i in range(24)}))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (True, 24, 5, cborutil.SPECIAL_START_SET))
-        self.assertEqual(cborutil.decodeitem(encoded[0:6]),
-                         (True, 24, 5, cborutil.SPECIAL_START_SET))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (True, 24, 5, cborutil.SPECIAL_START_SET),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:6]),
+            (True, 24, 5, cborutil.SPECIAL_START_SET),
+        )
 
         encoded = b''.join(cborutil.streamencode({i for i in range(256)}))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:6]),
-                         (True, 256, 6, cborutil.SPECIAL_START_SET))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:6]),
+            (True, 256, 6, cborutil.SPECIAL_START_SET),
+        )
 
     def testinvalidvalue(self):
-        encoded = b''.join([
-            b'\xd9\x01\x02', # semantic tag
-            b'\x81', # array of size 1
-            b'\x5f\x43foo\xff', # indefinite length bytestring "foo"
-        ])
+        encoded = b''.join(
+            [
+                b'\xd9\x01\x02',  # semantic tag
+                b'\x81',  # array of size 1
+                b'\x5f\x43foo\xff',  # indefinite length bytestring "foo"
+            ]
+        )
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'indefinite length bytestrings not '
-                                    'allowed as set values'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError,
+            'indefinite length bytestrings not ' 'allowed as set values',
+        ):
+            cborutil.decodeall(encoded)
+
+        encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\x80',])  # empty array
+
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'collections not allowed as set values'
+        ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([
-            b'\xd9\x01\x02',
-            b'\x81',
-            b'\x80', # empty array
-        ])
+        encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\xa0',])  # empty map
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'collections not allowed as set values'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'collections not allowed as set values'
+        ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([
-            b'\xd9\x01\x02',
-            b'\x81',
-            b'\xa0', # empty map
-        ])
+        encoded = b''.join(
+            [
+                b'\xd9\x01\x02',
+                b'\x81',
+                b'\xd9\x01\x02\x81\x01',  # set with integer 1
+            ]
+        )
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'collections not allowed as set values'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'collections not allowed as set values'
+        ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([
-            b'\xd9\x01\x02',
-            b'\x81',
-            b'\xd9\x01\x02\x81\x01', # set with integer 1
-        ])
-
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'collections not allowed as set values'):
-            cborutil.decodeall(encoded)
 
 class BoolTests(TestCase):
     def testbasic(self):
-        self.assertEqual(list(cborutil.streamencode(True)),  [b'\xf5'])
+        self.assertEqual(list(cborutil.streamencode(True)), [b'\xf5'])
         self.assertEqual(list(cborutil.streamencode(False)), [b'\xf4'])
 
         self.assertIs(loadit(cborutil.streamencode(True)), True)
@@ -734,8 +953,10 @@
         self.assertEqual(cborutil.decodeall(b'\xf4'), [False])
         self.assertEqual(cborutil.decodeall(b'\xf5'), [True])
 
-        self.assertEqual(cborutil.decodeall(b'\xf4\xf5\xf5\xf4'),
-                         [False, True, True, False])
+        self.assertEqual(
+            cborutil.decodeall(b'\xf4\xf5\xf5\xf4'), [False, True, True, False]
+        )
+
 
 class NoneTests(TestCase):
     def testbasic(self):
@@ -746,6 +967,7 @@
         self.assertEqual(cborutil.decodeall(b'\xf6'), [None])
         self.assertEqual(cborutil.decodeall(b'\xf6\xf6'), [None, None])
 
+
 class MapTests(TestCase):
     def testempty(self):
         self.assertEqual(list(cborutil.streamencode({})), [b'\xa0'])
@@ -754,19 +976,23 @@
         self.assertEqual(cborutil.decodeall(b'\xa0'), [{}])
 
     def testemptyindefinite(self):
-        self.assertEqual(list(cborutil.streamencodemapfromiter([])), [
-            b'\xbf', b'\xff'])
+        self.assertEqual(
+            list(cborutil.streamencodemapfromiter([])), [b'\xbf', b'\xff']
+        )
 
         self.assertEqual(loadit(cborutil.streamencodemapfromiter([])), {})
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'indefinite length uint not allowed'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'indefinite length uint not allowed'
+        ):
             cborutil.decodeall(b'\xbf\xff')
 
     def testone(self):
         source = {b'foo': b'bar'}
-        self.assertEqual(list(cborutil.streamencode(source)), [
-            b'\xa1', b'\x43', b'foo', b'\x43', b'bar'])
+        self.assertEqual(
+            list(cborutil.streamencode(source)),
+            [b'\xa1', b'\x43', b'foo', b'\x43', b'bar'],
+        )
 
         self.assertEqual(loadit(cborutil.streamencode(source)), source)
 
@@ -781,8 +1007,8 @@
         self.assertEqual(loadit(cborutil.streamencode(source)), source)
 
         self.assertEqual(
-            loadit(cborutil.streamencodemapfromiter(source.items())),
-            source)
+            loadit(cborutil.streamencodemapfromiter(source.items())), source
+        )
 
         encoded = b''.join(cborutil.streamencode(source))
         self.assertEqual(cborutil.decodeall(encoded), [source])
@@ -793,12 +1019,11 @@
             2: -10,
         }
 
-        self.assertEqual(loadit(cborutil.streamencode(source)),
-                         source)
+        self.assertEqual(loadit(cborutil.streamencode(source)), source)
 
         self.assertEqual(
-            loadit(cborutil.streamencodemapfromiter(source.items())),
-            source)
+            loadit(cborutil.streamencodemapfromiter(source.items())), source
+        )
 
         encoded = b''.join(cborutil.streamencode(source))
         self.assertEqual(cborutil.decodeall(encoded), [source])
@@ -819,88 +1044,124 @@
         self.assertEqual(cborutil.decodeall(encoded), [source])
 
     def testillegalkey(self):
-        encoded = b''.join([
-            # map header + len 1
-            b'\xa1',
-            # indefinite length bytestring "foo" in key position
-            b'\x5f\x03foo\xff'
-        ])
+        encoded = b''.join(
+            [
+                # map header + len 1
+                b'\xa1',
+                # indefinite length bytestring "foo" in key position
+                b'\x5f\x03foo\xff',
+            ]
+        )
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'indefinite length bytestrings not '
-                                    'allowed as map keys'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError,
+            'indefinite length bytestrings not ' 'allowed as map keys',
+        ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([
-            b'\xa1',
-            b'\x80', # empty array
-            b'\x43foo',
-        ])
+        encoded = b''.join([b'\xa1', b'\x80', b'\x43foo',])  # empty array
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'collections not supported as map keys'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'collections not supported as map keys'
+        ):
             cborutil.decodeall(encoded)
 
     def testillegalvalue(self):
-        encoded = b''.join([
-            b'\xa1', # map headers
-            b'\x43foo', # key
-            b'\x5f\x03bar\xff', # indefinite length value
-        ])
+        encoded = b''.join(
+            [
+                b'\xa1',  # map headers
+                b'\x43foo',  # key
+                b'\x5f\x03bar\xff',  # indefinite length value
+            ]
+        )
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'indefinite length bytestrings not '
-                                    'allowed as map values'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError,
+            'indefinite length bytestrings not ' 'allowed as map values',
+        ):
             cborutil.decodeall(encoded)
 
     def testpartialdecode(self):
         source = {b'key1': b'value1'}
         encoded = b''.join(cborutil.streamencode(source))
 
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (True, 1, 1, cborutil.SPECIAL_START_MAP))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (True, 1, 1, cborutil.SPECIAL_START_MAP))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (True, 1, 1, cborutil.SPECIAL_START_MAP),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (True, 1, 1, cborutil.SPECIAL_START_MAP),
+        )
 
         source = {b'key%d' % i: None for i in range(23)}
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (True, 23, 1, cborutil.SPECIAL_START_MAP))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (True, 23, 1, cborutil.SPECIAL_START_MAP),
+        )
 
         source = {b'key%d' % i: None for i in range(24)}
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (True, 24, 2, cborutil.SPECIAL_START_MAP))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (True, 24, 2, cborutil.SPECIAL_START_MAP))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (True, 24, 2, cborutil.SPECIAL_START_MAP),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (True, 24, 2, cborutil.SPECIAL_START_MAP),
+        )
 
         source = {b'key%d' % i: None for i in range(256)}
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (True, 256, 3, cborutil.SPECIAL_START_MAP))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (True, 256, 3, cborutil.SPECIAL_START_MAP))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (True, 256, 3, cborutil.SPECIAL_START_MAP),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (True, 256, 3, cborutil.SPECIAL_START_MAP),
+        )
 
         source = {b'key%d' % i: None for i in range(65536)}
         encoded = b''.join(cborutil.streamencode(source))
-        self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                         (False, None, -4, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                         (False, None, -3, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:3]),
-                         (False, None, -2, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:4]),
-                         (False, None, -1, cborutil.SPECIAL_NONE))
-        self.assertEqual(cborutil.decodeitem(encoded[0:5]),
-                         (True, 65536, 5, cborutil.SPECIAL_START_MAP))
-        self.assertEqual(cborutil.decodeitem(encoded[0:6]),
-                         (True, 65536, 5, cborutil.SPECIAL_START_MAP))
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:1]),
+            (False, None, -4, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:2]),
+            (False, None, -3, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:3]),
+            (False, None, -2, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:4]),
+            (False, None, -1, cborutil.SPECIAL_NONE),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:5]),
+            (True, 65536, 5, cborutil.SPECIAL_START_MAP),
+        )
+        self.assertEqual(
+            cborutil.decodeitem(encoded[0:6]),
+            (True, 65536, 5, cborutil.SPECIAL_START_MAP),
+        )
+
 
 class SemanticTagTests(TestCase):
     def testdecodeforbidden(self):
@@ -908,8 +1169,7 @@
             if i == cborutil.SEMANTIC_TAG_FINITE_SET:
                 continue
 
-            tag = cborutil.encodelength(cborutil.MAJOR_TYPE_SEMANTIC,
-                                        i)
+            tag = cborutil.encodelength(cborutil.MAJOR_TYPE_SEMANTIC, i)
 
             encoded = tag + cborutil.encodelength(cborutil.MAJOR_TYPE_UINT, 42)
 
@@ -917,18 +1177,26 @@
             if i < 24:
                 pass
             elif i < 256:
-                self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                                 (False, None, -1, cborutil.SPECIAL_NONE))
+                self.assertEqual(
+                    cborutil.decodeitem(encoded[0:1]),
+                    (False, None, -1, cborutil.SPECIAL_NONE),
+                )
             elif i < 65536:
-                self.assertEqual(cborutil.decodeitem(encoded[0:1]),
-                                 (False, None, -2, cborutil.SPECIAL_NONE))
-                self.assertEqual(cborutil.decodeitem(encoded[0:2]),
-                                 (False, None, -1, cborutil.SPECIAL_NONE))
+                self.assertEqual(
+                    cborutil.decodeitem(encoded[0:1]),
+                    (False, None, -2, cborutil.SPECIAL_NONE),
+                )
+                self.assertEqual(
+                    cborutil.decodeitem(encoded[0:2]),
+                    (False, None, -1, cborutil.SPECIAL_NONE),
+                )
 
-            with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                        r'semantic tag \d+ not allowed'):
+            with self.assertRaisesRegex(
+                cborutil.CBORDecodeError, r'semantic tag \d+ not allowed'
+            ):
                 cborutil.decodeitem(encoded)
 
+
 class SpecialTypesTests(TestCase):
     def testforbiddentypes(self):
         for i in range(256):
@@ -941,15 +1209,18 @@
 
             encoded = cborutil.encodelength(cborutil.MAJOR_TYPE_SPECIAL, i)
 
-            with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                        r'special type \d+ not allowed'):
+            with self.assertRaisesRegex(
+                cborutil.CBORDecodeError, r'special type \d+ not allowed'
+            ):
                 cborutil.decodeitem(encoded)
 
+
 class SansIODecoderTests(TestCase):
     def testemptyinput(self):
         decoder = cborutil.sansiodecoder()
         self.assertEqual(decoder.decode(b''), (False, 0, 0))
 
+
 class BufferingDecoderTests(TestCase):
     def testsimple(self):
         source = [
@@ -969,7 +1240,7 @@
             start = 0
 
             while start < len(encoded):
-                decoder.decode(encoded[start:start + step])
+                decoder.decode(encoded[start : start + step])
                 start += step
 
             self.assertEqual(decoder.getavailable(), [source])
@@ -982,20 +1253,23 @@
 
         self.assertEqual(decoder.getavailable(), [b'foobar'])
 
+
 class DecodeallTests(TestCase):
     def testemptyinput(self):
         self.assertEqual(cborutil.decodeall(b''), [])
 
     def testpartialinput(self):
-        encoded = b''.join([
-            b'\x82', # array of 2 elements
-            b'\x01', # integer 1
-        ])
+        encoded = b''.join(
+            [b'\x82', b'\x01',]  # array of 2 elements  # integer 1
+        )
 
-        with self.assertRaisesRegex(cborutil.CBORDecodeError,
-                                    'input data not complete'):
+        with self.assertRaisesRegex(
+            cborutil.CBORDecodeError, 'input data not complete'
+        ):
             cborutil.decodeall(encoded)
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-check-interfaces.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-check-interfaces.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,6 +3,7 @@
 from __future__ import absolute_import, print_function
 
 from mercurial import encoding
+
 encoding.environ[b'HGREALINTERFACES'] = b'1'
 
 import os
@@ -10,20 +11,17 @@
 import sys
 
 # Only run if tests are run in a repo
-if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
-                    'test-repo']):
+if subprocess.call(
+    ['python', '%s/hghave' % os.environ['TESTDIR'], 'test-repo']
+):
     sys.exit(80)
 
 from mercurial.interfaces import (
     dirstate as intdirstate,
     repository,
 )
-from mercurial.thirdparty.zope import (
-    interface as zi,
-)
-from mercurial.thirdparty.zope.interface import (
-    verify as ziverify,
-)
+from mercurial.thirdparty.zope import interface as zi
+from mercurial.thirdparty.zope.interface import verify as ziverify
 from mercurial import (
     bundlerepo,
     dirstate,
@@ -49,8 +47,10 @@
 
 sys.path[0:0] = [testdir]
 import simplestorerepo
+
 del sys.path[0]
 
+
 def checkzobject(o, allowextra=False):
     """Verify an object with a zope interface."""
     ifaces = zi.providedBy(o)
@@ -75,35 +75,45 @@
     public = {a for a in dir(o) if not a.startswith('_')}
 
     for attr in sorted(public - allowed):
-        print('public attribute not declared in interfaces: %s.%s' % (
-            o.__class__.__name__, attr))
+        print(
+            'public attribute not declared in interfaces: %s.%s'
+            % (o.__class__.__name__, attr)
+        )
+
 
 # Facilitates testing localpeer.
 class dummyrepo(object):
     def __init__(self):
         self.ui = uimod.ui()
+
     def filtered(self, name):
         pass
+
     def _restrictcapabilities(self, caps):
         pass
 
+
 class dummyopener(object):
     handlers = []
 
+
 # Facilitates testing sshpeer without requiring a server.
 class badpeer(httppeer.httppeer):
     def __init__(self):
-        super(badpeer, self).__init__(None, None, None, dummyopener(), None,
-                                      None)
+        super(badpeer, self).__init__(
+            None, None, None, dummyopener(), None, None
+        )
         self.badattribute = True
 
     def badmethod(self):
         pass
 
+
 class dummypipe(object):
     def close(self):
         pass
 
+
 def main():
     ui = uimod.ui()
     # Needed so we can open a local repo with obsstore without a warning.
@@ -117,25 +127,44 @@
     ziverify.verifyClass(repository.ipeerv2, httppeer.httpv2peer)
     checkzobject(httppeer.httpv2peer(None, b'', b'', None, None, None))
 
-    ziverify.verifyClass(repository.ipeerbase,
-                         localrepo.localpeer)
+    ziverify.verifyClass(repository.ipeerbase, localrepo.localpeer)
     checkzobject(localrepo.localpeer(dummyrepo()))
 
-    ziverify.verifyClass(repository.ipeercommandexecutor,
-                         localrepo.localcommandexecutor)
+    ziverify.verifyClass(
+        repository.ipeercommandexecutor, localrepo.localcommandexecutor
+    )
     checkzobject(localrepo.localcommandexecutor(None))
 
-    ziverify.verifyClass(repository.ipeercommandexecutor,
-                         wireprotov1peer.peerexecutor)
+    ziverify.verifyClass(
+        repository.ipeercommandexecutor, wireprotov1peer.peerexecutor
+    )
     checkzobject(wireprotov1peer.peerexecutor(None))
 
     ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv1peer)
-    checkzobject(sshpeer.sshv1peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
-                                   dummypipe(), None, None))
+    checkzobject(
+        sshpeer.sshv1peer(
+            ui,
+            b'ssh://localhost/foo',
+            b'',
+            dummypipe(),
+            dummypipe(),
+            None,
+            None,
+        )
+    )
 
     ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv2peer)
-    checkzobject(sshpeer.sshv2peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
-                                   dummypipe(), None, None))
+    checkzobject(
+        sshpeer.sshv2peer(
+            ui,
+            b'ssh://localhost/foo',
+            b'',
+            dummypipe(),
+            dummypipe(),
+            None,
+            None,
+        )
+    )
 
     ziverify.verifyClass(repository.ipeerbase, bundlerepo.bundlepeer)
     checkzobject(bundlerepo.bundlepeer(dummyrepo()))
@@ -146,21 +175,29 @@
     ziverify.verifyClass(repository.ipeerbase, unionrepo.unionpeer)
     checkzobject(unionrepo.unionpeer(dummyrepo()))
 
-    ziverify.verifyClass(repository.ilocalrepositorymain,
-                         localrepo.localrepository)
-    ziverify.verifyClass(repository.ilocalrepositoryfilestorage,
-                         localrepo.revlogfilestorage)
+    ziverify.verifyClass(
+        repository.ilocalrepositorymain, localrepo.localrepository
+    )
+    ziverify.verifyClass(
+        repository.ilocalrepositoryfilestorage, localrepo.revlogfilestorage
+    )
     repo = localrepo.makelocalrepository(ui, rootdir)
     checkzobject(repo)
 
-    ziverify.verifyClass(wireprototypes.baseprotocolhandler,
-                         wireprotoserver.sshv1protocolhandler)
-    ziverify.verifyClass(wireprototypes.baseprotocolhandler,
-                         wireprotoserver.sshv2protocolhandler)
-    ziverify.verifyClass(wireprototypes.baseprotocolhandler,
-                         wireprotoserver.httpv1protocolhandler)
-    ziverify.verifyClass(wireprototypes.baseprotocolhandler,
-                         wireprotov2server.httpv2protocolhandler)
+    ziverify.verifyClass(
+        wireprototypes.baseprotocolhandler, wireprotoserver.sshv1protocolhandler
+    )
+    ziverify.verifyClass(
+        wireprototypes.baseprotocolhandler, wireprotoserver.sshv2protocolhandler
+    )
+    ziverify.verifyClass(
+        wireprototypes.baseprotocolhandler,
+        wireprotoserver.httpv1protocolhandler,
+    )
+    ziverify.verifyClass(
+        wireprototypes.baseprotocolhandler,
+        wireprotov2server.httpv2protocolhandler,
+    )
 
     sshv1 = wireprotoserver.sshv1protocolhandler(None, None, None)
     checkzobject(sshv1)
@@ -174,22 +211,28 @@
 
     ziverify.verifyClass(repository.ifilestorage, filelog.filelog)
     ziverify.verifyClass(repository.imanifestdict, manifest.manifestdict)
-    ziverify.verifyClass(repository.imanifestrevisionstored,
-                         manifest.manifestctx)
-    ziverify.verifyClass(repository.imanifestrevisionwritable,
-                         manifest.memmanifestctx)
-    ziverify.verifyClass(repository.imanifestrevisionstored,
-                         manifest.treemanifestctx)
-    ziverify.verifyClass(repository.imanifestrevisionwritable,
-                         manifest.memtreemanifestctx)
+    ziverify.verifyClass(
+        repository.imanifestrevisionstored, manifest.manifestctx
+    )
+    ziverify.verifyClass(
+        repository.imanifestrevisionwritable, manifest.memmanifestctx
+    )
+    ziverify.verifyClass(
+        repository.imanifestrevisionstored, manifest.treemanifestctx
+    )
+    ziverify.verifyClass(
+        repository.imanifestrevisionwritable, manifest.memtreemanifestctx
+    )
     ziverify.verifyClass(repository.imanifestlog, manifest.manifestlog)
     ziverify.verifyClass(repository.imanifeststorage, manifest.manifestrevlog)
 
-    ziverify.verifyClass(repository.irevisiondelta,
-                         simplestorerepo.simplestorerevisiondelta)
+    ziverify.verifyClass(
+        repository.irevisiondelta, simplestorerepo.simplestorerevisiondelta
+    )
     ziverify.verifyClass(repository.ifilestorage, simplestorerepo.filestorage)
-    ziverify.verifyClass(repository.iverifyproblem,
-                         simplestorerepo.simplefilestoreproblem)
+    ziverify.verifyClass(
+        repository.iverifyproblem, simplestorerepo.simplefilestoreproblem
+    )
 
     ziverify.verifyClass(intdirstate.idirstate, dirstate.dirstate)
 
@@ -198,8 +241,9 @@
     checkzobject(fl, allowextra=True)
 
     # Conforms to imanifestlog.
-    ml = manifest.manifestlog(vfs, repo, manifest.manifestrevlog(repo.svfs),
-                              repo.narrowmatch())
+    ml = manifest.manifestlog(
+        vfs, repo, manifest.manifestrevlog(repo.svfs), repo.narrowmatch()
+    )
     checkzobject(ml)
     checkzobject(repo.manifestlog)
 
@@ -217,8 +261,7 @@
     mrl = manifest.manifestrevlog(vfs)
     checkzobject(mrl)
 
-    ziverify.verifyClass(repository.irevisiondelta,
-                         revlog.revlogrevisiondelta)
+    ziverify.verifyClass(repository.irevisiondelta, revlog.revlogrevisiondelta)
 
     rd = revlog.revlogrevisiondelta(
         node=b'',
@@ -229,11 +272,12 @@
         flags=b'',
         baserevisionsize=None,
         revision=b'',
-        delta=None)
+        delta=None,
+    )
     checkzobject(rd)
 
-    ziverify.verifyClass(repository.iverifyproblem,
-                         revlog.revlogproblem)
+    ziverify.verifyClass(repository.iverifyproblem, revlog.revlogproblem)
     checkzobject(revlog.revlogproblem())
 
+
 main()
--- a/tests/test-config-env.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-config-env.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,9 +11,7 @@
     util,
 )
 
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 testtmp = encoding.environ[b'TESTTMP']
 
@@ -21,6 +19,7 @@
 def join(name):
     return os.path.join(testtmp, name)
 
+
 with open(join(b'sysrc'), 'wb') as f:
     f.write(b'[ui]\neditor=e0\n[pager]\npager=p0\n')
 
@@ -31,24 +30,28 @@
 def systemrcpath():
     return [join(b'sysrc')]
 
+
 def userrcpath():
     return [join(b'userrc')]
 
+
 rcutil.systemrcpath = systemrcpath
 rcutil.userrcpath = userrcpath
-os.path.isdir = lambda x: False # hack: do not load default.d/*.rc
+os.path.isdir = lambda x: False  # hack: do not load default.d/*.rc
 
 # utility to print configs
 def printconfigs(env):
     encoding.environ = env
-    rcutil._rccomponents = None # reset cache
+    rcutil._rccomponents = None  # reset cache
     ui = uimod.ui.load()
     for section, name, value in ui.walkconfig():
         source = ui.configsource(section, name)
-        procutil.stdout.write(b'%s.%s=%s # %s\n'
-                              % (section, name, value, util.pconvert(source)))
+        procutil.stdout.write(
+            b'%s.%s=%s # %s\n' % (section, name, value, util.pconvert(source))
+        )
     procutil.stdout.write(b'\n')
 
+
 # environment variable overrides
 printconfigs({})
 printconfigs({b'EDITOR': b'e2', b'PAGER': b'p2'})
--- a/tests/test-context.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-context.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,6 +13,8 @@
 )
 
 print_ = print
+
+
 def print(*args, **kwargs):
     """print() wrapper that flushes stdout buffers to avoid py3 buffer issues
 
@@ -22,11 +24,13 @@
     print_(*args, **kwargs)
     sys.stdout.flush()
 
+
 def printb(data, end=b'\n'):
     out = getattr(sys.stdout, 'buffer', sys.stdout)
     out.write(data + end)
     out.flush()
 
+
 ui = uimod.ui.load()
 
 repo = hg.repository(ui, b'test1', create=1)
@@ -49,12 +53,18 @@
 
 # test memctx with non-ASCII commit message
 
+
 def filectxfn(repo, memctx, path):
     return context.memfilectx(repo, memctx, b"foo", b"")
 
-ctx = context.memctx(repo, [b'tip', None],
-                     encoding.tolocal(b"Gr\xc3\xbcezi!"),
-                     [b"foo"], filectxfn)
+
+ctx = context.memctx(
+    repo,
+    [b'tip', None],
+    encoding.tolocal(b"Gr\xc3\xbcezi!"),
+    [b"foo"],
+    filectxfn,
+)
 ctx.commit()
 for enc in "ASCII", "Latin-1", "UTF-8":
     encoding.encoding = enc
@@ -62,17 +72,27 @@
 
 # test performing a status
 
+
 def getfilectx(repo, memctx, f):
     fctx = memctx.p1()[f]
     data, flags = fctx.data(), fctx.flags()
     if f == b'foo':
         data += b'bar\n'
     return context.memfilectx(
-        repo, memctx, f, data, b'l' in flags, b'x' in flags)
+        repo, memctx, f, data, b'l' in flags, b'x' in flags
+    )
+
 
 ctxa = repo[0]
-ctxb = context.memctx(repo, [ctxa.node(), None], b"test diff", [b"foo"],
-                      getfilectx, ctxa.user(), ctxa.date())
+ctxb = context.memctx(
+    repo,
+    [ctxa.node(), None],
+    b"test diff",
+    [b"foo"],
+    getfilectx,
+    ctxa.user(),
+    ctxa.date(),
+)
 
 print(ctxb.status(ctxa))
 
@@ -114,11 +134,13 @@
 print('wctx._status=%s' % (str(wctx._status)))
 
 print('=== with "pattern match":')
-print(actx1.status(other=wctx,
-                   match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
+print(
+    actx1.status(other=wctx, match=scmutil.matchfiles(repo, [b'bar-m', b'foo']))
+)
 print('wctx._status=%s' % (str(wctx._status)))
-print(actx2.status(other=wctx,
-                   match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
+print(
+    actx2.status(other=wctx, match=scmutil.matchfiles(repo, [b'bar-m', b'foo']))
+)
 print('wctx._status=%s' % (str(wctx._status)))
 
 print('=== with "always match" and "listclean=True":')
@@ -129,12 +151,12 @@
 
 print("== checking workingcommitctx.status:")
 
-wcctx = context.workingcommitctx(repo,
-                                 scmutil.status([b'bar-m'],
-                                                [b'bar-a'],
-                                                [],
-                                                [], [], [], []),
-                                 text=b'', date=b'0 0')
+wcctx = context.workingcommitctx(
+    repo,
+    scmutil.status([b'bar-m'], [b'bar-a'], [], [], [], [], []),
+    text=b'',
+    date=b'0 0',
+)
 print('wcctx._status=%s' % (str(wcctx._status)))
 
 print('=== with "always match":')
@@ -150,21 +172,35 @@
 print('wcctx._status=%s' % (str(wcctx._status)))
 
 print('=== with "pattern match":')
-print(actx1.status(other=wcctx,
-                   match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
+print(
+    actx1.status(
+        other=wcctx, match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])
+    )
+)
 print('wcctx._status=%s' % (str(wcctx._status)))
-print(actx2.status(other=wcctx,
-                   match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])))
+print(
+    actx2.status(
+        other=wcctx, match=scmutil.matchfiles(repo, [b'bar-m', b'foo'])
+    )
+)
 print('wcctx._status=%s' % (str(wcctx._status)))
 
 print('=== with "pattern match" and "listclean=True":')
-print(actx1.status(other=wcctx,
-                   match=scmutil.matchfiles(repo, [b'bar-r', b'foo']),
-                   listclean=True))
+print(
+    actx1.status(
+        other=wcctx,
+        match=scmutil.matchfiles(repo, [b'bar-r', b'foo']),
+        listclean=True,
+    )
+)
 print('wcctx._status=%s' % (str(wcctx._status)))
-print(actx2.status(other=wcctx,
-                   match=scmutil.matchfiles(repo, [b'bar-r', b'foo']),
-                   listclean=True))
+print(
+    actx2.status(
+        other=wcctx,
+        match=scmutil.matchfiles(repo, [b'bar-r', b'foo']),
+        listclean=True,
+    )
+)
 print('wcctx._status=%s' % (str(wcctx._status)))
 
 os.chdir('..')
@@ -180,17 +216,19 @@
     with open(i, 'wb') as f:
         f.write(i)
     status = scmutil.status([], [i], [], [], [], [], [])
-    ctx = context.workingcommitctx(repo, status, text=i, user=b'test@test.com',
-                                   date=(0, 0))
-    ctx.p1().manifest() # side effect: cache manifestctx
+    ctx = context.workingcommitctx(
+        repo, status, text=i, user=b'test@test.com', date=(0, 0)
+    )
+    ctx.p1().manifest()  # side effect: cache manifestctx
     n = repo.commitctx(ctx)
     printb(b'commit %s: %s' % (i, hex(n)))
 
     # touch 00manifest.i mtime so storecache could expire.
     # repo.__dict__['manifestlog'] is deleted by transaction releasefn.
     st = repo.svfs.stat(b'00manifest.i')
-    repo.svfs.utime(b'00manifest.i',
-                    (st[stat.ST_MTIME] + 1, st[stat.ST_MTIME] + 1))
+    repo.svfs.utime(
+        b'00manifest.i', (st[stat.ST_MTIME] + 1, st[stat.ST_MTIME] + 1)
+    )
 
     # read the file just committed
     try:
--- a/tests/test-demandimport.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-demandimport.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,6 +1,7 @@
 from __future__ import absolute_import, print_function
 
 from mercurial import demandimport
+
 demandimport.enable()
 
 import os
@@ -12,8 +13,9 @@
 ispy3 = sys.version_info[0] >= 3
 
 # Only run if demandimport is allowed
-if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
-                    'demandimport']):
+if subprocess.call(
+    ['python', '%s/hghave' % os.environ['TESTDIR'], 'demandimport']
+):
     sys.exit(80)
 
 # We rely on assert, which gets optimized out.
@@ -33,8 +35,11 @@
 if os.name != 'nt':
     try:
         import distutils.msvc9compiler
-        print('distutils.msvc9compiler needs to be an immediate '
-              'importerror on non-windows platforms')
+
+        print(
+            'distutils.msvc9compiler needs to be an immediate '
+            'importerror on non-windows platforms'
+        )
         distutils.msvc9compiler
     except ImportError:
         pass
@@ -42,6 +47,8 @@
 import re
 
 rsub = re.sub
+
+
 def f(obj):
     l = repr(obj)
     l = rsub("0x[0-9a-fA-F]+", "0x?", l)
@@ -49,6 +56,7 @@
     l = rsub("'<[a-z]*>'", "'<whatever>'", l)
     return l
 
+
 demandimport.disable()
 os.environ['HGDEMANDIMPORT'] = 'disable'
 # this enable call should not actually enable demandimport!
--- a/tests/test-dispatch.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-dispatch.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,15 +1,15 @@
 from __future__ import absolute_import, print_function
 import os
 import sys
-from mercurial import (
-    dispatch,
-)
+from mercurial import dispatch
+
 
 def printb(data, end=b'\n'):
     out = getattr(sys.stdout, 'buffer', sys.stdout)
     out.write(data + end)
     out.flush()
 
+
 def testdispatch(cmd):
     """Simple wrapper around dispatch.dispatch()
 
@@ -20,6 +20,7 @@
     result = dispatch.dispatch(req)
     printb(b"result: %r" % (result,))
 
+
 testdispatch(b"init test1")
 os.chdir('test1')
 
--- a/tests/test-doctest.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-doctest.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,22 +7,30 @@
 import re
 import sys
 
-ispy3 = (sys.version_info[0] >= 3)
+ispy3 = sys.version_info[0] >= 3
 
 if 'TERM' in os.environ:
     del os.environ['TERM']
 
+
 class py3docchecker(doctest.OutputChecker):
     def check_output(self, want, got, optionflags):
         want2 = re.sub(r'''\bu(['"])(.*?)\1''', r'\1\2\1', want)  # py2: u''
         got2 = re.sub(r'''\bb(['"])(.*?)\1''', r'\1\2\1', got)  # py3: b''
         # py3: <exc.name>: b'<msg>' -> <name>: <msg>
         #      <exc.name>: <others> -> <name>: <others>
-        got2 = re.sub(r'''^mercurial\.\w+\.(\w+): (['"])(.*?)\2''', r'\1: \3',
-                      got2, re.MULTILINE)
+        got2 = re.sub(
+            r'''^mercurial\.\w+\.(\w+): (['"])(.*?)\2''',
+            r'\1: \3',
+            got2,
+            re.MULTILINE,
+        )
         got2 = re.sub(r'^mercurial\.\w+\.(\w+): ', r'\1: ', got2, re.MULTILINE)
-        return any(doctest.OutputChecker.check_output(self, w, g, optionflags)
-                   for w, g in [(want, got), (want2, got2)])
+        return any(
+            doctest.OutputChecker.check_output(self, w, g, optionflags)
+            for w, g in [(want, got), (want2, got2)]
+        )
+
 
 def testmod(name, optionflags=0, testtarget=None):
     __import__(name)
@@ -40,6 +48,7 @@
         runner.run(test)
     runner.summarize()
 
+
 testmod('mercurial.changegroup')
 testmod('mercurial.changelog')
 testmod('mercurial.cmdutil')
--- a/tests/test-duplicateoptions.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-duplicateoptions.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,7 +10,8 @@
 
 try:
     import sqlite3
-    del sqlite3 # unused, just checking that import works
+
+    del sqlite3  # unused, just checking that import works
 except ImportError:
     ignore.add(b'sqlitestore')
 
@@ -41,8 +42,9 @@
     seenshort = globalshort.copy()
     seenlong = globallong.copy()
     for option in entry[1]:
-        if ((option[0] and option[0] in seenshort) or
-            (option[1] and option[1] in seenlong)):
+        if (option[0] and option[0] in seenshort) or (
+            option[1] and option[1] in seenlong
+        ):
             print("command '" + cmd + "' has duplicate option " + str(option))
         seenshort.add(option[0])
         seenlong.add(option[1])
--- a/tests/test-encoding-func.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-encoding-func.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,9 +2,8 @@
 
 import unittest
 
-from mercurial import (
-    encoding,
-)
+from mercurial import encoding
+
 
 class IsasciistrTest(unittest.TestCase):
     asciistrs = [
@@ -28,12 +27,14 @@
                 t[i] |= 0x80
                 self.assertFalse(encoding.isasciistr(bytes(t)))
 
+
 class LocalEncodingTest(unittest.TestCase):
     def testasciifastpath(self):
         s = b'\0' * 100
         self.assertTrue(s is encoding.tolocal(s))
         self.assertTrue(s is encoding.fromlocal(s))
 
+
 class Utf8bEncodingTest(unittest.TestCase):
     def setUp(self):
         self.origencoding = encoding.encoding
@@ -75,6 +76,8 @@
         self.assertEqual(l, b'\xc5\xed')  # lossless
         self.assertEqual(s, encoding.toutf8b(l))  # convert back to utf-8
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-extensions-wrapfunction.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-extensions-wrapfunction.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,28 +2,36 @@
 
 from mercurial import extensions
 
+
 def genwrapper(x):
     def f(orig, *args, **kwds):
         return [x] + orig(*args, **kwds)
+
     f.x = x
     return f
 
+
 def getid(wrapper):
     return getattr(wrapper, 'x', '-')
 
+
 wrappers = [genwrapper(i) for i in range(5)]
 
+
 class dummyclass(object):
     def getstack(self):
         return ['orig']
 
+
 dummy = dummyclass()
 
+
 def batchwrap(wrappers):
     for w in wrappers:
         extensions.wrapfunction(dummy, 'getstack', w)
         print('wrap %d: %s' % (getid(w), dummy.getstack()))
 
+
 def batchunwrap(wrappers):
     for w in wrappers:
         result = None
@@ -34,9 +42,14 @@
             msg = e.__class__.__name__
         print('unwrap %s: %s: %s' % (getid(w), getid(result), msg))
 
+
 batchwrap(wrappers + [wrappers[0]])
-batchunwrap([(wrappers[i] if i is not None and i >= 0 else None)
-             for i in [3, None, 0, 4, 0, 2, 1, None]])
+batchunwrap(
+    [
+        (wrappers[i] if i is not None and i >= 0 else None)
+        for i in [3, None, 0, 4, 0, 2, 1, None]
+    ]
+)
 
 wrap0 = extensions.wrappedfunction(dummy, 'getstack', wrappers[0])
 wrap1 = extensions.wrappedfunction(dummy, 'getstack', wrappers[1])
@@ -59,6 +72,8 @@
 class callableobj(object):
     def __call__(self):
         return ['orig']
+
+
 dummy.cobj = callableobj()
 extensions.wrapfunction(dummy, 'cobj', wrappers[0])
 print('wrap callable object', dummy.cobj())
--- a/tests/test-fastannotate-revmap.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-fastannotate-revmap.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,19 +13,23 @@
 if pycompat.ispy3:
     xrange = range
 
+
 def genhsh(i):
     return pycompat.bytechr(i) + b'\0' * 19
 
+
 def gettemppath():
     fd, path = tempfile.mkstemp()
     os.close(fd)
     os.unlink(path)
     return path
 
+
 def ensure(condition):
     if not condition:
         raise RuntimeError('Unexpected')
 
+
 def testbasicreadwrite():
     path = gettemppath()
 
@@ -36,7 +40,19 @@
     ensure(rm.hsh2rev(b'\0' * 20) is None)
 
     paths = [
-        b'', b'a', None, b'b', b'b', b'c', b'c', None, b'a', b'b', b'a', b'a']
+        b'',
+        b'a',
+        None,
+        b'b',
+        b'b',
+        b'c',
+        b'c',
+        None,
+        b'a',
+        b'b',
+        b'a',
+        b'a',
+    ]
     for i in xrange(1, 5):
         ensure(rm.append(genhsh(i), sidebranch=(i & 1), path=paths[i]) == i)
 
@@ -56,8 +72,10 @@
 
     # append without calling save() explicitly
     for i in xrange(5, 12):
-        ensure(rm.append(genhsh(i), sidebranch=(i & 1), path=paths[i],
-                         flush=True) == i)
+        ensure(
+            rm.append(genhsh(i), sidebranch=(i & 1), path=paths[i], flush=True)
+            == i
+        )
 
     # re-load and verify
     rm = revmap.revmap(path)
@@ -85,6 +103,7 @@
     except Exception:
         pass
 
+
 def testcorruptformat():
     path = gettemppath()
 
@@ -127,12 +146,15 @@
 
     os.unlink(path)
 
+
 def testcopyfrom():
     path = gettemppath()
     rm = revmap.revmap(path)
     for i in xrange(1, 10):
-        ensure(rm.append(genhsh(i),
-                         sidebranch=(i & 1), path=(b'%d' % (i // 3))) == i)
+        ensure(
+            rm.append(genhsh(i), sidebranch=(i & 1), path=(b'%d' % (i // 3)))
+            == i
+        )
     rm.flush()
 
     # copy rm to rm2
@@ -148,6 +170,7 @@
     os.unlink(path)
     os.unlink(path2)
 
+
 class fakefctx(object):
     def __init__(self, node, path=None):
         self._node = node
@@ -159,6 +182,7 @@
     def path(self):
         return self._path
 
+
 def testcontains():
     path = gettemppath()
 
@@ -181,6 +205,7 @@
         ensure(fakefctx(genhsh(i), path=(b'%d' % (i // 2))) in rm)
         ensure(fakefctx(genhsh(i), path=b'a') not in rm)
 
+
 def testlastnode():
     path = gettemppath()
     ensure(revmap.getlastnode(path) is None)
@@ -193,6 +218,7 @@
         rm2 = revmap.revmap(path)
         ensure(rm2.rev2hsh(rm2.maxrev) == hsh)
 
+
 testbasicreadwrite()
 testcorruptformat()
 testcopyfrom()
--- a/tests/test-filecache.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-filecache.py	Sun Oct 06 09:45:02 2019 -0400
@@ -4,11 +4,14 @@
 import subprocess
 import sys
 
-if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
-                    'cacheable']):
+if subprocess.call(
+    ['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']
+):
     sys.exit(80)
 
 print_ = print
+
+
 def print(*args, **kwargs):
     """print() wrapper that flushes stdout buffers to avoid py3 buffer issues
 
@@ -18,6 +21,7 @@
     print_(*args, **kwargs)
     sys.stdout.flush()
 
+
 from mercurial import (
     extensions,
     hg,
@@ -31,12 +35,12 @@
 if pycompat.ispy3:
     xrange = range
 
+
 class fakerepo(object):
     def __init__(self):
         self._filecache = {}
 
     class fakevfs(object):
-
         def join(self, p):
             return p
 
@@ -60,6 +64,7 @@
             except AttributeError:
                 pass
 
+
 def basic(repo):
     print("* neither file exists")
     # calls function
@@ -137,6 +142,7 @@
     print("* both files changed inode")
     repo.cached
 
+
 def fakeuncacheable():
     def wrapcacheable(orig, *args, **kwargs):
         return False
@@ -145,8 +151,9 @@
         pass
 
     originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit)
-    origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
-                                            wrapcacheable)
+    origcacheable = extensions.wrapfunction(
+        util.cachestat, 'cacheable', wrapcacheable
+    )
 
     for fn in ['x', 'y']:
         try:
@@ -159,6 +166,7 @@
     util.cachestat.cacheable = origcacheable
     util.cachestat.__init__ = originit
 
+
 def test_filecache_synced():
     # test old behavior that caused filecached properties to go out of sync
     os.system('hg init && echo a >> a && hg ci -qAm.')
@@ -174,6 +182,7 @@
     # it
     repo.commit(b'.')
 
+
 def setbeforeget(repo):
     os.remove('x')
     os.remove('y')
@@ -200,6 +209,7 @@
     print("* file y created")
     print(repo.cached)
 
+
 def antiambiguity():
     filename = 'ambigcheck'
 
@@ -236,11 +246,17 @@
 
         # st_mtime should be advanced "repetition * 2" times, because
         # all changes occurred at same time (in sec)
-        expected = (oldstat[stat.ST_MTIME] + repetition * 2) & 0x7fffffff
+        expected = (oldstat[stat.ST_MTIME] + repetition * 2) & 0x7FFFFFFF
         if newstat[stat.ST_MTIME] != expected:
-            print("'newstat[stat.ST_MTIME] %s is not %s (as %s + %s * 2)" %
-                  (newstat[stat.ST_MTIME], expected,
-                   oldstat[stat.ST_MTIME], repetition))
+            print(
+                "'newstat[stat.ST_MTIME] %s is not %s (as %s + %s * 2)"
+                % (
+                    newstat[stat.ST_MTIME],
+                    expected,
+                    oldstat[stat.ST_MTIME],
+                    repetition,
+                )
+            )
 
         # no more examination is needed regardless of result
         break
@@ -251,6 +267,7 @@
         # on other faster platforms can detect problems
         pass
 
+
 print('basic:')
 print()
 basic(fakerepo())
--- a/tests/test-filelog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-filelog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
 
 fl = repo.file(b'foobar')
 
+
 def addrev(text, renamed=False):
     if renamed:
         # data doesn't matter. Just make sure filelog.renamed() returns True
@@ -37,9 +38,11 @@
         if lock:
             lock.release()
 
+
 def error(text):
     print('ERROR: ' + text)
 
+
 textwith = b'\1\nfoo'
 without = b'foo'
 
@@ -49,8 +52,10 @@
 if fl.cmp(node, textwith) or not fl.cmp(node, without):
     error('filelog.cmp for data starting with \\1\\n')
 if fl.size(0) != len(textwith):
-    error('FIXME: This is a known failure of filelog.size for data starting '
-        'with \\1\\n')
+    error(
+        'FIXME: This is a known failure of filelog.size for data starting '
+        'with \\1\\n'
+    )
 
 node = addrev(textwith, renamed=True)
 if not textwith == fl.read(node):
--- a/tests/test-flagprocessor.t	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-flagprocessor.t	Sun Oct 06 09:45:02 2019 -0400
@@ -204,7 +204,7 @@
     File "*/mercurial/extensions.py", line *, in _runextsetup (glob)
       extsetup(ui)
     File "*/tests/flagprocessorext.py", line *, in extsetup (glob)
-      validatehash,
+      REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
     File "*/mercurial/revlogutils/flagutil.py", line *, in addflagprocessor (glob)
       insertflagprocessor(flag, processor, flagprocessors)
     File "*/mercurial/revlogutils/flagutil.py", line *, in insertflagprocessor (glob)
--- a/tests/test-hg-parseurl.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-hg-parseurl.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,33 +2,49 @@
 
 import unittest
 
-from mercurial import (
-    hg,
-)
+from mercurial import hg
+
 
 class ParseRequestTests(unittest.TestCase):
     def testparse(self):
 
-        self.assertEqual(hg.parseurl(b'http://example.com/no/anchor'),
-                         (b'http://example.com/no/anchor', (None, [])))
-        self.assertEqual(hg.parseurl(b'http://example.com/an/anchor#foo'),
-                         (b'http://example.com/an/anchor', (b'foo', [])))
+        self.assertEqual(
+            hg.parseurl(b'http://example.com/no/anchor'),
+            (b'http://example.com/no/anchor', (None, [])),
+        )
+        self.assertEqual(
+            hg.parseurl(b'http://example.com/an/anchor#foo'),
+            (b'http://example.com/an/anchor', (b'foo', [])),
+        )
         self.assertEqual(
             hg.parseurl(b'http://example.com/no/anchor/branches', [b'foo']),
-            (b'http://example.com/no/anchor/branches', (None, [b'foo'])))
+            (b'http://example.com/no/anchor/branches', (None, [b'foo'])),
+        )
         self.assertEqual(
             hg.parseurl(b'http://example.com/an/anchor/branches#bar', [b'foo']),
-            (b'http://example.com/an/anchor/branches', (b'bar', [b'foo'])))
-        self.assertEqual(hg.parseurl(
-            b'http://example.com/an/anchor/branches-None#foo', None),
-            (b'http://example.com/an/anchor/branches-None', (b'foo', [])))
-        self.assertEqual(hg.parseurl(b'http://example.com/'),
-                         (b'http://example.com/', (None, [])))
-        self.assertEqual(hg.parseurl(b'http://example.com'),
-                         (b'http://example.com/', (None, [])))
-        self.assertEqual(hg.parseurl(b'http://example.com#foo'),
-                         (b'http://example.com/', (b'foo', [])))
+            (b'http://example.com/an/anchor/branches', (b'bar', [b'foo'])),
+        )
+        self.assertEqual(
+            hg.parseurl(
+                b'http://example.com/an/anchor/branches-None#foo', None
+            ),
+            (b'http://example.com/an/anchor/branches-None', (b'foo', [])),
+        )
+        self.assertEqual(
+            hg.parseurl(b'http://example.com/'),
+            (b'http://example.com/', (None, [])),
+        )
+        self.assertEqual(
+            hg.parseurl(b'http://example.com'),
+            (b'http://example.com/', (None, [])),
+        )
+        self.assertEqual(
+            hg.parseurl(b'http://example.com#foo'),
+            (b'http://example.com/', (b'foo', [])),
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-hgweb-auth.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-hgweb-auth.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,6 +1,8 @@
 from __future__ import absolute_import, print_function
 
-from mercurial import demandimport; demandimport.enable()
+from mercurial import demandimport
+
+demandimport.enable()
 from mercurial import (
     error,
     pycompat,
@@ -8,35 +10,39 @@
     url,
     util,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 urlerr = util.urlerr
 urlreq = util.urlreq
 
+
 class myui(uimod.ui):
     def interactive(self):
         return False
 
+
 origui = myui.load()
 
+
 def writeauth(items):
     ui = origui.copy()
     for name, value in items.items():
         ui.setconfig(b'auth', name, value)
     return ui
 
+
 def _stringifyauthinfo(ai):
     if ai is None:
         return ai
     realm, authuris, user, passwd = ai
-    return (pycompat.strurl(realm),
-            [pycompat.strurl(u) for u in authuris],
-            pycompat.strurl(user),
-            pycompat.strurl(passwd),
+    return (
+        pycompat.strurl(realm),
+        [pycompat.strurl(u) for u in authuris],
+        pycompat.strurl(user),
+        pycompat.strurl(passwd),
     )
 
+
 def test(auth, urls=None):
     print('CFG:', pycompat.sysstr(stringutil.pprint(auth, bprefix=True)))
     prefixes = set()
@@ -57,11 +63,15 @@
             u, authinfo = util.url(uri).authinfo()
             if authinfo is not None:
                 pm.add_password(*_stringifyauthinfo(authinfo))
-            print('    ', tuple(pycompat.strurl(a) for a in
-                                pm.find_user_password('test',
-                                                      pycompat.strurl(u))))
+            print(
+                '    ',
+                tuple(
+                    pycompat.strurl(a)
+                    for a in pm.find_user_password('test', pycompat.strurl(u))
+                ),
+            )
         except error.Abort:
-            print('    ','abort')
+            print('    ', 'abort')
 
     if not urls:
         urls = [
@@ -73,7 +83,7 @@
             b'https://example.org/bar',
             b'https://x@example.org/bar',
             b'https://y@example.org/bar',
-            ]
+        ]
     for u in urls:
         _test(u)
 
@@ -90,64 +100,100 @@
 test({b'x.prefix': b'example.org', b'x.schemes': b'http https'})
 
 print('\n*** Test prefix matching\n')
-test({b'x.prefix': b'http://example.org/foo',
-      b'y.prefix': b'http://example.org/bar'})
-test({b'x.prefix': b'http://example.org/foo',
-      b'y.prefix': b'http://example.org/foo/bar'})
+test(
+    {
+        b'x.prefix': b'http://example.org/foo',
+        b'y.prefix': b'http://example.org/bar',
+    }
+)
+test(
+    {
+        b'x.prefix': b'http://example.org/foo',
+        b'y.prefix': b'http://example.org/foo/bar',
+    }
+)
 test({b'x.prefix': b'*', b'y.prefix': b'https://example.org/bar'})
 
 print('\n*** Test user matching\n')
-test({b'x.prefix': b'http://example.org/foo',
-      b'x.username': None,
-      b'x.password': b'xpassword'},
-     urls=[b'http://y@example.org/foo'])
-test({b'x.prefix': b'http://example.org/foo',
-      b'x.username': None,
-      b'x.password': b'xpassword',
-      b'y.prefix': b'http://example.org/foo',
-      b'y.username': b'y',
-      b'y.password': b'ypassword'},
-     urls=[b'http://y@example.org/foo'])
-test({b'x.prefix': b'http://example.org/foo/bar',
-      b'x.username': None,
-      b'x.password': b'xpassword',
-      b'y.prefix': b'http://example.org/foo',
-      b'y.username': b'y',
-      b'y.password': b'ypassword'},
-     urls=[b'http://y@example.org/foo/bar'])
+test(
+    {
+        b'x.prefix': b'http://example.org/foo',
+        b'x.username': None,
+        b'x.password': b'xpassword',
+    },
+    urls=[b'http://y@example.org/foo'],
+)
+test(
+    {
+        b'x.prefix': b'http://example.org/foo',
+        b'x.username': None,
+        b'x.password': b'xpassword',
+        b'y.prefix': b'http://example.org/foo',
+        b'y.username': b'y',
+        b'y.password': b'ypassword',
+    },
+    urls=[b'http://y@example.org/foo'],
+)
+test(
+    {
+        b'x.prefix': b'http://example.org/foo/bar',
+        b'x.username': None,
+        b'x.password': b'xpassword',
+        b'y.prefix': b'http://example.org/foo',
+        b'y.username': b'y',
+        b'y.password': b'ypassword',
+    },
+    urls=[b'http://y@example.org/foo/bar'],
+)
 
 print('\n*** Test user matching with name in prefix\n')
 
 # prefix, username and URL have the same user
-test({b'x.prefix': b'https://example.org/foo',
-      b'x.username': None,
-      b'x.password': b'xpassword',
-      b'y.prefix': b'http://y@example.org/foo',
-      b'y.username': b'y',
-      b'y.password': b'ypassword'},
-     urls=[b'http://y@example.org/foo'])
+test(
+    {
+        b'x.prefix': b'https://example.org/foo',
+        b'x.username': None,
+        b'x.password': b'xpassword',
+        b'y.prefix': b'http://y@example.org/foo',
+        b'y.username': b'y',
+        b'y.password': b'ypassword',
+    },
+    urls=[b'http://y@example.org/foo'],
+)
 # Prefix has a different user from username and URL
-test({b'y.prefix': b'http://z@example.org/foo',
-      b'y.username': b'y',
-      b'y.password': b'ypassword'},
-     urls=[b'http://y@example.org/foo'])
+test(
+    {
+        b'y.prefix': b'http://z@example.org/foo',
+        b'y.username': b'y',
+        b'y.password': b'ypassword',
+    },
+    urls=[b'http://y@example.org/foo'],
+)
 # Prefix has a different user from URL; no username
-test({b'y.prefix': b'http://z@example.org/foo',
-      b'y.password': b'ypassword'},
-     urls=[b'http://y@example.org/foo'])
+test(
+    {b'y.prefix': b'http://z@example.org/foo', b'y.password': b'ypassword'},
+    urls=[b'http://y@example.org/foo'],
+)
 # Prefix and URL have same user, but doesn't match username
-test({b'y.prefix': b'http://y@example.org/foo',
-      b'y.username': b'z',
-      b'y.password': b'ypassword'},
-     urls=[b'http://y@example.org/foo'])
+test(
+    {
+        b'y.prefix': b'http://y@example.org/foo',
+        b'y.username': b'z',
+        b'y.password': b'ypassword',
+    },
+    urls=[b'http://y@example.org/foo'],
+)
 # Prefix and URL have the same user; no username
-test({b'y.prefix': b'http://y@example.org/foo',
-      b'y.password': b'ypassword'},
-     urls=[b'http://y@example.org/foo'])
+test(
+    {b'y.prefix': b'http://y@example.org/foo', b'y.password': b'ypassword'},
+    urls=[b'http://y@example.org/foo'],
+)
 # Prefix user, but no URL user or username
-test({b'y.prefix': b'http://y@example.org/foo',
-      b'y.password': b'ypassword'},
-     urls=[b'http://example.org/foo'])
+test(
+    {b'y.prefix': b'http://y@example.org/foo', b'y.password': b'ypassword'},
+    urls=[b'http://example.org/foo'],
+)
+
 
 def testauthinfo(fullurl, authurl):
     print('URIs:', fullurl, authurl)
@@ -156,5 +202,6 @@
     pm.add_password(*ai)
     print(pm.find_user_password('test', authurl))
 
+
 print('\n*** Test urllib2 and util.url\n')
 testauthinfo('http://user@example.com:8080/foo', 'http://example.com:8080/foo')
--- a/tests/test-hgwebdir-paths.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-hgwebdir-paths.py	Sun Oct 06 09:45:02 2019 -0400
@@ -5,9 +5,8 @@
     hg,
     ui as uimod,
 )
-from mercurial.hgweb import (
-    hgwebdir_mod,
-)
+from mercurial.hgweb import hgwebdir_mod
+
 hgwebdir = hgwebdir_mod.hgwebdir
 
 os.mkdir(b'webdir')
@@ -24,10 +23,12 @@
 hg.repository(u, b'c', create=1)
 os.chdir(b'..')
 
-paths = {b't/a/': b'%s/a' % webdir,
-         b'b': b'%s/b' % webdir,
-         b'coll': b'%s/*' % webdir,
-         b'rcoll': b'%s/**' % webdir}
+paths = {
+    b't/a/': b'%s/a' % webdir,
+    b'b': b'%s/b' % webdir,
+    b'coll': b'%s/*' % webdir,
+    b'rcoll': b'%s/**' % webdir,
+}
 
 config = os.path.join(webdir, b'hgwebdir.conf')
 configfile = open(config, 'wb')
--- a/tests/test-hook.t	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-hook.t	Sun Oct 06 09:45:02 2019 -0400
@@ -1138,6 +1138,7 @@
   Traceback (most recent call last):
   ImportError: No module named hgext_importfail
   Traceback (most recent call last):
+      "precommit", throw=True, parent1=hookp1, parent2=hookp2
   HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
   abort: precommit.importfail hook is invalid: import of "importfail" failed
 
--- a/tests/test-hybridencode.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-hybridencode.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,9 +2,8 @@
 
 import unittest
 
-from mercurial import (
-    store,
-)
+from mercurial import store
+
 
 class hybridencodetests(unittest.TestCase):
     def hybridencode(self, input, want):
@@ -19,72 +18,112 @@
     def testnoencodingrequired(self):
         self.hybridencode(
             b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}',
-            b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}')
+            b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}',
+        )
 
-    def testuppercasechars(self): # uppercase char X is encoded as _x
+    def testuppercasechars(self):  # uppercase char X is encoded as _x
         self.hybridencode(
             b'data/ABCDEFGHIJKLMNOPQRSTUVWXYZ',
-            b'data/_a_b_c_d_e_f_g_h_i_j_k_l_m_n_o_p_q_r_s_t_u_v_w_x_y_z')
+            b'data/_a_b_c_d_e_f_g_h_i_j_k_l_m_n_o_p_q_r_s_t_u_v_w_x_y_z',
+        )
 
-    def testunderbar(self): # underbar is doubled
+    def testunderbar(self):  # underbar is doubled
         self.hybridencode(b'data/_', b'data/__')
 
-    def testtilde(self): # tilde is character-encoded
+    def testtilde(self):  # tilde is character-encoded
         self.hybridencode(b'data/~', b'data/~7e')
 
-    def testcontrolchars(self): # characters in ASCII code range 1..31
+    def testcontrolchars(self):  # characters in ASCII code range 1..31
         self.hybridencode(
-            (b'data/\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
-             b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e'
-             b'\x1f'),
-            (b'data/~01~02~03~04~05~06~07~08~09~0a~0b~0c~0d~0e~0f~10~11~12~13'
-             b'~14~15~16~17~18~19~1a~1b~1c~1d~1e~1f'))
+            (
+                b'data/\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
+                b'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e'
+                b'\x1f'
+            ),
+            (
+                b'data/~01~02~03~04~05~06~07~08~09~0a~0b~0c~0d~0e~0f~10~11~12~13'
+                b'~14~15~16~17~18~19~1a~1b~1c~1d~1e~1f'
+            ),
+        )
 
-    def testhighascii(self):# characters in ASCII code range 126..255
+    def testhighascii(self):  # characters in ASCII code range 126..255
         self.hybridencode(
-            (b'data/~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c'
-             b'\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b'
-             b'\x9c\x9d\x9e\x9f'),
-            (b'data/~7e~7f~80~81~82~83~84~85~86~87~88~89~8a~8b~8c~8d~8e~8f~90'
-             b'~91~92~93~94~95~96~97~98~99~9a~9b~9c~9d~9e~9f'))
+            (
+                b'data/~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c'
+                b'\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b'
+                b'\x9c\x9d\x9e\x9f'
+            ),
+            (
+                b'data/~7e~7f~80~81~82~83~84~85~86~87~88~89~8a~8b~8c~8d~8e~8f~90'
+                b'~91~92~93~94~95~96~97~98~99~9a~9b~9c~9d~9e~9f'
+            ),
+        )
         self.hybridencode(
-            (b'data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad'
-             b'\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc'
-             b'\xbd\xbe\xbf'),
-            (b'data/~a0~a1~a2~a3~a4~a5~a6~a7~a8~a9~aa~ab~ac~ad~ae~af~b0~b1~b2'
-             b'~b3~b4~b5~b6~b7~b8~b9~ba~bb~bc~bd~be~bf'))
+            (
+                b'data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad'
+                b'\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc'
+                b'\xbd\xbe\xbf'
+            ),
+            (
+                b'data/~a0~a1~a2~a3~a4~a5~a6~a7~a8~a9~aa~ab~ac~ad~ae~af~b0~b1~b2'
+                b'~b3~b4~b5~b6~b7~b8~b9~ba~bb~bc~bd~be~bf'
+            ),
+        )
         self.hybridencode(
-            (b'data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca'
-             b'\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6'
-             b'\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf'),
-            (b'data/~c0~c1~c2~c3~c4~c5~c6~c7~c8~c9~ca~cb~cc~cd~ce~cf~d0~d1~d2'
-             b'~d3~d4~d5~d6~d7~d8~d9~da~db~dc~dd~de~df'))
+            (
+                b'data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca'
+                b'\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6'
+                b'\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf'
+            ),
+            (
+                b'data/~c0~c1~c2~c3~c4~c5~c6~c7~c8~c9~ca~cb~cc~cd~ce~cf~d0~d1~d2'
+                b'~d3~d4~d5~d6~d7~d8~d9~da~db~dc~dd~de~df'
+            ),
+        )
         self.hybridencode(
-            (b'data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed'
-             b'\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd'
-             b'\xfe\xff'),
-            (b'data/~e0~e1~e2~e3~e4~e5~e6~e7~e8~e9~ea~eb~ec~ed~ee~ef~f0~f1~f2'
-             b'~f3~f4~f5~f6~f7~f8~f9~fa~fb~fc~fd~fe~ff'))
+            (
+                b'data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed'
+                b'\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd'
+                b'\xfe\xff'
+            ),
+            (
+                b'data/~e0~e1~e2~e3~e4~e5~e6~e7~e8~e9~ea~eb~ec~ed~ee~ef~f0~f1~f2'
+                b'~f3~f4~f5~f6~f7~f8~f9~fa~fb~fc~fd~fe~ff'
+            ),
+        )
 
-    def testwinreserved(self): # Windows reserved characters
+    def testwinreserved(self):  # Windows reserved characters
         self.hybridencode(
-            (b'data/less <, greater >, colon :, double-quote ", backslash \\, '
-             b'pipe |, question-mark ?, asterisk *'),
-            (b'data/less ~3c, greater ~3e, colon ~3a, double-quote ~22, '
-             b'backslash ~5c, pipe ~7c, question-mark ~3f, asterisk ~2a'))
+            (
+                b'data/less <, greater >, colon :, double-quote ", backslash \\, '
+                b'pipe |, question-mark ?, asterisk *'
+            ),
+            (
+                b'data/less ~3c, greater ~3e, colon ~3a, double-quote ~22, '
+                b'backslash ~5c, pipe ~7c, question-mark ~3f, asterisk ~2a'
+            ),
+        )
 
     def testhgreserved(self):
         # encoding directories ending in .hg, .i or .d with '.hg' suffix
-        self.hybridencode(b'data/x.h.i/x.hg/x.i/x.d/foo',
-                          b'data/x.h.i.hg/x.hg.hg/x.i.hg/x.d.hg/foo')
-        self.hybridencode(b'data/a.hg/a.i/a.d/foo',
-                          b'data/a.hg.hg/a.i.hg/a.d.hg/foo')
-        self.hybridencode(b'data/au.hg/au.i/au.d/foo',
-                          b'data/au.hg.hg/au.i.hg/au.d.hg/foo')
-        self.hybridencode(b'data/aux.hg/aux.i/aux.d/foo',
-                          b'data/au~78.hg.hg/au~78.i.hg/au~78.d.hg/foo')
-        self.hybridencode(b'data/auxy.hg/auxy.i/auxy.d/foo',
-                          b'data/auxy.hg.hg/auxy.i.hg/auxy.d.hg/foo')
+        self.hybridencode(
+            b'data/x.h.i/x.hg/x.i/x.d/foo',
+            b'data/x.h.i.hg/x.hg.hg/x.i.hg/x.d.hg/foo',
+        )
+        self.hybridencode(
+            b'data/a.hg/a.i/a.d/foo', b'data/a.hg.hg/a.i.hg/a.d.hg/foo'
+        )
+        self.hybridencode(
+            b'data/au.hg/au.i/au.d/foo', b'data/au.hg.hg/au.i.hg/au.d.hg/foo'
+        )
+        self.hybridencode(
+            b'data/aux.hg/aux.i/aux.d/foo',
+            b'data/au~78.hg.hg/au~78.i.hg/au~78.d.hg/foo',
+        )
+        self.hybridencode(
+            b'data/auxy.hg/auxy.i/auxy.d/foo',
+            b'data/auxy.hg.hg/auxy.i.hg/auxy.d.hg/foo',
+        )
         # but these are not encoded on *filenames*
         self.hybridencode(b'data/foo/x.hg', b'data/foo/x.hg')
         self.hybridencode(b'data/foo/x.i', b'data/foo/x.i')
@@ -103,775 +142,1293 @@
         self.hybridencode(b'data/foo/auxy.d', b'data/foo/auxy.d')
 
         # plain .hg, .i and .d directories have the leading dot encoded
-        self.hybridencode(b'data/.hg/.i/.d/foo',
-                          b'data/~2ehg.hg/~2ei.hg/~2ed.hg/foo')
+        self.hybridencode(
+            b'data/.hg/.i/.d/foo', b'data/~2ehg.hg/~2ei.hg/~2ed.hg/foo'
+        )
 
     def testmisclongcases(self):
         self.hybridencode(
-            (b'data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/'
-             b'coma/foo.NUL/normal.c.i'),
-            (b'data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3'
-             b'/nu~6c/coma/foo._n_u_l/normal.c.i'))
+            (
+                b'data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/'
+                b'coma/foo.NUL/normal.c.i'
+            ),
+            (
+                b'data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3'
+                b'/nu~6c/coma/foo._n_u_l/normal.c.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH'
-             b'/TENTH/ELEVENTH/LOREMIPSUM.TXT.i'),
-            (b'dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/'
-             b'nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i'))
+            (
+                b'data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH'
+                b'/TENTH/ELEVENTH/LOREMIPSUM.TXT.i'
+            ),
+            (
+                b'dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/'
+                b'nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/enterprise/openesbaddons/contrib-imola/corba-bc/'
-             b'netbeansplugin/wsdlExtension/src/main/java/META-INF/services'
-             b'/org.netbeans.modules.xml.wsdl.bindingsupport.spi.'
-             b'ExtensibilityElementTemplateProvider.i'),
-            (b'dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/'
-             b'main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i'))
+            (
+                b'data/enterprise/openesbaddons/contrib-imola/corba-bc/'
+                b'netbeansplugin/wsdlExtension/src/main/java/META-INF/services'
+                b'/org.netbeans.modules.xml.wsdl.bindingsupport.spi.'
+                b'ExtensibilityElementTemplateProvider.i'
+            ),
+            (
+                b'dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/'
+                b'main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-'
-             b'QUICK-BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i'),
-            (b'dh/au~78.the-quick-brown-fox-ju~3amps-over-the-lazy-dog-the-'
-             b'quick-brown-fox-jud4dcadd033000ab2b26eb66bae1906bcb15d4a70.i'))
+            (
+                b'data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-'
+                b'QUICK-BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i'
+            ),
+            (
+                b'dh/au~78.the-quick-brown-fox-ju~3amps-over-the-lazy-dog-the-'
+                b'quick-brown-fox-jud4dcadd033000ab2b26eb66bae1906bcb15d4a70.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/Project Planning/Resources/AnotherLongDirectoryName/Follow'
-             b'edbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'),
-            (b'dh/project_/resource/anotherl/followed/andanoth/andthenanextrem'
-             b'elylongfilenaf93030515d9849cfdca52937c2204d19f83913e5.txt'))
+            (
+                b'data/Project Planning/Resources/AnotherLongDirectoryName/Follow'
+                b'edbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'
+            ),
+            (
+                b'dh/project_/resource/anotherl/followed/andanoth/andthenanextrem'
+                b'elylongfilenaf93030515d9849cfdca52937c2204d19f83913e5.txt'
+            ),
+        )
         self.hybridencode(
-            (b'data/Project.Planning/Resources/AnotherLongDirectoryName/Follo'
-             b'wedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'),
-            (b'dh/project_/resource/anotherl/followed/andanoth/andthenanextre'
-             b'melylongfilena0fd7c506f5c9d58204444fc67e9499006bd2d445.txt'))
+            (
+                b'data/Project.Planning/Resources/AnotherLongDirectoryName/Follo'
+                b'wedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt'
+            ),
+            (
+                b'dh/project_/resource/anotherl/followed/andanoth/andthenanextre'
+                b'melylongfilena0fd7c506f5c9d58204444fc67e9499006bd2d445.txt'
+            ),
+        )
         self.hybridencode(
             b'data/foo.../foo   / /a./_. /__/.x../    bla/.FOO/something.i',
-            (b'data/foo..~2e/foo  ~20/~20/a~2e/__.~20/____/~2ex.~2e/~20   bla/'
-             b'~2e_f_o_o/something.i'))
+            (
+                b'data/foo..~2e/foo  ~20/~20/a~2e/__.~20/____/~2ex.~2e/~20   bla/'
+                b'~2e_f_o_o/something.i'
+            ),
+        )
         self.hybridencode(
             b'data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9',
-            (b'data/c/co/com/com0/co~6d1/co~6d2/co~6d3/co~6d4/co~6d5/co~6d6/'
-             b'co~6d7/co~6d8/co~6d9'))
+            (
+                b'data/c/co/com/com0/co~6d1/co~6d2/co~6d3/co~6d4/co~6d5/co~6d6/'
+                b'co~6d7/co~6d8/co~6d9'
+            ),
+        )
         self.hybridencode(
             b'data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9',
-            (b'data/_c/_c_o/_c_o_m/_c_o_m0/_c_o_m1/_c_o_m2/_c_o_m3/_c_o_m4/'
-             b'_c_o_m5/_c_o_m6/_c_o_m7/_c_o_m8/_c_o_m9'))
+            (
+                b'data/_c/_c_o/_c_o_m/_c_o_m0/_c_o_m1/_c_o_m2/_c_o_m3/_c_o_m4/'
+                b'_c_o_m5/_c_o_m6/_c_o_m7/_c_o_m8/_c_o_m9'
+            ),
+        )
         self.hybridencode(
-            (b'data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x/'
-             b'com6.x/com7.x/com8.x/com9.x'),
-            (b'data/c.x/co.x/com.x/com0.x/co~6d1.x/co~6d2.x/co~6d3.x/co~6d4.x'
-             b'/co~6d5.x/co~6d6.x/co~6d7.x/co~6d8.x/co~6d9.x'))
+            (
+                b'data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x/'
+                b'com6.x/com7.x/com8.x/com9.x'
+            ),
+            (
+                b'data/c.x/co.x/com.x/com0.x/co~6d1.x/co~6d2.x/co~6d3.x/co~6d4.x'
+                b'/co~6d5.x/co~6d6.x/co~6d7.x/co~6d8.x/co~6d9.x'
+            ),
+        )
         self.hybridencode(
-            (b'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6'
-             b'/x.com7/x.com8/x.com9'),
-            (b'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6'
-             b'/x.com7/x.com8/x.com9'))
+            (
+                b'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6'
+                b'/x.com7/x.com8/x.com9'
+            ),
+            (
+                b'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6'
+                b'/x.com7/x.com8/x.com9'
+            ),
+        )
         self.hybridencode(
-            (b'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/'
-             b'com7x/com8x/com9x'),
-            (b'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/'
-             b'com7x/com8x/com9x'))
+            (
+                b'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/'
+                b'com7x/com8x/com9x'
+            ),
+            (
+                b'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/'
+                b'com7x/com8x/com9x'
+            ),
+        )
         self.hybridencode(
-            (b'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/'
-             b'xcom8/xcom9'),
-            (b'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/'
-             b'xcom8/xcom9'))
+            (
+                b'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/'
+                b'xcom8/xcom9'
+            ),
+            (
+                b'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/'
+                b'xcom8/xcom9'
+            ),
+        )
         self.hybridencode(
             b'data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9',
-            (b'data/l/lp/lpt/lpt0/lp~741/lp~742/lp~743/lp~744/lp~745/lp~746/'
-             b'lp~747/lp~748/lp~749'))
+            (
+                b'data/l/lp/lpt/lpt0/lp~741/lp~742/lp~743/lp~744/lp~745/lp~746/'
+                b'lp~747/lp~748/lp~749'
+            ),
+        )
         self.hybridencode(
             b'data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9',
-            (b'data/_l/_l_p/_l_p_t/_l_p_t0/_l_p_t1/_l_p_t2/_l_p_t3/_l_p_t4/'
-             b'_l_p_t5/_l_p_t6/_l_p_t7/_l_p_t8/_l_p_t9'))
+            (
+                b'data/_l/_l_p/_l_p_t/_l_p_t0/_l_p_t1/_l_p_t2/_l_p_t3/_l_p_t4/'
+                b'_l_p_t5/_l_p_t6/_l_p_t7/_l_p_t8/_l_p_t9'
+            ),
+        )
         self.hybridencode(
-            (b'data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x/'
-             b'lpt6.x/lpt7.x/lpt8.x/lpt9.x'),
-            (b'data/l.x/lp.x/lpt.x/lpt0.x/lp~741.x/lp~742.x/lp~743.x/lp~744.x/'
-             b'lp~745.x/lp~746.x/lp~747.x/lp~748.x/lp~749.x'))
+            (
+                b'data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x/'
+                b'lpt6.x/lpt7.x/lpt8.x/lpt9.x'
+            ),
+            (
+                b'data/l.x/lp.x/lpt.x/lpt0.x/lp~741.x/lp~742.x/lp~743.x/lp~744.x/'
+                b'lp~745.x/lp~746.x/lp~747.x/lp~748.x/lp~749.x'
+            ),
+        )
         self.hybridencode(
-            (b'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/'
-             b'x.lpt6/x.lpt7/x.lpt8/x.lpt9'),
-            (b'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5'
-             b'/x.lpt6/x.lpt7/x.lpt8/x.lpt9'))
+            (
+                b'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/'
+                b'x.lpt6/x.lpt7/x.lpt8/x.lpt9'
+            ),
+            (
+                b'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5'
+                b'/x.lpt6/x.lpt7/x.lpt8/x.lpt9'
+            ),
+        )
         self.hybridencode(
-            (b'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/'
-             b'lpt7x/lpt8x/lpt9x'),
-            (b'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/'
-             b'lpt7x/lpt8x/lpt9x'))
+            (
+                b'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/'
+                b'lpt7x/lpt8x/lpt9x'
+            ),
+            (
+                b'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/'
+                b'lpt7x/lpt8x/lpt9x'
+            ),
+        )
         self.hybridencode(
-            (b'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/'
-             b'xlpt7/xlpt8/xlpt9'),
-            (b'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/'
-             b'xlpt7/xlpt8/xlpt9'))
-        self.hybridencode(b'data/con/p/pr/prn/a/au/aux/n/nu/nul',
-                          b'data/co~6e/p/pr/pr~6e/a/au/au~78/n/nu/nu~6c')
+            (
+                b'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/'
+                b'xlpt7/xlpt8/xlpt9'
+            ),
+            (
+                b'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/'
+                b'xlpt7/xlpt8/xlpt9'
+            ),
+        )
+        self.hybridencode(
+            b'data/con/p/pr/prn/a/au/aux/n/nu/nul',
+            b'data/co~6e/p/pr/pr~6e/a/au/au~78/n/nu/nu~6c',
+        )
         self.hybridencode(
             b'data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL',
-            b'data/_c_o_n/_p/_p_r/_p_r_n/_a/_a_u/_a_u_x/_n/_n_u/_n_u_l')
+            b'data/_c_o_n/_p/_p_r/_p_r_n/_a/_a_u/_a_u_x/_n/_n_u/_n_u_l',
+        )
         self.hybridencode(
             b'data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x',
-            b'data/co~6e.x/p.x/pr.x/pr~6e.x/a.x/au.x/au~78.x/n.x/nu.x/nu~6c.x')
+            b'data/co~6e.x/p.x/pr.x/pr~6e.x/a.x/au.x/au~78.x/n.x/nu.x/nu~6c.x',
+        )
         self.hybridencode(
             b'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul',
-            b'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul')
-        self.hybridencode(b'data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx',
-                          b'data/conx/px/prx/prnx/ax/au~78/auxx/nx/nux/nulx')
-        self.hybridencode(b'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul',
-                          b'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul')
-        self.hybridencode(b'data/a./au./aux./auxy./aux.',
-                          b'data/a~2e/au~2e/au~78~2e/auxy~2e/au~78~2e')
-        self.hybridencode(b'data/c./co./con./cony./con.',
-                          b'data/c~2e/co~2e/co~6e~2e/cony~2e/co~6e~2e')
-        self.hybridencode(b'data/p./pr./prn./prny./prn.',
-                          b'data/p~2e/pr~2e/pr~6e~2e/prny~2e/pr~6e~2e')
-        self.hybridencode(b'data/n./nu./nul./nuly./nul.',
-                          b'data/n~2e/nu~2e/nu~6c~2e/nuly~2e/nu~6c~2e')
+            b'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul',
+        )
+        self.hybridencode(
+            b'data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx',
+            b'data/conx/px/prx/prnx/ax/au~78/auxx/nx/nux/nulx',
+        )
+        self.hybridencode(
+            b'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul',
+            b'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul',
+        )
+        self.hybridencode(
+            b'data/a./au./aux./auxy./aux.',
+            b'data/a~2e/au~2e/au~78~2e/auxy~2e/au~78~2e',
+        )
+        self.hybridencode(
+            b'data/c./co./con./cony./con.',
+            b'data/c~2e/co~2e/co~6e~2e/cony~2e/co~6e~2e',
+        )
+        self.hybridencode(
+            b'data/p./pr./prn./prny./prn.',
+            b'data/p~2e/pr~2e/pr~6e~2e/prny~2e/pr~6e~2e',
+        )
+        self.hybridencode(
+            b'data/n./nu./nul./nuly./nul.',
+            b'data/n~2e/nu~2e/nu~6c~2e/nuly~2e/nu~6c~2e',
+        )
         self.hybridencode(
             b'data/l./lp./lpt./lpt1./lpt1y./lpt1.',
-            b'data/l~2e/lp~2e/lpt~2e/lp~741~2e/lpt1y~2e/lp~741~2e')
-        self.hybridencode(b'data/lpt9./lpt9y./lpt9.',
-                          b'data/lp~749~2e/lpt9y~2e/lp~749~2e')
-        self.hybridencode(b'data/com./com1./com1y./com1.',
-                          b'data/com~2e/co~6d1~2e/com1y~2e/co~6d1~2e')
-        self.hybridencode(b'data/com9./com9y./com9.',
-                          b'data/co~6d9~2e/com9y~2e/co~6d9~2e')
-        self.hybridencode(b'data/a /au /aux /auxy /aux ',
-                          b'data/a~20/au~20/aux~20/auxy~20/aux~20')
+            b'data/l~2e/lp~2e/lpt~2e/lp~741~2e/lpt1y~2e/lp~741~2e',
+        )
+        self.hybridencode(
+            b'data/lpt9./lpt9y./lpt9.', b'data/lp~749~2e/lpt9y~2e/lp~749~2e'
+        )
+        self.hybridencode(
+            b'data/com./com1./com1y./com1.',
+            b'data/com~2e/co~6d1~2e/com1y~2e/co~6d1~2e',
+        )
+        self.hybridencode(
+            b'data/com9./com9y./com9.', b'data/co~6d9~2e/com9y~2e/co~6d9~2e'
+        )
+        self.hybridencode(
+            b'data/a /au /aux /auxy /aux ',
+            b'data/a~20/au~20/aux~20/auxy~20/aux~20',
+        )
 
     def testhashingboundarycases(self):
         # largest unhashed path
         self.hybridencode(
-            (b'data/123456789-123456789-123456789-123456789-123456789-unhashed'
-             b'--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'),
-            (b'data/123456789-123456789-123456789-123456789-123456789-unhashed'
-             b'--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'))
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789-unhashed'
+                b'--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+            ),
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789-unhashed'
+                b'--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+            ),
+        )
         # shortest hashed path
         self.hybridencode(
-            (b'data/123456789-123456789-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/123456789-123456789-123456789-123456789-123456789-hashed---'
-             b'-xxxxxxxxx-xxxxxxxe9c55002b50bf5181e7a6fc1f60b126e2a6fcf71'))
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/123456789-123456789-123456789-123456789-123456789-hashed---'
+                b'-xxxxxxxxx-xxxxxxxe9c55002b50bf5181e7a6fc1f60b126e2a6fcf71'
+            ),
+        )
 
     def testhashing(self):
         # changing one char in part that's hashed away produces a different hash
         self.hybridencode(
-            (b'data/123456789-123456789-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-123456789-123456'),
-            (b'dh/123456789-123456789-123456789-123456789-123456789-hashed---'
-             b'-xxxxxxxxx-xxxxxxxd24fa4455faf8a94350c18e5eace7c2bb17af706'))
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-123456789-123456'
+            ),
+            (
+                b'dh/123456789-123456789-123456789-123456789-123456789-hashed---'
+                b'-xxxxxxxxx-xxxxxxxd24fa4455faf8a94350c18e5eace7c2bb17af706'
+            ),
+        )
         # uppercase hitting length limit due to encoding
         self.hybridencode(
-            (b'data/A23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/a23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxx'
-             b'cbbc657029b41b94ed510d05feb6716a5c03bc6b'))
+            (
+                b'data/A23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/a23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxx'
+                b'cbbc657029b41b94ed510d05feb6716a5c03bc6b'
+            ),
+        )
         self.hybridencode(
-            (b'data/Z23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx'
-             b'-xxxxxxxxx-xxxxxxx938f32a725c89512833fb96b6602dd9ebff51ddd'))
+            (
+                b'data/Z23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx'
+                b'-xxxxxxxxx-xxxxxxx938f32a725c89512833fb96b6602dd9ebff51ddd'
+            ),
+        )
         # compare with lowercase not hitting limit
         self.hybridencode(
-            (b'data/a23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
-             b'12345'),
-            (b'data/a23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
-             b'12345'))
+            (
+                b'data/a23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+                b'12345'
+            ),
+            (
+                b'data/a23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+                b'12345'
+            ),
+        )
         self.hybridencode(
-            (b'data/z23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789'
-             b'-12345'),
-            (b'data/z23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
-             b'12345'))
+            (
+                b'data/z23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789'
+                b'-12345'
+            ),
+            (
+                b'data/z23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+                b'12345'
+            ),
+        )
         # not hitting limit with any of these
         self.hybridencode(
-            (b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
-             b'xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'),
-            (b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
-             b'xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'))
+            (
+                b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
+                b'xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+            ),
+            (
+                b'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}'
+                b'xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+            ),
+        )
         # underbar hitting length limit due to encoding
         self.hybridencode(
-            (b'data/_23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
-             b'12345'),
-            (b'dh/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-'
-             b'xxxxxxxxx-xxxxxxx9921a01af50feeabc060ce00eee4cba6efc31d2b'))
+            (
+                b'data/_23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+                b'12345'
+            ),
+            (
+                b'dh/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-'
+                b'xxxxxxxxx-xxxxxxx9921a01af50feeabc060ce00eee4cba6efc31d2b'
+            ),
+        )
 
         # tilde hitting length limit due to encoding
         self.hybridencode(
-            (b'data/~23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
-             b'12345'),
-            (b'dh/~7e23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'9cec6f97d569c10995f785720044ea2e4227481b'))
+            (
+                b'data/~23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+                b'12345'
+            ),
+            (
+                b'dh/~7e23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'9cec6f97d569c10995f785720044ea2e4227481b'
+            ),
+        )
 
     def testwinreservedoverlimit(self):
         # Windows reserved characters hitting length limit
         self.hybridencode(
-            (b'data/<23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/~3c23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxee'
-             b'67d8f275876ca1ef2500fc542e63c885c4e62d'))
+            (
+                b'data/<23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/~3c23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxee'
+                b'67d8f275876ca1ef2500fc542e63c885c4e62d'
+            ),
+        )
         self.hybridencode(
-            (b'data/>23456789-123456789-123456789-123456789-123456789-'
-             b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/~3e23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'387a85a5b1547cc9136310c974df716818458ddb'))
+            (
+                b'data/>23456789-123456789-123456789-123456789-123456789-'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/~3e23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'387a85a5b1547cc9136310c974df716818458ddb'
+            ),
+        )
         self.hybridencode(
-            (b'data/:23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/~3a23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'2e4154fb571d13d22399c58cc4ef4858e4b75999'))
+            (
+                b'data/:23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/~3a23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'2e4154fb571d13d22399c58cc4ef4858e4b75999'
+            ),
+        )
         self.hybridencode(
-            (b'data/"23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/~2223456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'fc7e3ec7b0687ee06ed8c32fef0eb0c1980259f5'))
+            (
+                b'data/"23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/~2223456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'fc7e3ec7b0687ee06ed8c32fef0eb0c1980259f5'
+            ),
+        )
         self.hybridencode(
-            (b'data/\\23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/~5c23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'944e1f2b7110687e116e0d151328ac648b06ab4a'))
+            (
+                b'data/\\23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/~5c23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'944e1f2b7110687e116e0d151328ac648b06ab4a'
+            ),
+        )
         self.hybridencode(
-            (b'data/|23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/~7c23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'28b23dd3fd0242946334126ab62bcd772aac32f4'))
+            (
+                b'data/|23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/~7c23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'28b23dd3fd0242946334126ab62bcd772aac32f4'
+            ),
+        )
         self.hybridencode(
-            (b'data/?23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/~3f23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'a263022d3994d2143d98f94f431eef8b5e7e0f8a'))
+            (
+                b'data/?23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/~3f23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'a263022d3994d2143d98f94f431eef8b5e7e0f8a'
+            ),
+        )
         self.hybridencode(
-            (b'data/*23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/~2a23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'0e7e6020e3c00ba7bb7893d84ca2966fbf53e140'))
+            (
+                b'data/*23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/~2a23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'0e7e6020e3c00ba7bb7893d84ca2966fbf53e140'
+            ),
+        )
 
     def testinitialspacelenlimit(self):
         # initial space hitting length limit
         self.hybridencode(
-            (b'data/ 23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/~2023456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'92acbc78ef8c0b796111629a02601f07d8aec4ea'))
+            (
+                b'data/ 23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/~2023456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'92acbc78ef8c0b796111629a02601f07d8aec4ea'
+            ),
+        )
 
     def testinitialdotlenlimit(self):
         # initial dot hitting length limit
         self.hybridencode(
-            (b'data/.23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/~2e23456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'dbe19cc6505b3515ab9228cebf877ad07075168f'))
+            (
+                b'data/.23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/~2e23456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'dbe19cc6505b3515ab9228cebf877ad07075168f'
+            ),
+        )
 
     def testtrailingspacelenlimit(self):
         # trailing space in filename hitting length limit
         self.hybridencode(
-            (b'data/123456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-1234 '),
-            (b'dh/123456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxx'
-             b'0025dc73e04f97426db4893e3bf67d581dc6d066'))
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-1234 '
+            ),
+            (
+                b'dh/123456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxx'
+                b'0025dc73e04f97426db4893e3bf67d581dc6d066'
+            ),
+        )
 
     def testtrailingdotlenlimit(self):
         # trailing dot in filename hitting length limit
         self.hybridencode(
-            (b'data/123456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
-             b'1234.'),
-            (b'dh/123456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxx'
-             b'85a16cf03ee7feba8a5abc626f1ba9886d01e89d'))
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+                b'1234.'
+            ),
+            (
+                b'dh/123456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxx'
+                b'85a16cf03ee7feba8a5abc626f1ba9886d01e89d'
+            ),
+        )
 
     def testinitialspacedirlenlimit(self):
         # initial space in directory hitting length limit
         self.hybridencode(
-            (b'data/ x/456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/~20x/456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'1b3a3b712b2ac00d6af14ae8b4c14fdbf904f516'))
+            (
+                b'data/ x/456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/~20x/456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'1b3a3b712b2ac00d6af14ae8b4c14fdbf904f516'
+            ),
+        )
 
     def testinitialdotdirlenlimit(self):
         # initial dot in directory hitting length limit
         self.hybridencode(
-            (b'data/.x/456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/~2ex/456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'39dbc4c193a5643a8936fc69c3363cd7ac91ab14'))
+            (
+                b'data/.x/456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/~2ex/456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'39dbc4c193a5643a8936fc69c3363cd7ac91ab14'
+            ),
+        )
 
     def testtrailspacedirlenlimit(self):
         # trailing space in directory hitting length limit
         self.hybridencode(
-            (b'data/x /456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/x~20/456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'2253c341df0b5290790ad312cd8499850f2273e5'))
+            (
+                b'data/x /456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/x~20/456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'2253c341df0b5290790ad312cd8499850f2273e5'
+            ),
+        )
 
     def testtrailingdotdirlenlimit(self):
         # trailing dot in directory hitting length limit
         self.hybridencode(
-            (b'data/x./456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/x~2e/456789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'cc0324d696d34562b44b5138db08ee1594ccc583'))
+            (
+                b'data/x./456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/x~2e/456789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'cc0324d696d34562b44b5138db08ee1594ccc583'
+            ),
+        )
 
     def testdirencodinglenlimit(self):
         # with directories that need direncoding, hitting length limit
         self.hybridencode(
-            (b'data/x.i/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
-             b'12345'),
-            (b'dh/x.i.hg/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxx'
-             b'a4c4399bdf81c67dbbbb7060aa0124d8dea94f74'))
+            (
+                b'data/x.i/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-'
+                b'12345'
+            ),
+            (
+                b'dh/x.i.hg/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxx'
+                b'a4c4399bdf81c67dbbbb7060aa0124d8dea94f74'
+            ),
+        )
         self.hybridencode(
-            (b'data/x.d/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/x.d.hg/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxx'
-             b'1303fa90473b230615f5b3ea7b660e881ae5270a'))
+            (
+                b'data/x.d/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/x.d.hg/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxx'
+                b'1303fa90473b230615f5b3ea7b660e881ae5270a'
+            ),
+        )
         self.hybridencode(
-            (b'data/x.hg/5789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/x.hg.hg/5789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxx'
-             b'26d724a8af68e7a4e4455e6602ea9adbd0eb801f'))
+            (
+                b'data/x.hg/5789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/x.hg.hg/5789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxx'
+                b'26d724a8af68e7a4e4455e6602ea9adbd0eb801f'
+            ),
+        )
 
     def testwinreservedfilenameslimit(self):
         # Windows reserved filenames, hitting length limit
         self.hybridencode(
-            (b'data/con/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-12345'),
-            (b'dh/co~6e/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'c0794d4f4c605a2617900eb2563d7113cf6ea7d3'))
+            (
+                b'data/con/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-12345'
+            ),
+            (
+                b'dh/co~6e/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'c0794d4f4c605a2617900eb2563d7113cf6ea7d3'
+            ),
+        )
         self.hybridencode(
-            (b'data/prn/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/pr~6e/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'64db876e1a9730e27236cb9b167aff942240e932'))
+            (
+                b'data/prn/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/pr~6e/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'64db876e1a9730e27236cb9b167aff942240e932'
+            ),
+        )
         self.hybridencode(
-            (b'data/aux/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/au~78/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'8a178558405ca6fb4bbd75446dfa186f06751a0d'))
+            (
+                b'data/aux/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/au~78/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'8a178558405ca6fb4bbd75446dfa186f06751a0d'
+            ),
+        )
         self.hybridencode(
-            (b'data/nul/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/nu~6c/56789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'c5e51b6fec1bd07bd243b053a0c3f7209855b886'))
+            (
+                b'data/nul/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/nu~6c/56789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'c5e51b6fec1bd07bd243b053a0c3f7209855b886'
+            ),
+        )
         self.hybridencode(
-            (b'data/com1/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/co~6d1/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'32f5f44ece3bb62b9327369ca84cc19c86259fcd'))
+            (
+                b'data/com1/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/co~6d1/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'32f5f44ece3bb62b9327369ca84cc19c86259fcd'
+            ),
+        )
         self.hybridencode(
-            (b'data/com9/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/co~6d9/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'734360b28c66a3230f55849fe8926206d229f990'))
+            (
+                b'data/com9/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/co~6d9/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'734360b28c66a3230f55849fe8926206d229f990'
+            ),
+        )
         self.hybridencode(
-            (b'data/lpt1/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/lp~741/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'e6f16ab4b6b0637676b2842b3345c9836df46ef7'))
+            (
+                b'data/lpt1/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/lp~741/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'e6f16ab4b6b0637676b2842b3345c9836df46ef7'
+            ),
+        )
         self.hybridencode(
-            (b'data/lpt9/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-12345'),
-            (b'dh/lp~749/6789-123456789-123456789-123456789-123456789'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
-             b'a475814c51acead3e44f2ff801f0c4903f986157'))
+            (
+                b'data/lpt9/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-12345'
+            ),
+            (
+                b'dh/lp~749/6789-123456789-123456789-123456789-123456789'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxx'
+                b'a475814c51acead3e44f2ff801f0c4903f986157'
+            ),
+        )
 
     def testnonreservednolimit(self):
         # non-reserved names, just not hitting limit
         self.hybridencode(
-            (b'data/123456789-123456789-123456789-123456789-123456789-'
-             b'/com/com0/lpt/lpt0/'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'),
-            (b'data/123456789-123456789-123456789-123456789-123456789-'
-             b'/com/com0/lpt/lpt0/'
-             b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'))
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789-'
+                b'/com/com0/lpt/lpt0/'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+            ),
+            (
+                b'data/123456789-123456789-123456789-123456789-123456789-'
+                b'/com/com0/lpt/lpt0/'
+                b'-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345'
+            ),
+        )
 
     def testhashedpathuntrucfirst(self):
         # hashed path with largest untruncated 1st dir
         self.hybridencode(
-            (b'data/12345678/-123456789-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/-123456789-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxx4e9e9e384d00929a93b6835fbf976eb32321ff3c'))
+            (
+                b'data/12345678/-123456789-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/-123456789-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxx4e9e9e384d00929a93b6835fbf976eb32321ff3c'
+            ),
+        )
 
     def testhashedpathsmallesttrucdir(self):
         # hashed path with smallest truncated 1st dir
         self.hybridencode(
-            (b'data/123456789/123456789-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/123456789-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxx1f4e4ec5f2be76e109bfaa8e31c062fe426d5490'))
+            (
+                b'data/123456789/123456789-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/123456789-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxx1f4e4ec5f2be76e109bfaa8e31c062fe426d5490'
+            ),
+        )
 
     def testhashedlargesttwountruc(self):
         # hashed path with largest untruncated two dirs
         self.hybridencode(
-            (b'data/12345678/12345678/9-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/9-123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxx3332d8329d969cf835542a9f2cbcfb385b6cf39d'))
+            (
+                b'data/12345678/12345678/9-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/9-123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxx3332d8329d969cf835542a9f2cbcfb385b6cf39d'
+            ),
+        )
 
     def testhashedpathsmallesttrunctwodirs(self):
         # hashed path with smallest truncated two dirs
         self.hybridencode(
-            (b'data/123456789/123456789/123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/123456789-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx'
-             b'9699559798247dffa18717138859be5f8874840e'))
+            (
+                b'data/123456789/123456789/123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/123456789-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx'
+                b'9699559798247dffa18717138859be5f8874840e'
+            ),
+        )
 
     def testhashuntruncthree(self):
         # hashed path with largest untruncated three dirs
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/89-123456789-123456789-'
-             b'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-123456'),
-            (b'dh/12345678/12345678/12345678/89-123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxf0a2b053bb1369cce02f78c217d6a7aaea18c439'))
+            (
+                b'data/12345678/12345678/12345678/89-123456789-123456789-'
+                b'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/89-123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxf0a2b053bb1369cce02f78c217d6a7aaea18c439'
+            ),
+        )
 
     def testhashtruncthree(self):
         # hashed path with smallest truncated three dirs
         self.hybridencode(
-            (b'data/123456789/123456789/123456789/123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/123456789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-'
-             b'1c6f8284967384ec13985a046d3553179d9d03cd'))
+            (
+                b'data/123456789/123456789/123456789/123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/123456789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-'
+                b'1c6f8284967384ec13985a046d3553179d9d03cd'
+            ),
+        )
 
     def testhashuntrucfour(self):
         # hashed path with largest untruncated four dirs
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/789-123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxx0d30c99049d8f0ff97b94d4ef302027e8d54c6fd'))
+            (
+                b'data/12345678/12345678/12345678/12345678/789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/789-123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxx0d30c99049d8f0ff97b94d4ef302027e8d54c6fd'
+            ),
+        )
 
     def testhashtruncfour(self):
         # hashed path with smallest truncated four dirs
         self.hybridencode(
-            (b'data/123456789/123456789/123456789/123456789/123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/123456789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-x'
-             b'46162779e1a771810b37a737f82ae7ed33771402'))
+            (
+                b'data/123456789/123456789/123456789/123456789/123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/123456789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-x'
+                b'46162779e1a771810b37a737f82ae7ed33771402'
+            ),
+        )
 
     def testhashuntruncfive(self):
         # hashed path with largest untruncated five dirs
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/6789-hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/6789-hashed'
-             b'----xxxxxxxxx-xxxxxxxbfe752ddc8b003c2790c66a9f2eb1ea75c114390'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/6789-hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/6789-hashed'
+                b'----xxxxxxxxx-xxxxxxxbfe752ddc8b003c2790c66a9f2eb1ea75c114390'
+            ),
+        )
 
     def testhashtruncfive(self):
         # hashed path with smallest truncated five dirs
         self.hybridencode(
-            (b'data/123456789/123456789/123456789/123456789/123456789/hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/hashed'
-             b'----xxxxxxxxx-xxxxxxxxx-xx'
-             b'b94c27b3532fa880cdd572b1c514785cab7b6ff2'))
+            (
+                b'data/123456789/123456789/123456789/123456789/123456789/hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/hashed'
+                b'----xxxxxxxxx-xxxxxxxxx-xx'
+                b'b94c27b3532fa880cdd572b1c514785cab7b6ff2'
+            ),
+        )
 
     def testhashuntruncsix(self):
         # hashed path with largest untruncated six dirs
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'ed----xxxxxxxxx-xxxxxxx'
-             b'cd8cc5483a0f3be409e0e5d4bf9e36e113c59235'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'ed----xxxxxxxxx-xxxxxxx'
+                b'cd8cc5483a0f3be409e0e5d4bf9e36e113c59235'
+            ),
+        )
 
     def testhashtruncsix(self):
         # hashed path with smallest truncated six dirs
         self.hybridencode(
-            (b'data/123456789/123456789/123456789/123456789/123456789/'
-              b'123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-              b'123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'xxxxxxxxx-xxxxxxxxx-xxx'
-             b'47dd6f616f833a142da00701b334cebbf640da06'))
+            (
+                b'data/123456789/123456789/123456789/123456789/123456789/'
+                b'123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'xxxxxxxxx-xxxxxxxxx-xxx'
+                b'47dd6f616f833a142da00701b334cebbf640da06'
+            ),
+        )
 
     def testhashuntrunc7(self):
         # hashed path with largest untruncated seven dirs
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/xxxxxx-xxxxxxx'
-             b'1c8ed635229fc22efe51035feeadeb4c8a0ecb82'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/xxxxxx-xxxxxxx'
+                b'1c8ed635229fc22efe51035feeadeb4c8a0ecb82'
+            ),
+        )
 
     def testhashtrunc7(self):
         # hashed path with smallest truncated seven dirs
         self.hybridencode(
-            (b'data/123456789/123456789/123456789/123456789/123456789/'
-              b'123456789/123456789/'
-              b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/123'
-             b'45678/xxxxxxxxx-xxxx298ff7d33f8ce6db57930837ffea2fb2f48bb926'))
+            (
+                b'data/123456789/123456789/123456789/123456789/123456789/'
+                b'123456789/123456789/'
+                b'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/123'
+                b'45678/xxxxxxxxx-xxxx298ff7d33f8ce6db57930837ffea2fb2f48bb926'
+            ),
+        )
 
     def testhashuntrunc8(self):
         # hashed path with largest untruncated eight dirs
         # (directory 8 is dropped because it hits _maxshortdirslen)
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1'
-             b'2345678/xxxxxxx-xxxxxxc8996ccd41b471f768057181a4d59d2febe7277d'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1'
+                b'2345678/xxxxxxx-xxxxxxc8996ccd41b471f768057181a4d59d2febe7277d'
+            ),
+        )
 
     def testhashtrunc8(self):
         # hashed path with smallest truncated eight dirs
         # (directory 8 is dropped because it hits _maxshortdirslen)
         self.hybridencode(
-            (b'data/123456789/123456789/123456789/123456789/123456789/'
-             b'123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/xxxxxxxxx-xxxx'
-             b'4fa04a839a6bda93e1c21c713f2edcbd16e8890d'))
+            (
+                b'data/123456789/123456789/123456789/123456789/123456789/'
+                b'123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/xxxxxxxxx-xxxx'
+                b'4fa04a839a6bda93e1c21c713f2edcbd16e8890d'
+            ),
+        )
 
     def testhashnondropped8(self):
         # hashed path with largest non-dropped directory 8
         # (just not hitting the _maxshortdirslen boundary)
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789'
-             b'-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12345/-xxxxxxx'
-             b'4d43d1ccaa20efbfe99ec779dc063611536ff2c5'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789'
+                b'-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12345/-xxxxxxx'
+                b'4d43d1ccaa20efbfe99ec779dc063611536ff2c5'
+            ),
+        )
         # ...adding one truncated char to dir 1..7 won't drop dir 8
         self.hybridencode(
-            (b'data/12345678x/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
-             b'5678/12345/xxxxxxxx0f9efce65189cc60fd90fe4ffd49d7b58bbe0f2e'))
+            (
+                b'data/12345678x/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+                b'5678/12345/xxxxxxxx0f9efce65189cc60fd90fe4ffd49d7b58bbe0f2e'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678x/12345678/12345678/12345678/12345678'
-             b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
-             b'5678/12345/xxxxxxxx945ca395708cafdd54a94501859beabd3e243921'))
+            (
+                b'data/12345678/12345678x/12345678/12345678/12345678/12345678'
+                b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+                b'5678/12345/xxxxxxxx945ca395708cafdd54a94501859beabd3e243921'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678x/12345678/12345678/12345678/12'
-             b'345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
-             b'5678/12345/xxxxxxxxac62bf6898c4fd0502146074547c11caa751a327'))
+            (
+                b'data/12345678/12345678/12345678x/12345678/12345678/12345678/12'
+                b'345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+                b'5678/12345/xxxxxxxxac62bf6898c4fd0502146074547c11caa751a327'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678x/12345678/12345678/12'
-             b'345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
-             b'5678/12345/xxxxxxxx2ae5a2baed7983fae8974d0ca06c6bf08b9aee92'))
+            (
+                b'data/12345678/12345678/12345678/12345678x/12345678/12345678/12'
+                b'345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+                b'5678/12345/xxxxxxxx2ae5a2baed7983fae8974d0ca06c6bf08b9aee92'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678x/12345678/'
-             b'12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
-             b'5678/12345/xxxxxxxx214aba07b6687532a43d1e9eaf6e88cfca96b68c'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678x/12345678/'
+                b'12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+                b'5678/12345/xxxxxxxx214aba07b6687532a43d1e9eaf6e88cfca96b68c'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678x'
-             b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
-             b'5678/12345/xxxxxxxxe7a022ae82f0f55cf4e0498e55ba59ea4ebb55bf'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678x'
+                b'/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/1234'
+                b'5678/12345/xxxxxxxxe7a022ae82f0f55cf4e0498e55ba59ea4ebb55bf'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345'
-             b'678/12345/xxxxxxxxb51ce61164996a80f36ce3cfe64b62d519aedae3'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345'
+                b'678/12345/xxxxxxxxb51ce61164996a80f36ce3cfe64b62d519aedae3'
+            ),
+        )
 
     def testhashedpathshortestdropped8(self):
         # hashed path with shortest dropped directory 8
         # (just hitting the _maxshortdirslen boundary)
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/xxxxxxxxx-xxxx'
-             b'11fa9873cc6c3215eae864528b5530a04efc6cfe'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/xxxxxxxxx-xxxx'
+                b'11fa9873cc6c3215eae864528b5530a04efc6cfe'
+            ),
+        )
 
     def testhashedpathdropsdir8fortrailingdotspace(self):
         # hashed path that drops dir 8 due to dot or space at end is
         # encoded, and thus causing to hit _maxshortdirslen
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/-xxxxxxxxx-xxx'
-             b'602df9b45bec564e2e1f0645d5140dddcc76ed58'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/-xxxxxxxxx-xxx'
+                b'602df9b45bec564e2e1f0645d5140dddcc76ed58'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
-             b'123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/-xxxxxxxxx-xxx'
-             b'd99ff212bc84b4d1f70cd6b0071e3ef69d4e12ce'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-'
+                b'123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/-xxxxxxxxx-xxx'
+                b'd99ff212bc84b4d1f70cd6b0071e3ef69d4e12ce'
+            ),
+        )
         # ... with dir 8 short enough for encoding
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
-             b'-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12~2e/'
-             b'xx-xxxxx7baeb5ed7f14a586ee1cacecdbcbff70032d1b3c'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx'
+                b'-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12~2e/'
+                b'xx-xxxxx7baeb5ed7f14a586ee1cacecdbcbff70032d1b3c'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12 '
-             b'/xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12~20/'
-             b'xx-xxxxxcf79ca9795f77d7f75745da36807e5d772bd5182'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12 '
+                b'/xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12~20/'
+                b'xx-xxxxxcf79ca9795f77d7f75745da36807e5d772bd5182'
+            ),
+        )
 
     def testextensionsreplicatedonhashedpaths(self):
         # Extensions are replicated on hashed paths. Note that
         # we only get to encode files that end in .i or .d inside the
         # store. Encoded filenames are thus bound in length.
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'45.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxxc10ad03b5755ed524f5286aab1815dfe07729438.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'45.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxxc10ad03b5755ed524f5286aab1815dfe07729438.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'45.d'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxx9eec83381f2b39ef5ac8b4ecdf2c94f7983f57c8.d'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'45.d'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxx9eec83381f2b39ef5ac8b4ecdf2c94f7983f57c8.d'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxxb7796dc7d175cfb0bb8a7728f58f6ebec9042568.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxxb7796dc7d175cfb0bb8a7728f58f6ebec9042568.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'4567.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxxb515857a6bfeef017c4894d8df42458ac65d55b8.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'4567.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxxb515857a6bfeef017c4894d8df42458ac65d55b8.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'45678.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxxb05a0f247bc0a776211cd6a32ab714fd9cc09f2b.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'45678.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxxb05a0f247bc0a776211cd6a32ab714fd9cc09f2b.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxxf192b48bff08d9e0e12035fb52bc58c70de72c94.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxxf192b48bff08d9e0e12035fb52bc58c70de72c94.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789-.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxx435551e0ed4c7b083b9ba83cee916670e02e80ad.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789-.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxx435551e0ed4c7b083b9ba83cee916670e02e80ad.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789-1.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxxa7f74eb98d8d58b716356dfd26e2f9aaa65d6a9a.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789-1.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxxa7f74eb98d8d58b716356dfd26e2f9aaa65d6a9a.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789-12.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxxed68d9bd43b931f0b100267fee488d65a0c66f62.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789-12.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxxed68d9bd43b931f0b100267fee488d65a0c66f62.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789-123.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxx5cea44de2b642d2ba2b4a30693ffb1049644d698.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789-123.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxx5cea44de2b642d2ba2b4a30693ffb1049644d698.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789-1234.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxx68462f62a7f230b39c1b5400d73ec35920990b7e.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789-1234.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxx68462f62a7f230b39c1b5400d73ec35920990b7e.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789-12345.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxx4cb852a314c6da240a83eec94761cdd71c6ec22e.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789-12345.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxx4cb852a314c6da240a83eec94761cdd71c6ec22e.i'
+            ),
+        )
         self.hybridencode(
-            (b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
-             b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
-             b'456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-'
-             b'abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTUVWXYZ'
-             b'-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx'
-             b'-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww'
-             b'-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i'),
-            (b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
-             b'345678/12345/-xxxxx93352aa50377751d9e5ebdf52da1e6e69a6887a6.i'))
+            (
+                b'data/12345678/12345678/12345678/12345678/12345678/12345678/'
+                b'12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3'
+                b'456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-'
+                b'abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTUVWXYZ'
+                b'-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx'
+                b'-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww'
+                b'-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i'
+            ),
+            (
+                b'dh/12345678/12345678/12345678/12345678/12345678/12345678/12'
+                b'345678/12345/-xxxxx93352aa50377751d9e5ebdf52da1e6e69a6887a6.i'
+            ),
+        )
 
     def testpathsoutsidedata(self):
         # paths outside data/ can be encoded
-        self.hybridencode(b'metadata/dir/00manifest.i',
-                          b'metadata/dir/00manifest.i')
+        self.hybridencode(
+            b'metadata/dir/00manifest.i', b'metadata/dir/00manifest.i'
+        )
         self.hybridencode(
-            (b'metadata/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12345678/00manifest.i'),
-            (b'dh/ata/12345678/12345678/12345678/12345678/12345678'
-             b'/12345678/12345678/00manife'
-             b'0a4da1f89aa2aa9eb0896eb451288419049781b4.i'))
+            (
+                b'metadata/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12345678/00manifest.i'
+            ),
+            (
+                b'dh/ata/12345678/12345678/12345678/12345678/12345678'
+                b'/12345678/12345678/00manife'
+                b'0a4da1f89aa2aa9eb0896eb451288419049781b4.i'
+            ),
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-lfs-pointer.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-lfs-pointer.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,10 +2,12 @@
 
 # Import something from Mercurial, so the module loader gets initialized.
 from mercurial import pycompat
+
 del pycompat  # unused for now
 
 from hgext.lfs import pointer
 
+
 def tryparse(text):
     r = {}
     try:
@@ -19,11 +21,14 @@
             print('reconstructed text differs')
     return r
 
-t = (b'version https://git-lfs.github.com/spec/v1\n'
-     b'oid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1'
-     b'258daaa5e2ca24d17e2393\n'
-     b'size 12345\n'
-     b'x-foo extra-information\n')
+
+t = (
+    b'version https://git-lfs.github.com/spec/v1\n'
+    b'oid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1'
+    b'258daaa5e2ca24d17e2393\n'
+    b'size 12345\n'
+    b'x-foo extra-information\n'
+)
 
 tryparse(b'')
 tryparse(t)
--- a/tests/test-lfs-serve-access.t	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-lfs-serve-access.t	Sun Oct 06 09:45:02 2019 -0400
@@ -353,7 +353,7 @@
   $LOCALIP - - [$ERRDATE$] HG error:  Traceback (most recent call last): (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      localstore.download(oid, req.bodyfh) (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      super(badstore, self).download(oid, src) (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:      % oid) (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:      _(b'corrupt remote lfs object: %s') % oid (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  LfsCorruptionError: corrupt remote lfs object: b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c (glob)
   $LOCALIP - - [$ERRDATE$] HG error:   (glob)
   $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d': (glob)
@@ -362,9 +362,9 @@
       self.do_hgweb()
       for chunk in self.server.application(env, self._start_response):
       for r in self._runwsgi(req, res, repo):
-      rctx, req, res, self.check_perm)
+      rctx, req, res, self.check_perm
       return func(*(args + a), **kw) (no-py3 !)
-      lambda perm:
+      rctx.repo, req, res, lambda perm: checkperm(rctx, req, perm)
       res.setbodybytes(localstore.read(oid))
       blob = self._read(self.vfs, oid, verify)
       raise IOError(errno.EIO, r'%s: I/O error' % oid.decode("utf-8"))
@@ -375,7 +375,7 @@
   $LOCALIP - - [$ERRDATE$] HG error:      res.setbodybytes(localstore.read(oid)) (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      blob = self._read(self.vfs, oid, verify) (glob)
   $LOCALIP - - [$ERRDATE$] HG error:      blobstore._verify(oid, b'dummy content') (glob)
-  $LOCALIP - - [$ERRDATE$] HG error:      hint=_(b'run hg verify')) (glob)
+  $LOCALIP - - [$ERRDATE$] HG error:      hint=_(b'run hg verify'), (glob)
   $LOCALIP - - [$ERRDATE$] HG error:  LfsCorruptionError: detected corrupt lfs object: 276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d (glob)
   $LOCALIP - - [$ERRDATE$] HG error:   (glob)
 
--- a/tests/test-linelog.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-linelog.py	Sun Oct 06 09:45:02 2019 -0400
@@ -6,12 +6,13 @@
 
 from mercurial import linelog
 
-vecratio = 3 # number of replacelines / number of replacelines_vec
-maxlinenum = 0xffffff
-maxb1 = 0xffffff
+vecratio = 3  # number of replacelines / number of replacelines_vec
+maxlinenum = 0xFFFFFF
+maxb1 = 0xFFFFFF
 maxdeltaa = 10
 maxdeltab = 10
 
+
 def _genedits(seed, endrev):
     lines = []
     random.seed(seed)
@@ -23,22 +24,26 @@
         b2 = random.randint(b1, b1 + maxdeltab)
         usevec = not bool(random.randint(0, vecratio))
         if usevec:
-            blines = [(random.randint(0, rev), random.randint(0, maxlinenum))
-                      for _ in range(b1, b2)]
+            blines = [
+                (random.randint(0, rev), random.randint(0, maxlinenum))
+                for _ in range(b1, b2)
+            ]
         else:
             blines = [(rev, bidx) for bidx in range(b1, b2)]
         lines[a1:a2] = blines
         yield lines, rev, a1, a2, b1, b2, blines, usevec
 
+
 class linelogtests(unittest.TestCase):
     def testlinelogencodedecode(self):
-        program = [linelog._eof(0, 0),
-                   linelog._jge(41, 42),
-                   linelog._jump(0, 43),
-                   linelog._eof(0, 0),
-                   linelog._jl(44, 45),
-                   linelog._line(46, 47),
-                   ]
+        program = [
+            linelog._eof(0, 0),
+            linelog._jge(41, 42),
+            linelog._jump(0, 43),
+            linelog._eof(0, 0),
+            linelog._jl(44, 45),
+            linelog._line(46, 47),
+        ]
         ll = linelog.linelog(program, maxrev=100)
         enc = ll.encode()
         # round-trips okay
@@ -46,89 +51,80 @@
         self.assertEqual(linelog.linelog.fromdata(enc), ll)
         # This encoding matches the encoding used by hg-experimental's
         # linelog file, or is supposed to if it doesn't.
-        self.assertEqual(enc, (b'\x00\x00\x01\x90\x00\x00\x00\x06'
-                               b'\x00\x00\x00\xa4\x00\x00\x00*'
-                               b'\x00\x00\x00\x00\x00\x00\x00+'
-                               b'\x00\x00\x00\x00\x00\x00\x00\x00'
-                               b'\x00\x00\x00\xb1\x00\x00\x00-'
-                               b'\x00\x00\x00\xba\x00\x00\x00/'))
+        self.assertEqual(
+            enc,
+            (
+                b'\x00\x00\x01\x90\x00\x00\x00\x06'
+                b'\x00\x00\x00\xa4\x00\x00\x00*'
+                b'\x00\x00\x00\x00\x00\x00\x00+'
+                b'\x00\x00\x00\x00\x00\x00\x00\x00'
+                b'\x00\x00\x00\xb1\x00\x00\x00-'
+                b'\x00\x00\x00\xba\x00\x00\x00/'
+            ),
+        )
 
     def testsimpleedits(self):
         ll = linelog.linelog()
         # Initial revision: add lines 0, 1, and 2
         ll.replacelines(1, 0, 0, 0, 3)
-        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(1)],
-                         [(1, 0),
-                          (1, 1),
-                          (1, 2),
-                         ])
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in ll.annotate(1)],
+            [(1, 0), (1, 1), (1, 2),],
+        )
         # Replace line 1 with a new line
         ll.replacelines(2, 1, 2, 1, 2)
-        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(2)],
-                         [(1, 0),
-                          (2, 1),
-                          (1, 2),
-                         ])
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in ll.annotate(2)],
+            [(1, 0), (2, 1), (1, 2),],
+        )
         # delete a line out of 2
         ll.replacelines(3, 1, 2, 0, 0)
-        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(3)],
-                         [(1, 0),
-                          (1, 2),
-                         ])
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in ll.annotate(3)], [(1, 0), (1, 2),]
+        )
         # annotation of 1 is unchanged
-        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(1)],
-                         [(1, 0),
-                          (1, 1),
-                          (1, 2),
-                         ])
-        ll.annotate(3) # set internal state to revision 3
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in ll.annotate(1)],
+            [(1, 0), (1, 1), (1, 2),],
+        )
+        ll.annotate(3)  # set internal state to revision 3
         start = ll.getoffset(0)
         end = ll.getoffset(1)
-        self.assertEqual(ll.getalllines(start, end), [
-            (1, 0),
-            (2, 1),
-            (1, 1),
-        ])
-        self.assertEqual(ll.getalllines(), [
-            (1, 0),
-            (2, 1),
-            (1, 1),
-            (1, 2),
-        ])
+        self.assertEqual(ll.getalllines(start, end), [(1, 0), (2, 1), (1, 1),])
+        self.assertEqual(ll.getalllines(), [(1, 0), (2, 1), (1, 1), (1, 2),])
 
     def testparseclinelogfile(self):
         # This data is what the replacements in testsimpleedits
         # produce when fed to the original linelog.c implementation.
-        data = (b'\x00\x00\x00\x0c\x00\x00\x00\x0f'
-                b'\x00\x00\x00\x00\x00\x00\x00\x02'
-                b'\x00\x00\x00\x05\x00\x00\x00\x06'
-                b'\x00\x00\x00\x06\x00\x00\x00\x00'
-                b'\x00\x00\x00\x00\x00\x00\x00\x07'
-                b'\x00\x00\x00\x06\x00\x00\x00\x02'
-                b'\x00\x00\x00\x00\x00\x00\x00\x00'
-                b'\x00\x00\x00\t\x00\x00\x00\t'
-                b'\x00\x00\x00\x00\x00\x00\x00\x0c'
-                b'\x00\x00\x00\x08\x00\x00\x00\x05'
-                b'\x00\x00\x00\x06\x00\x00\x00\x01'
-                b'\x00\x00\x00\x00\x00\x00\x00\x05'
-                b'\x00\x00\x00\x0c\x00\x00\x00\x05'
-                b'\x00\x00\x00\n\x00\x00\x00\x01'
-                b'\x00\x00\x00\x00\x00\x00\x00\t')
+        data = (
+            b'\x00\x00\x00\x0c\x00\x00\x00\x0f'
+            b'\x00\x00\x00\x00\x00\x00\x00\x02'
+            b'\x00\x00\x00\x05\x00\x00\x00\x06'
+            b'\x00\x00\x00\x06\x00\x00\x00\x00'
+            b'\x00\x00\x00\x00\x00\x00\x00\x07'
+            b'\x00\x00\x00\x06\x00\x00\x00\x02'
+            b'\x00\x00\x00\x00\x00\x00\x00\x00'
+            b'\x00\x00\x00\t\x00\x00\x00\t'
+            b'\x00\x00\x00\x00\x00\x00\x00\x0c'
+            b'\x00\x00\x00\x08\x00\x00\x00\x05'
+            b'\x00\x00\x00\x06\x00\x00\x00\x01'
+            b'\x00\x00\x00\x00\x00\x00\x00\x05'
+            b'\x00\x00\x00\x0c\x00\x00\x00\x05'
+            b'\x00\x00\x00\n\x00\x00\x00\x01'
+            b'\x00\x00\x00\x00\x00\x00\x00\t'
+        )
         llc = linelog.linelog.fromdata(data)
-        self.assertEqual([(l.rev, l.linenum) for l in llc.annotate(1)],
-                         [(1, 0),
-                          (1, 1),
-                          (1, 2),
-                         ])
-        self.assertEqual([(l.rev, l.linenum) for l in llc.annotate(2)],
-                         [(1, 0),
-                          (2, 1),
-                          (1, 2),
-                         ])
-        self.assertEqual([(l.rev, l.linenum) for l in llc.annotate(3)],
-                         [(1, 0),
-                          (1, 2),
-                         ])
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in llc.annotate(1)],
+            [(1, 0), (1, 1), (1, 2),],
+        )
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in llc.annotate(2)],
+            [(1, 0), (2, 1), (1, 2),],
+        )
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in llc.annotate(3)], [(1, 0), (1, 2),]
+        )
         # Check we emit the same bytecode.
         ll = linelog.linelog()
         # Initial revision: add lines 0, 1, and 2
@@ -137,9 +133,15 @@
         ll.replacelines(2, 1, 2, 1, 2)
         # delete a line out of 2
         ll.replacelines(3, 1, 2, 0, 0)
-        diff = '\n   ' + '\n   '.join(difflib.unified_diff(
-            ll.debugstr().splitlines(), llc.debugstr().splitlines(),
-            'python', 'c', lineterm=''))
+        diff = '\n   ' + '\n   '.join(
+            difflib.unified_diff(
+                ll.debugstr().splitlines(),
+                llc.debugstr().splitlines(),
+                'python',
+                'c',
+                lineterm='',
+            )
+        )
         self.assertEqual(ll._program, llc._program, 'Program mismatch: ' + diff)
         # Done as a secondary step so we get a better result if the
         # program is where the mismatch is.
@@ -150,13 +152,12 @@
         ll = linelog.linelog()
         ll.replacelines(3, 0, 0, 0, 2)
         ll.replacelines(4, 0, 2, 0, 0)
-        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(4)],
-                         [])
-        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(3)],
-                         [(3, 0), (3, 1)])
+        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(4)], [])
+        self.assertEqual(
+            [(l.rev, l.linenum) for l in ll.annotate(3)], [(3, 0), (3, 1)]
+        )
         # rev 2 is empty because contents were only ever introduced in rev 3
-        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(2)],
-                         [])
+        self.assertEqual([(l.rev, l.linenum) for l in ll.annotate(2)], [])
 
     def testrandomedits(self):
         # Inspired by original linelog tests.
@@ -165,7 +166,8 @@
         ll = linelog.linelog()
         # Populate linelog
         for lines, rev, a1, a2, b1, b2, blines, usevec in _genedits(
-                seed, numrevs):
+            seed, numrevs
+        ):
             if usevec:
                 ll.replacelines_vec(rev, a1, a2, blines)
             else:
@@ -174,7 +176,8 @@
             self.assertEqual(ll.annotateresult, lines)
         # Verify we can get back these states by annotating each rev
         for lines, rev, a1, a2, b1, b2, blines, usevec in _genedits(
-                seed, numrevs):
+            seed, numrevs
+        ):
             ar = ll.annotate(rev)
             self.assertEqual([(l.rev, l.linenum) for l in ar], lines)
 
@@ -187,6 +190,8 @@
             # should not be an infinite loop and raise
             ll.annotate(1)
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-linerange.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-linerange.py	Sun Oct 06 09:45:02 2019 -0400
@@ -17,7 +17,9 @@
            09 at OLD
            10 at OLD
            11 at OLD
-'''[1:] # strip initial LF
+'''[
+    1:
+]  # strip initial LF
 
 text2 = b'''
 00 at NEW
@@ -32,7 +34,10 @@
 09 at NEW
 10 at NEW
 11 at NEW
-'''[1:] # strip initial LF
+'''[
+    1:
+]  # strip initial LF
+
 
 def filteredblocks(blocks, rangeb):
     """return `rangea` extracted from `blocks` coming from
@@ -42,8 +47,8 @@
     skipped = [b not in filtered for b in blocks]
     return rangea, skipped
 
+
 class blocksinrangetests(unittest.TestCase):
-
     def setUp(self):
         self.blocks = list(mdiff.allblocks(text1, text2))
         assert self.blocks == [
@@ -227,6 +232,8 @@
             else:
                 self.fail('%s not raised' % exctype.__name__)
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-lock.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-lock.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,19 +19,24 @@
 
 # work around http://bugs.python.org/issue1515
 if types.MethodType not in copy._deepcopy_dispatch:
+
     def _deepcopy_method(x, memo):
         return type(x)(x.__func__, copy.deepcopy(x.__self__, memo), x.im_class)
+
     copy._deepcopy_dispatch[types.MethodType] = _deepcopy_method
 
+
 class lockwrapper(lock.lock):
     def __init__(self, pidoffset, *args, **kwargs):
         # lock.lock.__init__() calls lock(), so the pidoffset assignment needs
         # to be earlier
         self._pidoffset = pidoffset
         super(lockwrapper, self).__init__(*args, **kwargs)
+
     def _getpid(self):
         return super(lockwrapper, self)._getpid() + self._pidoffset
 
+
 class teststate(object):
     def __init__(self, testcase, dir, pidoffset=0):
         self._testcase = testcase
@@ -42,9 +47,15 @@
         self._pidoffset = pidoffset
 
     def makelock(self, *args, **kwargs):
-        l = lockwrapper(self._pidoffset, self.vfs, testlockname,
-                        releasefn=self.releasefn, acquirefn=self.acquirefn,
-                        *args, **kwargs)
+        l = lockwrapper(
+            self._pidoffset,
+            self.vfs,
+            testlockname,
+            releasefn=self.releasefn,
+            acquirefn=self.acquirefn,
+            *args,
+            **kwargs
+        )
         l.postrelease.append(self.postreleasefn)
         return l
 
@@ -59,39 +70,42 @@
 
     def assertacquirecalled(self, called):
         self._testcase.assertEqual(
-            self._acquirecalled, called,
-            'expected acquire to be %s but was actually %s' % (
-                self._tocalled(called),
-                self._tocalled(self._acquirecalled),
-            ))
+            self._acquirecalled,
+            called,
+            'expected acquire to be %s but was actually %s'
+            % (self._tocalled(called), self._tocalled(self._acquirecalled),),
+        )
 
     def resetacquirefn(self):
         self._acquirecalled = False
 
     def assertreleasecalled(self, called):
         self._testcase.assertEqual(
-            self._releasecalled, called,
-            'expected release to be %s but was actually %s' % (
-                self._tocalled(called),
-                self._tocalled(self._releasecalled),
-            ))
+            self._releasecalled,
+            called,
+            'expected release to be %s but was actually %s'
+            % (self._tocalled(called), self._tocalled(self._releasecalled),),
+        )
 
     def assertpostreleasecalled(self, called):
         self._testcase.assertEqual(
-            self._postreleasecalled, called,
-            'expected postrelease to be %s but was actually %s' % (
+            self._postreleasecalled,
+            called,
+            'expected postrelease to be %s but was actually %s'
+            % (
                 self._tocalled(called),
                 self._tocalled(self._postreleasecalled),
-            ))
+            ),
+        )
 
     def assertlockexists(self, exists):
         actual = self.vfs.lexists(testlockname)
         self._testcase.assertEqual(
-            actual, exists,
-            'expected lock to %s but actually did %s' % (
-                self._toexists(exists),
-                self._toexists(actual),
-            ))
+            actual,
+            exists,
+            'expected lock to %s but actually did %s'
+            % (self._toexists(exists), self._toexists(actual),),
+        )
 
     def _tocalled(self, called):
         if called:
@@ -105,6 +119,7 @@
         else:
             return 'not exist'
 
+
 class testlock(unittest.TestCase):
     def testlock(self):
         state = teststate(self, tempfile.mkdtemp(dir=encoding.getcwd()))
@@ -125,12 +140,12 @@
         # recursive lock should not call acquirefn again
         state.assertacquirecalled(False)
 
-        lock.release() # brings lock refcount down from 2 to 1
+        lock.release()  # brings lock refcount down from 2 to 1
         state.assertreleasecalled(False)
         state.assertpostreleasecalled(False)
         state.assertlockexists(True)
 
-        lock.release() # releases the lock
+        lock.release()  # releases the lock
         state.assertreleasecalled(True)
         state.assertpostreleasecalled(True)
         state.assertlockexists(False)
@@ -256,8 +271,10 @@
     def testinheritcheck(self):
         d = tempfile.mkdtemp(dir=encoding.getcwd())
         state = teststate(self, d)
+
         def check():
             raise error.LockInheritanceContractViolation('check failed')
+
         lock = state.makelock(inheritchecker=check)
         state.assertacquirecalled(True)
 
@@ -279,6 +296,7 @@
 
         def emulatefrequentlock(*args):
             raise OSError(errno.EEXIST, "File exists")
+
         def emulatefrequentunlock(*args):
             raise OSError(errno.ENOENT, "No such file or directory")
 
@@ -293,5 +311,6 @@
             self.assertTrue(why.locker == b"")
             state.assertlockexists(False)
 
+
 if __name__ == '__main__':
     silenttestrunner.main(__name__)
--- a/tests/test-lrucachedict.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-lrucachedict.py	Sun Oct 06 09:45:02 2019 -0400
@@ -4,9 +4,8 @@
 
 import silenttestrunner
 
-from mercurial import (
-    util,
-)
+from mercurial import util
+
 
 class testlrucachedict(unittest.TestCase):
     def testsimple(self):
@@ -363,5 +362,6 @@
         self.assertIn('d', d)
         self.assertIn('e', d)
 
+
 if __name__ == '__main__':
     silenttestrunner.main(__name__)
--- a/tests/test-manifest.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-manifest.py	Sun Oct 06 09:45:02 2019 -0400
@@ -20,13 +20,8 @@
 HASH_3 = b'1234567890abcdef0987654321deadbeef0fcafe'
 BIN_HASH_3 = binascii.unhexlify(HASH_3)
 A_SHORT_MANIFEST = (
-    b'bar/baz/qux.py\0%(hash2)s%(flag2)s\n'
-    b'foo\0%(hash1)s%(flag1)s\n'
-    ) % {b'hash1': HASH_1,
-         b'flag1': b'',
-         b'hash2': HASH_2,
-         b'flag2': b'l',
-         }
+    b'bar/baz/qux.py\0%(hash2)s%(flag2)s\n' b'foo\0%(hash1)s%(flag1)s\n'
+) % {b'hash1': HASH_1, b'flag1': b'', b'hash2': HASH_2, b'flag2': b'l',}
 
 A_DEEPER_MANIFEST = (
     b'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
@@ -47,12 +42,13 @@
     b'a/purple.py\0%(hash2)s%(flag1)s\n'
     b'app.py\0%(hash3)s%(flag1)s\n'
     b'readme.txt\0%(hash2)s%(flag1)s\n'
-    ) % {b'hash1': HASH_1,
-         b'flag1': b'',
-         b'hash2': HASH_2,
-         b'flag2': b'l',
-         b'hash3': HASH_3,
-         }
+) % {
+    b'hash1': HASH_1,
+    b'flag1': b'',
+    b'hash2': HASH_2,
+    b'flag2': b'l',
+    b'hash3': HASH_3,
+}
 
 HUGE_MANIFEST_ENTRIES = 200001
 
@@ -60,11 +56,17 @@
 if 'xrange' not in globals():
     xrange = range
 
-A_HUGE_MANIFEST = b''.join(sorted(
-    b'file%d\0%s%s\n' % (i, h, f) for i, h, f in
-    izip(xrange(200001),
-         itertools.cycle((HASH_1, HASH_2)),
-         itertools.cycle((b'', b'x', b'l')))))
+A_HUGE_MANIFEST = b''.join(
+    sorted(
+        b'file%d\0%s%s\n' % (i, h, f)
+        for i, h, f in izip(
+            xrange(200001),
+            itertools.cycle((HASH_1, HASH_2)),
+            itertools.cycle((b'', b'x', b'l')),
+        )
+    )
+)
+
 
 class basemanifesttests(object):
     def parsemanifest(self, text):
@@ -97,8 +99,7 @@
         m = self.parsemanifest(A_SHORT_MANIFEST)
         m[b'a'] = want
         self.assertEqual(want, m[b'a'])
-        self.assertEqual(b'a\0' + HASH_1 + b'\n' + A_SHORT_MANIFEST,
-                         m.text())
+        self.assertEqual(b'a\0' + HASH_1 + b'\n' + A_SHORT_MANIFEST, m.text())
 
     def testSetFlag(self):
         want = b'x'
@@ -115,15 +116,16 @@
         m[b'a'] = BIN_HASH_1
         m.setflag(b'a', want)
         self.assertEqual(want, m.flags(b'a'))
-        self.assertEqual(b'a\0' + HASH_1 + want + b'\n' + A_SHORT_MANIFEST,
-                         m.text())
+        self.assertEqual(
+            b'a\0' + HASH_1 + want + b'\n' + A_SHORT_MANIFEST, m.text()
+        )
 
     def testCopy(self):
         m = self.parsemanifest(A_SHORT_MANIFEST)
         m[b'a'] = BIN_HASH_1
         m2 = m.copy()
         del m
-        del m2 # make sure we don't double free() anything
+        del m2  # make sure we don't double free() anything
 
     def testCompaction(self):
         unhex = binascii.unhexlify
@@ -133,7 +135,10 @@
         m[b'beta'] = h2
         del m[b'foo']
         want = b'alpha\0%s\nbar/baz/qux.py\0%sl\nbeta\0%s\n' % (
-            HASH_1, HASH_2, HASH_2)
+            HASH_1,
+            HASH_2,
+            HASH_2,
+        )
         self.assertEqual(want, m.text())
         self.assertEqual(3, len(m))
         self.assertEqual([b'alpha', b'bar/baz/qux.py', b'beta'], list(m))
@@ -155,9 +160,10 @@
         # Merge code wants to set 21-byte fake hashes at times
         m[b'foo'] = want
         self.assertEqual(want, m[b'foo'])
-        self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2),
-                          (b'foo', BIN_HASH_1 + b'a')],
-                         list(m.items()))
+        self.assertEqual(
+            [(b'bar/baz/qux.py', BIN_HASH_2), (b'foo', BIN_HASH_1 + b'a')],
+            list(m.items()),
+        )
         # Sometimes it even tries a 22-byte fake hash, but we can
         # return 21 and it'll work out
         m[b'foo'] = want + b'+'
@@ -170,9 +176,9 @@
         m2 = m.copy()
         self.assertEqual(want, m2[b'foo'])
         # suffix with iteration
-        self.assertEqual([(b'bar/baz/qux.py', BIN_HASH_2),
-                          (b'foo', want)],
-                         list(m.items()))
+        self.assertEqual(
+            [(b'bar/baz/qux.py', BIN_HASH_2), (b'foo', want)], list(m.items())
+        )
 
         # shows up in diff
         self.assertEqual({b'foo': ((want, f), (h, b''))}, m.diff(clean))
@@ -181,10 +187,12 @@
     def testMatchException(self):
         m = self.parsemanifest(A_SHORT_MANIFEST)
         match = matchmod.match(b'', b'', [b're:.*'])
+
         def filt(path):
             if path == b'foo':
                 assert False
             return True
+
         match.matchfn = filt
         with self.assertRaises(AssertionError):
             m.matches(match)
@@ -206,28 +214,28 @@
         addl = b'z-only-in-left\0' + HASH_1 + b'\n'
         addr = b'z-only-in-right\0' + HASH_2 + b'x\n'
         left = self.parsemanifest(
-            A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + b'x') + addl)
+            A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + b'x') + addl
+        )
         right = self.parsemanifest(A_SHORT_MANIFEST + addr)
         want = {
-            b'foo': ((BIN_HASH_3, b'x'),
-                     (BIN_HASH_1, b'')),
+            b'foo': ((BIN_HASH_3, b'x'), (BIN_HASH_1, b'')),
             b'z-only-in-left': ((BIN_HASH_1, b''), MISSING),
             b'z-only-in-right': (MISSING, (BIN_HASH_2, b'x')),
-            }
+        }
         self.assertEqual(want, left.diff(right))
 
         want = {
             b'bar/baz/qux.py': (MISSING, (BIN_HASH_2, b'l')),
             b'foo': (MISSING, (BIN_HASH_3, b'x')),
             b'z-only-in-left': (MISSING, (BIN_HASH_1, b'')),
-            }
+        }
         self.assertEqual(want, self.parsemanifest(EMTPY_MANIFEST).diff(left))
 
         want = {
             b'bar/baz/qux.py': ((BIN_HASH_2, b'l'), MISSING),
             b'foo': ((BIN_HASH_3, b'x'), MISSING),
             b'z-only-in-left': ((BIN_HASH_1, b''), MISSING),
-            }
+        }
         self.assertEqual(want, left.diff(self.parsemanifest(EMTPY_MANIFEST)))
         copy = right.copy()
         del copy[b'z-only-in-right']
@@ -235,7 +243,7 @@
         want = {
             b'foo': (MISSING, (BIN_HASH_1, b'')),
             b'z-only-in-right': ((BIN_HASH_2, b'x'), MISSING),
-            }
+        }
         self.assertEqual(want, right.diff(copy))
 
         short = self.parsemanifest(A_SHORT_MANIFEST)
@@ -243,21 +251,22 @@
         del pruned[b'foo']
         want = {
             b'foo': ((BIN_HASH_1, b''), MISSING),
-            }
+        }
         self.assertEqual(want, short.diff(pruned))
         want = {
             b'foo': (MISSING, (BIN_HASH_1, b'')),
-            }
+        }
         self.assertEqual(want, pruned.diff(short))
         want = {
             b'bar/baz/qux.py': None,
             b'foo': (MISSING, (BIN_HASH_1, b'')),
-            }
+        }
         self.assertEqual(want, pruned.diff(short, clean=True))
 
     def testReversedLines(self):
         backwards = b''.join(
-            l + b'\n' for l in reversed(A_SHORT_MANIFEST.split(b'\n')) if l)
+            l + b'\n' for l in reversed(A_SHORT_MANIFEST.split(b'\n')) if l
+        )
         try:
             self.parsemanifest(backwards)
             self.fail('Should have raised ValueError')
@@ -292,9 +301,11 @@
         match = matchmod.exact([b'file1', b'file200', b'file300'])
         m2 = m.matches(match)
 
-        w = (b'file1\0%sx\n'
-             b'file200\0%sl\n'
-             b'file300\0%s\n') % (HASH_2, HASH_1, HASH_1)
+        w = (b'file1\0%sx\n' b'file200\0%sl\n' b'file300\0%s\n') % (
+            HASH_2,
+            HASH_1,
+            HASH_1,
+        )
         self.assertEqual(w, m2.text())
 
     def testMatchesNonexistentFile(self):
@@ -303,13 +314,14 @@
         '''
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
-        match = matchmod.exact([b'a/b/c/bar.txt', b'a/b/d/qux.py',
-                                b'readme.txt', b'nonexistent'])
+        match = matchmod.exact(
+            [b'a/b/c/bar.txt', b'a/b/d/qux.py', b'readme.txt', b'nonexistent']
+        )
         m2 = m.matches(match)
 
         self.assertEqual(
-                [b'a/b/c/bar.txt', b'a/b/d/qux.py', b'readme.txt'],
-                m2.keys())
+            [b'a/b/c/bar.txt', b'a/b/d/qux.py', b'readme.txt'], m2.keys()
+        )
 
     def testMatchesNonexistentDirectory(self):
         '''Tests matches() for a relpath match on a directory that doesn't
@@ -349,11 +361,20 @@
         match = matchmod.match(b'/', b'', [b'a/b'], default=b'relpath')
         m2 = m.matches(match)
 
-        self.assertEqual([
-            b'a/b/c/bar.py', b'a/b/c/bar.txt', b'a/b/c/foo.py',
-            b'a/b/c/foo.txt',
-            b'a/b/d/baz.py', b'a/b/d/qux.py', b'a/b/d/ten.txt', b'a/b/dog.py',
-            b'a/b/fish.py'], m2.keys())
+        self.assertEqual(
+            [
+                b'a/b/c/bar.py',
+                b'a/b/c/bar.txt',
+                b'a/b/c/foo.py',
+                b'a/b/c/foo.txt',
+                b'a/b/d/baz.py',
+                b'a/b/d/qux.py',
+                b'a/b/d/ten.txt',
+                b'a/b/dog.py',
+                b'a/b/fish.py',
+            ],
+            m2.keys(),
+        )
 
     def testMatchesExactPath(self):
         '''Tests matches() on an exact match on a directory, which should
@@ -374,10 +395,20 @@
         match = matchmod.match(b'/', b'a/b', [b'.'], default=b'relpath')
         m2 = m.matches(match)
 
-        self.assertEqual([
-            b'a/b/c/bar.py', b'a/b/c/bar.txt', b'a/b/c/foo.py',
-            b'a/b/c/foo.txt', b'a/b/d/baz.py', b'a/b/d/qux.py',
-            b'a/b/d/ten.txt', b'a/b/dog.py', b'a/b/fish.py'], m2.keys())
+        self.assertEqual(
+            [
+                b'a/b/c/bar.py',
+                b'a/b/c/bar.txt',
+                b'a/b/c/foo.py',
+                b'a/b/c/foo.txt',
+                b'a/b/d/baz.py',
+                b'a/b/d/qux.py',
+                b'a/b/d/ten.txt',
+                b'a/b/dog.py',
+                b'a/b/fish.py',
+            ],
+            m2.keys(),
+        )
 
     def testMatchesWithPattern(self):
         '''Tests matches() for files matching a pattern that reside
@@ -388,8 +419,9 @@
         m2 = m.matches(match)
 
         self.assertEqual(
-                [b'a/b/c/bar.txt', b'a/b/c/foo.txt', b'a/b/d/ten.txt'],
-                m2.keys())
+            [b'a/b/c/bar.txt', b'a/b/c/foo.txt', b'a/b/d/ten.txt'], m2.keys()
+        )
+
 
 class testmanifestdict(unittest.TestCase, basemanifesttests):
     def parsemanifest(self, text):
@@ -414,10 +446,12 @@
             b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
             b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
             b'\x00\x00\xc0\x8aey\x1d}\x01\xd8\xe0\xb9\xf3\xde\x1b\xcf\x17'
-            b'\xac\xbe')
+            b'\xac\xbe'
+        )
         with self.assertRaises(ValueError):
             self.parsemanifest(data)
 
+
 class testtreemanifest(unittest.TestCase, basemanifesttests):
     def parsemanifest(self, text):
         return manifestmod.treemanifest(b'', text)
@@ -427,17 +461,16 @@
 
         dirs = [s._dir for s in m.walksubtrees()]
         self.assertEqual(
-            sorted([
-                b'', b'a/', b'a/c/', b'a/d/', b'a/b/', b'a/b/c/', b'a/b/d/']),
-            sorted(dirs)
+            sorted(
+                [b'', b'a/', b'a/c/', b'a/d/', b'a/b/', b'a/b/c/', b'a/b/d/']
+            ),
+            sorted(dirs),
         )
 
         match = matchmod.match(b'/', b'', [b'path:a/b/'])
         dirs = [s._dir for s in m.walksubtrees(matcher=match)]
-        self.assertEqual(
-            sorted([b'a/b/', b'a/b/c/', b'a/b/d/']),
-            sorted(dirs)
-        )
+        self.assertEqual(sorted([b'a/b/', b'a/b/c/', b'a/b/d/']), sorted(dirs))
+
 
 if __name__ == '__main__':
     silenttestrunner.main(__name__)
--- a/tests/test-match.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-match.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,8 +9,8 @@
     util,
 )
 
+
 class BaseMatcherTests(unittest.TestCase):
-
     def testVisitdir(self):
         m = matchmod.basematcher()
         self.assertTrue(m.visitdir(b''))
@@ -21,8 +21,8 @@
         self.assertEqual(m.visitchildrenset(b''), b'this')
         self.assertEqual(m.visitchildrenset(b'dir'), b'this')
 
+
 class AlwaysMatcherTests(unittest.TestCase):
-
     def testVisitdir(self):
         m = matchmod.alwaysmatcher()
         self.assertEqual(m.visitdir(b''), b'all')
@@ -33,8 +33,8 @@
         self.assertEqual(m.visitchildrenset(b''), b'all')
         self.assertEqual(m.visitchildrenset(b'dir'), b'all')
 
+
 class NeverMatcherTests(unittest.TestCase):
-
     def testVisitdir(self):
         m = matchmod.nevermatcher()
         self.assertFalse(m.visitdir(b''))
@@ -45,6 +45,7 @@
         self.assertEqual(m.visitchildrenset(b''), set())
         self.assertEqual(m.visitchildrenset(b'dir'), set())
 
+
 class PredicateMatcherTests(unittest.TestCase):
     # predicatematcher does not currently define either of these methods, so
     # this is equivalent to BaseMatcherTests.
@@ -59,8 +60,8 @@
         self.assertEqual(m.visitchildrenset(b''), b'this')
         self.assertEqual(m.visitchildrenset(b'dir'), b'this')
 
+
 class PatternMatcherTests(unittest.TestCase):
-
     def testVisitdirPrefix(self):
         m = matchmod.match(b'x', b'', patterns=[b'path:dir/subdir'])
         assert isinstance(m, matchmod.patternmatcher)
@@ -122,8 +123,8 @@
         self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'this')
         self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), b'this')
 
+
 class IncludeMatcherTests(unittest.TestCase):
-
     def testVisitdirPrefix(self):
         m = matchmod.match(b'x', b'', include=[b'path:dir/subdir'])
         assert isinstance(m, matchmod.includematcher)
@@ -182,8 +183,8 @@
         self.assertEqual(m.visitchildrenset(b'dir/subdir'), b'this')
         self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), b'this')
 
+
 class ExactMatcherTests(unittest.TestCase):
-
     def testVisitdir(self):
         m = matchmod.exact(files=[b'dir/subdir/foo.txt'])
         assert isinstance(m, matchmod.exactmatcher)
@@ -206,11 +207,15 @@
         self.assertEqual(m.visitchildrenset(b'folder'), set())
 
     def testVisitchildrensetFilesAndDirs(self):
-        m = matchmod.exact(files=[b'rootfile.txt',
-                                  b'a/file1.txt',
-                                  b'a/b/file2.txt',
-                                  # no file in a/b/c
-                                  b'a/b/c/d/file4.txt'])
+        m = matchmod.exact(
+            files=[
+                b'rootfile.txt',
+                b'a/file1.txt',
+                b'a/b/file2.txt',
+                # no file in a/b/c
+                b'a/b/c/d/file4.txt',
+            ]
+        )
         assert isinstance(m, matchmod.exactmatcher)
         self.assertEqual(m.visitchildrenset(b''), {b'a', b'rootfile.txt'})
         self.assertEqual(m.visitchildrenset(b'a'), {b'b', b'file1.txt'})
@@ -220,8 +225,8 @@
         self.assertEqual(m.visitchildrenset(b'a/b/c/d/e'), set())
         self.assertEqual(m.visitchildrenset(b'folder'), set())
 
+
 class DifferenceMatcherTests(unittest.TestCase):
-
     def testVisitdirM2always(self):
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.alwaysmatcher()
@@ -341,8 +346,8 @@
         self.assertEqual(dm.visitchildrenset(b'dir/subdir/z'), b'this')
         self.assertEqual(dm.visitchildrenset(b'dir/subdir/x'), b'this')
 
+
 class IntersectionMatcherTests(unittest.TestCase):
-
     def testVisitdirM2always(self):
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.alwaysmatcher()
@@ -533,8 +538,8 @@
         self.assertEqual(im.visitchildrenset(b'dir/subdir/z'), set())
         self.assertEqual(im.visitchildrenset(b'dir/subdir/x'), set())
 
+
 class UnionMatcherTests(unittest.TestCase):
-
     def testVisitdirM2always(self):
         m1 = matchmod.alwaysmatcher()
         m2 = matchmod.alwaysmatcher()
@@ -748,8 +753,8 @@
         self.assertEqual(um.visitchildrenset(b'dir/subdir/z'), b'all')
         self.assertEqual(um.visitchildrenset(b'dir/subdir/x'), b'all')
 
+
 class SubdirMatcherTests(unittest.TestCase):
-
     def testVisitdir(self):
         m = matchmod.match(b'', b'', include=[b'path:dir/subdir'])
         sm = matchmod.subdirmatcher(b'dir', m)
@@ -772,11 +777,12 @@
         self.assertEqual(sm.visitchildrenset(b'subdir/z'), b'this')
         self.assertEqual(sm.visitchildrenset(b'foo'), set())
 
+
 class PrefixdirMatcherTests(unittest.TestCase):
-
     def testVisitdir(self):
-        m = matchmod.match(util.localpath(b'root/d'), b'e/f',
-                [b'../a.txt', b'b.txt'])
+        m = matchmod.match(
+            util.localpath(b'root/d'), b'e/f', [b'../a.txt', b'b.txt']
+        )
         pm = matchmod.prefixdirmatcher(b'd', m)
 
         # `m` elides 'd' because it's part of the root, and the rest of the
@@ -807,8 +813,9 @@
         self.assertEqual(pm.visitdir(b'd/e/f/g'), False)
 
     def testVisitchildrenset(self):
-        m = matchmod.match(util.localpath(b'root/d'), b'e/f',
-                [b'../a.txt', b'b.txt'])
+        m = matchmod.match(
+            util.localpath(b'root/d'), b'e/f', [b'../a.txt', b'b.txt']
+        )
         pm = matchmod.prefixdirmatcher(b'd', m)
 
         # OPT: visitchildrenset could possibly return {'e'} and {'f'} for these
@@ -828,5 +835,6 @@
         self.assertEqual(pm.visitchildrenset(b'd/e/f'), b'this')
         self.assertEqual(pm.visitchildrenset(b'd/e/f/g'), set())
 
+
 if __name__ == '__main__':
     silenttestrunner.main(__name__)
--- a/tests/test-mdiff.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-mdiff.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,22 +3,23 @@
 
 import unittest
 
-from mercurial import (
-    mdiff,
-)
+from mercurial import mdiff
+
 
 class splitnewlinesTests(unittest.TestCase):
-
     def test_splitnewlines(self):
-        cases = {b'a\nb\nc\n': [b'a\n', b'b\n', b'c\n'],
-                 b'a\nb\nc': [b'a\n', b'b\n', b'c'],
-                 b'a\nb\nc\n\n': [b'a\n', b'b\n', b'c\n', b'\n'],
-                 b'': [],
-                 b'abcabc': [b'abcabc'],
-                 }
+        cases = {
+            b'a\nb\nc\n': [b'a\n', b'b\n', b'c\n'],
+            b'a\nb\nc': [b'a\n', b'b\n', b'c'],
+            b'a\nb\nc\n\n': [b'a\n', b'b\n', b'c\n', b'\n'],
+            b'': [],
+            b'abcabc': [b'abcabc'],
+        }
         for inp, want in cases.items():
             self.assertEqual(mdiff.splitnewlines(inp), want)
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-minifileset.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-minifileset.py	Sun Oct 06 09:45:02 2019 -0400
@@ -3,6 +3,7 @@
 
 from mercurial import minifileset
 
+
 def check(text, truecases, falsecases):
     f = minifileset.compile(text)
     for args in truecases:
@@ -12,24 +13,31 @@
         if f(*args):
             print('unexpected: %r should exclude %r' % (text, args))
 
+
 check(b'all()', [(b'a.php', 123), (b'b.txt', 0)], [])
 check(b'none()', [], [(b'a.php', 123), (b'b.txt', 0)])
 check(b'!!!!((!(!!all())))', [], [(b'a.php', 123), (b'b.txt', 0)])
 
-check(b'"path:a" & (**.b | **.c)',
-      [(b'a/b.b', 0), (b'a/c.c', 0)], [(b'b/c.c', 0)])
-check(b'(path:a & **.b) | **.c',
-      [(b'a/b.b', 0), (b'a/c.c', 0), (b'b/c.c', 0)], [])
+check(
+    b'"path:a" & (**.b | **.c)', [(b'a/b.b', 0), (b'a/c.c', 0)], [(b'b/c.c', 0)]
+)
+check(
+    b'(path:a & **.b) | **.c', [(b'a/b.b', 0), (b'a/c.c', 0), (b'b/c.c', 0)], []
+)
 
-check(b'**.bin - size("<20B")',
-      [(b'b.bin', 21)], [(b'a.bin', 11), (b'b.txt', 21)])
+check(
+    b'**.bin - size("<20B")', [(b'b.bin', 21)], [(b'a.bin', 11), (b'b.txt', 21)]
+)
 
-check(b'!!**.bin or size(">20B") + "path:bin" or !size(">10")',
-      [(b'a.bin', 11), (b'b.txt', 21), (b'bin/abc', 11)],
-      [(b'a.notbin', 11), (b'b.txt', 11), (b'bin2/abc', 11)])
+check(
+    b'!!**.bin or size(">20B") + "path:bin" or !size(">10")',
+    [(b'a.bin', 11), (b'b.txt', 21), (b'bin/abc', 11)],
+    [(b'a.notbin', 11), (b'b.txt', 11), (b'bin2/abc', 11)],
+)
 
 check(
     b'(**.php and size(">10KB")) | **.zip | ("path:bin" & !"path:bin/README") '
     b' | size(">1M")',
     [(b'a.php', 15000), (b'a.zip', 0), (b'bin/a', 0), (b'bin/README', 1e7)],
-    [(b'a.php', 5000), (b'b.zip2', 0), (b't/bin/a', 0), (b'bin/README', 1)])
+    [(b'a.php', 5000), (b'b.zip2', 0), (b't/bin/a', 0), (b'bin/README', 1)],
+)
--- a/tests/test-minirst.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-minirst.py	Sun Oct 06 09:45:02 2019 -0400
@@ -1,10 +1,7 @@
 from __future__ import absolute_import, print_function
-from mercurial import (
-    minirst,
-)
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial import minirst
+from mercurial.utils import stringutil
+
 
 def debugformat(text, form, **kwargs):
     blocks, pruned = minirst.parse(text, **kwargs)
@@ -23,12 +20,14 @@
     print("-" * 70)
     print()
 
+
 def debugformats(title, text, **kwargs):
     print("== %s ==" % title)
     debugformat(text, 60, **kwargs)
     debugformat(text, 30, **kwargs)
     debugformat(text, b'html', **kwargs)
 
+
 paragraphs = b"""
 This is some text in the first paragraph.
 
@@ -188,8 +187,9 @@
 debugformats('containers (normal)', containers)
 debugformats('containers (verbose)', containers, keep=[b'verbose'])
 debugformats('containers (debug)', containers, keep=[b'debug'])
-debugformats('containers (verbose debug)', containers,
-            keep=[b'verbose', b'debug'])
+debugformats(
+    'containers (verbose debug)', containers, keep=[b'verbose', b'debug']
+)
 
 roles = b"""Please see :hg:`add`."""
 debugformats('roles', roles)
@@ -245,9 +245,11 @@
 debugformats('comments', comments)
 
 
-data = [[b'a', b'b', b'c'],
-         [b'1', b'2', b'3'],
-         [b'foo', b'bar', b'baz this list is very very very long man']]
+data = [
+    [b'a', b'b', b'c'],
+    [b'1', b'2', b'3'],
+    [b'foo', b'bar', b'baz this list is very very very long man'],
+]
 
 rst = minirst.maketable(data, 2, True)
 table = b''.join(rst)
@@ -256,8 +258,10 @@
 
 debugformats('table', table)
 
-data = [[b's', b'long', b'line\ngoes on here'],
-        [b'', b'xy', b'tried to fix here\n        by indenting']]
+data = [
+    [b's', b'long', b'line\ngoes on here'],
+    [b'', b'xy', b'tried to fix here\n        by indenting'],
+]
 
 rst = minirst.maketable(data, 1, False)
 table = b''.join(rst)
--- a/tests/test-parseindex2.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-parseindex2.py	Sun Oct 06 09:45:02 2019 -0400
@@ -26,12 +26,15 @@
 def gettype(q):
     return int(q & 0xFFFF)
 
+
 def offset_type(offset, type):
     return int(int(offset) << 16 | type)
 
+
 indexformatng = ">Qiiiiii20s12x"
 
-def py_parseindex(data, inline) :
+
+def py_parseindex(data, inline):
     s = 64
     cache = None
     index = []
@@ -43,7 +46,7 @@
     if inline:
         cache = (0, data)
         while off <= l:
-            e = struct.unpack(indexformatng, data[off:off + s])
+            e = struct.unpack(indexformatng, data[off : off + s])
             nodemap[e[7]] = n
             append(e)
             n += 1
@@ -52,7 +55,7 @@
             off += e[1] + s
     else:
         while off <= l:
-            e = struct.unpack(indexformatng, data[off:off + s])
+            e = struct.unpack(indexformatng, data[off : off + s])
             nodemap[e[7]] = n
             append(e)
             n += 1
@@ -65,6 +68,7 @@
 
     return index, cache
 
+
 data_inlined = (
     b'\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c'
     b'\x00\x00\x04\x07\x00\x00\x00\x00\x00\x00\x15\x15\xff\xff\xff'
@@ -89,7 +93,7 @@
     b'\x83\x00\x9f$z\xb8#\xa5\xb1\xdf\x98\xd9\xec\x1b\x89O\xe3Ts\x9a4'
     b'\x17m\x8b\xfc\x8f\xa5\x95\x9a\xfc\xfa\xed,\xe5|\xa1\xfe\x15\xb9'
     b'\xbc\xb2\x93\x1f\xf2\x95\xff\xdf,\x1a\xc5\xe7\x17*\x93Oz:>\x0e'
-    )
+)
 
 data_non_inlined = (
     b'\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01D\x19'
@@ -108,57 +112,81 @@
     b'\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1'
     b'\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00'
     b'\x00\x00\x00\x00\x00\x00\x00\x00\x00'
-    )
+)
+
 
 def parse_index2(data, inline):
     index, chunkcache = parsers.parse_index2(data, inline)
     return list(index), chunkcache
 
+
 def importparsers(hexversion):
     """Import mercurial.parsers with the given sys.hexversion."""
     # The file parsers.c inspects sys.hexversion to determine the version
     # of the currently-running Python interpreter, so we monkey-patch
     # sys.hexversion to simulate using different versions.
-    code = ("import sys; sys.hexversion=%s; "
-            "import mercurial.cext.parsers" % hexversion)
+    code = (
+        "import sys; sys.hexversion=%s; "
+        "import mercurial.cext.parsers" % hexversion
+    )
     cmd = "python -c \"%s\"" % code
     # We need to do these tests inside a subprocess because parser.c's
     # version-checking code happens inside the module init function, and
     # when using reload() to reimport an extension module, "The init function
     # of extension modules is not called a second time"
     # (from http://docs.python.org/2/library/functions.html?#reload).
-    p = subprocess.Popen(cmd, shell=True,
-                         stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    p = subprocess.Popen(
+        cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+    )
     return p.communicate()  # returns stdout, stderr
 
+
 def hexfailmsg(testnumber, hexversion, stdout, expected):
     try:
         hexstring = hex(hexversion)
     except TypeError:
         hexstring = None
-    return ("FAILED: version test #%s with Python %s and patched "
-            "sys.hexversion %r (%r):\n Expected %s but got:\n-->'%s'\n" %
-            (testnumber, sys.version_info, hexversion, hexstring, expected,
-             stdout))
+    return (
+        "FAILED: version test #%s with Python %s and patched "
+        "sys.hexversion %r (%r):\n Expected %s but got:\n-->'%s'\n"
+        % (
+            testnumber,
+            sys.version_info,
+            hexversion,
+            hexstring,
+            expected,
+            stdout,
+        )
+    )
+
 
 def makehex(major, minor, micro):
     return int("%x%02x%02x00" % (major, minor, micro), 16)
 
+
 class parseindex2tests(unittest.TestCase):
-
     def assertversionokay(self, testnumber, hexversion):
         stdout, stderr = importparsers(hexversion)
         self.assertFalse(
-            stdout, hexfailmsg(testnumber, hexversion, stdout, 'no stdout'))
+            stdout, hexfailmsg(testnumber, hexversion, stdout, 'no stdout')
+        )
 
     def assertversionfail(self, testnumber, hexversion):
         stdout, stderr = importparsers(hexversion)
         # We include versionerrortext to distinguish from other ImportErrors.
         errtext = b"ImportError: %s" % pycompat.sysbytes(
-            parsers.versionerrortext)
-        self.assertIn(errtext, stdout,
-                      hexfailmsg(testnumber, hexversion, stdout,
-                                 expected="stdout to contain %r" % errtext))
+            parsers.versionerrortext
+        )
+        self.assertIn(
+            errtext,
+            stdout,
+            hexfailmsg(
+                testnumber,
+                hexversion,
+                stdout,
+                expected="stdout to contain %r" % errtext,
+            ),
+        )
 
     def testversiondetection(self):
         """Check the version-detection logic when importing parsers."""
@@ -189,11 +217,11 @@
 
         want = py_parseindex(data_inlined, True)
         got = parse_index2(data_inlined, True)
-        self.assertEqual(want, got) # inline data
+        self.assertEqual(want, got)  # inline data
 
         want = py_parseindex(data_non_inlined, False)
         got = parse_index2(data_non_inlined, False)
-        self.assertEqual(want, got) # no inline data
+        self.assertEqual(want, got)  # no inline data
 
         ix = parsers.parse_index2(data_inlined, True)[0]
         for i, r in enumerate(ix):
@@ -201,8 +229,10 @@
                 i = -1
             try:
                 self.assertEqual(
-                    ix[r[7]], i,
-                    'Reverse lookup inconsistent for %r' % nodemod.hex(r[7]))
+                    ix[r[7]],
+                    i,
+                    'Reverse lookup inconsistent for %r' % nodemod.hex(r[7]),
+                )
             except TypeError:
                 # pure version doesn't support this
                 break
@@ -211,12 +241,14 @@
         want = (0, 0, 0, -1, -1, -1, -1, nullid)
         index, junk = parsers.parse_index2(data_inlined, True)
         got = index[-1]
-        self.assertEqual(want, got) # inline data
+        self.assertEqual(want, got)  # inline data
 
         index, junk = parsers.parse_index2(data_non_inlined, False)
         got = index[-1]
-        self.assertEqual(want, got) # no inline data
+        self.assertEqual(want, got)  # no inline data
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-pathencode.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-pathencode.py	Sun Oct 06 09:45:02 2019 -0400
@@ -31,9 +31,12 @@
 for c in (b'\0', b'/'):
     validchars.remove(c)
 
-winreserved = (b'aux con prn nul'.split() +
-               [b'com%d' % i for i in xrange(1, 10)] +
-               [b'lpt%d' % i for i in xrange(1, 10)])
+winreserved = (
+    b'aux con prn nul'.split()
+    + [b'com%d' % i for i in xrange(1, 10)]
+    + [b'lpt%d' % i for i in xrange(1, 10)]
+)
+
 
 def casecombinations(names):
     '''Build all case-diddled combinations of names.'''
@@ -45,10 +48,11 @@
             for c in itertools.combinations(xrange(len(r)), i):
                 d = r
                 for j in c:
-                    d = b''.join((d[:j], d[j:j + 1].upper(), d[j + 1:]))
+                    d = b''.join((d[:j], d[j : j + 1].upper(), d[j + 1 :]))
                 combos.add(d)
     return sorted(combos)
 
+
 def buildprobtable(fp, cmd='hg manifest tip'):
     '''Construct and print a table of probabilities for path name
     components.  The numbers are percentages.'''
@@ -65,8 +69,9 @@
         counts.pop(c, None)
     t = sum(counts.itervalues()) / 100.0
     fp.write('probtable = (')
-    for i, (k, v) in enumerate(sorted(counts.items(), key=lambda x: x[1],
-                                      reverse=True)):
+    for i, (k, v) in enumerate(
+        sorted(counts.items(), key=lambda x: x[1], reverse=True)
+    ):
         if (i % 5) == 0:
             fp.write('\n    ')
         vt = v / t
@@ -75,29 +80,83 @@
         fp.write('(%r, %.03f), ' % (k, vt))
     fp.write('\n    )\n')
 
+
 # A table of character frequencies (as percentages), gleaned by
 # looking at filelog names from a real-world, very large repo.
 
 probtable = (
-    (b't', 9.828), (b'e', 9.042), (b's', 8.011), (b'a', 6.801), (b'i', 6.618),
-    (b'g', 5.053), (b'r', 5.030), (b'o', 4.887), (b'p', 4.363), (b'n', 4.258),
-    (b'l', 3.830), (b'h', 3.693), (b'_', 3.659), (b'.', 3.377), (b'm', 3.194),
-    (b'u', 2.364), (b'd', 2.296), (b'c', 2.163), (b'b', 1.739), (b'f', 1.625),
-    (b'6', 0.666), (b'j', 0.610), (b'y', 0.554), (b'x', 0.487), (b'w', 0.477),
-    (b'k', 0.476), (b'v', 0.473), (b'3', 0.336), (b'1', 0.335), (b'2', 0.326),
-    (b'4', 0.310), (b'5', 0.305), (b'9', 0.302), (b'8', 0.300), (b'7', 0.299),
-    (b'q', 0.298), (b'0', 0.250), (b'z', 0.223), (b'-', 0.118), (b'C', 0.095),
-    (b'T', 0.087), (b'F', 0.085), (b'B', 0.077), (b'S', 0.076), (b'P', 0.076),
-    (b'L', 0.059), (b'A', 0.058), (b'N', 0.051), (b'D', 0.049), (b'M', 0.046),
-    (b'E', 0.039), (b'I', 0.035), (b'R', 0.035), (b'G', 0.028), (b'U', 0.026),
-    (b'W', 0.025), (b'O', 0.017), (b'V', 0.015), (b'H', 0.013), (b'Q', 0.011),
-    (b'J', 0.007), (b'K', 0.005), (b'+', 0.004), (b'X', 0.003), (b'Y', 0.001),
-    )
+    (b't', 9.828),
+    (b'e', 9.042),
+    (b's', 8.011),
+    (b'a', 6.801),
+    (b'i', 6.618),
+    (b'g', 5.053),
+    (b'r', 5.030),
+    (b'o', 4.887),
+    (b'p', 4.363),
+    (b'n', 4.258),
+    (b'l', 3.830),
+    (b'h', 3.693),
+    (b'_', 3.659),
+    (b'.', 3.377),
+    (b'm', 3.194),
+    (b'u', 2.364),
+    (b'd', 2.296),
+    (b'c', 2.163),
+    (b'b', 1.739),
+    (b'f', 1.625),
+    (b'6', 0.666),
+    (b'j', 0.610),
+    (b'y', 0.554),
+    (b'x', 0.487),
+    (b'w', 0.477),
+    (b'k', 0.476),
+    (b'v', 0.473),
+    (b'3', 0.336),
+    (b'1', 0.335),
+    (b'2', 0.326),
+    (b'4', 0.310),
+    (b'5', 0.305),
+    (b'9', 0.302),
+    (b'8', 0.300),
+    (b'7', 0.299),
+    (b'q', 0.298),
+    (b'0', 0.250),
+    (b'z', 0.223),
+    (b'-', 0.118),
+    (b'C', 0.095),
+    (b'T', 0.087),
+    (b'F', 0.085),
+    (b'B', 0.077),
+    (b'S', 0.076),
+    (b'P', 0.076),
+    (b'L', 0.059),
+    (b'A', 0.058),
+    (b'N', 0.051),
+    (b'D', 0.049),
+    (b'M', 0.046),
+    (b'E', 0.039),
+    (b'I', 0.035),
+    (b'R', 0.035),
+    (b'G', 0.028),
+    (b'U', 0.026),
+    (b'W', 0.025),
+    (b'O', 0.017),
+    (b'V', 0.015),
+    (b'H', 0.013),
+    (b'Q', 0.011),
+    (b'J', 0.007),
+    (b'K', 0.005),
+    (b'+', 0.004),
+    (b'X', 0.003),
+    (b'Y', 0.001),
+)
 
 for c, _ in probtable:
     validchars.remove(c)
 validchars = list(validchars)
 
+
 def pickfrom(rng, table):
     c = 0
     r = rng.random() * sum(i[1] for i in table)
@@ -106,6 +165,7 @@
         if c >= r:
             return i
 
+
 reservedcombos = casecombinations(winreserved)
 
 # The first component of a name following a slash.
@@ -114,7 +174,7 @@
     (lambda rng: pickfrom(rng, probtable), 90),
     (lambda rng: rng.choice(validchars), 5),
     (lambda rng: rng.choice(reservedcombos), 5),
-    )
+)
 
 # Components of a name following the first.
 
@@ -129,7 +189,8 @@
 lasttable = resttable + (
     (lambda rng: b'', 95),
     (lambda rng: rng.choice(internalsuffixcombos), 5),
-    )
+)
+
 
 def makepart(rng, k):
     '''Construct a part of a pathname, without slashes.'''
@@ -145,29 +206,37 @@
     ps.append(pickfrom(rng, lasttable)(rng))
     return b''.join(ps)
 
+
 def makepath(rng, j, k):
     '''Construct a complete pathname.'''
 
-    return (b'data/' + b'/'.join(makepart(rng, k) for _ in xrange(j)) +
-            rng.choice([b'.d', b'.i']))
+    return (
+        b'data/'
+        + b'/'.join(makepart(rng, k) for _ in xrange(j))
+        + rng.choice([b'.d', b'.i'])
+    )
+
 
 def genpath(rng, count):
     '''Generate random pathnames with gradually increasing lengths.'''
 
     mink, maxk = 1, 4096
+
     def steps():
         for i in xrange(count):
             yield mink + int(round(math.sqrt((maxk - mink) * float(i) / count)))
+
     for k in steps():
         x = rng.randint(1, k)
         y = rng.randint(1, k)
         yield makepath(rng, x, y)
 
+
 def runtests(rng, seed, count):
     nerrs = 0
     for p in genpath(rng, count):
-        h = store._pathencode(p)    # uses C implementation, if available
-        r = store._hybridencode(p, True) # reference implementation in Python
+        h = store._pathencode(p)  # uses C implementation, if available
+        r = store._hybridencode(p, True)  # reference implementation in Python
         if h != r:
             if nerrs == 0:
                 print('seed:', hex(seed)[:-1], file=sys.stderr)
@@ -177,23 +246,27 @@
             nerrs += 1
     return nerrs
 
+
 def main():
     import getopt
 
     # Empirically observed to take about a second to run
     count = 100
     seed = None
-    opts, args = getopt.getopt(sys.argv[1:], 'c:s:',
-                               ['build', 'count=', 'seed='])
+    opts, args = getopt.getopt(
+        sys.argv[1:], 'c:s:', ['build', 'count=', 'seed=']
+    )
     for o, a in opts:
         if o in ('-c', '--count'):
             count = int(a)
         elif o in ('-s', '--seed'):
-            seed = int(a, base=0) # accepts base 10 or 16 strings
+            seed = int(a, base=0)  # accepts base 10 or 16 strings
         elif o == '--build':
-            buildprobtable(sys.stdout,
-                           'find .hg/store/data -type f && '
-                           'cat .hg/store/fncache 2>/dev/null')
+            buildprobtable(
+                sys.stdout,
+                'find .hg/store/data -type f && '
+                'cat .hg/store/fncache 2>/dev/null',
+            )
             sys.exit(0)
 
     if seed is None:
@@ -206,5 +279,6 @@
     if runtests(rng, seed, count):
         sys.exit(1)
 
+
 if __name__ == '__main__':
     main()
--- a/tests/test-propertycache.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-propertycache.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,13 +16,13 @@
     util,
 )
 
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
 
 # create some special property cache that trace they call
 
 calllog = []
+
+
 @util.propertycache
 def testcachedfoobar(repo):
     name = repo.filtername
@@ -32,7 +32,10 @@
     calllog.append(val)
     return val
 
+
 unficalllog = []
+
+
 @localrepo.unfilteredpropertycache
 def testcachedunfifoobar(repo):
     name = repo.filtername
@@ -42,7 +45,8 @@
     unficalllog.append(val)
     return val
 
-#plug them on repo
+
+# plug them on repo
 localrepo.localrepository.testcachedfoobar = testcachedfoobar
 localrepo.localrepository.testcachedunfifoobar = testcachedunfifoobar
 
@@ -50,8 +54,12 @@
 # Create an empty repo and instantiate it. It is important to run
 # these tests on the real object to detect regression.
 repopath = pycompat.fsencode(os.path.join(os.environ['TESTTMP'], 'repo'))
-assert subprocess.call(pycompat.rapply(procutil.tonativestr,
-                                       [b'hg', b'init', repopath])) == 0
+assert (
+    subprocess.call(
+        pycompat.rapply(procutil.tonativestr, [b'hg', b'init', repopath])
+    )
+    == 0
+)
 
 ui = uimod.ui.load()
 repo = hg.repository(ui, path=repopath).unfiltered()
@@ -61,57 +69,75 @@
 print('=== property cache ===')
 print('')
 print('calllog:', calllog)
-print('cached value (unfiltered):',
-    vars(repo).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):', vars(repo).get('testcachedfoobar', 'NOCACHE')
+)
 
 print('')
 print('= first access on unfiltered, should do a call')
 print('access:', repo.testcachedfoobar)
 print('calllog:', calllog)
-print('cached value (unfiltered):',
-    vars(repo).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):', vars(repo).get('testcachedfoobar', 'NOCACHE')
+)
 
 print('')
 print('= second access on unfiltered, should not do call')
 print('access', repo.testcachedfoobar)
 print('calllog:', calllog)
-print('cached value (unfiltered):',
-    vars(repo).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):', vars(repo).get('testcachedfoobar', 'NOCACHE')
+)
 
 print('')
 print('= first access on "visible" view, should do a call')
 visibleview = repo.filtered('visible')
-print('cached value ("visible" view):',
-    vars(visibleview).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value ("visible" view):',
+    vars(visibleview).get('testcachedfoobar', 'NOCACHE'),
+)
 print('access:', visibleview.testcachedfoobar)
 print('calllog:', calllog)
-print('cached value (unfiltered):',
-    vars(repo).get('testcachedfoobar', 'NOCACHE'))
-print('cached value ("visible" view):',
-    vars(visibleview).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):', vars(repo).get('testcachedfoobar', 'NOCACHE')
+)
+print(
+    'cached value ("visible" view):',
+    vars(visibleview).get('testcachedfoobar', 'NOCACHE'),
+)
 
 print('')
 print('= second access on "visible view", should not do call')
 print('access:', visibleview.testcachedfoobar)
 print('calllog:', calllog)
-print('cached value (unfiltered):',
-    vars(repo).get('testcachedfoobar', 'NOCACHE'))
-print('cached value ("visible" view):',
-    vars(visibleview).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):', vars(repo).get('testcachedfoobar', 'NOCACHE')
+)
+print(
+    'cached value ("visible" view):',
+    vars(visibleview).get('testcachedfoobar', 'NOCACHE'),
+)
 
 print('')
 print('= no effect on other view')
 immutableview = repo.filtered('immutable')
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedfoobar', 'NOCACHE'),
+)
 print('access:', immutableview.testcachedfoobar)
 print('calllog:', calllog)
-print('cached value (unfiltered):',
-    vars(repo).get('testcachedfoobar', 'NOCACHE'))
-print('cached value ("visible" view):',
-    vars(visibleview).get('testcachedfoobar', 'NOCACHE'))
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedfoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):', vars(repo).get('testcachedfoobar', 'NOCACHE')
+)
+print(
+    'cached value ("visible" view):',
+    vars(visibleview).get('testcachedfoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedfoobar', 'NOCACHE'),
+)
 
 # unfiltered property cache test
 print('')
@@ -119,26 +145,36 @@
 print('=== unfiltered property cache ===')
 print('')
 print('unficalllog:', unficalllog)
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("visible" view):  ',
-    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("visible" view):  ',
+    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'),
+)
 
 print('')
 print('= first access on unfiltered, should do a call')
 print('access (unfiltered):', repo.testcachedunfifoobar)
 print('unficalllog:', unficalllog)
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
 
 print('')
 print('= second access on unfiltered, should not do call')
 print('access (unfiltered):', repo.testcachedunfifoobar)
 print('unficalllog:', unficalllog)
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
 
 print('')
 print('= access on view should use the unfiltered cache')
@@ -146,44 +182,74 @@
 print('access ("visible" view):  ', visibleview.testcachedunfifoobar)
 print('access ("immutable" view):', immutableview.testcachedunfifoobar)
 print('unficalllog:', unficalllog)
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("visible" view):  ',
-    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("visible" view):  ',
+    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'),
+)
 
 print('')
 print('= even if we clear the unfiltered cache')
 del repo.__dict__['testcachedunfifoobar']
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("visible" view):  ',
-    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("visible" view):  ',
+    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'),
+)
 print('unficalllog:', unficalllog)
 print('access ("visible" view):  ', visibleview.testcachedunfifoobar)
 print('unficalllog:', unficalllog)
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("visible" view):  ',
-    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("visible" view):  ',
+    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'),
+)
 print('access ("immutable" view):', immutableview.testcachedunfifoobar)
 print('unficalllog:', unficalllog)
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("visible" view):  ',
-    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("visible" view):  ',
+    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'),
+)
 print('access (unfiltered):      ', repo.testcachedunfifoobar)
 print('unficalllog:', unficalllog)
-print('cached value (unfiltered):      ',
-    vars(repo).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("visible" view):  ',
-    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'))
-print('cached value ("immutable" view):',
-    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'))
+print(
+    'cached value (unfiltered):      ',
+    vars(repo).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("visible" view):  ',
+    vars(visibleview).get('testcachedunfifoobar', 'NOCACHE'),
+)
+print(
+    'cached value ("immutable" view):',
+    vars(immutableview).get('testcachedunfifoobar', 'NOCACHE'),
+)
--- a/tests/test-remotefilelog-datapack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-remotefilelog-datapack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -27,6 +27,7 @@
     datapack,
 )
 
+
 class datapacktestsbase(object):
     def __init__(self, datapackreader, paramsavailable):
         self.datapackreader = datapackreader
@@ -48,8 +49,9 @@
         return hashlib.sha1(content).digest()
 
     def getFakeHash(self):
-        return b''.join(pycompat.bytechr(random.randint(0, 255))
-                        for _ in range(20))
+        return b''.join(
+            pycompat.bytechr(random.randint(0, 255)) for _ in range(20)
+        )
 
     def createPack(self, revisions=None, packdir=None):
         if revisions is None:
@@ -80,8 +82,9 @@
         revisions = [(filename, node, nullid, content)]
         pack = self.createPack(revisions)
         if self.paramsavailable:
-            self.assertEqual(pack.params.fanoutprefix,
-                             basepack.SMALLFANOUTPREFIX)
+            self.assertEqual(
+                pack.params.fanoutprefix, basepack.SMALLFANOUTPREFIX
+            )
 
         chain = pack.getdeltachain(filename, node)
         self.assertEqual(content, chain[0][4])
@@ -171,10 +174,12 @@
             filename = b'%d.txt' % i
             content = b'put-something-here \n' * i
             node = self.getHash(content)
-            meta = {constants.METAKEYFLAG: i ** 4,
-                    constants.METAKEYSIZE: len(content),
-                    b'Z': b'random_string',
-                    b'_': b'\0' * i}
+            meta = {
+                constants.METAKEYFLAG: i ** 4,
+                constants.METAKEYSIZE: len(content),
+                b'Z': b'random_string',
+                b'_': b'\0' * i,
+            }
             revisions.append((filename, node, nullid, content, meta))
         pack = self.createPack(revisions)
         for name, node, x, content, origmeta in revisions:
@@ -201,13 +206,15 @@
         missing = pack.getmissing([(b"foo", revisions[0][1])])
         self.assertFalse(missing)
 
-        missing = pack.getmissing([(b"foo", revisions[0][1]),
-                                   (b"foo", revisions[1][1])])
+        missing = pack.getmissing(
+            [(b"foo", revisions[0][1]), (b"foo", revisions[1][1])]
+        )
         self.assertFalse(missing)
 
         fakenode = self.getFakeHash()
-        missing = pack.getmissing([(b"foo", revisions[0][1]),
-                                   (b"foo", fakenode)])
+        missing = pack.getmissing(
+            [(b"foo", revisions[0][1]), (b"foo", fakenode)]
+        )
         self.assertEqual(missing, [(b"foo", fakenode)])
 
     def testAddThrows(self):
@@ -257,8 +264,9 @@
 
         pack = self.createPack(revisions)
         if self.paramsavailable:
-            self.assertEqual(pack.params.fanoutprefix,
-                             basepack.LARGEFANOUTPREFIX)
+            self.assertEqual(
+                pack.params.fanoutprefix, basepack.LARGEFANOUTPREFIX
+            )
 
         for (filename, node), content in blobs.items():
             actualcontent = pack.getdeltachain(filename, node)[0][4]
@@ -284,7 +292,7 @@
                     b'%d' % i,
                     self.getFakeHash(),
                     revision[1],
-                    self.getFakeHash()
+                    self.getFakeHash(),
                 )
 
             self.createPack(chain, packdir)
@@ -303,8 +311,7 @@
 
             mostrecentpack = next(iter(store.packs), None)
             self.assertEqual(
-                mostrecentpack.getdeltachain(revision[0], revision[1]),
-                chain
+                mostrecentpack.getdeltachain(revision[0], revision[1]), chain
             )
 
             self.assertEqual(randomchain.index(revision) + 1, len(chain))
@@ -341,6 +348,7 @@
 
             # Perf of large multi-get
             import gc
+
             gc.disable()
             pack = self.datapackreader(path)
             for lookupsize in lookupsizes:
@@ -352,10 +360,14 @@
                 start = time.time()
                 pack.getmissing(findnodes[:lookupsize])
                 elapsed = time.time() - start
-                print ("%s pack %d lookups = %0.04f" %
-                       (('%d' % packsize).rjust(7),
+                print(
+                    "%s pack %d lookups = %0.04f"
+                    % (
+                        ('%d' % packsize).rjust(7),
                         ('%d' % lookupsize).rjust(7),
-                        elapsed))
+                        elapsed,
+                    )
+                )
 
             print("")
             gc.enable()
@@ -364,11 +376,13 @@
         # so the user sees the output.
         raise RuntimeError("perf test always fails")
 
+
 class datapacktests(datapacktestsbase, unittest.TestCase):
     def __init__(self, *args, **kwargs):
         datapacktestsbase.__init__(self, datapack.datapack, True)
         unittest.TestCase.__init__(self, *args, **kwargs)
 
+
 # TODO:
 # datapack store:
 # - getmissing
@@ -376,5 +390,5 @@
 
 if __name__ == '__main__':
     if pycompat.iswindows:
-        sys.exit(80)    # Skip on Windows
+        sys.exit(80)  # Skip on Windows
     silenttestrunner.main(__name__)
--- a/tests/test-remotefilelog-histpack.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-remotefilelog-histpack.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,6 +18,7 @@
     pycompat,
     ui as uimod,
 )
+
 # Load the local remotefilelog, not the system one
 sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
 from hgext.remotefilelog import (
@@ -25,6 +26,7 @@
     historypack,
 )
 
+
 class histpacktests(unittest.TestCase):
     def setUp(self):
         self.tempdirs = []
@@ -42,8 +44,9 @@
         return hashlib.sha1(content).digest()
 
     def getFakeHash(self):
-        return b''.join(pycompat.bytechr(random.randint(0, 255))
-                        for _ in range(20))
+        return b''.join(
+            pycompat.bytechr(random.randint(0, 255)) for _ in range(20)
+        )
 
     def createPack(self, revisions=None):
         """Creates and returns a historypack containing the specified revisions.
@@ -52,12 +55,19 @@
         node, p1node, p2node, and linknode.
         """
         if revisions is None:
-            revisions = [(b"filename", self.getFakeHash(), nullid, nullid,
-                          self.getFakeHash(), None)]
+            revisions = [
+                (
+                    b"filename",
+                    self.getFakeHash(),
+                    nullid,
+                    nullid,
+                    self.getFakeHash(),
+                    None,
+                )
+            ]
 
         packdir = pycompat.fsencode(self.makeTempDir())
-        packer = historypack.mutablehistorypack(uimod.ui(), packdir,
-                                                version=2)
+        packer = historypack.mutablehistorypack(uimod.ui(), packdir, version=2)
 
         for filename, node, p1, p2, linknode, copyfrom in revisions:
             packer.add(filename, node, p1, p2, linknode, copyfrom)
@@ -163,8 +173,7 @@
         # Verify the pack contents
         for (filename, node) in allentries:
             ancestors = pack.getancestors(filename, node)
-            self.assertEqual(ancestorcounts[(filename, node)],
-                             len(ancestors))
+            self.assertEqual(ancestorcounts[(filename, node)], len(ancestors))
             for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.items():
                 ep1, ep2, elinknode = allentries[(filename, anode)]
                 self.assertEqual(ap1, ep1)
@@ -208,13 +217,15 @@
         missing = pack.getmissing([(filename, revisions[0][1])])
         self.assertFalse(missing)
 
-        missing = pack.getmissing([(filename, revisions[0][1]),
-                                   (filename, revisions[1][1])])
+        missing = pack.getmissing(
+            [(filename, revisions[0][1]), (filename, revisions[1][1])]
+        )
         self.assertFalse(missing)
 
         fakenode = self.getFakeHash()
-        missing = pack.getmissing([(filename, revisions[0][1]),
-                                   (filename, fakenode)])
+        missing = pack.getmissing(
+            [(filename, revisions[0][1]), (filename, fakenode)]
+        )
         self.assertEqual(missing, [(filename, fakenode)])
 
         # Test getmissing on a non-existant filename
@@ -268,11 +279,13 @@
             self.assertEqual(p2, actual[1])
             self.assertEqual(linknode, actual[2])
             self.assertEqual(copyfrom, actual[3])
+
+
 # TODO:
 # histpack store:
 # - repack two packs into one
 
 if __name__ == '__main__':
     if pycompat.iswindows:
-        sys.exit(80)    # Skip on Windows
+        sys.exit(80)  # Skip on Windows
     silenttestrunner.main(__name__)
--- a/tests/test-revlog-ancestry.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-revlog-ancestry.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,9 +11,11 @@
 repo = hg.repository(u, b'test1', create=1)
 os.chdir('test1')
 
+
 def commit(text, time):
     repo.commit(text=text, date=b"%d 0" % time)
 
+
 def addcommit(name, time):
     f = open(name, 'wb')
     f.write(b'%s\n' % name)
@@ -21,12 +23,15 @@
     repo[None].add([name])
     commit(name, time)
 
+
 def update(rev):
     merge.update(repo, rev, branchmerge=False, force=True)
 
+
 def merge_(rev):
     merge.update(repo, rev, branchmerge=True, force=False)
 
+
 if __name__ == '__main__':
     addcommit(b"A", 0)
     addcommit(b"B", 1)
--- a/tests/test-revlog-raw.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-revlog-raw.py	Sun Oct 06 09:45:02 2019 -0400
@@ -23,18 +23,23 @@
 tvfs = vfs.vfs(encoding.environ.get(b'TESTTMP', b'/tmp'))
 
 # Enable generaldelta otherwise revlog won't use delta as expected by the test
-tvfs.options = {b'generaldelta': True, b'revlogv1': True,
-                b'sparse-revlog': True}
+tvfs.options = {
+    b'generaldelta': True,
+    b'revlogv1': True,
+    b'sparse-revlog': True,
+}
 
 # The test wants to control whether to use delta explicitly, based on
 # "storedeltachains".
 revlog.revlog._isgooddeltainfo = lambda self, d, textlen: self._storedeltachains
 
+
 def abort(msg):
     print('abort: %s' % msg)
     # Return 0 so run-tests.py could compare the output.
     sys.exit()
 
+
 # Register a revlog processor for flag EXTSTORED.
 #
 # It simply prepends a fixed header, and replaces '1' to 'i'. So it has
@@ -42,37 +47,45 @@
 # deltas.
 _extheader = b'E\n'
 
+
 def readprocessor(self, rawtext):
     # True: the returned text could be used to verify hash
-    text = rawtext[len(_extheader):].replace(b'i', b'1')
+    text = rawtext[len(_extheader) :].replace(b'i', b'1')
     return text, True, {}
 
+
 def writeprocessor(self, text, sidedata):
     # False: the returned rawtext shouldn't be used to verify hash
     rawtext = _extheader + text.replace(b'1', b'i')
     return rawtext, False
 
+
 def rawprocessor(self, rawtext):
     # False: do not verify hash. Only the content returned by "readprocessor"
     # can be used to verify hash.
     return False
 
-flagutil.addflagprocessor(revlog.REVIDX_EXTSTORED,
-                        (readprocessor, writeprocessor, rawprocessor))
+
+flagutil.addflagprocessor(
+    revlog.REVIDX_EXTSTORED, (readprocessor, writeprocessor, rawprocessor)
+)
 
 # Utilities about reading and appending revlog
 
+
 def newtransaction():
     # A transaction is required to write revlogs
     report = lambda msg: None
     return transaction.transaction(report, tvfs, {'plain': tvfs}, b'journal')
 
+
 def newrevlog(name=b'_testrevlog.i', recreate=False):
     if recreate:
         tvfs.tryunlink(name)
     rlog = revlog.revlog(tvfs, name)
     return rlog
 
+
 def appendrev(rlog, text, tr, isext=False, isdelta=True):
     '''Append a revision. If isext is True, set the EXTSTORED flag so flag
     processor will be used (and rawtext is different from text). If isdelta is
@@ -96,6 +109,7 @@
         # Restore storedeltachains. It is always True, see revlog.__init__
         rlog._storedeltachains = True
 
+
 def addgroupcopy(rlog, tr, destname=b'_destrevlog.i', optimaldelta=True):
     '''Copy revlog to destname using revlog.addgroup. Return the copied revlog.
 
@@ -109,6 +123,7 @@
     This exercises some revlog.addgroup (and revlog._addrevision(text=None))
     code path, which is not covered by "appendrev" alone.
     '''
+
     class dummychangegroup(object):
         @staticmethod
         def deltachunk(pnode):
@@ -124,10 +139,15 @@
                 deltaparent = min(0, parentrev)
             if not rlog.candelta(deltaparent, r):
                 deltaparent = -1
-            return {b'node': rlog.node(r), b'p1': pnode, b'p2': node.nullid,
-                    b'cs': rlog.node(rlog.linkrev(r)), b'flags': rlog.flags(r),
-                    b'deltabase': rlog.node(deltaparent),
-                    b'delta': rlog.revdiff(deltaparent, r)}
+            return {
+                b'node': rlog.node(r),
+                b'p1': pnode,
+                b'p2': node.nullid,
+                b'cs': rlog.node(rlog.linkrev(r)),
+                b'flags': rlog.flags(r),
+                b'deltabase': rlog.node(deltaparent),
+                b'delta': rlog.revdiff(deltaparent, r),
+            }
 
         def deltaiter(self):
             chain = None
@@ -152,6 +172,7 @@
     dlog.addgroup(dummydeltas, linkmap, tr)
     return dlog
 
+
 def lowlevelcopy(rlog, tr, destname=b'_destrevlog.i'):
     '''Like addgroupcopy, but use the low level revlog._addrevision directly.
 
@@ -166,9 +187,14 @@
             cachedelta = None
         else:
             # deltaparent cannot have EXTSTORED flag.
-            deltaparent = max([-1] +
-                              [p for p in range(r)
-                               if rlog.flags(p) & revlog.REVIDX_EXTSTORED == 0])
+            deltaparent = max(
+                [-1]
+                + [
+                    p
+                    for p in range(r)
+                    if rlog.flags(p) & revlog.REVIDX_EXTSTORED == 0
+                ]
+            )
             text = None
             cachedelta = (deltaparent, rlog.revdiff(deltaparent, r))
         flags = rlog.flags(r)
@@ -177,8 +203,9 @@
             ifh = dlog.opener(dlog.indexfile, b'a+')
             if not dlog._inline:
                 dfh = dlog.opener(dlog.datafile, b'a+')
-            dlog._addrevision(rlog.node(r), text, tr, r, p1, p2, flags,
-                              cachedelta, ifh, dfh)
+            dlog._addrevision(
+                rlog.node(r), text, tr, r, p1, p2, flags, cachedelta, ifh, dfh
+            )
         finally:
             if dfh is not None:
                 dfh.close()
@@ -186,8 +213,10 @@
                 ifh.close()
     return dlog
 
+
 # Utilities to generate revisions for testing
 
+
 def genbits(n):
     '''Given a number n, generate (2 ** (n * 2) + 1) numbers in range(2 ** n).
     i.e. the generated numbers have a width of n bits.
@@ -219,10 +248,12 @@
         x = y
         yield x
 
+
 def gentext(rev):
     '''Given a revision number, generate dummy text'''
     return b''.join(b'%d\n' % j for j in range(-1, rev % 5))
 
+
 def writecases(rlog, tr):
     '''Write some revisions interested to the test.
 
@@ -281,8 +312,10 @@
             abort('rev %d: isext is ineffective' % rev)
     return result
 
+
 # Main test and checking
 
+
 def checkrevlog(rlog, expected):
     '''Check if revlog has expected contents. expected is [(text, rawtext)]'''
     # Test using different access orders. This could expose some issues
@@ -299,21 +332,23 @@
                         else:
                             t = nlog.revision(rev)
                         if t != expected[rev][int(raw)]:
-                            abort('rev %d: corrupted %stext'
-                                  % (rev, raw and 'raw' or ''))
+                            abort(
+                                'rev %d: corrupted %stext'
+                                % (rev, raw and 'raw' or '')
+                            )
+
 
 slicingdata = [
-    ([0, 1, 2, 3, 55, 56, 58, 59, 60],
-     [[0, 1], [2], [58], [59, 60]],
-     10),
-    ([0, 1, 2, 3, 55, 56, 58, 59, 60],
-     [[0, 1], [2], [58], [59, 60]],
-     10),
-    ([-1, 0, 1, 2, 3, 55, 56, 58, 59, 60],
-     [[-1, 0, 1], [2], [58], [59, 60]],
-     10),
+    ([0, 1, 2, 3, 55, 56, 58, 59, 60], [[0, 1], [2], [58], [59, 60]], 10),
+    ([0, 1, 2, 3, 55, 56, 58, 59, 60], [[0, 1], [2], [58], [59, 60]], 10),
+    (
+        [-1, 0, 1, 2, 3, 55, 56, 58, 59, 60],
+        [[-1, 0, 1], [2], [58], [59, 60]],
+        10,
+    ),
 ]
 
+
 def slicingtest(rlog):
     oldmin = rlog._srmingapsize
     try:
@@ -333,9 +368,11 @@
     finally:
         rlog._srmingapsize = oldmin
 
+
 def md5sum(s):
     return hashlib.md5(s).digest()
 
+
 def _maketext(*coord):
     """create piece of text according to range of integers
 
@@ -348,6 +385,7 @@
         pieces.append(b'\n'.join(p))
     return b'\n'.join(pieces) + b'\n'
 
+
 data = [
     _maketext((0, 120), (456, 60)),
     _maketext((0, 120), (345, 60)),
@@ -383,13 +421,17 @@
     _maketext((0, 120), (60, 60), (618, 30), (398, 40), (158, 10)),
 ]
 
+
 def makesnapshot(tr):
     rl = newrevlog(name=b'_snaprevlog3.i', recreate=True)
     for i in data:
         appendrev(rl, i, tr)
     return rl
 
+
 snapshots = [-1, 0, 6, 8, 11, 17, 19, 21, 25, 30]
+
+
 def issnapshottest(rlog):
     result = []
     if rlog.issnapshot(-1):
@@ -402,8 +444,11 @@
         print('  expected: %s' % snapshots)
         print('  got:      %s' % result)
 
+
 snapshotmapall = {0: [6, 8, 11, 17, 19, 25], 8: [21], -1: [0, 30]}
 snapshotmap15 = {0: [17, 19, 25], 8: [21], -1: [30]}
+
+
 def findsnapshottest(rlog):
     resultall = collections.defaultdict(list)
     deltas._findsnapshots(rlog, resultall, 0)
@@ -420,6 +465,7 @@
         print('  expected: %s' % snapshotmap15)
         print('  got:      %s' % result15)
 
+
 def maintest():
     with newtransaction() as tr:
         rl = newrevlog(recreate=True)
@@ -449,6 +495,7 @@
         findsnapshottest(rl5)
         print('findsnapshot test passed')
 
+
 try:
     maintest()
 except Exception as ex:
--- a/tests/test-run-tests.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-run-tests.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,11 +8,13 @@
 import doctest
 import os
 import re
+
 # this is hack to make sure no escape characters are inserted into the output
 if 'TERM' in os.environ:
     del os.environ['TERM']
 run_tests = __import__('run-tests')
 
+
 def prn(ex):
     m = ex.args[0]
     if isinstance(m, str):
@@ -20,6 +22,7 @@
     else:
         print(m.decode('utf-8'))
 
+
 def lm(expected, output):
     r"""check if output matches expected
 
@@ -35,10 +38,12 @@
         ... except AssertionError as ex: prn(ex)
         single backslash or unknown char
     """
-    assert (expected.endswith(b'\n')
-            and output.endswith(b'\n')), 'missing newline'
-    assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), (
-           b'single backslash or unknown char')
+    assert expected.endswith(b'\n') and output.endswith(
+        b'\n'
+    ), 'missing newline'
+    assert not re.search(
+        br'[^ \w\\/\r\n()*?]', expected + output
+    ), b'single backslash or unknown char'
     test = run_tests.TTest(b'test-run-test.t', b'.', b'.')
     match, exact = test.linematch(expected, output)
     if isinstance(match, str):
@@ -46,7 +51,8 @@
     elif isinstance(match, bytes):
         return 'special: ' + match.decode('utf-8')
     else:
-        return bool(match) # do not return match object
+        return bool(match)  # do not return match object
+
 
 def wintests():
     r"""test matching like running on windows
@@ -77,6 +83,7 @@
     """
     pass
 
+
 def otherostests():
     r"""test matching like running on non-windows os
 
@@ -104,5 +111,6 @@
     """
     pass
 
+
 if __name__ == '__main__':
     doctest.testmod()
--- a/tests/test-rust-ancestor.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-rust-ancestor.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,6 +9,7 @@
 
 try:
     from mercurial import rustext
+
     rustext.__name__  # trigger immediate actual import
 except ImportError:
     rustext = None
@@ -45,12 +46,14 @@
     b'\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1'
     b'\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00'
     b'\x00\x00\x00\x00\x00\x00\x00\x00\x00'
-    )
+)
 
 
-@unittest.skipIf(rustext is None or cparsers is None,
-                 "rustext or the C Extension parsers module "
-                 "ancestor relies on is not available")
+@unittest.skipIf(
+    rustext is None or cparsers is None,
+    "rustext or the C Extension parsers module "
+    "ancestor relies on is not available",
+)
 class rustancestorstest(unittest.TestCase):
     """Test the correctness of binding to Rust code.
 
@@ -70,11 +73,10 @@
     def testiteratorrevlist(self):
         idx = self.parseindex()
         # checking test assumption about the index binary data:
-        self.assertEqual({i: (r[5], r[6]) for i, r in enumerate(idx)},
-                         {0: (-1, -1),
-                          1: (0, -1),
-                          2: (1, -1),
-                          3: (2, -1)})
+        self.assertEqual(
+            {i: (r[5], r[6]) for i, r in enumerate(idx)},
+            {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
+        )
         ait = AncestorsIterator(idx, [3], 0, True)
         self.assertEqual([r for r in ait], [3, 2, 1, 0])
 
@@ -84,11 +86,10 @@
     def testlazyancestors(self):
         idx = self.parseindex()
         start_count = sys.getrefcount(idx)  # should be 2 (see Python doc)
-        self.assertEqual({i: (r[5], r[6]) for i, r in enumerate(idx)},
-                         {0: (-1, -1),
-                          1: (0, -1),
-                          2: (1, -1),
-                          3: (2, -1)})
+        self.assertEqual(
+            {i: (r[5], r[6]) for i, r in enumerate(idx)},
+            {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
+        )
         lazy = LazyAncestors(idx, [3], 0, True)
         # we have two more references to the index:
         # - in its inner iterator for __contains__ and __bool__
@@ -148,9 +149,9 @@
         self.assertEqual(list(ait), [3, 2, 1, 0])
 
     def testgrapherror(self):
-        data = (data_non_inlined[:64 + 27] +
-                b'\xf2' +
-                data_non_inlined[64 + 28:])
+        data = (
+            data_non_inlined[: 64 + 27] + b'\xf2' + data_non_inlined[64 + 28 :]
+        )
         idx = cparsers.parse_index2(data, False)[0]
         with self.assertRaises(rustext.GraphError) as arc:
             AncestorsIterator(idx, [1], -1, False)
@@ -170,6 +171,8 @@
         idx = self.parseindex()
         self.assertEqual(dagop.headrevs(idx, [1, 2, 3]), {3})
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-rust-discovery.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-rust-discovery.py	Sun Oct 06 09:45:02 2019 -0400
@@ -29,20 +29,25 @@
     b'\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1'
     b'\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00'
     b'\x00\x00\x00\x00\x00\x00\x00\x00\x00'
-    )
+)
+
 
 class fakechangelog(object):
     def __init__(self, idx):
         self.index = idx
 
+
 class fakerepo(object):
     def __init__(self, idx):
         """Just make so that self.changelog.index is the given idx."""
         self.changelog = fakechangelog(idx)
 
-@unittest.skipIf(PartialDiscovery is None or cparsers is None,
-                 "rustext or the C Extension parsers module "
-                 "discovery relies on is not available")
+
+@unittest.skipIf(
+    PartialDiscovery is None or cparsers is None,
+    "rustext or the C Extension parsers module "
+    "discovery relies on is not available",
+)
 class rustdiscoverytest(unittest.TestCase):
     """Test the correctness of binding to Rust code.
 
@@ -64,11 +69,10 @@
     def testindex(self):
         idx = self.parseindex()
         # checking our assumptions about the index binary data:
-        self.assertEqual({i: (r[5], r[6]) for i, r in enumerate(idx)},
-                         {0: (-1, -1),
-                          1: (0, -1),
-                          2: (1, -1),
-                          3: (2, -1)})
+        self.assertEqual(
+            {i: (r[5], r[6]) for i, r in enumerate(idx)},
+            {0: (-1, -1), 1: (0, -1), 2: (1, -1), 3: (2, -1)},
+        )
 
     def testaddcommonsmissings(self):
         disco = PartialDiscovery(self.repo(), [3], True)
@@ -109,6 +113,8 @@
     def testinitnorandom(self):
         PartialDiscovery(self.repo(), [3], True, randomize=False)
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-simplekeyvaluefile.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-simplekeyvaluefile.py	Sun Oct 06 09:45:02 2019 -0400
@@ -8,6 +8,7 @@
     scmutil,
 )
 
+
 class mockfile(object):
     def __init__(self, name, fs):
         self.name = name
@@ -25,6 +26,7 @@
     def read(self):
         return self.fs.contents[self.name]
 
+
 class mockvfs(object):
     def __init__(self):
         self.contents = {}
@@ -39,6 +41,7 @@
     def __call__(self, path, mode, atomictemp):
         return mockfile(path, self)
 
+
 class testsimplekeyvaluefile(unittest.TestCase):
     def setUp(self):
         self.vfs = mockvfs()
@@ -46,21 +49,25 @@
     def testbasicwritingiandreading(self):
         dw = {b'key1': b'value1', b'Key2': b'value2'}
         scmutil.simplekeyvaluefile(self.vfs, b'kvfile').write(dw)
-        self.assertEqual(sorted(self.vfs.read(b'kvfile').split(b'\n')),
-                         [b'', b'Key2=value2', b'key1=value1'])
+        self.assertEqual(
+            sorted(self.vfs.read(b'kvfile').split(b'\n')),
+            [b'', b'Key2=value2', b'key1=value1'],
+        )
         dr = scmutil.simplekeyvaluefile(self.vfs, b'kvfile').read()
         self.assertEqual(dr, dw)
 
     if not getattr(unittest.TestCase, 'assertRaisesRegex', False):
         # Python 3.7 deprecates the regex*p* version, but 2.7 lacks
         # the regex version.
-        assertRaisesRegex = (# camelcase-required
-            unittest.TestCase.assertRaisesRegexp)
+        assertRaisesRegex = (  # camelcase-required
+            unittest.TestCase.assertRaisesRegexp
+        )
 
     def testinvalidkeys(self):
         d = {b'0key1': b'value1', b'Key2': b'value2'}
-        with self.assertRaisesRegex(error.ProgrammingError,
-                                     'keys must start with a letter.*'):
+        with self.assertRaisesRegex(
+            error.ProgrammingError, 'keys must start with a letter.*'
+        ):
             scmutil.simplekeyvaluefile(self.vfs, b'kvfile').write(d)
 
         d = {b'key1@': b'value1', b'Key2': b'value2'}
@@ -69,22 +76,25 @@
 
     def testinvalidvalues(self):
         d = {b'key1': b'value1', b'Key2': b'value2\n'}
-        with self.assertRaisesRegex(error.ProgrammingError,  'invalid val.*'):
+        with self.assertRaisesRegex(error.ProgrammingError, 'invalid val.*'):
             scmutil.simplekeyvaluefile(self.vfs, b'kvfile').write(d)
 
     def testcorruptedfile(self):
         self.vfs.contents[b'badfile'] = b'ababagalamaga\n'
-        with self.assertRaisesRegex(error.CorruptedState,
-                                     'dictionary.*element.*'):
+        with self.assertRaisesRegex(
+            error.CorruptedState, 'dictionary.*element.*'
+        ):
             scmutil.simplekeyvaluefile(self.vfs, b'badfile').read()
 
     def testfirstline(self):
         dw = {b'key1': b'value1'}
         scmutil.simplekeyvaluefile(self.vfs, b'fl').write(dw, firstline=b'1.0')
         self.assertEqual(self.vfs.read(b'fl'), b'1.0\nkey1=value1\n')
-        dr = scmutil.simplekeyvaluefile(
-            self.vfs, b'fl').read(firstlinenonkeyval=True)
+        dr = scmutil.simplekeyvaluefile(self.vfs, b'fl').read(
+            firstlinenonkeyval=True
+        )
         self.assertEqual(dr, {b'__firstline': b'1.0', b'key1': b'value1'})
 
+
 if __name__ == "__main__":
     silenttestrunner.main(__name__)
--- a/tests/test-simplemerge.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-simplemerge.py	Sun Oct 06 09:45:02 2019 -0400
@@ -22,9 +22,7 @@
     util,
 )
 
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 TestCase = unittest.TestCase
 # bzr compatible interface, for the tests
@@ -34,25 +32,34 @@
     Given BASE, OTHER, THIS, tries to produce a combined text
     incorporating the changes from both BASE->OTHER and BASE->THIS.
     All three will typically be sequences of lines."""
+
     def __init__(self, base, a, b):
         basetext = b'\n'.join([i.strip(b'\n') for i in base] + [b''])
         atext = b'\n'.join([i.strip(b'\n') for i in a] + [b''])
         btext = b'\n'.join([i.strip(b'\n') for i in b] + [b''])
-        if (stringutil.binary(basetext) or stringutil.binary(atext)
-            or stringutil.binary(btext)):
+        if (
+            stringutil.binary(basetext)
+            or stringutil.binary(atext)
+            or stringutil.binary(btext)
+        ):
             raise error.Abort(b"don't know how to merge binary files")
-        simplemerge.Merge3Text.__init__(self, basetext, atext, btext,
-                                        base, a, b)
+        simplemerge.Merge3Text.__init__(
+            self, basetext, atext, btext, base, a, b
+        )
+
 
 CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase
 
+
 def split_lines(t):
     return util.stringio(t).readlines()
 
+
 ############################################################
 # test case data from the gnu diffutils manual
 # common base
-TZU = split_lines(b"""     The Nameless is the origin of Heaven and Earth;
+TZU = split_lines(
+    b"""     The Nameless is the origin of Heaven and Earth;
      The named is the mother of all things.
 
      Therefore let there always be non-being,
@@ -65,9 +72,11 @@
      They both may be called deep and profound.
      Deeper and more profound,
      The door of all subtleties!
-""")
+"""
+)
 
-LAO = split_lines(b"""     The Way that can be told of is not the eternal Way;
+LAO = split_lines(
+    b"""     The Way that can be told of is not the eternal Way;
      The name that can be named is not the eternal name.
      The Nameless is the origin of Heaven and Earth;
      The Named is the mother of all things.
@@ -78,10 +87,12 @@
      The two are the same,
      But after they are produced,
        they have different names.
-""")
+"""
+)
 
 
-TAO = split_lines(b"""     The Way that can be told of is not the eternal Way;
+TAO = split_lines(
+    b"""     The Way that can be told of is not the eternal Way;
      The name that can be named is not the eternal name.
      The Nameless is the origin of Heaven and Earth;
      The named is the mother of all things.
@@ -96,9 +107,11 @@
 
        -- The Way of Lao-Tzu, tr. Wing-tsit Chan
 
-""")
+"""
+)
 
-MERGED_RESULT = split_lines(b"""\
+MERGED_RESULT = split_lines(
+    b"""\
      The Way that can be told of is not the eternal Way;
      The name that can be named is not the eternal name.
      The Nameless is the origin of Heaven and Earth;
@@ -116,7 +129,9 @@
        -- The Way of Lao-Tzu, tr. Wing-tsit Chan
 \
 \n>>>>>>> TAO
-""")
+"""
+)
+
 
 class TestMerge3(TestCase):
     def log(self, msg):
@@ -124,203 +139,211 @@
 
     def test_no_changes(self):
         """No conflicts because nothing changed"""
-        m3 = Merge3([b'aaa', b'bbb'],
-                    [b'aaa', b'bbb'],
-                    [b'aaa', b'bbb'])
+        m3 = Merge3([b'aaa', b'bbb'], [b'aaa', b'bbb'], [b'aaa', b'bbb'])
 
-        self.assertEqual(m3.find_unconflicted(),
-                         [(0, 2)])
+        self.assertEqual(m3.find_unconflicted(), [(0, 2)])
 
-        self.assertEqual(list(m3.find_sync_regions()),
-                         [(0, 2,
-                           0, 2,
-                           0, 2),
-                          (2, 2,  2, 2,  2, 2)])
+        self.assertEqual(
+            list(m3.find_sync_regions()),
+            [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)],
+        )
 
-        self.assertEqual(list(m3.merge_regions()),
-                         [(b'unchanged', 0, 2)])
+        self.assertEqual(list(m3.merge_regions()), [(b'unchanged', 0, 2)])
 
-        self.assertEqual(list(m3.merge_groups()),
-                         [(b'unchanged', [b'aaa', b'bbb'])])
+        self.assertEqual(
+            list(m3.merge_groups()), [(b'unchanged', [b'aaa', b'bbb'])]
+        )
 
     def test_front_insert(self):
-        m3 = Merge3([b'zz'],
-                    [b'aaa', b'bbb', b'zz'],
-                    [b'zz'])
+        m3 = Merge3([b'zz'], [b'aaa', b'bbb', b'zz'], [b'zz'])
 
         # todo: should use a sentinel at end as from get_matching_blocks
         # to match without zz
-        self.assertEqual(list(m3.find_sync_regions()),
-                         [(0, 1,  2, 3,  0, 1),
-                          (1, 1,  3, 3,  1, 1)])
+        self.assertEqual(
+            list(m3.find_sync_regions()),
+            [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)],
+        )
 
-        self.assertEqual(list(m3.merge_regions()),
-                         [(b'a', 0, 2),
-                          (b'unchanged', 0, 1)])
+        self.assertEqual(
+            list(m3.merge_regions()), [(b'a', 0, 2), (b'unchanged', 0, 1)]
+        )
 
-        self.assertEqual(list(m3.merge_groups()),
-                         [(b'a', [b'aaa', b'bbb']),
-                          (b'unchanged', [b'zz'])])
+        self.assertEqual(
+            list(m3.merge_groups()),
+            [(b'a', [b'aaa', b'bbb']), (b'unchanged', [b'zz'])],
+        )
 
     def test_null_insert(self):
-        m3 = Merge3([],
-                    [b'aaa', b'bbb'],
-                    [])
+        m3 = Merge3([], [b'aaa', b'bbb'], [])
         # todo: should use a sentinel at end as from get_matching_blocks
         # to match without zz
-        self.assertEqual(list(m3.find_sync_regions()),
-                         [(0, 0,  2, 2,  0, 0)])
+        self.assertEqual(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)])
 
-        self.assertEqual(list(m3.merge_regions()),
-                         [(b'a', 0, 2)])
+        self.assertEqual(list(m3.merge_regions()), [(b'a', 0, 2)])
 
-        self.assertEqual(list(m3.merge_lines()),
-                         [b'aaa', b'bbb'])
+        self.assertEqual(list(m3.merge_lines()), [b'aaa', b'bbb'])
 
     def test_no_conflicts(self):
         """No conflicts because only one side changed"""
-        m3 = Merge3([b'aaa', b'bbb'],
-                    [b'aaa', b'111', b'bbb'],
-                    [b'aaa', b'bbb'])
+        m3 = Merge3(
+            [b'aaa', b'bbb'], [b'aaa', b'111', b'bbb'], [b'aaa', b'bbb']
+        )
 
-        self.assertEqual(m3.find_unconflicted(),
-                         [(0, 1), (1, 2)])
+        self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)])
 
-        self.assertEqual(list(m3.find_sync_regions()),
-                         [(0, 1,  0, 1,  0, 1),
-                          (1, 2,  2, 3,  1, 2),
-                          (2, 2,  3, 3,  2, 2)])
+        self.assertEqual(
+            list(m3.find_sync_regions()),
+            [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)],
+        )
 
-        self.assertEqual(list(m3.merge_regions()),
-                         [(b'unchanged', 0, 1),
-                          (b'a', 1, 2),
-                          (b'unchanged', 1, 2)])
+        self.assertEqual(
+            list(m3.merge_regions()),
+            [(b'unchanged', 0, 1), (b'a', 1, 2), (b'unchanged', 1, 2)],
+        )
 
     def test_append_a(self):
-        m3 = Merge3([b'aaa\n', b'bbb\n'],
-                    [b'aaa\n', b'bbb\n', b'222\n'],
-                    [b'aaa\n', b'bbb\n'])
+        m3 = Merge3(
+            [b'aaa\n', b'bbb\n'],
+            [b'aaa\n', b'bbb\n', b'222\n'],
+            [b'aaa\n', b'bbb\n'],
+        )
 
-        self.assertEqual(b''.join(m3.merge_lines()),
-                         b'aaa\nbbb\n222\n')
+        self.assertEqual(b''.join(m3.merge_lines()), b'aaa\nbbb\n222\n')
 
     def test_append_b(self):
-        m3 = Merge3([b'aaa\n', b'bbb\n'],
-                    [b'aaa\n', b'bbb\n'],
-                    [b'aaa\n', b'bbb\n', b'222\n'])
+        m3 = Merge3(
+            [b'aaa\n', b'bbb\n'],
+            [b'aaa\n', b'bbb\n'],
+            [b'aaa\n', b'bbb\n', b'222\n'],
+        )
 
-        self.assertEqual(b''.join(m3.merge_lines()),
-                         b'aaa\nbbb\n222\n')
+        self.assertEqual(b''.join(m3.merge_lines()), b'aaa\nbbb\n222\n')
 
     def test_append_agreement(self):
-        m3 = Merge3([b'aaa\n', b'bbb\n'],
-                    [b'aaa\n', b'bbb\n', b'222\n'],
-                    [b'aaa\n', b'bbb\n', b'222\n'])
+        m3 = Merge3(
+            [b'aaa\n', b'bbb\n'],
+            [b'aaa\n', b'bbb\n', b'222\n'],
+            [b'aaa\n', b'bbb\n', b'222\n'],
+        )
 
-        self.assertEqual(b''.join(m3.merge_lines()),
-                         b'aaa\nbbb\n222\n')
+        self.assertEqual(b''.join(m3.merge_lines()), b'aaa\nbbb\n222\n')
 
     def test_append_clash(self):
-        m3 = Merge3([b'aaa\n', b'bbb\n'],
-                    [b'aaa\n', b'bbb\n', b'222\n'],
-                    [b'aaa\n', b'bbb\n', b'333\n'])
+        m3 = Merge3(
+            [b'aaa\n', b'bbb\n'],
+            [b'aaa\n', b'bbb\n', b'222\n'],
+            [b'aaa\n', b'bbb\n', b'333\n'],
+        )
 
-        ml = m3.merge_lines(name_a=b'a',
-                            name_b=b'b',
-                            start_marker=b'<<',
-                            mid_marker=b'--',
-                            end_marker=b'>>')
-        self.assertEqual(b''.join(ml),
-                         b'aaa\n'
-                         b'bbb\n'
-                         b'<< a\n'
-                         b'222\n'
-                         b'--\n'
-                         b'333\n'
-                         b'>> b\n'
-                         )
+        ml = m3.merge_lines(
+            name_a=b'a',
+            name_b=b'b',
+            start_marker=b'<<',
+            mid_marker=b'--',
+            end_marker=b'>>',
+        )
+        self.assertEqual(
+            b''.join(ml),
+            b'aaa\n' b'bbb\n' b'<< a\n' b'222\n' b'--\n' b'333\n' b'>> b\n',
+        )
 
     def test_insert_agreement(self):
-        m3 = Merge3([b'aaa\n', b'bbb\n'],
-                    [b'aaa\n', b'222\n', b'bbb\n'],
-                    [b'aaa\n', b'222\n', b'bbb\n'])
+        m3 = Merge3(
+            [b'aaa\n', b'bbb\n'],
+            [b'aaa\n', b'222\n', b'bbb\n'],
+            [b'aaa\n', b'222\n', b'bbb\n'],
+        )
 
-        ml = m3.merge_lines(name_a=b'a',
-                            name_b=b'b',
-                            start_marker=b'<<',
-                            mid_marker=b'--',
-                            end_marker=b'>>')
+        ml = m3.merge_lines(
+            name_a=b'a',
+            name_b=b'b',
+            start_marker=b'<<',
+            mid_marker=b'--',
+            end_marker=b'>>',
+        )
         self.assertEqual(b''.join(ml), b'aaa\n222\nbbb\n')
 
-
     def test_insert_clash(self):
         """Both try to insert lines in the same place."""
-        m3 = Merge3([b'aaa\n', b'bbb\n'],
-                    [b'aaa\n', b'111\n', b'bbb\n'],
-                    [b'aaa\n', b'222\n', b'bbb\n'])
+        m3 = Merge3(
+            [b'aaa\n', b'bbb\n'],
+            [b'aaa\n', b'111\n', b'bbb\n'],
+            [b'aaa\n', b'222\n', b'bbb\n'],
+        )
 
-        self.assertEqual(m3.find_unconflicted(),
-                         [(0, 1), (1, 2)])
+        self.assertEqual(m3.find_unconflicted(), [(0, 1), (1, 2)])
 
-        self.assertEqual(list(m3.find_sync_regions()),
-                         [(0, 1,  0, 1,  0, 1),
-                          (1, 2,  2, 3,  2, 3),
-                          (2, 2,  3, 3,  3, 3)])
+        self.assertEqual(
+            list(m3.find_sync_regions()),
+            [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)],
+        )
 
-        self.assertEqual(list(m3.merge_regions()),
-                         [(b'unchanged', 0, 1),
-                          (b'conflict', 1, 1,  1, 2,  1, 2),
-                          (b'unchanged', 1, 2)])
+        self.assertEqual(
+            list(m3.merge_regions()),
+            [
+                (b'unchanged', 0, 1),
+                (b'conflict', 1, 1, 1, 2, 1, 2),
+                (b'unchanged', 1, 2),
+            ],
+        )
 
-        self.assertEqual(list(m3.merge_groups()),
-                         [(b'unchanged', [b'aaa\n']),
-                          (b'conflict', [], [b'111\n'], [b'222\n']),
-                          (b'unchanged', [b'bbb\n']),
-                          ])
+        self.assertEqual(
+            list(m3.merge_groups()),
+            [
+                (b'unchanged', [b'aaa\n']),
+                (b'conflict', [], [b'111\n'], [b'222\n']),
+                (b'unchanged', [b'bbb\n']),
+            ],
+        )
 
-        ml = m3.merge_lines(name_a=b'a',
-                            name_b=b'b',
-                            start_marker=b'<<',
-                            mid_marker=b'--',
-                            end_marker=b'>>')
-        self.assertEqual(b''.join(ml),
-b'''aaa
+        ml = m3.merge_lines(
+            name_a=b'a',
+            name_b=b'b',
+            start_marker=b'<<',
+            mid_marker=b'--',
+            end_marker=b'>>',
+        )
+        self.assertEqual(
+            b''.join(ml),
+            b'''aaa
 << a
 111
 --
 222
 >> b
 bbb
-''')
+''',
+        )
 
     def test_replace_clash(self):
         """Both try to insert lines in the same place."""
-        m3 = Merge3([b'aaa', b'000', b'bbb'],
-                    [b'aaa', b'111', b'bbb'],
-                    [b'aaa', b'222', b'bbb'])
+        m3 = Merge3(
+            [b'aaa', b'000', b'bbb'],
+            [b'aaa', b'111', b'bbb'],
+            [b'aaa', b'222', b'bbb'],
+        )
 
-        self.assertEqual(m3.find_unconflicted(),
-                         [(0, 1), (2, 3)])
+        self.assertEqual(m3.find_unconflicted(), [(0, 1), (2, 3)])
 
-        self.assertEqual(list(m3.find_sync_regions()),
-                         [(0, 1,  0, 1,  0, 1),
-                           (2, 3,  2, 3,  2, 3),
-                           (3, 3,  3, 3,  3, 3)])
+        self.assertEqual(
+            list(m3.find_sync_regions()),
+            [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)],
+        )
 
     def test_replace_multi(self):
         """Replacement with regions of different size."""
-        m3 = Merge3([b'aaa', b'000', b'000', b'bbb'],
-                    [b'aaa', b'111', b'111', b'111', b'bbb'],
-                    [b'aaa', b'222', b'222', b'222', b'222', b'bbb'])
+        m3 = Merge3(
+            [b'aaa', b'000', b'000', b'bbb'],
+            [b'aaa', b'111', b'111', b'111', b'bbb'],
+            [b'aaa', b'222', b'222', b'222', b'222', b'bbb'],
+        )
 
-        self.assertEqual(m3.find_unconflicted(),
-                         [(0, 1), (3, 4)])
-
+        self.assertEqual(m3.find_unconflicted(), [(0, 1), (3, 4)])
 
-        self.assertEqual(list(m3.find_sync_regions()),
-                         [(0, 1,  0, 1,  0, 1),
-                          (3, 4,  4, 5,  5, 6),
-                          (4, 4,  5, 5,  6, 6)])
+        self.assertEqual(
+            list(m3.find_sync_regions()),
+            [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)],
+        )
 
     def test_merge_poem(self):
         """Test case from diff3 manual"""
@@ -338,22 +361,36 @@
         base_text = b'a\r\n'
         this_text = b'b\r\n'
         other_text = b'c\r\n'
-        m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True),
-                    this_text.splitlines(True))
+        m3 = Merge3(
+            base_text.splitlines(True),
+            other_text.splitlines(True),
+            this_text.splitlines(True),
+        )
         m_lines = m3.merge_lines(b'OTHER', b'THIS')
-        self.assertEqual(b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
-                         b'>>>>>>> THIS\r\n'.splitlines(True), list(m_lines))
+        self.assertEqual(
+            b'<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n'
+            b'>>>>>>> THIS\r\n'.splitlines(True),
+            list(m_lines),
+        )
 
     def test_mac_text(self):
         base_text = b'a\r'
         this_text = b'b\r'
         other_text = b'c\r'
-        m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True),
-                    this_text.splitlines(True))
+        m3 = Merge3(
+            base_text.splitlines(True),
+            other_text.splitlines(True),
+            this_text.splitlines(True),
+        )
         m_lines = m3.merge_lines(b'OTHER', b'THIS')
-        self.assertEqual(b'<<<<<<< OTHER\rc\r=======\rb\r'
-                         b'>>>>>>> THIS\r'.splitlines(True), list(m_lines))
+        self.assertEqual(
+            b'<<<<<<< OTHER\rc\r=======\rb\r'
+            b'>>>>>>> THIS\r'.splitlines(True),
+            list(m_lines),
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-sshserver.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-sshserver.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,37 +10,41 @@
     wireprotov1server,
 )
 
-from mercurial.utils import (
-    procutil,
-)
+from mercurial.utils import procutil
+
 
 class SSHServerGetArgsTests(unittest.TestCase):
     def testparseknown(self):
         tests = [
             (b'* 0\nnodes 0\n', [b'', {}]),
-            (b'* 0\nnodes 40\n1111111111111111111111111111111111111111\n',
-             [b'1111111111111111111111111111111111111111', {}]),
+            (
+                b'* 0\nnodes 40\n1111111111111111111111111111111111111111\n',
+                [b'1111111111111111111111111111111111111111', {}],
+            ),
         ]
         for input, expected in tests:
             self.assertparse(b'known', input, expected)
 
     def assertparse(self, cmd, input, expected):
         server = mockserver(input)
-        proto = wireprotoserver.sshv1protocolhandler(server._ui,
-                                                     server._fin,
-                                                     server._fout)
+        proto = wireprotoserver.sshv1protocolhandler(
+            server._ui, server._fin, server._fout
+        )
         _func, spec = wireprotov1server.commands[cmd]
         self.assertEqual(proto.getargs(spec), expected)
 
+
 def mockserver(inbytes):
     ui = mockui(inbytes)
     repo = mockrepo(ui)
     return wireprotoserver.sshserver(ui, repo)
 
+
 class mockrepo(object):
     def __init__(self, ui):
         self.ui = ui
 
+
 class mockui(object):
     def __init__(self, inbytes):
         self.fin = io.BytesIO(inbytes)
@@ -53,6 +57,7 @@
     def restorefinout(self, fin, fout):
         pass
 
+
 if __name__ == '__main__':
     # Don't call into msvcrt to set BytesIO to binary mode
     procutil.setbinary = lambda fp: True
--- a/tests/test-status-inprocess.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-status-inprocess.py	Sun Oct 06 09:45:02 2019 -0400
@@ -10,6 +10,8 @@
 )
 
 print_ = print
+
+
 def print(*args, **kwargs):
     """print() wrapper that flushes stdout buffers to avoid py3 buffer issues
 
@@ -19,6 +21,7 @@
     print_(*args, **kwargs)
     sys.stdout.flush()
 
+
 u = uimod.ui.load()
 
 print('% creating repo')
--- a/tests/test-storage.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-storage.py	Sun Oct 06 09:45:02 2019 -0400
@@ -13,19 +13,16 @@
     vfs as vfsmod,
 )
 
-from mercurial.testing import (
-    storage as storagetesting,
-)
+from mercurial.testing import storage as storagetesting
 
 try:
-    from hgext import (
-        sqlitestore,
-    )
+    from hgext import sqlitestore
 except ImportError:
     sqlitestore = None
 
 try:
     import sqlite3
+
     if sqlite3.sqlite_version_info < (3, 8, 3):
         # WITH clause not supported
         sqlitestore = None
@@ -34,6 +31,7 @@
 
 try:
     from mercurial import zstd
+
     zstd.__version__
 except ImportError:
     zstd = None
@@ -44,20 +42,36 @@
     'vfs': vfsmod.vfs(b'.', realpath=True),
 }
 
+
 def makefilefn(self):
     """Factory for filelog instances."""
     fl = filelog.filelog(STATE['vfs'], b'filelog-%d' % STATE['lastindex'])
     STATE['lastindex'] += 1
     return fl
 
+
 def maketransaction(self):
     vfsmap = {b'plain': STATE['vfs'], b'store': STATE['vfs']}
 
-    return transaction.transaction(STATE['ui'].warn, STATE['vfs'], vfsmap,
-                                   b'journal', b'undo')
+    return transaction.transaction(
+        STATE['ui'].warn, STATE['vfs'], vfsmap, b'journal', b'undo'
+    )
+
 
-def addrawrevision(self, fl, tr, node, p1, p2, linkrev, rawtext=None,
-                   delta=None, censored=False, ellipsis=False, extstored=False):
+def addrawrevision(
+    self,
+    fl,
+    tr,
+    node,
+    p1,
+    p2,
+    linkrev,
+    rawtext=None,
+    delta=None,
+    censored=False,
+    ellipsis=False,
+    extstored=False,
+):
     flags = 0
 
     if censored:
@@ -70,8 +84,9 @@
     if rawtext is not None:
         fl._revlog.addrawrevision(rawtext, tr, linkrev, p1, p2, node, flags)
     elif delta is not None:
-        fl._revlog.addrawrevision(rawtext, tr, linkrev, p1, p2, node, flags,
-                                  cachedelta=delta)
+        fl._revlog.addrawrevision(
+            rawtext, tr, linkrev, p1, p2, node, flags, cachedelta=delta
+        )
     else:
         raise error.Abort('must supply rawtext or delta arguments')
 
@@ -79,17 +94,19 @@
     # bypass hash verification.
     fl._revlog.clearcaches()
 
+
 # Assigning module-level attributes that inherit from unittest.TestCase
 # is all that is needed to register tests.
-filelogindextests = storagetesting.makeifileindextests(makefilefn,
-                                                       maketransaction,
-                                                       addrawrevision)
-filelogdatatests = storagetesting.makeifiledatatests(makefilefn,
-                                                     maketransaction,
-                                                     addrawrevision)
-filelogmutationtests = storagetesting.makeifilemutationtests(makefilefn,
-                                                             maketransaction,
-                                                             addrawrevision)
+filelogindextests = storagetesting.makeifileindextests(
+    makefilefn, maketransaction, addrawrevision
+)
+filelogdatatests = storagetesting.makeifiledatatests(
+    makefilefn, maketransaction, addrawrevision
+)
+filelogmutationtests = storagetesting.makeifilemutationtests(
+    makefilefn, maketransaction, addrawrevision
+)
+
 
 def makesqlitefile(self):
     path = STATE['vfs'].join(b'db-%d.db' % STATE['lastindex'])
@@ -101,33 +118,51 @@
 
     return sqlitestore.sqlitefilestore(db, b'dummy-path', compression)
 
-def addrawrevisionsqlite(self, fl, tr, node, p1, p2, linkrev, rawtext=None,
-                         delta=None, censored=False, ellipsis=False,
-                         extstored=False):
+
+def addrawrevisionsqlite(
+    self,
+    fl,
+    tr,
+    node,
+    p1,
+    p2,
+    linkrev,
+    rawtext=None,
+    delta=None,
+    censored=False,
+    ellipsis=False,
+    extstored=False,
+):
     flags = 0
 
     if censored:
         flags |= sqlitestore.FLAG_CENSORED
 
     if ellipsis | extstored:
-        raise error.Abort(b'support for ellipsis and extstored flags not '
-                          b'supported')
+        raise error.Abort(
+            b'support for ellipsis and extstored flags not ' b'supported'
+        )
 
     if rawtext is not None:
         fl._addrawrevision(node, rawtext, tr, linkrev, p1, p2, flags=flags)
     elif delta is not None:
-        fl._addrawrevision(node, rawtext, tr, linkrev, p1, p2,
-                           storedelta=delta, flags=flags)
+        fl._addrawrevision(
+            node, rawtext, tr, linkrev, p1, p2, storedelta=delta, flags=flags
+        )
     else:
         raise error.Abort(b'must supply rawtext or delta arguments')
 
+
 if sqlitestore is not None:
     sqlitefileindextests = storagetesting.makeifileindextests(
-        makesqlitefile, maketransaction, addrawrevisionsqlite)
+        makesqlitefile, maketransaction, addrawrevisionsqlite
+    )
     sqlitefiledatatests = storagetesting.makeifiledatatests(
-        makesqlitefile, maketransaction, addrawrevisionsqlite)
+        makesqlitefile, maketransaction, addrawrevisionsqlite
+    )
     sqlitefilemutationtests = storagetesting.makeifilemutationtests(
-        makesqlitefile, maketransaction, addrawrevisionsqlite)
+        makesqlitefile, maketransaction, addrawrevisionsqlite
+    )
 
 if __name__ == '__main__':
     silenttestrunner.main(__name__)
--- a/tests/test-symlink-os-yes-fs-no.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-symlink-os-yes-fs-no.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,7 +16,7 @@
 
 # only makes sense to test on os which supports symlinks
 if not getattr(os, "symlink", False):
-    sys.exit(80) # SKIPPED_STATUS defined in run-tests.py
+    sys.exit(80)  # SKIPPED_STATUS defined in run-tests.py
 
 u = uimod.ui.load()
 # hide outer repo
@@ -36,9 +36,15 @@
 # non-symlink file system
 def symlink_failure(src, dst):
     raise OSError(1, "Operation not permitted")
+
+
 os.symlink = symlink_failure
+
+
 def islink_failure(path):
     return False
+
+
 os.path.islink = islink_failure
 
 # dereference links as if a Samba server has exported this to a
--- a/tests/test-trusted.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-trusted.py	Sun Oct 06 09:45:02 2019 -0400
@@ -20,20 +20,33 @@
 basehgrc = f.read()
 f.close()
 
+
 def _maybesysstr(v):
     if isinstance(v, bytes):
         return pycompat.sysstr(v)
     return pycompat.sysstr(stringutil.pprint(v))
 
+
 def bprint(*args, **kwargs):
-    print(*[_maybesysstr(a) for a in args],
-          **{k: _maybesysstr(v) for k, v in kwargs.items()})
+    print(
+        *[_maybesysstr(a) for a in args],
+        **{k: _maybesysstr(v) for k, v in kwargs.items()}
+    )
     # avoid awkward interleaving with ui object's output
     sys.stdout.flush()
 
-def testui(user=b'foo', group=b'bar', tusers=(), tgroups=(),
-           cuser=b'foo', cgroup=b'bar', debug=False, silent=False,
-           report=True):
+
+def testui(
+    user=b'foo',
+    group=b'bar',
+    tusers=(),
+    tgroups=(),
+    cuser=b'foo',
+    cgroup=b'bar',
+    debug=False,
+    silent=False,
+    report=True,
+):
     # user, group => owners of the file
     # tusers, tgroups => trusted users/groups
     # cuser, cgroup => user/group of the current process
@@ -58,29 +71,33 @@
         if uid is None:
             return cuser
         return user
+
     util.username = username
 
     def groupname(gid=None):
         if gid is None:
             return b'bar'
         return group
+
     util.groupname = groupname
 
     def isowner(st):
         return user == cuser
+
     util.isowner = isowner
 
     # try to read everything
-    #print '# File belongs to user %s, group %s' % (user, group)
-    #print '# trusted users = %s; trusted groups = %s' % (tusers, tgroups)
+    # print '# File belongs to user %s, group %s' % (user, group)
+    # print '# trusted users = %s; trusted groups = %s' % (tusers, tgroups)
     kind = (b'different', b'same')
     who = (b'', b'user', b'group', b'user and the group')
-    trusted = who[(user in tusers) + 2*(group in tgroups)]
+    trusted = who[(user in tusers) + 2 * (group in tgroups)]
     if trusted:
         trusted = b', but we trust the ' + trusted
-    bprint(b'# %s user, %s group%s' % (kind[user == cuser],
-                                       kind[group == cgroup],
-                                       trusted))
+    bprint(
+        b'# %s user, %s group%s'
+        % (kind[user == cuser], kind[group == cgroup], trusted)
+    )
 
     u = uimod.ui.load()
     # disable the configuration registration warning
@@ -101,14 +118,15 @@
     bprint(b'untrusted')
     for name, path in u.configitems(b'paths', untrusted=True):
         bprint(b'.', end=b' ')
-        u.config(b'paths', name) # warning with debug=True
+        u.config(b'paths', name)  # warning with debug=True
         bprint(b'.', end=b' ')
-        u.config(b'paths', name, untrusted=True) # no warnings
+        u.config(b'paths', name, untrusted=True)  # no warnings
         bprint(name, b'=', util.pconvert(path))
     print()
 
     return u
 
+
 os.mkdir(b'repo')
 os.chdir(b'repo')
 os.mkdir(b'.hg')
@@ -117,7 +135,7 @@
 f.write(b'local = /another/path\n\n')
 f.close()
 
-#print '# Everything is run by user foo, group bar\n'
+# print '# Everything is run by user foo, group bar\n'
 
 # same user, same group
 testui()
@@ -149,12 +167,20 @@
 testui(user=b'abc', group=b'def', tusers=[b'def'], tgroups=[b'abc'])
 # ... lists of user names work
 bprint(b"# list of user names")
-testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'abc', b'bleh'],
-       tgroups=[b'bar', b'baz', b'qux'])
+testui(
+    user=b'abc',
+    group=b'def',
+    tusers=[b'foo', b'xyz', b'abc', b'bleh'],
+    tgroups=[b'bar', b'baz', b'qux'],
+)
 # ... lists of group names work
 bprint(b"# list of group names")
-testui(user=b'abc', group=b'def', tusers=[b'foo', b'xyz', b'bleh'],
-       tgroups=[b'bar', b'def', b'baz', b'qux'])
+testui(
+    user=b'abc',
+    group=b'def',
+    tusers=[b'foo', b'xyz', b'bleh'],
+    tgroups=[b'bar', b'def', b'baz', b'qux'],
+)
 
 bprint(b"# Can't figure out the name of the user running this process")
 testui(user=b'abc', group=b'def', cuser=None)
@@ -190,8 +216,12 @@
 u.setconfig(b'ui', b'debug', b'on')
 u.readconfig(filename)
 u2 = u.copy()
+
+
 def username(uid=None):
     return b'foo'
+
+
 util.username = username
 u2.readconfig(b'.hg/hgrc')
 bprint(b'trusted:')
@@ -202,6 +232,7 @@
 print()
 bprint(b"# error handling")
 
+
 def assertraises(f, exc=error.Abort):
     try:
         f()
@@ -210,6 +241,7 @@
     else:
         bprint(b'no exception?!')
 
+
 bprint(b"# file doesn't exist")
 os.unlink(b'.hg/hgrc')
 assert not os.path.exists(b'.hg/hgrc')
@@ -232,6 +264,7 @@
 
     return error.ParseError(*args)
 
+
 try:
     testui(user=b'abc', group=b'def', silent=True)
 except error.ParseError as inst:
@@ -245,7 +278,8 @@
 print()
 bprint(b'# access typed information')
 with open(b'.hg/hgrc', 'wb') as f:
-    f.write(b'''\
+    f.write(
+        b'''\
 [foo]
 sub=main
 sub:one=one
@@ -255,34 +289,43 @@
 int=42
 bytes=81mb
 list=spam,ham,eggs
-''')
+'''
+    )
 u = testui(user=b'abc', group=b'def', cuser=b'foo', silent=True)
+
+
 def configpath(section, name, default=None, untrusted=False):
     path = u.configpath(section, name, default, untrusted)
     if path is None:
         return None
     return util.pconvert(path)
 
+
 bprint(b'# suboptions, trusted and untrusted')
 trusted = u.configsuboptions(b'foo', b'sub')
 untrusted = u.configsuboptions(b'foo', b'sub', untrusted=True)
 bprint(
     (trusted[0], sorted(trusted[1].items())),
-    (untrusted[0], sorted(untrusted[1].items())))
+    (untrusted[0], sorted(untrusted[1].items())),
+)
 bprint(b'# path, trusted and untrusted')
 bprint(configpath(b'foo', b'path'), configpath(b'foo', b'path', untrusted=True))
 bprint(b'# bool, trusted and untrusted')
-bprint(u.configbool(b'foo', b'bool'),
-       u.configbool(b'foo', b'bool', untrusted=True))
+bprint(
+    u.configbool(b'foo', b'bool'), u.configbool(b'foo', b'bool', untrusted=True)
+)
 bprint(b'# int, trusted and untrusted')
 bprint(
     u.configint(b'foo', b'int', 0),
-    u.configint(b'foo', b'int', 0, untrusted=True))
+    u.configint(b'foo', b'int', 0, untrusted=True),
+)
 bprint(b'# bytes, trusted and untrusted')
 bprint(
     u.configbytes(b'foo', b'bytes', 0),
-    u.configbytes(b'foo', b'bytes', 0, untrusted=True))
+    u.configbytes(b'foo', b'bytes', 0, untrusted=True),
+)
 bprint(b'# list, trusted and untrusted')
 bprint(
     u.configlist(b'foo', b'list', []),
-    u.configlist(b'foo', b'list', [], untrusted=True))
+    u.configlist(b'foo', b'list', [], untrusted=True),
+)
--- a/tests/test-ui-color.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-ui-color.py	Sun Oct 06 09:45:02 2019 -0400
@@ -5,15 +5,13 @@
     dispatch,
     ui as uimod,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 # ensure errors aren't buffered
 testui = uimod.ui()
 testui.pushbuffer()
-testui.write((b'buffered\n'))
-testui.warn((b'warning\n'))
+testui.write(b'buffered\n')
+testui.warn(b'warning\n')
 testui.write_err(b'error\n')
 print(stringutil.pprint(testui.popbuffer(), bprefix=True).decode('ascii'))
 
@@ -34,6 +32,7 @@
 def runcmd():
     dispatch.dispatch(dispatch.request([b'version', b'-q'], ui_))
 
+
 runcmd()
 print("colored? %s" % (ui_._colormode is not None))
 runcmd()
--- a/tests/test-ui-config.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-ui-config.py	Sun Oct 06 09:45:02 2019 -0400
@@ -5,9 +5,7 @@
     pycompat,
     ui as uimod,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 testui = uimod.ui.load()
 
@@ -19,40 +17,45 @@
 testui.setconfig(b'devel', b'warn-config-unknown', False, b'test')
 testui.setconfig(b'devel', b'all-warnings', False, b'test')
 
-parsed = dispatch._parseconfig(testui, [
-    b'values.string=string value',
-    b'values.bool1=true',
-    b'values.bool2=false',
-    b'values.boolinvalid=foo',
-    b'values.int1=42',
-    b'values.int2=-42',
-    b'values.intinvalid=foo',
-    b'lists.list1=foo',
-    b'lists.list2=foo bar baz',
-    b'lists.list3=alice, bob',
-    b'lists.list4=foo bar baz alice, bob',
-    b'lists.list5=abc d"ef"g "hij def"',
-    b'lists.list6="hello world", "how are you?"',
-    b'lists.list7=Do"Not"Separate',
-    b'lists.list8="Do"Separate',
-    b'lists.list9="Do\\"NotSeparate"',
-    b'lists.list10=string "with extraneous" quotation mark"',
-    b'lists.list11=x, y',
-    b'lists.list12="x", "y"',
-    b'lists.list13=""" key = "x", "y" """',
-    b'lists.list14=,,,,     ',
-    b'lists.list15=" just with starting quotation',
-    b'lists.list16="longer quotation" with "no ending quotation',
-    b'lists.list17=this is \\" "not a quotation mark"',
-    b'lists.list18=\n \n\nding\ndong',
-    b'date.epoch=0 0',
-    b'date.birth=2005-04-19T00:00:00',
-    b'date.invalid=0'
-    ])
+parsed = dispatch._parseconfig(
+    testui,
+    [
+        b'values.string=string value',
+        b'values.bool1=true',
+        b'values.bool2=false',
+        b'values.boolinvalid=foo',
+        b'values.int1=42',
+        b'values.int2=-42',
+        b'values.intinvalid=foo',
+        b'lists.list1=foo',
+        b'lists.list2=foo bar baz',
+        b'lists.list3=alice, bob',
+        b'lists.list4=foo bar baz alice, bob',
+        b'lists.list5=abc d"ef"g "hij def"',
+        b'lists.list6="hello world", "how are you?"',
+        b'lists.list7=Do"Not"Separate',
+        b'lists.list8="Do"Separate',
+        b'lists.list9="Do\\"NotSeparate"',
+        b'lists.list10=string "with extraneous" quotation mark"',
+        b'lists.list11=x, y',
+        b'lists.list12="x", "y"',
+        b'lists.list13=""" key = "x", "y" """',
+        b'lists.list14=,,,,     ',
+        b'lists.list15=" just with starting quotation',
+        b'lists.list16="longer quotation" with "no ending quotation',
+        b'lists.list17=this is \\" "not a quotation mark"',
+        b'lists.list18=\n \n\nding\ndong',
+        b'date.epoch=0 0',
+        b'date.birth=2005-04-19T00:00:00',
+        b'date.invalid=0',
+    ],
+)
+
 
 def pprint(obj):
     return stringutil.pprint(obj).decode('ascii')
 
+
 print(pprint(testui.configitems(b'values')))
 print(pprint(testui.configitems(b'lists')))
 print("---")
@@ -107,9 +110,11 @@
 
 print(pprint(testui.config(b'values', b'String')))
 
+
 def function():
     pass
 
+
 # values that aren't strings should work
 testui.setconfig(b'hook', b'commit', function)
 print(function == testui.config(b'hook', b'commit'))
--- a/tests/test-ui-verbosity.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-ui-verbosity.py	Sun Oct 06 09:45:02 2019 -0400
@@ -18,12 +18,12 @@
 print('    quiet verbo debug   quiet verbo debug      quiet verbo debug')
 
 for i in xrange(64):
-    hgrc_quiet   = bool(i & 1<<0)
-    hgrc_verbose = bool(i & 1<<1)
-    hgrc_debug   = bool(i & 1<<2)
-    cmd_quiet    = bool(i & 1<<3)
-    cmd_verbose  = bool(i & 1<<4)
-    cmd_debug    = bool(i & 1<<5)
+    hgrc_quiet = bool(i & 1 << 0)
+    hgrc_verbose = bool(i & 1 << 1)
+    hgrc_debug = bool(i & 1 << 2)
+    cmd_quiet = bool(i & 1 << 3)
+    cmd_verbose = bool(i & 1 << 4)
+    cmd_debug = bool(i & 1 << 5)
 
     f = open(hgrc, 'w')
     f.write(basehgrc)
@@ -49,7 +49,21 @@
     elif u.verbose and u.quiet:
         check = ' +'
 
-    print(('%2d  %5s %5s %5s   %5s %5s %5s  ->  %5s %5s %5s%s'
-           % (i, hgrc_quiet, hgrc_verbose, hgrc_debug,
-              cmd_quiet, cmd_verbose, cmd_debug,
-              u.quiet, u.verbose, u.debugflag, check)))
+    print(
+        (
+            '%2d  %5s %5s %5s   %5s %5s %5s  ->  %5s %5s %5s%s'
+            % (
+                i,
+                hgrc_quiet,
+                hgrc_verbose,
+                hgrc_debug,
+                cmd_quiet,
+                cmd_verbose,
+                cmd_debug,
+                u.quiet,
+                u.verbose,
+                u.debugflag,
+                check,
+            )
+        )
+    )
--- a/tests/test-url.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-url.py	Sun Oct 06 09:45:02 2019 -0400
@@ -4,146 +4,170 @@
 import doctest
 import os
 
+
 def check(a, b):
     if a != b:
         print((a, b))
 
+
 def cert(cn):
     return {'subject': ((('commonName', cn),),)}
 
-from mercurial import (
-    sslutil,
-)
+
+from mercurial import sslutil
 
 _verifycert = sslutil._verifycert
 # Test non-wildcard certificates
-check(_verifycert(cert('example.com'), 'example.com'),
-      None)
-check(_verifycert(cert('example.com'), 'www.example.com'),
-      b'certificate is for example.com')
-check(_verifycert(cert('www.example.com'), 'example.com'),
-      b'certificate is for www.example.com')
+check(_verifycert(cert('example.com'), 'example.com'), None)
+check(
+    _verifycert(cert('example.com'), 'www.example.com'),
+    b'certificate is for example.com',
+)
+check(
+    _verifycert(cert('www.example.com'), 'example.com'),
+    b'certificate is for www.example.com',
+)
 
 # Test wildcard certificates
-check(_verifycert(cert('*.example.com'), 'www.example.com'),
-      None)
-check(_verifycert(cert('*.example.com'), 'example.com'),
-      b'certificate is for *.example.com')
-check(_verifycert(cert('*.example.com'), 'w.w.example.com'),
-      b'certificate is for *.example.com')
+check(_verifycert(cert('*.example.com'), 'www.example.com'), None)
+check(
+    _verifycert(cert('*.example.com'), 'example.com'),
+    b'certificate is for *.example.com',
+)
+check(
+    _verifycert(cert('*.example.com'), 'w.w.example.com'),
+    b'certificate is for *.example.com',
+)
 
 # Test subjectAltName
-san_cert = {'subject': ((('commonName', 'example.com'),),),
-            'subjectAltName': (('DNS', '*.example.net'),
-                               ('DNS', 'example.net'))}
-check(_verifycert(san_cert, 'example.net'),
-      None)
-check(_verifycert(san_cert, 'foo.example.net'),
-      None)
+san_cert = {
+    'subject': ((('commonName', 'example.com'),),),
+    'subjectAltName': (('DNS', '*.example.net'), ('DNS', 'example.net')),
+}
+check(_verifycert(san_cert, 'example.net'), None)
+check(_verifycert(san_cert, 'foo.example.net'), None)
 # no fallback to subject commonName when subjectAltName has DNS
-check(_verifycert(san_cert, 'example.com'),
-      b'certificate is for *.example.net, example.net')
+check(
+    _verifycert(san_cert, 'example.com'),
+    b'certificate is for *.example.net, example.net',
+)
 # fallback to subject commonName when no DNS in subjectAltName
-san_cert = {'subject': ((('commonName', 'example.com'),),),
-            'subjectAltName': (('IP Address', '8.8.8.8'),)}
+san_cert = {
+    'subject': ((('commonName', 'example.com'),),),
+    'subjectAltName': (('IP Address', '8.8.8.8'),),
+}
 check(_verifycert(san_cert, 'example.com'), None)
 
 # Avoid some pitfalls
-check(_verifycert(cert('*.foo'), 'foo'),
-      b'certificate is for *.foo')
+check(_verifycert(cert('*.foo'), 'foo'), b'certificate is for *.foo')
 check(_verifycert(cert('*o'), 'foo'), None)
 
-check(_verifycert({'subject': ()},
-                  'example.com'),
-      b'no commonName or subjectAltName found in certificate')
-check(_verifycert(None, 'example.com'),
-      b'no certificate received')
+check(
+    _verifycert({'subject': ()}, 'example.com'),
+    b'no commonName or subjectAltName found in certificate',
+)
+check(_verifycert(None, 'example.com'), b'no certificate received')
 
 # Unicode (IDN) certname isn't supported
-check(_verifycert(cert(u'\u4f8b.jp'), 'example.jp'),
-      b'IDN in certificate not supported')
+check(
+    _verifycert(cert(u'\u4f8b.jp'), 'example.jp'),
+    b'IDN in certificate not supported',
+)
 
 # The following tests are from CPython's test_ssl.py.
 check(_verifycert(cert('example.com'), 'example.com'), None)
 check(_verifycert(cert('example.com'), 'ExAmple.cOm'), None)
-check(_verifycert(cert('example.com'), 'www.example.com'),
-      b'certificate is for example.com')
-check(_verifycert(cert('example.com'), '.example.com'),
-      b'certificate is for example.com')
-check(_verifycert(cert('example.com'), 'example.org'),
-      b'certificate is for example.com')
-check(_verifycert(cert('example.com'), 'exampleXcom'),
-      b'certificate is for example.com')
+check(
+    _verifycert(cert('example.com'), 'www.example.com'),
+    b'certificate is for example.com',
+)
+check(
+    _verifycert(cert('example.com'), '.example.com'),
+    b'certificate is for example.com',
+)
+check(
+    _verifycert(cert('example.com'), 'example.org'),
+    b'certificate is for example.com',
+)
+check(
+    _verifycert(cert('example.com'), 'exampleXcom'),
+    b'certificate is for example.com',
+)
 check(_verifycert(cert('*.a.com'), 'foo.a.com'), None)
-check(_verifycert(cert('*.a.com'), 'bar.foo.a.com'),
-      b'certificate is for *.a.com')
-check(_verifycert(cert('*.a.com'), 'a.com'),
-      b'certificate is for *.a.com')
-check(_verifycert(cert('*.a.com'), 'Xa.com'),
-      b'certificate is for *.a.com')
-check(_verifycert(cert('*.a.com'), '.a.com'),
-      b'certificate is for *.a.com')
+check(
+    _verifycert(cert('*.a.com'), 'bar.foo.a.com'), b'certificate is for *.a.com'
+)
+check(_verifycert(cert('*.a.com'), 'a.com'), b'certificate is for *.a.com')
+check(_verifycert(cert('*.a.com'), 'Xa.com'), b'certificate is for *.a.com')
+check(_verifycert(cert('*.a.com'), '.a.com'), b'certificate is for *.a.com')
 
 # only match one left-most wildcard
 check(_verifycert(cert('f*.com'), 'foo.com'), None)
 check(_verifycert(cert('f*.com'), 'f.com'), None)
-check(_verifycert(cert('f*.com'), 'bar.com'),
-      b'certificate is for f*.com')
-check(_verifycert(cert('f*.com'), 'foo.a.com'),
-      b'certificate is for f*.com')
-check(_verifycert(cert('f*.com'), 'bar.foo.com'),
-      b'certificate is for f*.com')
+check(_verifycert(cert('f*.com'), 'bar.com'), b'certificate is for f*.com')
+check(_verifycert(cert('f*.com'), 'foo.a.com'), b'certificate is for f*.com')
+check(_verifycert(cert('f*.com'), 'bar.foo.com'), b'certificate is for f*.com')
 
 # NULL bytes are bad, CVE-2013-4073
-check(_verifycert(cert('null.python.org\x00example.org'),
-                  'null.python.org\x00example.org'), None)
-check(_verifycert(cert('null.python.org\x00example.org'),
-                  'example.org'),
-      b'certificate is for null.python.org\x00example.org')
-check(_verifycert(cert('null.python.org\x00example.org'),
-                  'null.python.org'),
-      b'certificate is for null.python.org\x00example.org')
+check(
+    _verifycert(
+        cert('null.python.org\x00example.org'), 'null.python.org\x00example.org'
+    ),
+    None,
+)
+check(
+    _verifycert(cert('null.python.org\x00example.org'), 'example.org'),
+    b'certificate is for null.python.org\x00example.org',
+)
+check(
+    _verifycert(cert('null.python.org\x00example.org'), 'null.python.org'),
+    b'certificate is for null.python.org\x00example.org',
+)
 
 # error cases with wildcards
-check(_verifycert(cert('*.*.a.com'), 'bar.foo.a.com'),
-      b'certificate is for *.*.a.com')
-check(_verifycert(cert('*.*.a.com'), 'a.com'),
-      b'certificate is for *.*.a.com')
-check(_verifycert(cert('*.*.a.com'), 'Xa.com'),
-      b'certificate is for *.*.a.com')
-check(_verifycert(cert('*.*.a.com'), '.a.com'),
-      b'certificate is for *.*.a.com')
+check(
+    _verifycert(cert('*.*.a.com'), 'bar.foo.a.com'),
+    b'certificate is for *.*.a.com',
+)
+check(_verifycert(cert('*.*.a.com'), 'a.com'), b'certificate is for *.*.a.com')
+check(_verifycert(cert('*.*.a.com'), 'Xa.com'), b'certificate is for *.*.a.com')
+check(_verifycert(cert('*.*.a.com'), '.a.com'), b'certificate is for *.*.a.com')
 
-check(_verifycert(cert('a.*.com'), 'a.foo.com'),
-      b'certificate is for a.*.com')
-check(_verifycert(cert('a.*.com'), 'a..com'),
-      b'certificate is for a.*.com')
-check(_verifycert(cert('a.*.com'), 'a.com'),
-      b'certificate is for a.*.com')
+check(_verifycert(cert('a.*.com'), 'a.foo.com'), b'certificate is for a.*.com')
+check(_verifycert(cert('a.*.com'), 'a..com'), b'certificate is for a.*.com')
+check(_verifycert(cert('a.*.com'), 'a.com'), b'certificate is for a.*.com')
 
 # wildcard doesn't match IDNA prefix 'xn--'
 idna = u'püthon.python.org'.encode('idna').decode('ascii')
 check(_verifycert(cert(idna), idna), None)
-check(_verifycert(cert('x*.python.org'), idna),
-      b'certificate is for x*.python.org')
-check(_verifycert(cert('xn--p*.python.org'), idna),
-      b'certificate is for xn--p*.python.org')
+check(
+    _verifycert(cert('x*.python.org'), idna),
+    b'certificate is for x*.python.org',
+)
+check(
+    _verifycert(cert('xn--p*.python.org'), idna),
+    b'certificate is for xn--p*.python.org',
+)
 
 # wildcard in first fragment and  IDNA A-labels in sequent fragments
 # are supported.
 idna = u'www*.pythön.org'.encode('idna').decode('ascii')
-check(_verifycert(cert(idna),
-                  u'www.pythön.org'.encode('idna').decode('ascii')),
-      None)
-check(_verifycert(cert(idna),
-                  u'www1.pythön.org'.encode('idna').decode('ascii')),
-      None)
-check(_verifycert(cert(idna),
-                  u'ftp.pythön.org'.encode('idna').decode('ascii')),
-      b'certificate is for www*.xn--pythn-mua.org')
-check(_verifycert(cert(idna),
-                  u'pythön.org'.encode('idna').decode('ascii')),
-      b'certificate is for www*.xn--pythn-mua.org')
+check(
+    _verifycert(cert(idna), u'www.pythön.org'.encode('idna').decode('ascii')),
+    None,
+)
+check(
+    _verifycert(cert(idna), u'www1.pythön.org'.encode('idna').decode('ascii')),
+    None,
+)
+check(
+    _verifycert(cert(idna), u'ftp.pythön.org'.encode('idna').decode('ascii')),
+    b'certificate is for www*.xn--pythn-mua.org',
+)
+check(
+    _verifycert(cert(idna), u'pythön.org'.encode('idna').decode('ascii')),
+    b'certificate is for www*.xn--pythn-mua.org',
+)
 
 c = {
     'notAfter': 'Jun 26 21:41:46 2011 GMT',
@@ -152,16 +176,20 @@
         ('DNS', 'linuxfr.org'),
         ('DNS', 'linuxfr.com'),
         ('othername', '<unsupported>'),
-    )
+    ),
 }
 check(_verifycert(c, 'linuxfr.org'), None)
 check(_verifycert(c, 'linuxfr.com'), None)
 # Not a "DNS" entry
-check(_verifycert(c, '<unsupported>'),
-      b'certificate is for linuxfr.org, linuxfr.com')
+check(
+    _verifycert(c, '<unsupported>'),
+    b'certificate is for linuxfr.org, linuxfr.com',
+)
 # When there is a subjectAltName, commonName isn't used
-check(_verifycert(c, 'linuxfrz.org'),
-      b'certificate is for linuxfr.org, linuxfr.com')
+check(
+    _verifycert(c, 'linuxfrz.org'),
+    b'certificate is for linuxfr.org, linuxfr.com',
+)
 
 # A pristine real-world example
 c = {
@@ -190,8 +218,10 @@
         ((u'organizationName', u'Google Inc'),),
     ),
 }
-check(_verifycert(c, 'mail.google.com'),
-      b'no commonName or subjectAltName found in certificate')
+check(
+    _verifycert(c, 'mail.google.com'),
+    b'no commonName or subjectAltName found in certificate',
+)
 
 # No DNS entry in subjectAltName but a commonName
 c = {
@@ -217,8 +247,10 @@
     ),
     'subjectAltName': (('othername', 'blabla'),),
 }
-check(_verifycert(c, 'google.com'),
-      b'no commonName or subjectAltName found in certificate')
+check(
+    _verifycert(c, 'google.com'),
+    b'no commonName or subjectAltName found in certificate',
+)
 
 # Empty cert / no cert
 check(_verifycert(None, 'example.com'), b'no certificate received')
@@ -226,13 +258,19 @@
 
 # avoid denials of service by refusing more than one
 # wildcard per fragment.
-check(_verifycert({'subject': (((u'commonName', u'a*b.com'),),)},
-                  'axxb.com'), None)
-check(_verifycert({'subject': (((u'commonName', u'a*b.co*'),),)},
-                  'axxb.com'), b'certificate is for a*b.co*')
-check(_verifycert({'subject': (((u'commonName', u'a*b*.com'),),)},
-                  'axxbxxc.com'),
-      b'too many wildcards in certificate DNS name: a*b*.com')
+check(
+    _verifycert({'subject': (((u'commonName', u'a*b.com'),),)}, 'axxb.com'),
+    None,
+)
+check(
+    _verifycert({'subject': (((u'commonName', u'a*b.co*'),),)}, 'axxb.com'),
+    b'certificate is for a*b.co*',
+)
+check(
+    _verifycert({'subject': (((u'commonName', u'a*b*.com'),),)}, 'axxbxxc.com'),
+    b'too many wildcards in certificate DNS name: a*b*.com',
+)
+
 
 def test_url():
     """
@@ -418,6 +456,7 @@
     'foo/bar/baz'
     """
 
+
 if 'TERM' in os.environ:
     del os.environ['TERM']
 
--- a/tests/test-util.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-util.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,6 +7,7 @@
 
 from mercurial import pycompat, util, utils
 
+
 @contextlib.contextmanager
 def mocktimer(incr=0.1, *additional_targets):
     """Replaces util.timer and additional_targets with a mock
@@ -46,10 +47,12 @@
         for args in additional_origs:
             setattr(*args)
 
+
 # attr.s default factory for util.timedstats.start binds the timer we
 # need to mock out.
 _start_default = (util.timedcmstats.start.default, 'factory')
 
+
 @contextlib.contextmanager
 def capturestderr():
     """Replace utils.procutil.stderr with a pycompat.bytesio instance
@@ -66,6 +69,7 @@
     finally:
         utils.procutil.stderr = orig
 
+
 class timedtests(unittest.TestCase):
     def testtimedcmstatsstr(self):
         stats = util.timedcmstats()
@@ -127,11 +131,13 @@
             with capturestderr() as out:
                 testfunc(2)
 
-        self.assertEqual(out.getvalue(), (
-            b'    testfunc: 1.000 s\n'
-            b'  testfunc: 3.000 s\n'
-        ))
+        self.assertEqual(
+            out.getvalue(),
+            (b'    testfunc: 1.000 s\n' b'  testfunc: 3.000 s\n'),
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-verify-repo-operations.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-verify-repo-operations.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,8 +11,7 @@
 import sys
 
 # Only run if slow tests are allowed
-if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
-                    'slow']):
+if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'slow']):
     sys.exit(80)
 
 # These tests require Hypothesis and pytz to be installed.
@@ -29,6 +28,7 @@
 # fix this problem.
 try:
     import enum
+
     assert enum  # Silence pyflakes
 except ImportError:
     sys.stderr.write("skipped: enum34 not installed" + os.linesep)
@@ -44,7 +44,11 @@
 
 from hypothesis.errors import HypothesisException
 from hypothesis.stateful import (
-    rule, RuleBasedStateMachine, Bundle, precondition)
+    rule,
+    RuleBasedStateMachine,
+    Bundle,
+    precondition,
+)
 from hypothesis import settings, note, strategies as st
 from hypothesis.configuration import set_hypothesis_home_dir
 from hypothesis.database import ExampleDatabase
@@ -76,9 +80,9 @@
 file_index = 0
 while True:
     file_index += 1
-    savefile = os.path.join(generatedtests, "test-generated-%d.t" % (
-        file_index,
-    ))
+    savefile = os.path.join(
+        generatedtests, "test-generated-%d.t" % (file_index,)
+    )
     try:
         os.close(os.open(savefile, os.O_CREAT | os.O_EXCL | os.O_WRONLY))
         break
@@ -94,17 +98,23 @@
     "[]^_`;=@{}~ !#$%&'()+,-"
 )
 
-files = st.text(filecharacters, min_size=1).map(lambda x: x.strip()).filter(
-    bool).map(lambda s: s.encode('ascii'))
-
-safetext = st.text(st.characters(
-    min_codepoint=1, max_codepoint=127,
-    blacklist_categories=('Cc', 'Cs')), min_size=1).map(
-    lambda s: s.encode('utf-8')
+files = (
+    st.text(filecharacters, min_size=1)
+    .map(lambda x: x.strip())
+    .filter(bool)
+    .map(lambda s: s.encode('ascii'))
 )
 
+safetext = st.text(
+    st.characters(
+        min_codepoint=1, max_codepoint=127, blacklist_categories=('Cc', 'Cs')
+    ),
+    min_size=1,
+).map(lambda s: s.encode('utf-8'))
+
 extensions = st.sampled_from(('shelve', 'mq', 'blackbox',))
 
+
 @contextmanager
 def acceptableerrors(*args):
     """Sometimes we know an operation we're about to perform might fail, and
@@ -118,10 +128,12 @@
             note(e.output)
             raise
 
+
 reponames = st.text("abcdefghijklmnopqrstuvwxyz01234556789", min_size=1).map(
     lambda s: s.encode('ascii')
 )
 
+
 class verifyingstatemachine(RuleBasedStateMachine):
     """This defines the set of acceptable operations on a Mercurial repository
     using Hypothesis's RuleBasedStateMachine.
@@ -188,8 +200,10 @@
             o.write(ttest + os.linesep)
         with open(os.devnull, "w") as devnull:
             rewriter = subprocess.Popen(
-                [runtests, "--local", "-i", path], stdin=subprocess.PIPE,
-                stdout=devnull, stderr=devnull,
+                [runtests, "--local", "-i", path],
+                stdin=subprocess.PIPE,
+                stdout=devnull,
+                stderr=devnull,
             )
             rewriter.communicate("yes")
             with open(path, 'r') as i:
@@ -198,29 +212,29 @@
         e = None
         if not self.failed:
             try:
-                output = subprocess.check_output([
-                    runtests, path, "--local", "--pure"
-                ], stderr=subprocess.STDOUT)
+                output = subprocess.check_output(
+                    [runtests, path, "--local", "--pure"],
+                    stderr=subprocess.STDOUT,
+                )
                 assert "Ran 1 test" in output, output
-                for ext in (
-                    self.all_extensions - self.non_skippable_extensions
-                ):
-                    tf = os.path.join(testtmp, "test-generated-no-%s.t" % (
-                        ext,
-                    ))
+                for ext in self.all_extensions - self.non_skippable_extensions:
+                    tf = os.path.join(
+                        testtmp, "test-generated-no-%s.t" % (ext,)
+                    )
                     with open(tf, 'w') as o:
                         for l in ttest.splitlines():
                             if l.startswith("  $ hg"):
                                 l = l.replace(
-                                    "--config %s=" % (
-                                        extensionconfigkey(ext),), "")
+                                    "--config %s=" % (extensionconfigkey(ext),),
+                                    "",
+                                )
                             o.write(l + os.linesep)
                     with open(tf, 'r') as r:
                         t = r.read()
                         assert ext not in t, t
-                    output = subprocess.check_output([
-                        runtests, tf, "--local",
-                    ], stderr=subprocess.STDOUT)
+                    output = subprocess.check_output(
+                        [runtests, tf, "--local",], stderr=subprocess.STDOUT
+                    )
                     assert "Ran 1 test" in output, output
             except subprocess.CalledProcessError as e:
                 note(e.output)
@@ -244,7 +258,8 @@
         if os.path.exists(path):
             return
         self.log.append(
-            "$ mkdir -p -- %s" % (pipes.quote(os.path.relpath(path)),))
+            "$ mkdir -p -- %s" % (pipes.quote(os.path.relpath(path)),)
+        )
         os.makedirs(path)
 
     def cd(self, path):
@@ -270,28 +285,29 @@
     # to use later.
     @rule(
         target=paths,
-        source=st.lists(files, min_size=1).map(lambda l: os.path.join(*l)))
+        source=st.lists(files, min_size=1).map(lambda l: os.path.join(*l)),
+    )
     def genpath(self, source):
         return source
 
     @rule(
         target=committimes,
-        when=datetimes(min_year=1970, max_year=2038) | st.none())
+        when=datetimes(min_year=1970, max_year=2038) | st.none(),
+    )
     def gentime(self, when):
         return when
 
     @rule(
         target=contents,
         content=st.one_of(
-            st.binary(),
-            st.text().map(lambda x: x.encode('utf-8'))
-        ))
+            st.binary(), st.text().map(lambda x: x.encode('utf-8'))
+        ),
+    )
     def gencontent(self, content):
         return content
 
     @rule(
-        target=branches,
-        name=safetext,
+        target=branches, name=safetext,
     )
     def genbranch(self, name):
         return name
@@ -322,12 +338,13 @@
                 return
         with open(path, 'wb') as o:
             o.write(content)
-        self.log.append((
-            "$ python -c 'import binascii; "
-            "print(binascii.unhexlify(\"%s\"))' > %s") % (
-                binascii.hexlify(content),
-                pipes.quote(path),
-            ))
+        self.log.append(
+            (
+                "$ python -c 'import binascii; "
+                "print(binascii.unhexlify(\"%s\"))' > %s"
+            )
+            % (binascii.hexlify(content), pipes.quote(path),)
+        )
 
     @rule(path=paths)
     def addpath(self, path):
@@ -337,9 +354,7 @@
     @rule(path=paths)
     def forgetpath(self, path):
         if os.path.exists(path):
-            with acceptableerrors(
-                "file is already untracked",
-            ):
+            with acceptableerrors("file is already untracked",):
                 self.hg("forget", "--", path)
 
     @rule(s=st.none() | st.integers(0, 100))
@@ -388,8 +403,9 @@
                 errors.append('negative date value')
             if when.year == 2038:
                 errors.append('exceeds 32 bits')
-            command.append("--date=%s" % (
-                when.strftime('%Y-%m-%d %H:%M:%S %z'),))
+            command.append(
+                "--date=%s" % (when.strftime('%Y-%m-%d %H:%M:%S %z'),)
+            )
 
         with acceptableerrors(*errors):
             self.hg(*command)
@@ -404,9 +420,7 @@
         return self.configperrepo.setdefault(self.currentrepo, {})
 
     @rule(
-        target=repos,
-        source=repos,
-        name=reponames,
+        target=repos, source=repos, name=reponames,
     )
     def clone(self, source, name):
         if not os.path.exists(os.path.join("..", name)):
@@ -416,8 +430,7 @@
         return name
 
     @rule(
-        target=repos,
-        name=reponames,
+        target=repos, name=reponames,
     )
     def fresh(self, name):
         if not os.path.exists(os.path.join("..", name)):
@@ -440,23 +453,19 @@
     @rule()
     def pull(self, repo=repos):
         with acceptableerrors(
-            "repository default not found",
-            "repository is unrelated",
+            "repository default not found", "repository is unrelated",
         ):
             self.hg("pull")
 
     @rule(newbranch=st.booleans())
     def push(self, newbranch):
         with acceptableerrors(
-            "default repository not configured",
-            "no changes found",
+            "default repository not configured", "no changes found",
         ):
             if newbranch:
                 self.hg("push", "--new-branch")
             else:
-                with acceptableerrors(
-                    "creates new branches"
-                ):
+                with acceptableerrors("creates new branches"):
                     self.hg("push")
 
     # Section: Simple side effect free "check" operations
@@ -498,8 +507,7 @@
     @rule(branch=branches, clean=st.booleans())
     def update(self, branch, clean):
         with acceptableerrors(
-            'unknown revision',
-            'parse error',
+            'unknown revision', 'parse error',
         ):
             if clean:
                 self.hg("update", "-C", "--", branch)
@@ -538,6 +546,7 @@
         with acceptableerrors("no shelved changes to apply"):
             self.hg("unshelve")
 
+
 class writeonlydatabase(ExampleDatabase):
     def __init__(self, underlying):
         super(ExampleDatabase, self).__init__()
@@ -555,35 +564,35 @@
     def close(self):
         self.underlying.close()
 
+
 def extensionconfigkey(extension):
     return "extensions." + extension
 
+
 settings.register_profile(
-    'default',  settings(
-        timeout=300,
-        stateful_step_count=50,
-        max_examples=10,
-    )
+    'default', settings(timeout=300, stateful_step_count=50, max_examples=10,)
 )
 
 settings.register_profile(
-    'fast',  settings(
+    'fast',
+    settings(
         timeout=10,
         stateful_step_count=20,
         max_examples=5,
         min_satisfying_examples=1,
         max_shrinks=0,
-    )
+    ),
 )
 
 settings.register_profile(
-    'continuous', settings(
+    'continuous',
+    settings(
         timeout=-1,
         stateful_step_count=1000,
         max_examples=10 ** 8,
         max_iterations=10 ** 8,
-        database=writeonlydatabase(settings.default.database)
-    )
+        database=writeonlydatabase(settings.default.database),
+    ),
 )
 
 settings.load_profile(os.getenv('HYPOTHESIS_PROFILE', 'default'))
--- a/tests/test-walkrepo.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-walkrepo.py	Sun Oct 06 09:45:02 2019 -0400
@@ -31,24 +31,35 @@
     os.symlink(os.path.pardir, b'circle')
     os.symlink(pjoin(b'subsubdir', b'subsub1'), b'subsub1')
 
+
 def runtest():
     reposet = frozenset(walkrepos(b'.', followsym=True))
     if sym and (len(reposet) != 3):
         print("reposet = %r" % (reposet,))
-        print(("Found %d repositories when I should have found 3"
-               % (len(reposet),)))
+        print(
+            (
+                "Found %d repositories when I should have found 3"
+                % (len(reposet),)
+            )
+        )
     if (not sym) and (len(reposet) != 2):
         print("reposet = %r" % (reposet,))
-        print(("Found %d repositories when I should have found 2"
-               % (len(reposet),)))
-    sub1set = frozenset((pjoin(b'.', b'sub1'),
-                         pjoin(b'.', b'circle', b'subdir', b'sub1')))
+        print(
+            (
+                "Found %d repositories when I should have found 2"
+                % (len(reposet),)
+            )
+        )
+    sub1set = frozenset(
+        (pjoin(b'.', b'sub1'), pjoin(b'.', b'circle', b'subdir', b'sub1'))
+    )
     if len(sub1set & reposet) != 1:
         print("sub1set = %r" % (sub1set,))
         print("reposet = %r" % (reposet,))
         print("sub1set and reposet should have exactly one path in common.")
-    sub2set = frozenset((pjoin(b'.', b'subsub1'),
-                         pjoin(b'.', b'subsubdir', b'subsub1')))
+    sub2set = frozenset(
+        (pjoin(b'.', b'subsub1'), pjoin(b'.', b'subsubdir', b'subsub1'))
+    )
     if len(sub2set & reposet) != 1:
         print("sub2set = %r" % (sub2set,))
         print("reposet = %r" % (reposet,))
@@ -58,6 +69,7 @@
         print("reposet = %r" % (reposet,))
         print("Symbolic links are supported and %s is not in reposet" % (sub3,))
 
+
 runtest()
 if sym:
     # Simulate not having symlinks.
--- a/tests/test-wireproto-clientreactor.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-wireproto-clientreactor.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,12 +9,11 @@
     ui as uimod,
     wireprotoframing as framing,
 )
-from mercurial.utils import (
-    cborutil,
-)
+from mercurial.utils import cborutil
 
 try:
     from mercurial import zstd
+
     zstd.__version__
 except ImportError:
     zstd = None
@@ -23,18 +22,24 @@
 
 globalui = uimod.ui()
 
+
 def sendframe(reactor, frame):
     """Send a frame bytearray to a reactor."""
     header = framing.parseheader(frame)
-    payload = frame[framing.FRAME_HEADER_SIZE:]
+    payload = frame[framing.FRAME_HEADER_SIZE :]
     assert len(payload) == header.length
 
-    return reactor.onframerecv(framing.frame(header.requestid,
-                                             header.streamid,
-                                             header.streamflags,
-                                             header.typeid,
-                                             header.flags,
-                                             payload))
+    return reactor.onframerecv(
+        framing.frame(
+            header.requestid,
+            header.streamid,
+            header.streamflags,
+            header.typeid,
+            header.flags,
+            payload,
+        )
+    )
+
 
 class SingleSendTests(unittest.TestCase):
     """A reactor that can only send once rejects subsequent sends."""
@@ -42,13 +47,14 @@
     if not getattr(unittest.TestCase, 'assertRaisesRegex', False):
         # Python 3.7 deprecates the regex*p* version, but 2.7 lacks
         # the regex version.
-        assertRaisesRegex = (# camelcase-required
-            unittest.TestCase.assertRaisesRegexp)
+        assertRaisesRegex = (  # camelcase-required
+            unittest.TestCase.assertRaisesRegexp
+        )
 
     def testbasic(self):
-        reactor = framing.clientreactor(globalui,
-                                        hasmultiplesend=False,
-                                        buffersends=True)
+        reactor = framing.clientreactor(
+            globalui, hasmultiplesend=False, buffersends=True
+        )
 
         request, action, meta = reactor.callcommand(b'foo', {})
         self.assertEqual(request.state, b'pending')
@@ -62,20 +68,24 @@
 
         self.assertEqual(request.state, b'sent')
 
-        with self.assertRaisesRegex(error.ProgrammingError,
-                                     'cannot issue new commands'):
+        with self.assertRaisesRegex(
+            error.ProgrammingError, 'cannot issue new commands'
+        ):
             reactor.callcommand(b'foo', {})
 
-        with self.assertRaisesRegex(error.ProgrammingError,
-                                     'cannot issue new commands'):
+        with self.assertRaisesRegex(
+            error.ProgrammingError, 'cannot issue new commands'
+        ):
             reactor.callcommand(b'foo', {})
 
+
 class NoBufferTests(unittest.TestCase):
     """A reactor without send buffering sends requests immediately."""
+
     def testbasic(self):
-        reactor = framing.clientreactor(globalui,
-                                        hasmultiplesend=True,
-                                        buffersends=False)
+        reactor = framing.clientreactor(
+            globalui, hasmultiplesend=True, buffersends=False
+        )
 
         request, action, meta = reactor.callcommand(b'command1', {})
         self.assertEqual(request.requestid, 1)
@@ -101,29 +111,34 @@
 
         self.assertEqual(request.state, b'sent')
 
+
 class BadFrameRecvTests(unittest.TestCase):
     if not getattr(unittest.TestCase, 'assertRaisesRegex', False):
         # Python 3.7 deprecates the regex*p* version, but 2.7 lacks
         # the regex version.
-        assertRaisesRegex = (# camelcase-required
-            unittest.TestCase.assertRaisesRegexp)
+        assertRaisesRegex = (  # camelcase-required
+            unittest.TestCase.assertRaisesRegexp
+        )
 
     def testoddstream(self):
         reactor = framing.clientreactor(globalui)
 
         action, meta = sendframe(reactor, ffs(b'1 1 0 1 0 foo'))
         self.assertEqual(action, b'error')
-        self.assertEqual(meta[b'message'],
-                         b'received frame with odd numbered stream ID: 1')
+        self.assertEqual(
+            meta[b'message'], b'received frame with odd numbered stream ID: 1'
+        )
 
     def testunknownstream(self):
         reactor = framing.clientreactor(globalui)
 
         action, meta = sendframe(reactor, ffs(b'1 0 0 1 0 foo'))
         self.assertEqual(action, b'error')
-        self.assertEqual(meta[b'message'],
-                         b'received frame on unknown stream without beginning '
-                         b'of stream flag set')
+        self.assertEqual(
+            meta[b'message'],
+            b'received frame on unknown stream without beginning '
+            b'of stream flag set',
+        )
 
     def testunhandledframetype(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -132,10 +147,12 @@
         for frame in meta[b'framegen']:
             pass
 
-        with self.assertRaisesRegex(error.ProgrammingError,
-                                     'unhandled frame type'):
+        with self.assertRaisesRegex(
+            error.ProgrammingError, 'unhandled frame type'
+        ):
             sendframe(reactor, ffs(b'1 0 stream-begin text-output 0 foo'))
 
+
 class StreamTests(unittest.TestCase):
     def testmultipleresponseframes(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -148,15 +165,18 @@
 
         action, meta = sendframe(
             reactor,
-            ffs(b'%d 0 stream-begin command-response 0 foo' %
-                request.requestid))
+            ffs(
+                b'%d 0 stream-begin command-response 0 foo' % request.requestid
+            ),
+        )
         self.assertEqual(action, b'responsedata')
 
         action, meta = sendframe(
-            reactor,
-            ffs(b'%d 0 0 command-response eos bar' % request.requestid))
+            reactor, ffs(b'%d 0 0 command-response eos bar' % request.requestid)
+        )
         self.assertEqual(action, b'responsedata')
 
+
 class RedirectTests(unittest.TestCase):
     def testredirect(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -167,18 +187,24 @@
         }
 
         request, action, meta = reactor.callcommand(
-            b'foo', {}, redirect=redirect)
+            b'foo', {}, redirect=redirect
+        )
 
         self.assertEqual(action, b'sendframes')
 
         frames = list(meta[b'framegen'])
         self.assertEqual(len(frames), 1)
 
-        self.assertEqual(frames[0],
-                         ffs(b'1 1 stream-begin command-request new '
-                             b"cbor:{b'name': b'foo', "
-                             b"b'redirect': {b'targets': [b'a', b'b'], "
-                             b"b'hashes': [b'sha256']}}"))
+        self.assertEqual(
+            frames[0],
+            ffs(
+                b'1 1 stream-begin command-request new '
+                b"cbor:{b'name': b'foo', "
+                b"b'redirect': {b'targets': [b'a', b'b'], "
+                b"b'hashes': [b'sha256']}}"
+            ),
+        )
+
 
 class StreamSettingsTests(unittest.TestCase):
     def testnoflags(self):
@@ -188,14 +214,18 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings 0 '))
+        action, meta = sendframe(
+            reactor, ffs(b'1 2 stream-begin stream-settings 0 ')
+        )
 
         self.assertEqual(action, b'error')
-        self.assertEqual(meta, {
-            b'message': b'stream encoding settings frame must have '
-                        b'continuation or end of stream flag set',
-        })
+        self.assertEqual(
+            meta,
+            {
+                b'message': b'stream encoding settings frame must have '
+                b'continuation or end of stream flag set',
+            },
+        )
 
     def testconflictflags(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -204,14 +234,18 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings continuation|eos '))
+        action, meta = sendframe(
+            reactor, ffs(b'1 2 stream-begin stream-settings continuation|eos ')
+        )
 
         self.assertEqual(action, b'error')
-        self.assertEqual(meta, {
-            b'message': b'stream encoding settings frame cannot have both '
-                        b'continuation and end of stream flags set',
-        })
+        self.assertEqual(
+            meta,
+            {
+                b'message': b'stream encoding settings frame cannot have both '
+                b'continuation and end of stream flags set',
+            },
+        )
 
     def testemptypayload(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -220,14 +254,18 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings eos '))
+        action, meta = sendframe(
+            reactor, ffs(b'1 2 stream-begin stream-settings eos ')
+        )
 
         self.assertEqual(action, b'error')
-        self.assertEqual(meta, {
-            b'message': b'stream encoding settings frame did not contain '
-                        b'CBOR data'
-        })
+        self.assertEqual(
+            meta,
+            {
+                b'message': b'stream encoding settings frame did not contain '
+                b'CBOR data'
+            },
+        )
 
     def testbadcbor(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -236,8 +274,9 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings eos badvalue'))
+        action, meta = sendframe(
+            reactor, ffs(b'1 2 stream-begin stream-settings eos badvalue')
+        )
 
         self.assertEqual(action, b'error')
 
@@ -248,8 +287,10 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings eos cbor:b"identity"'))
+        action, meta = sendframe(
+            reactor,
+            ffs(b'1 2 stream-begin stream-settings eos cbor:b"identity"'),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
@@ -261,19 +302,25 @@
         for f in meta[b'framegen']:
             pass
 
-        data = b''.join([
-            b''.join(cborutil.streamencode(b'identity')),
-            b''.join(cborutil.streamencode({b'foo', b'bar'})),
-        ])
+        data = b''.join(
+            [
+                b''.join(cborutil.streamencode(b'identity')),
+                b''.join(cborutil.streamencode({b'foo', b'bar'})),
+            ]
+        )
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings eos %s' % data))
+        action, meta = sendframe(
+            reactor, ffs(b'1 2 stream-begin stream-settings eos %s' % data)
+        )
 
         self.assertEqual(action, b'error')
-        self.assertEqual(meta, {
-            b'message': b'error setting stream decoder: identity decoder '
-                        b'received unexpected additional values',
-        })
+        self.assertEqual(
+            meta,
+            {
+                b'message': b'error setting stream decoder: identity decoder '
+                b'received unexpected additional values',
+            },
+        )
 
     def testmultipleframes(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -284,15 +331,19 @@
 
         data = b''.join(cborutil.streamencode(b'identity'))
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings continuation %s' %
-                data[0:3]))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'1 2 stream-begin stream-settings continuation %s' % data[0:3]
+            ),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 0 stream-settings eos %s' % data[3:]))
+        action, meta = sendframe(
+            reactor, ffs(b'1 2 0 stream-settings eos %s' % data[3:])
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
@@ -304,14 +355,19 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'1 2 stream-begin stream-settings eos cbor:b"badvalue"'))
+        action, meta = sendframe(
+            reactor,
+            ffs(b'1 2 stream-begin stream-settings eos cbor:b"badvalue"'),
+        )
 
         self.assertEqual(action, b'error')
-        self.assertEqual(meta, {
-            b'message': b'error setting stream decoder: unknown stream '
-                        b'decoder: badvalue',
-        })
+        self.assertEqual(
+            meta,
+            {
+                b'message': b'error setting stream decoder: unknown stream '
+                b'decoder: badvalue',
+            },
+        )
 
     def testzlibencoding(self):
         reactor = framing.clientreactor(globalui, buffersends=False)
@@ -320,9 +376,13 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-begin stream-settings eos cbor:b"zlib"' %
-                request.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 stream-begin stream-settings eos cbor:b"zlib"'
+                % request.requestid
+            ),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
@@ -335,9 +395,13 @@
         compressed = zlib.compress(encoded)
         self.assertEqual(zlib.decompress(compressed), encoded)
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response eos %s' %
-                (request.requestid, compressed)))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response eos %s'
+                % (request.requestid, compressed)
+            ),
+        )
 
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], encoded)
@@ -349,9 +413,13 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-begin stream-settings eos cbor:b"zlib"' %
-                request.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 stream-begin stream-settings eos cbor:b"zlib"'
+                % request.requestid
+            ),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
@@ -367,12 +435,16 @@
         chunks = []
 
         for i in range(len(compressed)):
-            char = compressed[i:i + 1]
+            char = compressed[i : i + 1]
             if char == b'\\':
                 char = b'\\\\'
-            action, meta = sendframe(reactor,
-                ffs(b'%d 2 encoded command-response continuation %s' %
-                    (request.requestid, char)))
+            action, meta = sendframe(
+                reactor,
+                ffs(
+                    b'%d 2 encoded command-response continuation %s'
+                    % (request.requestid, char)
+                ),
+            )
 
             self.assertEqual(action, b'responsedata')
             chunks.append(meta[b'data'])
@@ -384,8 +456,10 @@
         self.assertEqual(b''.join(chunks), encoded)
 
         # End the stream for good measure.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-end command-response eos ' % request.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(b'%d 2 stream-end command-response eos ' % request.requestid),
+        )
 
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], b'')
@@ -395,8 +469,9 @@
     def testzlibmultipleresponses(self):
         # We feed in zlib compressed data on the same stream but belonging to
         # 2 different requests. This tests our flushing behavior.
-        reactor = framing.clientreactor(globalui, buffersends=False,
-                                        hasmultiplesend=True)
+        reactor = framing.clientreactor(
+            globalui, buffersends=False, hasmultiplesend=True
+        )
 
         request1, action, meta = reactor.callcommand(b'foo', {})
         for f in meta[b'framegen']:
@@ -409,48 +484,70 @@
         outstream = framing.outputstream(2)
         outstream.setencoder(globalui, b'zlib')
 
-        response1 = b''.join(cborutil.streamencode({
-            b'status': b'ok',
-            b'extra': b'response1' * 10,
-        }))
+        response1 = b''.join(
+            cborutil.streamencode(
+                {b'status': b'ok', b'extra': b'response1' * 10,}
+            )
+        )
 
-        response2 = b''.join(cborutil.streamencode({
-            b'status': b'error',
-            b'extra': b'response2' * 10,
-        }))
+        response2 = b''.join(
+            cborutil.streamencode(
+                {b'status': b'error', b'extra': b'response2' * 10,}
+            )
+        )
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-begin stream-settings eos cbor:b"zlib"' %
-                request1.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 stream-begin stream-settings eos cbor:b"zlib"'
+                % request1.requestid
+            ),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
 
         # Feeding partial data in won't get anything useful out.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response continuation %s' % (
-                request1.requestid, outstream.encode(response1))))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response continuation %s'
+                % (request1.requestid, outstream.encode(response1))
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], b'')
 
         # But flushing data at both ends will get our original data.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response eos %s' % (
-                request1.requestid, outstream.flush())))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response eos %s'
+                % (request1.requestid, outstream.flush())
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], response1)
 
         # We should be able to reuse the compressor/decompressor for the
         # 2nd response.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response continuation %s' % (
-                request2.requestid, outstream.encode(response2))))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response continuation %s'
+                % (request2.requestid, outstream.encode(response2))
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], b'')
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response eos %s' % (
-                request2.requestid, outstream.flush())))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response eos %s'
+                % (request2.requestid, outstream.flush())
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], response2)
 
@@ -462,9 +559,13 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-begin stream-settings eos cbor:b"zstd-8mb"' %
-                request.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 stream-begin stream-settings eos cbor:b"zstd-8mb"'
+                % request.requestid
+            ),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
@@ -476,12 +577,20 @@
 
         encoder = framing.zstd8mbencoder(globalui)
         compressed = encoder.encode(encoded) + encoder.finish()
-        self.assertEqual(zstd.ZstdDecompressor().decompress(
-            compressed, max_output_size=len(encoded)), encoded)
+        self.assertEqual(
+            zstd.ZstdDecompressor().decompress(
+                compressed, max_output_size=len(encoded)
+            ),
+            encoded,
+        )
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response eos %s' %
-                (request.requestid, compressed)))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response eos %s'
+                % (request.requestid, compressed)
+            ),
+        )
 
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], encoded)
@@ -494,9 +603,13 @@
         for f in meta[b'framegen']:
             pass
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-begin stream-settings eos cbor:b"zstd-8mb"' %
-                request.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 stream-begin stream-settings eos cbor:b"zstd-8mb"'
+                % request.requestid
+            ),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
@@ -507,18 +620,23 @@
         encoded = b''.join(cborutil.streamencode(result))
 
         compressed = zstd.ZstdCompressor().compress(encoded)
-        self.assertEqual(zstd.ZstdDecompressor().decompress(compressed),
-                         encoded)
+        self.assertEqual(
+            zstd.ZstdDecompressor().decompress(compressed), encoded
+        )
 
         chunks = []
 
         for i in range(len(compressed)):
-            char = compressed[i:i + 1]
+            char = compressed[i : i + 1]
             if char == b'\\':
                 char = b'\\\\'
-            action, meta = sendframe(reactor,
-                ffs(b'%d 2 encoded command-response continuation %s' %
-                    (request.requestid, char)))
+            action, meta = sendframe(
+                reactor,
+                ffs(
+                    b'%d 2 encoded command-response continuation %s'
+                    % (request.requestid, char)
+                ),
+            )
 
             self.assertEqual(action, b'responsedata')
             chunks.append(meta[b'data'])
@@ -529,8 +647,10 @@
         self.assertEqual(b''.join(chunks), encoded)
 
         # End the stream for good measure.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-end command-response eos ' % request.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(b'%d 2 stream-end command-response eos ' % request.requestid),
+        )
 
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], b'')
@@ -541,8 +661,9 @@
     def testzstd8mbmultipleresponses(self):
         # We feed in zstd compressed data on the same stream but belonging to
         # 2 different requests. This tests our flushing behavior.
-        reactor = framing.clientreactor(globalui, buffersends=False,
-                                        hasmultiplesend=True)
+        reactor = framing.clientreactor(
+            globalui, buffersends=False, hasmultiplesend=True
+        )
 
         request1, action, meta = reactor.callcommand(b'foo', {})
         for f in meta[b'framegen']:
@@ -555,51 +676,74 @@
         outstream = framing.outputstream(2)
         outstream.setencoder(globalui, b'zstd-8mb')
 
-        response1 = b''.join(cborutil.streamencode({
-            b'status': b'ok',
-            b'extra': b'response1' * 10,
-        }))
+        response1 = b''.join(
+            cborutil.streamencode(
+                {b'status': b'ok', b'extra': b'response1' * 10,}
+            )
+        )
 
-        response2 = b''.join(cborutil.streamencode({
-            b'status': b'error',
-            b'extra': b'response2' * 10,
-        }))
+        response2 = b''.join(
+            cborutil.streamencode(
+                {b'status': b'error', b'extra': b'response2' * 10,}
+            )
+        )
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 stream-begin stream-settings eos cbor:b"zstd-8mb"' %
-                request1.requestid))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 stream-begin stream-settings eos cbor:b"zstd-8mb"'
+                % request1.requestid
+            ),
+        )
 
         self.assertEqual(action, b'noop')
         self.assertEqual(meta, {})
 
         # Feeding partial data in won't get anything useful out.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response continuation %s' % (
-                request1.requestid, outstream.encode(response1))))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response continuation %s'
+                % (request1.requestid, outstream.encode(response1))
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], b'')
 
         # But flushing data at both ends will get our original data.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response eos %s' % (
-                request1.requestid, outstream.flush())))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response eos %s'
+                % (request1.requestid, outstream.flush())
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], response1)
 
         # We should be able to reuse the compressor/decompressor for the
         # 2nd response.
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response continuation %s' % (
-                request2.requestid, outstream.encode(response2))))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response continuation %s'
+                % (request2.requestid, outstream.encode(response2))
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], b'')
 
-        action, meta = sendframe(reactor,
-            ffs(b'%d 2 encoded command-response eos %s' % (
-                request2.requestid, outstream.flush())))
+        action, meta = sendframe(
+            reactor,
+            ffs(
+                b'%d 2 encoded command-response eos %s'
+                % (request2.requestid, outstream.flush())
+            ),
+        )
         self.assertEqual(action, b'responsedata')
         self.assertEqual(meta[b'data'], response2)
 
+
 if __name__ == '__main__':
     if (3, 6, 0) <= sys.version_info < (3, 6, 4):
         # Python 3.6.0 through 3.6.3 inclusive shipped with
@@ -607,4 +751,5 @@
         # tests on those specific versions of Python. Sigh.
         sys.exit(80)
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-wireproto-framing.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-wireproto-framing.py	Sun Oct 06 09:45:02 2019 -0400
@@ -9,186 +9,278 @@
 
 ffs = framing.makeframefromhumanstring
 
+
 class FrameHumanStringTests(unittest.TestCase):
     def testbasic(self):
-        self.assertEqual(ffs(b'1 1 0 1 0 '),
-                         b'\x00\x00\x00\x01\x00\x01\x00\x10')
+        self.assertEqual(
+            ffs(b'1 1 0 1 0 '), b'\x00\x00\x00\x01\x00\x01\x00\x10'
+        )
 
-        self.assertEqual(ffs(b'2 4 0 1 0 '),
-                         b'\x00\x00\x00\x02\x00\x04\x00\x10')
+        self.assertEqual(
+            ffs(b'2 4 0 1 0 '), b'\x00\x00\x00\x02\x00\x04\x00\x10'
+        )
 
-        self.assertEqual(ffs(b'2 4 0 1 0 foo'),
-                         b'\x03\x00\x00\x02\x00\x04\x00\x10foo')
+        self.assertEqual(
+            ffs(b'2 4 0 1 0 foo'), b'\x03\x00\x00\x02\x00\x04\x00\x10foo'
+        )
 
     def testcborint(self):
-        self.assertEqual(ffs(b'1 1 0 1 0 cbor:15'),
-                         b'\x01\x00\x00\x01\x00\x01\x00\x10\x0f')
+        self.assertEqual(
+            ffs(b'1 1 0 1 0 cbor:15'), b'\x01\x00\x00\x01\x00\x01\x00\x10\x0f'
+        )
 
-        self.assertEqual(ffs(b'1 1 0 1 0 cbor:42'),
-                         b'\x02\x00\x00\x01\x00\x01\x00\x10\x18*')
+        self.assertEqual(
+            ffs(b'1 1 0 1 0 cbor:42'), b'\x02\x00\x00\x01\x00\x01\x00\x10\x18*'
+        )
+
+        self.assertEqual(
+            ffs(b'1 1 0 1 0 cbor:1048576'),
+            b'\x05\x00\x00\x01\x00\x01\x00\x10\x1a' b'\x00\x10\x00\x00',
+        )
 
-        self.assertEqual(ffs(b'1 1 0 1 0 cbor:1048576'),
-                         b'\x05\x00\x00\x01\x00\x01\x00\x10\x1a'
-                         b'\x00\x10\x00\x00')
+        self.assertEqual(
+            ffs(b'1 1 0 1 0 cbor:0'), b'\x01\x00\x00\x01\x00\x01\x00\x10\x00'
+        )
 
-        self.assertEqual(ffs(b'1 1 0 1 0 cbor:0'),
-                         b'\x01\x00\x00\x01\x00\x01\x00\x10\x00')
+        self.assertEqual(
+            ffs(b'1 1 0 1 0 cbor:-1'), b'\x01\x00\x00\x01\x00\x01\x00\x10 '
+        )
 
-        self.assertEqual(ffs(b'1 1 0 1 0 cbor:-1'),
-                         b'\x01\x00\x00\x01\x00\x01\x00\x10 ')
-
-        self.assertEqual(ffs(b'1 1 0 1 0 cbor:-342542'),
-                         b'\x05\x00\x00\x01\x00\x01\x00\x10:\x00\x05:\r')
+        self.assertEqual(
+            ffs(b'1 1 0 1 0 cbor:-342542'),
+            b'\x05\x00\x00\x01\x00\x01\x00\x10:\x00\x05:\r',
+        )
 
     def testcborstrings(self):
-        self.assertEqual(ffs(b"1 1 0 1 0 cbor:b'foo'"),
-                         b'\x04\x00\x00\x01\x00\x01\x00\x10Cfoo')
+        self.assertEqual(
+            ffs(b"1 1 0 1 0 cbor:b'foo'"),
+            b'\x04\x00\x00\x01\x00\x01\x00\x10Cfoo',
+        )
 
     def testcborlists(self):
-        self.assertEqual(ffs(b"1 1 0 1 0 cbor:[None, True, False, 42, b'foo']"),
-                         b'\n\x00\x00\x01\x00\x01\x00\x10\x85\xf6\xf5\xf4'
-                         b'\x18*Cfoo')
+        self.assertEqual(
+            ffs(b"1 1 0 1 0 cbor:[None, True, False, 42, b'foo']"),
+            b'\n\x00\x00\x01\x00\x01\x00\x10\x85\xf6\xf5\xf4' b'\x18*Cfoo',
+        )
 
     def testcbordicts(self):
-        self.assertEqual(ffs(b"1 1 0 1 0 "
-                             b"cbor:{b'foo': b'val1', b'bar': b'val2'}"),
-                         b'\x13\x00\x00\x01\x00\x01\x00\x10\xa2'
-                         b'CbarDval2CfooDval1')
+        self.assertEqual(
+            ffs(b"1 1 0 1 0 " b"cbor:{b'foo': b'val1', b'bar': b'val2'}"),
+            b'\x13\x00\x00\x01\x00\x01\x00\x10\xa2' b'CbarDval2CfooDval1',
+        )
+
 
 class FrameTests(unittest.TestCase):
     def testdataexactframesize(self):
         data = util.bytesio(b'x' * framing.DEFAULT_MAX_FRAME_SIZE)
 
         stream = framing.stream(1)
-        frames = list(framing.createcommandframes(stream, 1, b'command',
-                                                  {}, data))
-        self.assertEqual(frames, [
-            ffs(b'1 1 stream-begin command-request new|have-data '
-                b"cbor:{b'name': b'command'}"),
-            ffs(b'1 1 0 command-data continuation %s' % data.getvalue()),
-            ffs(b'1 1 0 command-data eos ')
-        ])
+        frames = list(
+            framing.createcommandframes(stream, 1, b'command', {}, data)
+        )
+        self.assertEqual(
+            frames,
+            [
+                ffs(
+                    b'1 1 stream-begin command-request new|have-data '
+                    b"cbor:{b'name': b'command'}"
+                ),
+                ffs(b'1 1 0 command-data continuation %s' % data.getvalue()),
+                ffs(b'1 1 0 command-data eos '),
+            ],
+        )
 
     def testdatamultipleframes(self):
         data = util.bytesio(b'x' * (framing.DEFAULT_MAX_FRAME_SIZE + 1))
 
         stream = framing.stream(1)
-        frames = list(framing.createcommandframes(stream, 1, b'command', {},
-                                                  data))
-        self.assertEqual(frames, [
-            ffs(b'1 1 stream-begin command-request new|have-data '
-                b"cbor:{b'name': b'command'}"),
-            ffs(b'1 1 0 command-data continuation %s' % (
-                b'x' * framing.DEFAULT_MAX_FRAME_SIZE)),
-            ffs(b'1 1 0 command-data eos x'),
-        ])
+        frames = list(
+            framing.createcommandframes(stream, 1, b'command', {}, data)
+        )
+        self.assertEqual(
+            frames,
+            [
+                ffs(
+                    b'1 1 stream-begin command-request new|have-data '
+                    b"cbor:{b'name': b'command'}"
+                ),
+                ffs(
+                    b'1 1 0 command-data continuation %s'
+                    % (b'x' * framing.DEFAULT_MAX_FRAME_SIZE)
+                ),
+                ffs(b'1 1 0 command-data eos x'),
+            ],
+        )
 
     def testargsanddata(self):
         data = util.bytesio(b'x' * 100)
 
         stream = framing.stream(1)
-        frames = list(framing.createcommandframes(stream, 1, b'command', {
-            b'key1': b'key1value',
-            b'key2': b'key2value',
-            b'key3': b'key3value',
-        }, data))
+        frames = list(
+            framing.createcommandframes(
+                stream,
+                1,
+                b'command',
+                {
+                    b'key1': b'key1value',
+                    b'key2': b'key2value',
+                    b'key3': b'key3value',
+                },
+                data,
+            )
+        )
 
-        self.assertEqual(frames, [
-            ffs(b'1 1 stream-begin command-request new|have-data '
-                b"cbor:{b'name': b'command', b'args': {b'key1': b'key1value', "
-                b"b'key2': b'key2value', b'key3': b'key3value'}}"),
-            ffs(b'1 1 0 command-data eos %s' % data.getvalue()),
-        ])
+        self.assertEqual(
+            frames,
+            [
+                ffs(
+                    b'1 1 stream-begin command-request new|have-data '
+                    b"cbor:{b'name': b'command', b'args': {b'key1': b'key1value', "
+                    b"b'key2': b'key2value', b'key3': b'key3value'}}"
+                ),
+                ffs(b'1 1 0 command-data eos %s' % data.getvalue()),
+            ],
+        )
 
     if not getattr(unittest.TestCase, 'assertRaisesRegex', False):
         # Python 3.7 deprecates the regex*p* version, but 2.7 lacks
         # the regex version.
-        assertRaisesRegex = (# camelcase-required
-            unittest.TestCase.assertRaisesRegexp)
+        assertRaisesRegex = (  # camelcase-required
+            unittest.TestCase.assertRaisesRegexp
+        )
 
     def testtextoutputformattingstringtype(self):
         """Formatting string must be bytes."""
         with self.assertRaisesRegex(ValueError, 'must use bytes formatting '):
-            list(framing.createtextoutputframe(None, 1, [
-                (b'foo'.decode('ascii'), [], [])]))
+            list(
+                framing.createtextoutputframe(
+                    None, 1, [(b'foo'.decode('ascii'), [], [])]
+                )
+            )
 
     def testtextoutputargumentbytes(self):
         with self.assertRaisesRegex(ValueError, 'must use bytes for argument'):
-            list(framing.createtextoutputframe(None, 1, [
-                (b'foo', [b'foo'.decode('ascii')], [])]))
+            list(
+                framing.createtextoutputframe(
+                    None, 1, [(b'foo', [b'foo'.decode('ascii')], [])]
+                )
+            )
 
     def testtextoutputlabelbytes(self):
         with self.assertRaisesRegex(ValueError, 'must use bytes for labels'):
-            list(framing.createtextoutputframe(None, 1, [
-                (b'foo', [], [b'foo'.decode('ascii')])]))
+            list(
+                framing.createtextoutputframe(
+                    None, 1, [(b'foo', [], [b'foo'.decode('ascii')])]
+                )
+            )
 
     def testtextoutput1simpleatom(self):
         stream = framing.stream(1)
-        val = list(framing.createtextoutputframe(stream, 1, [
-            (b'foo', [], [])]))
+        val = list(framing.createtextoutputframe(stream, 1, [(b'foo', [], [])]))
 
-        self.assertEqual(val, [
-            ffs(b'1 1 stream-begin text-output 0 '
-                b"cbor:[{b'msg': b'foo'}]"),
-        ])
+        self.assertEqual(
+            val,
+            [
+                ffs(
+                    b'1 1 stream-begin text-output 0 '
+                    b"cbor:[{b'msg': b'foo'}]"
+                ),
+            ],
+        )
 
     def testtextoutput2simpleatoms(self):
         stream = framing.stream(1)
-        val = list(framing.createtextoutputframe(stream, 1, [
-            (b'foo', [], []),
-            (b'bar', [], []),
-        ]))
+        val = list(
+            framing.createtextoutputframe(
+                stream, 1, [(b'foo', [], []), (b'bar', [], []),]
+            )
+        )
 
-        self.assertEqual(val, [
-            ffs(b'1 1 stream-begin text-output 0 '
-                b"cbor:[{b'msg': b'foo'}, {b'msg': b'bar'}]")
-        ])
+        self.assertEqual(
+            val,
+            [
+                ffs(
+                    b'1 1 stream-begin text-output 0 '
+                    b"cbor:[{b'msg': b'foo'}, {b'msg': b'bar'}]"
+                )
+            ],
+        )
 
     def testtextoutput1arg(self):
         stream = framing.stream(1)
-        val = list(framing.createtextoutputframe(stream, 1, [
-            (b'foo %s', [b'val1'], []),
-        ]))
+        val = list(
+            framing.createtextoutputframe(
+                stream, 1, [(b'foo %s', [b'val1'], []),]
+            )
+        )
 
-        self.assertEqual(val, [
-            ffs(b'1 1 stream-begin text-output 0 '
-                b"cbor:[{b'msg': b'foo %s', b'args': [b'val1']}]")
-        ])
+        self.assertEqual(
+            val,
+            [
+                ffs(
+                    b'1 1 stream-begin text-output 0 '
+                    b"cbor:[{b'msg': b'foo %s', b'args': [b'val1']}]"
+                )
+            ],
+        )
 
     def testtextoutput2arg(self):
         stream = framing.stream(1)
-        val = list(framing.createtextoutputframe(stream, 1, [
-            (b'foo %s %s', [b'val', b'value'], []),
-        ]))
+        val = list(
+            framing.createtextoutputframe(
+                stream, 1, [(b'foo %s %s', [b'val', b'value'], []),]
+            )
+        )
 
-        self.assertEqual(val, [
-            ffs(b'1 1 stream-begin text-output 0 '
-                b"cbor:[{b'msg': b'foo %s %s', b'args': [b'val', b'value']}]")
-        ])
+        self.assertEqual(
+            val,
+            [
+                ffs(
+                    b'1 1 stream-begin text-output 0 '
+                    b"cbor:[{b'msg': b'foo %s %s', b'args': [b'val', b'value']}]"
+                )
+            ],
+        )
 
     def testtextoutput1label(self):
         stream = framing.stream(1)
-        val = list(framing.createtextoutputframe(stream, 1, [
-            (b'foo', [], [b'label']),
-        ]))
+        val = list(
+            framing.createtextoutputframe(
+                stream, 1, [(b'foo', [], [b'label']),]
+            )
+        )
 
-        self.assertEqual(val, [
-            ffs(b'1 1 stream-begin text-output 0 '
-                b"cbor:[{b'msg': b'foo', b'labels': [b'label']}]")
-        ])
+        self.assertEqual(
+            val,
+            [
+                ffs(
+                    b'1 1 stream-begin text-output 0 '
+                    b"cbor:[{b'msg': b'foo', b'labels': [b'label']}]"
+                )
+            ],
+        )
 
     def testargandlabel(self):
         stream = framing.stream(1)
-        val = list(framing.createtextoutputframe(stream, 1, [
-            (b'foo %s', [b'arg'], [b'label']),
-        ]))
+        val = list(
+            framing.createtextoutputframe(
+                stream, 1, [(b'foo %s', [b'arg'], [b'label']),]
+            )
+        )
 
-        self.assertEqual(val, [
-            ffs(b'1 1 stream-begin text-output 0 '
-                b"cbor:[{b'msg': b'foo %s', b'args': [b'arg'], "
-                b"b'labels': [b'label']}]")
-        ])
+        self.assertEqual(
+            val,
+            [
+                ffs(
+                    b'1 1 stream-begin text-output 0 '
+                    b"cbor:[{b'msg': b'foo %s', b'args': [b'arg'], "
+                    b"b'labels': [b'label']}]"
+                )
+            ],
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-wireproto-serverreactor.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-wireproto-serverreactor.py	Sun Oct 06 09:45:02 2019 -0400
@@ -7,18 +7,18 @@
     util,
     wireprotoframing as framing,
 )
-from mercurial.utils import (
-    cborutil,
-)
+from mercurial.utils import cborutil
 
 ffs = framing.makeframefromhumanstring
 
 OK = b''.join(cborutil.streamencode({b'status': b'ok'}))
 
+
 def makereactor(deferoutput=False):
     ui = uimod.ui()
     return framing.serverreactor(ui, deferoutput=deferoutput)
 
+
 def sendframes(reactor, gen):
     """Send a generator of frame bytearray to a reactor.
 
@@ -26,21 +26,26 @@
     """
     for frame in gen:
         header = framing.parseheader(frame)
-        payload = frame[framing.FRAME_HEADER_SIZE:]
+        payload = frame[framing.FRAME_HEADER_SIZE :]
         assert len(payload) == header.length
 
-        yield reactor.onframerecv(framing.frame(header.requestid,
-                                                header.streamid,
-                                                header.streamflags,
-                                                header.typeid,
-                                                header.flags,
-                                                payload))
+        yield reactor.onframerecv(
+            framing.frame(
+                header.requestid,
+                header.streamid,
+                header.streamflags,
+                header.typeid,
+                header.flags,
+                payload,
+            )
+        )
+
 
 def sendcommandframes(reactor, stream, rid, cmd, args, datafh=None):
     """Generate frames to run a command and send them to a reactor."""
-    return sendframes(reactor,
-                      framing.createcommandframes(stream, rid, cmd, args,
-                                                  datafh))
+    return sendframes(
+        reactor, framing.createcommandframes(stream, rid, cmd, args, datafh)
+    )
 
 
 class ServerReactorTests(unittest.TestCase):
@@ -67,13 +72,16 @@
         results = list(sendcommandframes(reactor, stream, 1, b'mycommand', {}))
         self.assertEqual(len(results), 1)
         self.assertaction(results[0], b'runcommand')
-        self.assertEqual(results[0][1], {
-            b'requestid': 1,
-            b'command': b'mycommand',
-            b'args': {},
-            b'redirect': None,
-            b'data': None,
-        })
+        self.assertEqual(
+            results[0][1],
+            {
+                b'requestid': 1,
+                b'command': b'mycommand',
+                b'args': {},
+                b'redirect': None,
+                b'data': None,
+            },
+        )
 
         result = reactor.oninputeof()
         self.assertaction(result, b'noop')
@@ -81,53 +89,77 @@
     def test1argument(self):
         reactor = makereactor()
         stream = framing.stream(1)
-        results = list(sendcommandframes(reactor, stream, 41, b'mycommand',
-                                         {b'foo': b'bar'}))
+        results = list(
+            sendcommandframes(
+                reactor, stream, 41, b'mycommand', {b'foo': b'bar'}
+            )
+        )
         self.assertEqual(len(results), 1)
         self.assertaction(results[0], b'runcommand')
-        self.assertEqual(results[0][1], {
-            b'requestid': 41,
-            b'command': b'mycommand',
-            b'args': {b'foo': b'bar'},
-            b'redirect': None,
-            b'data': None,
-        })
+        self.assertEqual(
+            results[0][1],
+            {
+                b'requestid': 41,
+                b'command': b'mycommand',
+                b'args': {b'foo': b'bar'},
+                b'redirect': None,
+                b'data': None,
+            },
+        )
 
     def testmultiarguments(self):
         reactor = makereactor()
         stream = framing.stream(1)
-        results = list(sendcommandframes(reactor, stream, 1, b'mycommand',
-                                         {b'foo': b'bar', b'biz': b'baz'}))
+        results = list(
+            sendcommandframes(
+                reactor,
+                stream,
+                1,
+                b'mycommand',
+                {b'foo': b'bar', b'biz': b'baz'},
+            )
+        )
         self.assertEqual(len(results), 1)
         self.assertaction(results[0], b'runcommand')
-        self.assertEqual(results[0][1], {
-            b'requestid': 1,
-            b'command': b'mycommand',
-            b'args': {b'foo': b'bar', b'biz': b'baz'},
-            b'redirect': None,
-            b'data': None,
-        })
+        self.assertEqual(
+            results[0][1],
+            {
+                b'requestid': 1,
+                b'command': b'mycommand',
+                b'args': {b'foo': b'bar', b'biz': b'baz'},
+                b'redirect': None,
+                b'data': None,
+            },
+        )
 
     def testsimplecommanddata(self):
         reactor = makereactor()
         stream = framing.stream(1)
-        results = list(sendcommandframes(reactor, stream, 1, b'mycommand', {},
-                                         util.bytesio(b'data!')))
+        results = list(
+            sendcommandframes(
+                reactor, stream, 1, b'mycommand', {}, util.bytesio(b'data!')
+            )
+        )
         self.assertEqual(len(results), 2)
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'runcommand')
-        self.assertEqual(results[1][1], {
-            b'requestid': 1,
-            b'command': b'mycommand',
-            b'args': {},
-            b'redirect': None,
-            b'data': b'data!',
-        })
+        self.assertEqual(
+            results[1][1],
+            {
+                b'requestid': 1,
+                b'command': b'mycommand',
+                b'args': {},
+                b'redirect': None,
+                b'data': b'data!',
+            },
+        )
 
     def testmultipledataframes(self):
         frames = [
-            ffs(b'1 1 stream-begin command-request new|have-data '
-                b"cbor:{b'name': b'mycommand'}"),
+            ffs(
+                b'1 1 stream-begin command-request new|have-data '
+                b"cbor:{b'name': b'mycommand'}"
+            ),
             ffs(b'1 1 0 command-data continuation data1'),
             ffs(b'1 1 0 command-data continuation data2'),
             ffs(b'1 1 0 command-data eos data3'),
@@ -139,19 +171,24 @@
         for i in range(3):
             self.assertaction(results[i], b'wantframe')
         self.assertaction(results[3], b'runcommand')
-        self.assertEqual(results[3][1], {
-            b'requestid': 1,
-            b'command': b'mycommand',
-            b'args': {},
-            b'redirect': None,
-            b'data': b'data1data2data3',
-        })
+        self.assertEqual(
+            results[3][1],
+            {
+                b'requestid': 1,
+                b'command': b'mycommand',
+                b'args': {},
+                b'redirect': None,
+                b'data': b'data1data2data3',
+            },
+        )
 
     def testargumentanddata(self):
         frames = [
-            ffs(b'1 1 stream-begin command-request new|have-data '
+            ffs(
+                b'1 1 stream-begin command-request new|have-data '
                 b"cbor:{b'name': b'command', b'args': {b'key': b'val',"
-                b"b'foo': b'bar'}}"),
+                b"b'foo': b'bar'}}"
+            ),
             ffs(b'1 1 0 command-data continuation value1'),
             ffs(b'1 1 0 command-data eos value2'),
         ]
@@ -160,169 +197,251 @@
         results = list(sendframes(reactor, frames))
 
         self.assertaction(results[-1], b'runcommand')
-        self.assertEqual(results[-1][1], {
-            b'requestid': 1,
-            b'command': b'command',
-            b'args': {
-                b'key': b'val',
-                b'foo': b'bar',
+        self.assertEqual(
+            results[-1][1],
+            {
+                b'requestid': 1,
+                b'command': b'command',
+                b'args': {b'key': b'val', b'foo': b'bar',},
+                b'redirect': None,
+                b'data': b'value1value2',
             },
-            b'redirect': None,
-            b'data': b'value1value2',
-        })
+        )
 
     def testnewandcontinuation(self):
-        result = self._sendsingleframe(makereactor(),
-            ffs(b'1 1 stream-begin command-request new|continuation '))
+        result = self._sendsingleframe(
+            makereactor(),
+            ffs(b'1 1 stream-begin command-request new|continuation '),
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'received command request frame with both new and '
-                        b'continuation flags set',
-        })
+        self.assertEqual(
+            result[1],
+            {
+                b'message': b'received command request frame with both new and '
+                b'continuation flags set',
+            },
+        )
 
     def testneithernewnorcontinuation(self):
-        result = self._sendsingleframe(makereactor(),
-            ffs(b'1 1 stream-begin command-request 0 '))
+        result = self._sendsingleframe(
+            makereactor(), ffs(b'1 1 stream-begin command-request 0 ')
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'received command request frame with neither new nor '
-                        b'continuation flags set',
-        })
+        self.assertEqual(
+            result[1],
+            {
+                b'message': b'received command request frame with neither new nor '
+                b'continuation flags set',
+            },
+        )
 
     def testunexpectedcommanddata(self):
         """Command data frame when not running a command is an error."""
-        result = self._sendsingleframe(makereactor(),
-            ffs(b'1 1 stream-begin command-data 0 ignored'))
+        result = self._sendsingleframe(
+            makereactor(), ffs(b'1 1 stream-begin command-data 0 ignored')
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'expected sender protocol settings or command request '
-                        b'frame; got 2',
-        })
+        self.assertEqual(
+            result[1],
+            {
+                b'message': b'expected sender protocol settings or command request '
+                b'frame; got 2',
+            },
+        )
 
     def testunexpectedcommanddatareceiving(self):
         """Same as above except the command is receiving."""
-        results = list(sendframes(makereactor(), [
-            ffs(b'1 1 stream-begin command-request new|more '
-                b"cbor:{b'name': b'ignored'}"),
-            ffs(b'1 1 0 command-data eos ignored'),
-        ]))
+        results = list(
+            sendframes(
+                makereactor(),
+                [
+                    ffs(
+                        b'1 1 stream-begin command-request new|more '
+                        b"cbor:{b'name': b'ignored'}"
+                    ),
+                    ffs(b'1 1 0 command-data eos ignored'),
+                ],
+            )
+        )
 
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'error')
-        self.assertEqual(results[1][1], {
-            b'message': b'received command data frame for request that is not '
-                        b'expecting data: 1',
-        })
+        self.assertEqual(
+            results[1][1],
+            {
+                b'message': b'received command data frame for request that is not '
+                b'expecting data: 1',
+            },
+        )
 
     def testconflictingrequestidallowed(self):
         """Multiple fully serviced commands with same request ID is allowed."""
         reactor = makereactor()
         results = []
         outstream = reactor.makeoutputstream()
-        results.append(self._sendsingleframe(
-            reactor, ffs(b'1 1 stream-begin command-request new '
-                         b"cbor:{b'name': b'command'}")))
+        results.append(
+            self._sendsingleframe(
+                reactor,
+                ffs(
+                    b'1 1 stream-begin command-request new '
+                    b"cbor:{b'name': b'command'}"
+                ),
+            )
+        )
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 1, [b'response1'])
+            outstream, 1, [b'response1']
+        )
         self.assertaction(result, b'sendframes')
         list(result[1][b'framegen'])
-        results.append(self._sendsingleframe(
-            reactor, ffs(b'1 1 stream-begin command-request new '
-                         b"cbor:{b'name': b'command'}")))
+        results.append(
+            self._sendsingleframe(
+                reactor,
+                ffs(
+                    b'1 1 stream-begin command-request new '
+                    b"cbor:{b'name': b'command'}"
+                ),
+            )
+        )
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 1, [b'response2'])
+            outstream, 1, [b'response2']
+        )
         self.assertaction(result, b'sendframes')
         list(result[1][b'framegen'])
-        results.append(self._sendsingleframe(
-            reactor, ffs(b'1 1 stream-begin command-request new '
-                         b"cbor:{b'name': b'command'}")))
+        results.append(
+            self._sendsingleframe(
+                reactor,
+                ffs(
+                    b'1 1 stream-begin command-request new '
+                    b"cbor:{b'name': b'command'}"
+                ),
+            )
+        )
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 1, [b'response3'])
+            outstream, 1, [b'response3']
+        )
         self.assertaction(result, b'sendframes')
         list(result[1][b'framegen'])
 
         for i in range(3):
             self.assertaction(results[i], b'runcommand')
-            self.assertEqual(results[i][1], {
-                b'requestid': 1,
-                b'command': b'command',
-                b'args': {},
-                b'redirect': None,
-                b'data': None,
-            })
+            self.assertEqual(
+                results[i][1],
+                {
+                    b'requestid': 1,
+                    b'command': b'command',
+                    b'args': {},
+                    b'redirect': None,
+                    b'data': None,
+                },
+            )
 
     def testconflictingrequestid(self):
         """Request ID for new command matching in-flight command is illegal."""
-        results = list(sendframes(makereactor(), [
-            ffs(b'1 1 stream-begin command-request new|more '
-                b"cbor:{b'name': b'command'}"),
-            ffs(b'1 1 0 command-request new '
-                b"cbor:{b'name': b'command1'}"),
-        ]))
+        results = list(
+            sendframes(
+                makereactor(),
+                [
+                    ffs(
+                        b'1 1 stream-begin command-request new|more '
+                        b"cbor:{b'name': b'command'}"
+                    ),
+                    ffs(
+                        b'1 1 0 command-request new '
+                        b"cbor:{b'name': b'command1'}"
+                    ),
+                ],
+            )
+        )
 
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'error')
-        self.assertEqual(results[1][1], {
-            b'message': b'request with ID 1 already received',
-        })
+        self.assertEqual(
+            results[1][1], {b'message': b'request with ID 1 already received',}
+        )
 
     def testinterleavedcommands(self):
-        cbor1 = b''.join(cborutil.streamencode({
-            b'name': b'command1',
-            b'args': {
-                b'foo': b'bar',
-                b'key1': b'val',
-            }
-        }))
-        cbor3 = b''.join(cborutil.streamencode({
-            b'name': b'command3',
-            b'args': {
-                b'biz': b'baz',
-                b'key': b'val',
-            },
-        }))
-
-        results = list(sendframes(makereactor(), [
-            ffs(b'1 1 stream-begin command-request new|more %s' % cbor1[0:6]),
-            ffs(b'3 1 0 command-request new|more %s' % cbor3[0:10]),
-            ffs(b'1 1 0 command-request continuation|more %s' % cbor1[6:9]),
-            ffs(b'3 1 0 command-request continuation|more %s' % cbor3[10:13]),
-            ffs(b'3 1 0 command-request continuation %s' % cbor3[13:]),
-            ffs(b'1 1 0 command-request continuation %s' % cbor1[9:]),
-        ]))
+        cbor1 = b''.join(
+            cborutil.streamencode(
+                {
+                    b'name': b'command1',
+                    b'args': {b'foo': b'bar', b'key1': b'val',},
+                }
+            )
+        )
+        cbor3 = b''.join(
+            cborutil.streamencode(
+                {
+                    b'name': b'command3',
+                    b'args': {b'biz': b'baz', b'key': b'val',},
+                }
+            )
+        )
 
-        self.assertEqual([t[0] for t in results], [
-            b'wantframe',
-            b'wantframe',
-            b'wantframe',
-            b'wantframe',
-            b'runcommand',
-            b'runcommand',
-        ])
+        results = list(
+            sendframes(
+                makereactor(),
+                [
+                    ffs(
+                        b'1 1 stream-begin command-request new|more %s'
+                        % cbor1[0:6]
+                    ),
+                    ffs(b'3 1 0 command-request new|more %s' % cbor3[0:10]),
+                    ffs(
+                        b'1 1 0 command-request continuation|more %s'
+                        % cbor1[6:9]
+                    ),
+                    ffs(
+                        b'3 1 0 command-request continuation|more %s'
+                        % cbor3[10:13]
+                    ),
+                    ffs(b'3 1 0 command-request continuation %s' % cbor3[13:]),
+                    ffs(b'1 1 0 command-request continuation %s' % cbor1[9:]),
+                ],
+            )
+        )
 
-        self.assertEqual(results[4][1], {
-            b'requestid': 3,
-            b'command': b'command3',
-            b'args': {b'biz': b'baz', b'key': b'val'},
-            b'redirect': None,
-            b'data': None,
-        })
-        self.assertEqual(results[5][1], {
-            b'requestid': 1,
-            b'command': b'command1',
-            b'args': {b'foo': b'bar', b'key1': b'val'},
-            b'redirect': None,
-            b'data': None,
-        })
+        self.assertEqual(
+            [t[0] for t in results],
+            [
+                b'wantframe',
+                b'wantframe',
+                b'wantframe',
+                b'wantframe',
+                b'runcommand',
+                b'runcommand',
+            ],
+        )
+
+        self.assertEqual(
+            results[4][1],
+            {
+                b'requestid': 3,
+                b'command': b'command3',
+                b'args': {b'biz': b'baz', b'key': b'val'},
+                b'redirect': None,
+                b'data': None,
+            },
+        )
+        self.assertEqual(
+            results[5][1],
+            {
+                b'requestid': 1,
+                b'command': b'command1',
+                b'args': {b'foo': b'bar', b'key1': b'val'},
+                b'redirect': None,
+                b'data': None,
+            },
+        )
 
     def testmissingcommanddataframe(self):
         # The reactor doesn't currently handle partially received commands.
         # So this test is failing to do anything with request 1.
         frames = [
-            ffs(b'1 1 stream-begin command-request new|have-data '
-                b"cbor:{b'name': b'command1'}"),
-            ffs(b'3 1 0 command-request new '
-                b"cbor:{b'name': b'command2'}"),
+            ffs(
+                b'1 1 stream-begin command-request new|have-data '
+                b"cbor:{b'name': b'command1'}"
+            ),
+            ffs(b'3 1 0 command-request new ' b"cbor:{b'name': b'command2'}"),
         ]
         results = list(sendframes(makereactor(), frames))
         self.assertEqual(len(results), 2)
@@ -331,31 +450,45 @@
 
     def testmissingcommanddataframeflags(self):
         frames = [
-            ffs(b'1 1 stream-begin command-request new|have-data '
-                b"cbor:{b'name': b'command1'}"),
+            ffs(
+                b'1 1 stream-begin command-request new|have-data '
+                b"cbor:{b'name': b'command1'}"
+            ),
             ffs(b'1 1 0 command-data 0 data'),
         ]
         results = list(sendframes(makereactor(), frames))
         self.assertEqual(len(results), 2)
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'error')
-        self.assertEqual(results[1][1], {
-            b'message': b'command data frame without flags',
-        })
+        self.assertEqual(
+            results[1][1], {b'message': b'command data frame without flags',}
+        )
 
     def testframefornonreceivingrequest(self):
         """Receiving a frame for a command that is not receiving is illegal."""
-        results = list(sendframes(makereactor(), [
-            ffs(b'1 1 stream-begin command-request new '
-                b"cbor:{b'name': b'command1'}"),
-            ffs(b'3 1 0 command-request new|have-data '
-                b"cbor:{b'name': b'command3'}"),
-            ffs(b'5 1 0 command-data eos ignored'),
-        ]))
+        results = list(
+            sendframes(
+                makereactor(),
+                [
+                    ffs(
+                        b'1 1 stream-begin command-request new '
+                        b"cbor:{b'name': b'command1'}"
+                    ),
+                    ffs(
+                        b'3 1 0 command-request new|have-data '
+                        b"cbor:{b'name': b'command3'}"
+                    ),
+                    ffs(b'5 1 0 command-data eos ignored'),
+                ],
+            )
+        )
         self.assertaction(results[2], b'error')
-        self.assertEqual(results[2][1], {
-            b'message': b'received frame for request that is not receiving: 5',
-        })
+        self.assertEqual(
+            results[2][1],
+            {
+                b'message': b'received frame for request that is not receiving: 5',
+            },
+        )
 
     def testsimpleresponse(self):
         """Bytes response to command sends result frames."""
@@ -365,14 +498,18 @@
 
         outstream = reactor.makeoutputstream()
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 1, [b'response'])
+            outstream, 1, [b'response']
+        )
         self.assertaction(result, b'sendframes')
-        self.assertframesequal(result[1][b'framegen'], [
-            b'1 2 stream-begin stream-settings eos cbor:b"identity"',
-            b'1 2 encoded command-response continuation %s' % OK,
-            b'1 2 encoded command-response continuation cbor:b"response"',
-            b'1 2 0 command-response eos ',
-        ])
+        self.assertframesequal(
+            result[1][b'framegen'],
+            [
+                b'1 2 stream-begin stream-settings eos cbor:b"identity"',
+                b'1 2 encoded command-response continuation %s' % OK,
+                b'1 2 encoded command-response continuation cbor:b"response"',
+                b'1 2 0 command-response eos ',
+            ],
+        )
 
     def testmultiframeresponse(self):
         """Bytes response spanning multiple frames is handled."""
@@ -385,16 +522,20 @@
 
         outstream = reactor.makeoutputstream()
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 1, [first + second])
+            outstream, 1, [first + second]
+        )
         self.assertaction(result, b'sendframes')
-        self.assertframesequal(result[1][b'framegen'], [
-            b'1 2 stream-begin stream-settings eos cbor:b"identity"',
-            b'1 2 encoded command-response continuation %s' % OK,
-            b'1 2 encoded command-response continuation Y\x80d',
-            b'1 2 encoded command-response continuation %s' % first,
-            b'1 2 encoded command-response continuation %s' % second,
-            b'1 2 0 command-response eos '
-        ])
+        self.assertframesequal(
+            result[1][b'framegen'],
+            [
+                b'1 2 stream-begin stream-settings eos cbor:b"identity"',
+                b'1 2 encoded command-response continuation %s' % OK,
+                b'1 2 encoded command-response continuation Y\x80d',
+                b'1 2 encoded command-response continuation %s' % first,
+                b'1 2 encoded command-response continuation %s' % second,
+                b'1 2 0 command-response eos ',
+            ],
+        )
 
     def testservererror(self):
         reactor = makereactor()
@@ -404,33 +545,41 @@
         outstream = reactor.makeoutputstream()
         result = reactor.onservererror(outstream, 1, b'some message')
         self.assertaction(result, b'sendframes')
-        self.assertframesequal(result[1][b'framegen'], [
-            b"1 2 stream-begin error-response 0 "
-            b"cbor:{b'type': b'server', "
-            b"b'message': [{b'msg': b'some message'}]}",
-        ])
+        self.assertframesequal(
+            result[1][b'framegen'],
+            [
+                b"1 2 stream-begin error-response 0 "
+                b"cbor:{b'type': b'server', "
+                b"b'message': [{b'msg': b'some message'}]}",
+            ],
+        )
 
     def test1commanddeferresponse(self):
         """Responses when in deferred output mode are delayed until EOF."""
         reactor = makereactor(deferoutput=True)
         instream = framing.stream(1)
-        results = list(sendcommandframes(reactor, instream, 1, b'mycommand',
-                                         {}))
+        results = list(
+            sendcommandframes(reactor, instream, 1, b'mycommand', {})
+        )
         self.assertEqual(len(results), 1)
         self.assertaction(results[0], b'runcommand')
 
         outstream = reactor.makeoutputstream()
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 1, [b'response'])
+            outstream, 1, [b'response']
+        )
         self.assertaction(result, b'noop')
         result = reactor.oninputeof()
         self.assertaction(result, b'sendframes')
-        self.assertframesequal(result[1][b'framegen'], [
-            b'1 2 stream-begin stream-settings eos cbor:b"identity"',
-            b'1 2 encoded command-response continuation %s' % OK,
-            b'1 2 encoded command-response continuation cbor:b"response"',
-            b'1 2 0 command-response eos ',
-        ])
+        self.assertframesequal(
+            result[1][b'framegen'],
+            [
+                b'1 2 stream-begin stream-settings eos cbor:b"identity"',
+                b'1 2 encoded command-response continuation %s' % OK,
+                b'1 2 encoded command-response continuation cbor:b"response"',
+                b'1 2 0 command-response eos ',
+            ],
+        )
 
     def testmultiplecommanddeferresponse(self):
         reactor = makereactor(deferoutput=True)
@@ -440,22 +589,27 @@
 
         outstream = reactor.makeoutputstream()
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 1, [b'response1'])
+            outstream, 1, [b'response1']
+        )
         self.assertaction(result, b'noop')
         result = reactor.oncommandresponsereadyobjects(
-            outstream, 3, [b'response2'])
+            outstream, 3, [b'response2']
+        )
         self.assertaction(result, b'noop')
         result = reactor.oninputeof()
         self.assertaction(result, b'sendframes')
-        self.assertframesequal(result[1][b'framegen'], [
-            b'1 2 stream-begin stream-settings eos cbor:b"identity"',
-            b'1 2 encoded command-response continuation %s' % OK,
-            b'1 2 encoded command-response continuation cbor:b"response1"',
-            b'1 2 0 command-response eos ',
-            b'3 2 encoded command-response continuation %s' % OK,
-            b'3 2 encoded command-response continuation cbor:b"response2"',
-            b'3 2 0 command-response eos ',
-        ])
+        self.assertframesequal(
+            result[1][b'framegen'],
+            [
+                b'1 2 stream-begin stream-settings eos cbor:b"identity"',
+                b'1 2 encoded command-response continuation %s' % OK,
+                b'1 2 encoded command-response continuation cbor:b"response1"',
+                b'1 2 0 command-response eos ',
+                b'3 2 encoded command-response continuation %s' % OK,
+                b'3 2 encoded command-response continuation cbor:b"response2"',
+                b'3 2 0 command-response eos ',
+            ],
+        )
 
     def testrequestidtracking(self):
         reactor = makereactor(deferoutput=True)
@@ -472,18 +626,21 @@
 
         result = reactor.oninputeof()
         self.assertaction(result, b'sendframes')
-        self.assertframesequal(result[1][b'framegen'], [
-            b'3 2 stream-begin stream-settings eos cbor:b"identity"',
-            b'3 2 encoded command-response continuation %s' % OK,
-            b'3 2 encoded command-response continuation cbor:b"response3"',
-            b'3 2 0 command-response eos ',
-            b'1 2 encoded command-response continuation %s' % OK,
-            b'1 2 encoded command-response continuation cbor:b"response1"',
-            b'1 2 0 command-response eos ',
-            b'5 2 encoded command-response continuation %s' % OK,
-            b'5 2 encoded command-response continuation cbor:b"response5"',
-            b'5 2 0 command-response eos ',
-        ])
+        self.assertframesequal(
+            result[1][b'framegen'],
+            [
+                b'3 2 stream-begin stream-settings eos cbor:b"identity"',
+                b'3 2 encoded command-response continuation %s' % OK,
+                b'3 2 encoded command-response continuation cbor:b"response3"',
+                b'3 2 0 command-response eos ',
+                b'1 2 encoded command-response continuation %s' % OK,
+                b'1 2 encoded command-response continuation cbor:b"response1"',
+                b'1 2 0 command-response eos ',
+                b'5 2 encoded command-response continuation %s' % OK,
+                b'5 2 encoded command-response continuation cbor:b"response5"',
+                b'5 2 0 command-response eos ',
+            ],
+        )
 
     def testduplicaterequestonactivecommand(self):
         """Receiving a request ID that matches a request that isn't finished."""
@@ -493,9 +650,9 @@
         results = list(sendcommandframes(reactor, stream, 1, b'command1', {}))
 
         self.assertaction(results[0], b'error')
-        self.assertEqual(results[0][1], {
-            b'message': b'request with ID 1 is already active',
-        })
+        self.assertEqual(
+            results[0][1], {b'message': b'request with ID 1 is already active',}
+        )
 
     def testduplicaterequestonactivecommandnosend(self):
         """Same as above but we've registered a response but haven't sent it."""
@@ -510,9 +667,9 @@
 
         results = list(sendcommandframes(reactor, instream, 1, b'command1', {}))
         self.assertaction(results[0], b'error')
-        self.assertEqual(results[0][1], {
-            b'message': b'request with ID 1 is already active',
-        })
+        self.assertEqual(
+            results[0][1], {b'message': b'request with ID 1 is already active',}
+        )
 
     def testduplicaterequestaftersend(self):
         """We can use a duplicate request ID after we've sent the response."""
@@ -528,70 +685,100 @@
 
     def testprotocolsettingsnoflags(self):
         result = self._sendsingleframe(
-            makereactor(),
-            ffs(b'0 1 stream-begin sender-protocol-settings 0 '))
+            makereactor(), ffs(b'0 1 stream-begin sender-protocol-settings 0 ')
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'sender protocol settings frame must have '
-                        b'continuation or end of stream flag set',
-        })
+        self.assertEqual(
+            result[1],
+            {
+                b'message': b'sender protocol settings frame must have '
+                b'continuation or end of stream flag set',
+            },
+        )
 
     def testprotocolsettingsconflictflags(self):
         result = self._sendsingleframe(
             makereactor(),
-            ffs(b'0 1 stream-begin sender-protocol-settings continuation|eos '))
+            ffs(b'0 1 stream-begin sender-protocol-settings continuation|eos '),
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'sender protocol settings frame cannot have both '
-                        b'continuation and end of stream flags set',
-        })
+        self.assertEqual(
+            result[1],
+            {
+                b'message': b'sender protocol settings frame cannot have both '
+                b'continuation and end of stream flags set',
+            },
+        )
 
     def testprotocolsettingsemptypayload(self):
         result = self._sendsingleframe(
             makereactor(),
-            ffs(b'0 1 stream-begin sender-protocol-settings eos '))
+            ffs(b'0 1 stream-begin sender-protocol-settings eos '),
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'sender protocol settings frame did not contain CBOR '
-                        b'data',
-        })
+        self.assertEqual(
+            result[1],
+            {
+                b'message': b'sender protocol settings frame did not contain CBOR '
+                b'data',
+            },
+        )
 
     def testprotocolsettingsmultipleobjects(self):
         result = self._sendsingleframe(
             makereactor(),
-            ffs(b'0 1 stream-begin sender-protocol-settings eos '
-                b'\x46foobar\x43foo'))
+            ffs(
+                b'0 1 stream-begin sender-protocol-settings eos '
+                b'\x46foobar\x43foo'
+            ),
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'sender protocol settings frame contained multiple '
-                        b'CBOR values',
-        })
+        self.assertEqual(
+            result[1],
+            {
+                b'message': b'sender protocol settings frame contained multiple '
+                b'CBOR values',
+            },
+        )
 
     def testprotocolsettingscontentencodings(self):
         reactor = makereactor()
 
         result = self._sendsingleframe(
             reactor,
-            ffs(b'0 1 stream-begin sender-protocol-settings eos '
-                b'cbor:{b"contentencodings": [b"a", b"b"]}'))
+            ffs(
+                b'0 1 stream-begin sender-protocol-settings eos '
+                b'cbor:{b"contentencodings": [b"a", b"b"]}'
+            ),
+        )
         self.assertaction(result, b'wantframe')
 
         self.assertEqual(reactor._state, b'idle')
-        self.assertEqual(reactor._sendersettings[b'contentencodings'],
-                         [b'a', b'b'])
+        self.assertEqual(
+            reactor._sendersettings[b'contentencodings'], [b'a', b'b']
+        )
 
     def testprotocolsettingsmultipleframes(self):
         reactor = makereactor()
 
-        data = b''.join(cborutil.streamencode({
-            b'contentencodings': [b'value1', b'value2'],
-        }))
+        data = b''.join(
+            cborutil.streamencode(
+                {b'contentencodings': [b'value1', b'value2'],}
+            )
+        )
 
-        results = list(sendframes(reactor, [
-            ffs(b'0 1 stream-begin sender-protocol-settings continuation %s' %
-                data[0:5]),
-            ffs(b'0 1 0 sender-protocol-settings eos %s' % data[5:]),
-        ]))
+        results = list(
+            sendframes(
+                reactor,
+                [
+                    ffs(
+                        b'0 1 stream-begin sender-protocol-settings continuation %s'
+                        % data[0:5]
+                    ),
+                    ffs(b'0 1 0 sender-protocol-settings eos %s' % data[5:]),
+                ],
+            )
+        )
 
         self.assertEqual(len(results), 2)
 
@@ -599,13 +786,15 @@
         self.assertaction(results[1], b'wantframe')
 
         self.assertEqual(reactor._state, b'idle')
-        self.assertEqual(reactor._sendersettings[b'contentencodings'],
-                         [b'value1', b'value2'])
+        self.assertEqual(
+            reactor._sendersettings[b'contentencodings'], [b'value1', b'value2']
+        )
 
     def testprotocolsettingsbadcbor(self):
         result = self._sendsingleframe(
             makereactor(),
-            ffs(b'0 1 stream-begin sender-protocol-settings eos badvalue'))
+            ffs(b'0 1 stream-begin sender-protocol-settings eos badvalue'),
+        )
         self.assertaction(result, b'error')
 
     def testprotocolsettingsnoninitial(self):
@@ -618,13 +807,15 @@
         self.assertaction(results[0], b'runcommand')
 
         result = self._sendsingleframe(
-            reactor,
-            ffs(b'0 1 0 sender-protocol-settings eos '))
+            reactor, ffs(b'0 1 0 sender-protocol-settings eos ')
+        )
         self.assertaction(result, b'error')
-        self.assertEqual(result[1], {
-            b'message': b'expected command request frame; got 8',
-        })
+        self.assertEqual(
+            result[1], {b'message': b'expected command request frame; got 8',}
+        )
+
 
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/test-wireproto.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-wireproto.py	Sun Oct 06 09:45:02 2019 -0400
@@ -11,11 +11,11 @@
     wireprotov1peer,
     wireprotov1server,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
+
 stringio = util.stringio
 
+
 class proto(object):
     def __init__(self, args):
         self.args = args
@@ -30,11 +30,13 @@
     def checkperm(self, perm):
         pass
 
+
 wireprototypes.TRANSPORTS['dummyproto'] = {
     'transport': 'dummy',
     'version': 1,
 }
 
+
 class clientpeer(wireprotov1peer.wirepeer):
     def __init__(self, serverrepo, ui):
         self.serverrepo = serverrepo
@@ -77,6 +79,7 @@
         yield {b'name': mangle(name)}, f
         yield unmangle(f.value)
 
+
 class serverrepo(object):
     def __init__(self, ui):
         self.ui = ui
@@ -87,29 +90,37 @@
     def filtered(self, name):
         return self
 
+
 def mangle(s):
     return b''.join(pycompat.bytechr(ord(c) + 1) for c in pycompat.bytestr(s))
+
+
 def unmangle(s):
     return b''.join(pycompat.bytechr(ord(c) - 1) for c in pycompat.bytestr(s))
 
+
 def greet(repo, proto, name):
     return mangle(repo.greet(unmangle(name)))
 
+
 wireprotov1server.commands[b'greet'] = (greet, b'name')
 
 srv = serverrepo(uimod.ui())
 clt = clientpeer(srv, uimod.ui())
 
+
 def printb(data, end=b'\n'):
     out = getattr(sys.stdout, 'buffer', sys.stdout)
     out.write(data + end)
     out.flush()
 
+
 printb(clt.greet(b"Foobar"))
 
 with clt.commandexecutor() as e:
     fgreet1 = e.callcommand(b'greet', {b'name': b'Fo, =;:<o'})
     fgreet2 = e.callcommand(b'greet', {b'name': b'Bar'})
 
-printb(stringutil.pprint([f.result() for f in (fgreet1, fgreet2)],
-                         bprefix=True))
+printb(
+    stringutil.pprint([f.result() for f in (fgreet1, fgreet2)], bprefix=True)
+)
--- a/tests/test-wsgirequest.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/test-wsgirequest.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,12 +2,8 @@
 
 import unittest
 
-from mercurial.hgweb import (
-    request as requestmod,
-)
-from mercurial import (
-    error,
-)
+from mercurial.hgweb import request as requestmod
+from mercurial import error
 
 DEFAULT_ENV = {
     r'REQUEST_METHOD': r'GET',
@@ -23,12 +19,15 @@
     r'wsgi.run_once': False,
 }
 
+
 def parse(env, reponame=None, altbaseurl=None, extra=None):
     env = dict(env)
     env.update(extra or {})
 
-    return requestmod.parserequestfromenv(env, reponame=reponame,
-                                          altbaseurl=altbaseurl)
+    return requestmod.parserequestfromenv(
+        env, reponame=reponame, altbaseurl=altbaseurl
+    )
+
 
 class ParseRequestTests(unittest.TestCase):
     def testdefault(self):
@@ -50,19 +49,17 @@
         self.assertEqual(len(r.headers), 0)
 
     def testcustomport(self):
-        r = parse(DEFAULT_ENV, extra={
-            r'SERVER_PORT': r'8000',
-        })
+        r = parse(DEFAULT_ENV, extra={r'SERVER_PORT': r'8000',})
 
         self.assertEqual(r.url, b'http://testserver:8000')
         self.assertEqual(r.baseurl, b'http://testserver:8000')
         self.assertEqual(r.advertisedurl, b'http://testserver:8000')
         self.assertEqual(r.advertisedbaseurl, b'http://testserver:8000')
 
-        r = parse(DEFAULT_ENV, extra={
-            r'SERVER_PORT': r'4000',
-            r'wsgi.url_scheme': r'https',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            extra={r'SERVER_PORT': r'4000', r'wsgi.url_scheme': r'https',},
+        )
 
         self.assertEqual(r.url, b'https://testserver:4000')
         self.assertEqual(r.baseurl, b'https://testserver:4000')
@@ -70,9 +67,7 @@
         self.assertEqual(r.advertisedbaseurl, b'https://testserver:4000')
 
     def testhttphost(self):
-        r = parse(DEFAULT_ENV, extra={
-            r'HTTP_HOST': r'altserver',
-        })
+        r = parse(DEFAULT_ENV, extra={r'HTTP_HOST': r'altserver',})
 
         self.assertEqual(r.url, b'http://altserver')
         self.assertEqual(r.baseurl, b'http://altserver')
@@ -80,9 +75,7 @@
         self.assertEqual(r.advertisedbaseurl, b'http://testserver')
 
     def testscriptname(self):
-        r = parse(DEFAULT_ENV, extra={
-            r'SCRIPT_NAME': r'',
-        })
+        r = parse(DEFAULT_ENV, extra={r'SCRIPT_NAME': r'',})
 
         self.assertEqual(r.url, b'http://testserver')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -92,9 +85,7 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertIsNone(r.dispatchpath)
 
-        r = parse(DEFAULT_ENV, extra={
-            r'SCRIPT_NAME': r'/script',
-        })
+        r = parse(DEFAULT_ENV, extra={r'SCRIPT_NAME': r'/script',})
 
         self.assertEqual(r.url, b'http://testserver/script')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -104,9 +95,7 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertIsNone(r.dispatchpath)
 
-        r = parse(DEFAULT_ENV, extra={
-            r'SCRIPT_NAME': r'/multiple words',
-        })
+        r = parse(DEFAULT_ENV, extra={r'SCRIPT_NAME': r'/multiple words',})
 
         self.assertEqual(r.url, b'http://testserver/multiple%20words')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -117,9 +106,7 @@
         self.assertIsNone(r.dispatchpath)
 
     def testpathinfo(self):
-        r = parse(DEFAULT_ENV, extra={
-            r'PATH_INFO': r'',
-        })
+        r = parse(DEFAULT_ENV, extra={r'PATH_INFO': r'',})
 
         self.assertEqual(r.url, b'http://testserver')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -129,9 +116,7 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertEqual(r.dispatchpath, b'')
 
-        r = parse(DEFAULT_ENV, extra={
-            r'PATH_INFO': r'/pathinfo',
-        })
+        r = parse(DEFAULT_ENV, extra={r'PATH_INFO': r'/pathinfo',})
 
         self.assertEqual(r.url, b'http://testserver/pathinfo')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -141,9 +126,7 @@
         self.assertEqual(r.dispatchparts, [b'pathinfo'])
         self.assertEqual(r.dispatchpath, b'pathinfo')
 
-        r = parse(DEFAULT_ENV, extra={
-            r'PATH_INFO': r'/one/two/',
-        })
+        r = parse(DEFAULT_ENV, extra={r'PATH_INFO': r'/one/two/',})
 
         self.assertEqual(r.url, b'http://testserver/one/two/')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -154,10 +137,10 @@
         self.assertEqual(r.dispatchpath, b'one/two')
 
     def testscriptandpathinfo(self):
-        r = parse(DEFAULT_ENV, extra={
-            r'SCRIPT_NAME': r'/script',
-            r'PATH_INFO': r'/pathinfo',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            extra={r'SCRIPT_NAME': r'/script', r'PATH_INFO': r'/pathinfo',},
+        )
 
         self.assertEqual(r.url, b'http://testserver/script/pathinfo')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -167,26 +150,34 @@
         self.assertEqual(r.dispatchparts, [b'pathinfo'])
         self.assertEqual(r.dispatchpath, b'pathinfo')
 
-        r = parse(DEFAULT_ENV, extra={
-            r'SCRIPT_NAME': r'/script1/script2',
-            r'PATH_INFO': r'/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                r'SCRIPT_NAME': r'/script1/script2',
+                r'PATH_INFO': r'/path1/path2',
+            },
+        )
 
-        self.assertEqual(r.url,
-                         b'http://testserver/script1/script2/path1/path2')
+        self.assertEqual(
+            r.url, b'http://testserver/script1/script2/path1/path2'
+        )
         self.assertEqual(r.baseurl, b'http://testserver')
-        self.assertEqual(r.advertisedurl,
-                         b'http://testserver/script1/script2/path1/path2')
+        self.assertEqual(
+            r.advertisedurl, b'http://testserver/script1/script2/path1/path2'
+        )
         self.assertEqual(r.advertisedbaseurl, b'http://testserver')
         self.assertEqual(r.apppath, b'/script1/script2')
         self.assertEqual(r.dispatchparts, [b'path1', b'path2'])
         self.assertEqual(r.dispatchpath, b'path1/path2')
 
-        r = parse(DEFAULT_ENV, extra={
-            r'HTTP_HOST': r'hostserver',
-            r'SCRIPT_NAME': r'/script',
-            r'PATH_INFO': r'/pathinfo',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                r'HTTP_HOST': r'hostserver',
+                r'SCRIPT_NAME': r'/script',
+                r'PATH_INFO': r'/pathinfo',
+            },
+        )
 
         self.assertEqual(r.url, b'http://hostserver/script/pathinfo')
         self.assertEqual(r.baseurl, b'http://hostserver')
@@ -199,32 +190,41 @@
     if not getattr(unittest.TestCase, 'assertRaisesRegex', False):
         # Python 3.7 deprecates the regex*p* version, but 2.7 lacks
         # the regex version.
-        assertRaisesRegex = (# camelcase-required
-            unittest.TestCase.assertRaisesRegexp)
+        assertRaisesRegex = (  # camelcase-required
+            unittest.TestCase.assertRaisesRegexp
+        )
 
     def testreponame(self):
         """repository path components get stripped from URL."""
 
-        with self.assertRaisesRegex(error.ProgrammingError,
-                                    'reponame requires PATH_INFO'):
+        with self.assertRaisesRegex(
+            error.ProgrammingError, 'reponame requires PATH_INFO'
+        ):
             parse(DEFAULT_ENV, reponame=b'repo')
 
-        with self.assertRaisesRegex(error.ProgrammingError,
-                                    'PATH_INFO does not begin with repo '
-                                    'name'):
-            parse(DEFAULT_ENV, reponame=b'repo', extra={
-                r'PATH_INFO': r'/pathinfo',
-            })
+        with self.assertRaisesRegex(
+            error.ProgrammingError, 'PATH_INFO does not begin with repo ' 'name'
+        ):
+            parse(
+                DEFAULT_ENV,
+                reponame=b'repo',
+                extra={r'PATH_INFO': r'/pathinfo',},
+            )
 
-        with self.assertRaisesRegex(error.ProgrammingError,
-                                    'reponame prefix of PATH_INFO'):
-            parse(DEFAULT_ENV, reponame=b'repo', extra={
-                r'PATH_INFO': r'/repoextra/path',
-            })
+        with self.assertRaisesRegex(
+            error.ProgrammingError, 'reponame prefix of PATH_INFO'
+        ):
+            parse(
+                DEFAULT_ENV,
+                reponame=b'repo',
+                extra={r'PATH_INFO': r'/repoextra/path',},
+            )
 
-        r = parse(DEFAULT_ENV, reponame=b'repo', extra={
-            r'PATH_INFO': r'/repo/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            reponame=b'repo',
+            extra={r'PATH_INFO': r'/repo/path1/path2',},
+        )
 
         self.assertEqual(r.url, b'http://testserver/repo/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -235,14 +235,17 @@
         self.assertEqual(r.dispatchpath, b'path1/path2')
         self.assertEqual(r.reponame, b'repo')
 
-        r = parse(DEFAULT_ENV, reponame=b'prefix/repo', extra={
-            r'PATH_INFO': r'/prefix/repo/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            reponame=b'prefix/repo',
+            extra={r'PATH_INFO': r'/prefix/repo/path1/path2',},
+        )
 
         self.assertEqual(r.url, b'http://testserver/prefix/repo/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
-        self.assertEqual(r.advertisedurl,
-                         b'http://testserver/prefix/repo/path1/path2')
+        self.assertEqual(
+            r.advertisedurl, b'http://testserver/prefix/repo/path1/path2'
+        )
         self.assertEqual(r.advertisedbaseurl, b'http://testserver')
         self.assertEqual(r.apppath, b'/prefix/repo')
         self.assertEqual(r.dispatchparts, [b'path1', b'path2'])
@@ -301,9 +304,11 @@
         self.assertIsNone(r.reponame)
 
         # With only PATH_INFO defined.
-        r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver', extra={
-            r'PATH_INFO': r'/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            altbaseurl=b'http://altserver',
+            extra={r'PATH_INFO': r'/path1/path2',},
+        )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
         self.assertEqual(r.advertisedurl, b'http://altserver/path1/path2')
@@ -339,13 +344,16 @@
         self.assertIsNone(r.reponame)
 
         # PATH_INFO + path on alt URL.
-        r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altpath', extra={
-            r'PATH_INFO': r'/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            altbaseurl=b'http://altserver/altpath',
+            extra={r'PATH_INFO': r'/path1/path2',},
+        )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
-        self.assertEqual(r.advertisedurl,
-                         b'http://altserver/altpath/path1/path2')
+        self.assertEqual(
+            r.advertisedurl, b'http://altserver/altpath/path1/path2'
+        )
         self.assertEqual(r.advertisedbaseurl, b'http://altserver')
         self.assertEqual(r.urlscheme, b'http')
         self.assertEqual(r.apppath, b'/altpath')
@@ -354,13 +362,16 @@
         self.assertIsNone(r.reponame)
 
         # PATH_INFO + path on alt URL with trailing slash.
-        r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altpath/', extra={
-            r'PATH_INFO': r'/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            altbaseurl=b'http://altserver/altpath/',
+            extra={r'PATH_INFO': r'/path1/path2',},
+        )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
-        self.assertEqual(r.advertisedurl,
-                         b'http://altserver/altpath//path1/path2')
+        self.assertEqual(
+            r.advertisedurl, b'http://altserver/altpath//path1/path2'
+        )
         self.assertEqual(r.advertisedbaseurl, b'http://altserver')
         self.assertEqual(r.urlscheme, b'http')
         self.assertEqual(r.apppath, b'/altpath/')
@@ -369,10 +380,11 @@
         self.assertIsNone(r.reponame)
 
         # Local SCRIPT_NAME is ignored.
-        r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver', extra={
-            r'SCRIPT_NAME': r'/script',
-            r'PATH_INFO': r'/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            altbaseurl=b'http://altserver',
+            extra={r'SCRIPT_NAME': r'/script', r'PATH_INFO': r'/path1/path2',},
+        )
         self.assertEqual(r.url, b'http://testserver/script/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
         self.assertEqual(r.advertisedurl, b'http://altserver/path1/path2')
@@ -384,14 +396,16 @@
         self.assertIsNone(r.reponame)
 
         # Use remote's path for script name, app path
-        r = parse(DEFAULT_ENV, altbaseurl=b'http://altserver/altroot', extra={
-            r'SCRIPT_NAME': r'/script',
-            r'PATH_INFO': r'/path1/path2',
-        })
+        r = parse(
+            DEFAULT_ENV,
+            altbaseurl=b'http://altserver/altroot',
+            extra={r'SCRIPT_NAME': r'/script', r'PATH_INFO': r'/path1/path2',},
+        )
         self.assertEqual(r.url, b'http://testserver/script/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
-        self.assertEqual(r.advertisedurl,
-                         b'http://altserver/altroot/path1/path2')
+        self.assertEqual(
+            r.advertisedurl, b'http://altserver/altroot/path1/path2'
+        )
         self.assertEqual(r.advertisedbaseurl, b'http://altserver')
         self.assertEqual(r.urlscheme, b'http')
         self.assertEqual(r.apppath, b'/altroot')
@@ -400,23 +414,29 @@
         self.assertIsNone(r.reponame)
 
         # reponame is factored in properly.
-        r = parse(DEFAULT_ENV, reponame=b'repo',
-                  altbaseurl=b'http://altserver/altroot',
-                  extra={
+        r = parse(
+            DEFAULT_ENV,
+            reponame=b'repo',
+            altbaseurl=b'http://altserver/altroot',
+            extra={
                 r'SCRIPT_NAME': r'/script',
                 r'PATH_INFO': r'/repo/path1/path2',
-            })
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/script/repo/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
-        self.assertEqual(r.advertisedurl,
-                         b'http://altserver/altroot/repo/path1/path2')
+        self.assertEqual(
+            r.advertisedurl, b'http://altserver/altroot/repo/path1/path2'
+        )
         self.assertEqual(r.advertisedbaseurl, b'http://altserver')
         self.assertEqual(r.apppath, b'/altroot/repo')
         self.assertEqual(r.dispatchparts, [b'path1', b'path2'])
         self.assertEqual(r.dispatchpath, b'path1/path2')
         self.assertEqual(r.reponame, b'repo')
 
+
 if __name__ == '__main__':
     import silenttestrunner
+
     silenttestrunner.main(__name__)
--- a/tests/testlib/ext-phase-report.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/testlib/ext-phase-report.py	Sun Oct 06 09:45:02 2019 -0400
@@ -2,16 +2,24 @@
 
 from __future__ import absolute_import
 
+
 def reposetup(ui, repo):
-
     def reportphasemove(tr):
         for rev, move in sorted(tr.changes[b'phases'].items()):
             if move[0] is None:
-                ui.write((b'test-debug-phase: new rev %d:  x -> %d\n'
-                          % (rev, move[1])))
+                ui.write(
+                    (
+                        b'test-debug-phase: new rev %d:  x -> %d\n'
+                        % (rev, move[1])
+                    )
+                )
             else:
-                ui.write((b'test-debug-phase: move rev %d: %d -> %d\n'
-                          % (rev, move[0], move[1])))
+                ui.write(
+                    (
+                        b'test-debug-phase: move rev %d: %d -> %d\n'
+                        % (rev, move[0], move[1])
+                    )
+                )
 
     class reportphaserepo(repo.__class__):
         def transaction(self, *args, **kwargs):
--- a/tests/testlib/ext-sidedata.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/testlib/ext-sidedata.py	Sun Oct 06 09:45:02 2019 -0400
@@ -16,12 +16,12 @@
     revlog,
 )
 
-from mercurial.revlogutils import (
-    sidedata,
-)
+from mercurial.revlogutils import sidedata
+
 
-def wrapaddrevision(orig, self, text, transaction, link, p1, p2, *args,
-                    **kwargs):
+def wrapaddrevision(
+    orig, self, text, transaction, link, p1, p2, *args, **kwargs
+):
     if kwargs.get('sidedata') is None:
         kwargs['sidedata'] = {}
     sd = kwargs['sidedata']
@@ -33,6 +33,7 @@
     sd[sidedata.SD_TEST2] = struct.pack('>32s', sha256)
     return orig(self, text, transaction, link, p1, p2, *args, **kwargs)
 
+
 def wraprevision(orig, self, nodeorrev, *args, **kwargs):
     text = orig(self, nodeorrev, *args, **kwargs)
     if nodeorrev != node.nullrev and nodeorrev != node.nullid:
@@ -45,6 +46,7 @@
             raise RuntimeError('sha256 mismatch')
     return text
 
+
 def extsetup(ui):
     extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision)
     extensions.wrapfunction(revlog.revlog, 'revision', wraprevision)
--- a/tests/tinyproxy.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/tinyproxy.py	Sun Oct 06 09:45:02 2019 -0400
@@ -34,12 +34,13 @@
 else:
     family = socket.AF_INET
 
-class ProxyHandler (httpserver.basehttprequesthandler):
+
+class ProxyHandler(httpserver.basehttprequesthandler):
     __base = httpserver.basehttprequesthandler
     __base_handle = __base.handle
 
     server_version = "TinyHTTPProxy/" + __version__
-    rbufsize = 0                        # self.rfile Be unbuffered
+    rbufsize = 0  # self.rfile Be unbuffered
 
     def handle(self):
         (ip, port) = self.client_address
@@ -53,9 +54,13 @@
 
     def log_request(self, code='-', size='-'):
         xheaders = [h for h in self.headers.items() if h[0].startswith('x-')]
-        self.log_message('"%s" %s %s%s',
-                         self.requestline, str(code), str(size),
-                         ''.join([' %s:%s' % h for h in sorted(xheaders)]))
+        self.log_message(
+            '"%s" %s %s%s',
+            self.requestline,
+            str(code),
+            str(size),
+            ''.join([' %s:%s' % h for h in sorted(xheaders)]),
+        )
         # Flush for Windows, so output isn't lost on TerminateProcess()
         sys.stdout.flush()
         sys.stderr.flush()
@@ -63,14 +68,17 @@
     def _connect_to(self, netloc, soc):
         i = netloc.find(':')
         if i >= 0:
-            host_port = netloc[:i], int(netloc[i + 1:])
+            host_port = netloc[:i], int(netloc[i + 1 :])
         else:
             host_port = netloc, 80
         print("\t" "connect to %s:%d" % host_port)
-        try: soc.connect(host_port)
+        try:
+            soc.connect(host_port)
         except socket.error as arg:
-            try: msg = arg[1]
-            except (IndexError, TypeError): msg = arg
+            try:
+                msg = arg[1]
+            except (IndexError, TypeError):
+                msg = arg
             self.send_error(404, msg)
             return 0
         return 1
@@ -80,10 +88,14 @@
         try:
             if self._connect_to(self.path, soc):
                 self.log_request(200)
-                self.wfile.write(pycompat.bytestr(self.protocol_version) +
-                                 b" 200 Connection established\r\n")
-                self.wfile.write(b"Proxy-agent: %s\r\n" %
-                                 pycompat.bytestr(self.version_string()))
+                self.wfile.write(
+                    pycompat.bytestr(self.protocol_version)
+                    + b" 200 Connection established\r\n"
+                )
+                self.wfile.write(
+                    b"Proxy-agent: %s\r\n"
+                    % pycompat.bytestr(self.version_string())
+                )
                 self.wfile.write(b"\r\n")
                 self._read_write(soc, 300)
         finally:
@@ -93,7 +105,8 @@
 
     def do_GET(self):
         (scm, netloc, path, params, query, fragment) = urlreq.urlparse(
-            self.path, 'http')
+            self.path, 'http'
+        )
         if scm != 'http' or fragment or not netloc:
             self.send_error(400, "bad url %s" % self.path)
             return
@@ -102,15 +115,21 @@
             if self._connect_to(netloc, soc):
                 self.log_request()
                 url = urlreq.urlunparse(('', '', path, params, query, ''))
-                soc.send(b"%s %s %s\r\n" % (
-                    pycompat.bytestr(self.command),
-                    pycompat.bytestr(url),
-                    pycompat.bytestr(self.request_version)))
+                soc.send(
+                    b"%s %s %s\r\n"
+                    % (
+                        pycompat.bytestr(self.command),
+                        pycompat.bytestr(url),
+                        pycompat.bytestr(self.request_version),
+                    )
+                )
                 self.headers['Connection'] = 'close'
                 del self.headers['Proxy-Connection']
                 for key, val in self.headers.items():
-                    soc.send(b"%s: %s\r\n" % (pycompat.bytestr(key),
-                                              pycompat.bytestr(val)))
+                    soc.send(
+                        b"%s: %s\r\n"
+                        % (pycompat.bytestr(key), pycompat.bytestr(val))
+                    )
                 soc.send(b"\r\n")
                 self._read_write(soc)
         finally:
@@ -147,17 +166,18 @@
 
     do_HEAD = do_GET
     do_POST = do_GET
-    do_PUT  = do_GET
+    do_PUT = do_GET
     do_DELETE = do_GET
 
-class ThreadingHTTPServer (socketserver.ThreadingMixIn,
-                           httpserver.httpserver):
+
+class ThreadingHTTPServer(socketserver.ThreadingMixIn, httpserver.httpserver):
     def __init__(self, *args, **kwargs):
         httpserver.httpserver.__init__(self, *args, **kwargs)
         a = open("proxy.pid", "w")
         a.write(str(os.getpid()) + "\n")
         a.close()
 
+
 def runserver(port=8000, bind=""):
     server_address = (bind, port)
     ProxyHandler.protocol_version = "HTTP/1.0"
@@ -171,6 +191,7 @@
         httpd.server_close()
         sys.exit(0)
 
+
 if __name__ == '__main__':
     argv = sys.argv
     if argv[1:] and argv[1] in ('-h', '--help'):
@@ -188,9 +209,13 @@
             print("Any clients will be served...")
 
         parser = optparse.OptionParser()
-        parser.add_option('-b', '--bind', metavar='ADDRESS',
-                          help='Specify alternate bind address '
-                               '[default: all interfaces]', default='')
+        parser.add_option(
+            '-b',
+            '--bind',
+            metavar='ADDRESS',
+            help='Specify alternate bind address ' '[default: all interfaces]',
+            default='',
+        )
         (options, args) = parser.parse_args()
         port = 8000
         if len(args) == 1:
--- a/tests/wireprotosimplecache.py	Sat Oct 05 10:29:34 2019 -0400
+++ b/tests/wireprotosimplecache.py	Sun Oct 06 09:45:02 2019 -0400
@@ -19,21 +19,16 @@
     repository,
     util as interfaceutil,
 )
-from mercurial.utils import (
-    stringutil,
-)
+from mercurial.utils import stringutil
 
 CACHE = None
 
 configtable = {}
 configitem = registrar.configitem(configtable)
 
-configitem(b'simplecache', b'cacheapi',
-           default=False)
-configitem(b'simplecache', b'cacheobjects',
-           default=False)
-configitem(b'simplecache', b'redirectsfile',
-           default=None)
+configitem(b'simplecache', b'cacheapi', default=False)
+configitem(b'simplecache', b'cacheobjects', default=False)
+configitem(b'simplecache', b'redirectsfile', default=None)
 
 # API handler that makes cached keys available.
 def handlecacherequest(rctx, req, res, checkperm, urlparts):
@@ -60,19 +55,23 @@
     res.headers[b'Content-Type'] = b'application/mercurial-cbor'
     res.setbodybytes(CACHE[key])
 
+
 def cachedescriptor(req, repo):
     return {}
 
+
 wireprotoserver.API_HANDLERS[b'simplecache'] = {
     b'config': (b'simplecache', b'cacheapi'),
     b'handler': handlecacherequest,
     b'apidescriptor': cachedescriptor,
 }
 
+
 @interfaceutil.implementer(repository.iwireprotocolcommandcacher)
 class memorycacher(object):
-    def __init__(self, ui, command, encodefn, redirecttargets, redirecthashes,
-                 req):
+    def __init__(
+        self, ui, command, encodefn, redirecttargets, redirecthashes, req
+    ):
         self.ui = ui
         self.encodefn = encodefn
         self.redirecttargets = redirecttargets
@@ -131,12 +130,16 @@
 
             url = b'%s/%s' % (self.req.baseurl, b'/'.join(paths))
 
-            #url = b'http://example.com/%s' % self.key
-            self.ui.log(b'simplecache', b'sending content redirect for %s to '
-                                        b'%s\n', self.key, url)
+            # url = b'http://example.com/%s' % self.key
+            self.ui.log(
+                b'simplecache',
+                b'sending content redirect for %s to ' b'%s\n',
+                self.key,
+                url,
+            )
             response = wireprototypes.alternatelocationresponse(
-                url=url,
-                mediatype=b'application/mercurial-cbor')
+                url=url, mediatype=b'application/mercurial-cbor'
+            )
 
             return {b'objs': [response]}
 
@@ -166,10 +169,26 @@
 
         return []
 
-def makeresponsecacher(orig, repo, proto, command, args, objencoderfn,
-                       redirecttargets, redirecthashes):
-    return memorycacher(repo.ui, command, objencoderfn, redirecttargets,
-                        redirecthashes, proto._req)
+
+def makeresponsecacher(
+    orig,
+    repo,
+    proto,
+    command,
+    args,
+    objencoderfn,
+    redirecttargets,
+    redirecthashes,
+):
+    return memorycacher(
+        repo.ui,
+        command,
+        objencoderfn,
+        redirecttargets,
+        redirecthashes,
+        proto._req,
+    )
+
 
 def loadredirecttargets(ui):
     path = ui.config(b'simplecache', b'redirectsfile')
@@ -181,15 +200,21 @@
 
     return stringutil.evalpythonliteral(s)
 
+
 def getadvertisedredirecttargets(orig, repo, proto):
     return loadredirecttargets(repo.ui)
 
+
 def extsetup(ui):
     global CACHE
 
     CACHE = util.lrucachedict(10000)
 
-    extensions.wrapfunction(wireprotov2server, b'makeresponsecacher',
-                            makeresponsecacher)
-    extensions.wrapfunction(wireprotov2server, b'getadvertisedredirecttargets',
-                            getadvertisedredirecttargets)
+    extensions.wrapfunction(
+        wireprotov2server, b'makeresponsecacher', makeresponsecacher
+    )
+    extensions.wrapfunction(
+        wireprotov2server,
+        b'getadvertisedredirecttargets',
+        getadvertisedredirecttargets,
+    )