formating: upgrade to black 20.8b1
authorAugie Fackler <raf@durin42.com>
Fri, 27 Nov 2020 17:03:29 -0500
changeset 45942 89a2afe31e82
parent 45941 346af7687c6f
child 45943 a9797b49fb69
formating: upgrade to black 20.8b1 This required a couple of small tweaks to un-confuse black, but now it works. Big formatting changes come from: * Dramatically improved collection-splitting logic upstream * Black having a strong (correct IMO) opinion that """ is better than ''' Differential Revision: https://phab.mercurial-scm.org/D9430
contrib/automation/hgautomation/aws.py
contrib/automation/hgautomation/cli.py
contrib/automation/hgautomation/windows.py
contrib/byteify-strings.py
contrib/check-code.py
contrib/packaging/hgpackaging/cli.py
contrib/packaging/hgpackaging/inno.py
contrib/packaging/hgpackaging/wix.py
contrib/perf.py
contrib/python-hook-examples.py
contrib/python-zstandard/make_cffi.py
contrib/python3-ratchet.py
contrib/synthrepo.py
contrib/testparseutil.py
doc/hgmanpage.py
hgext/acl.py
hgext/automv.py
hgext/blackbox.py
hgext/bugzilla.py
hgext/churn.py
hgext/convert/__init__.py
hgext/convert/bzr.py
hgext/convert/common.py
hgext/convert/convcmd.py
hgext/convert/cvsps.py
hgext/convert/filemap.py
hgext/eol.py
hgext/extdiff.py
hgext/factotum.py
hgext/fetch.py
hgext/fix.py
hgext/fsmonitor/__init__.py
hgext/fsmonitor/pywatchman/__init__.py
hgext/fsmonitor/pywatchman/capabilities.py
hgext/git/__init__.py
hgext/git/manifest.py
hgext/githelp.py
hgext/gpg.py
hgext/hgk.py
hgext/histedit.py
hgext/hooklib/changeset_obsoleted.py
hgext/hooklib/changeset_published.py
hgext/infinitepush/__init__.py
hgext/infinitepush/bundleparts.py
hgext/infinitepush/indexapi.py
hgext/infinitepush/sqlindexapi.py
hgext/keyword.py
hgext/largefiles/__init__.py
hgext/largefiles/basestore.py
hgext/largefiles/lfcommands.py
hgext/largefiles/lfutil.py
hgext/largefiles/localstore.py
hgext/largefiles/overrides.py
hgext/largefiles/proto.py
hgext/largefiles/remotestore.py
hgext/largefiles/reposetup.py
hgext/largefiles/wirestore.py
hgext/lfs/__init__.py
hgext/lfs/blobstore.py
hgext/lfs/wrapper.py
hgext/mq.py
hgext/narrow/narrowbundle2.py
hgext/narrow/narrowwirepeer.py
hgext/notify.py
hgext/pager.py
hgext/patchbomb.py
hgext/phabricator.py
hgext/purge.py
hgext/rebase.py
hgext/record.py
hgext/remotefilelog/__init__.py
hgext/remotefilelog/basestore.py
hgext/remotefilelog/contentstore.py
hgext/remotefilelog/fileserverclient.py
hgext/remotefilelog/remotefilectx.py
hgext/remotefilelog/remotefilelogserver.py
hgext/remotefilelog/repack.py
hgext/remotefilelog/shallowrepo.py
hgext/remotenames.py
hgext/schemes.py
hgext/share.py
hgext/transplant.py
hgext/uncommit.py
hgext/win32mbcs.py
hgext/win32text.py
i18n/check-translation.py
mercurial/ancestor.py
mercurial/archival.py
mercurial/bookmarks.py
mercurial/branchmap.py
mercurial/bundle2.py
mercurial/bundlerepo.py
mercurial/changelog.py
mercurial/cmdutil.py
mercurial/commands.py
mercurial/commandserver.py
mercurial/commit.py
mercurial/config.py
mercurial/configitems.py
mercurial/context.py
mercurial/copies.py
mercurial/crecord.py
mercurial/dagop.py
mercurial/dagparser.py
mercurial/debugcommands.py
mercurial/diffutil.py
mercurial/dirstate.py
mercurial/dirstateguard.py
mercurial/discovery.py
mercurial/dispatch.py
mercurial/encoding.py
mercurial/error.py
mercurial/exchange.py
mercurial/exchangev2.py
mercurial/extensions.py
mercurial/filemerge.py
mercurial/fileset.py
mercurial/help.py
mercurial/hg.py
mercurial/hgweb/__init__.py
mercurial/hgweb/common.py
mercurial/hgweb/hgweb_mod.py
mercurial/hgweb/request.py
mercurial/hgweb/webutil.py
mercurial/hook.py
mercurial/httppeer.py
mercurial/interfaces/dirstate.py
mercurial/interfaces/repository.py
mercurial/keepalive.py
mercurial/localrepo.py
mercurial/lock.py
mercurial/logcmdutil.py
mercurial/logexchange.py
mercurial/mail.py
mercurial/manifest.py
mercurial/match.py
mercurial/mdiff.py
mercurial/merge.py
mercurial/mergestate.py
mercurial/metadata.py
mercurial/minirst.py
mercurial/narrowspec.py
mercurial/obsolete.py
mercurial/obsutil.py
mercurial/parser.py
mercurial/patch.py
mercurial/pathutil.py
mercurial/posix.py
mercurial/progress.py
mercurial/pure/charencode.py
mercurial/pure/mpatch.py
mercurial/pure/osutil.py
mercurial/pure/parsers.py
mercurial/rcutil.py
mercurial/registrar.py
mercurial/repoview.py
mercurial/revlog.py
mercurial/revlogutils/nodemap.py
mercurial/revset.py
mercurial/revsetlang.py
mercurial/scmutil.py
mercurial/setdiscovery.py
mercurial/shelve.py
mercurial/similar.py
mercurial/simplemerge.py
mercurial/sshpeer.py
mercurial/sslutil.py
mercurial/state.py
mercurial/statprof.py
mercurial/store.py
mercurial/strip.py
mercurial/subrepo.py
mercurial/tagmerge.py
mercurial/tags.py
mercurial/templatefilters.py
mercurial/templatefuncs.py
mercurial/templatekw.py
mercurial/templater.py
mercurial/testing/storage.py
mercurial/transaction.py
mercurial/treediscovery.py
mercurial/txnutil.py
mercurial/ui.py
mercurial/url.py
mercurial/urllibcompat.py
mercurial/util.py
mercurial/utils/cborutil.py
mercurial/utils/compression.py
mercurial/utils/dateutil.py
mercurial/utils/procutil.py
mercurial/utils/stringutil.py
mercurial/vfs.py
mercurial/win32.py
mercurial/windows.py
mercurial/wireprotoframing.py
mercurial/wireprototypes.py
mercurial/wireprotov1peer.py
mercurial/wireprotov1server.py
mercurial/wireprotov2server.py
mercurial/worker.py
setup.py
tests/badserverext.py
tests/fakedirstatewritetime.py
tests/fakepatchtime.py
tests/flagprocessorext.py
tests/hghave.py
tests/hypothesishelpers.py
tests/run-tests.py
tests/test-absorb-filefixupstate.py
tests/test-ancestor.py
tests/test-batching.py
tests/test-cbor.py
tests/test-doctest.py
tests/test-linelog.py
tests/test-lock.py
tests/test-manifest.py
tests/test-match.py
tests/test-pathencode.py
tests/test-remotefilelog-datapack.py
tests/test-remotefilelog-histpack.py
tests/test-revlog-raw.py
tests/test-rust-revlog.py
tests/test-verify-repo-operations.py
tests/test-wireproto-clientreactor.py
tests/test-wireproto-framing.py
tests/test-wireproto-serverreactor.py
tests/test-wsgirequest.py
--- a/contrib/automation/hgautomation/aws.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/automation/hgautomation/aws.py	Fri Nov 27 17:03:29 2020 -0500
@@ -152,7 +152,11 @@
 
 
 IAM_INSTANCE_PROFILES = {
-    'ephemeral-ec2-1': {'roles': ['ephemeral-ec2-role-1',],}
+    'ephemeral-ec2-1': {
+        'roles': [
+            'ephemeral-ec2-role-1',
+        ],
+    }
 }
 
 
@@ -469,10 +473,22 @@
 
     images = ec2resource.images.filter(
         Filters=[
-            {'Name': 'owner-id', 'Values': [owner_id],},
-            {'Name': 'state', 'Values': ['available'],},
-            {'Name': 'image-type', 'Values': ['machine'],},
-            {'Name': 'name', 'Values': [name],},
+            {
+                'Name': 'owner-id',
+                'Values': [owner_id],
+            },
+            {
+                'Name': 'state',
+                'Values': ['available'],
+            },
+            {
+                'Name': 'image-type',
+                'Values': ['machine'],
+            },
+            {
+                'Name': 'name',
+                'Values': [name],
+            },
         ]
     )
 
@@ -519,10 +535,13 @@
         print('adding security group %s' % actual)
 
         group_res = ec2resource.create_security_group(
-            Description=group['description'], GroupName=actual,
+            Description=group['description'],
+            GroupName=actual,
         )
 
-        group_res.authorize_ingress(IpPermissions=group['ingress'],)
+        group_res.authorize_ingress(
+            IpPermissions=group['ingress'],
+        )
 
         security_groups[name] = group_res
 
@@ -614,7 +633,10 @@
     while True:
         res = ssmclient.describe_instance_information(
             Filters=[
-                {'Key': 'InstanceIds', 'Values': [i.id for i in instances],},
+                {
+                    'Key': 'InstanceIds',
+                    'Values': [i.id for i in instances],
+                },
             ],
         )
 
@@ -636,7 +658,9 @@
         InstanceIds=[i.id for i in instances],
         DocumentName=document_name,
         Parameters=parameters,
-        CloudWatchOutputConfig={'CloudWatchOutputEnabled': True,},
+        CloudWatchOutputConfig={
+            'CloudWatchOutputEnabled': True,
+        },
     )
 
     command_id = res['Command']['CommandId']
@@ -645,7 +669,8 @@
         while True:
             try:
                 res = ssmclient.get_command_invocation(
-                    CommandId=command_id, InstanceId=instance.id,
+                    CommandId=command_id,
+                    InstanceId=instance.id,
                 )
             except botocore.exceptions.ClientError as e:
                 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
@@ -799,19 +824,32 @@
     instance.stop()
 
     ec2client.get_waiter('instance_stopped').wait(
-        InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
+        InstanceIds=[instance.id],
+        WaiterConfig={
+            'Delay': 5,
+        },
     )
     print('%s is stopped' % instance.id)
 
-    image = instance.create_image(Name=name, Description=description,)
+    image = instance.create_image(
+        Name=name,
+        Description=description,
+    )
 
     image.create_tags(
-        Tags=[{'Key': 'HGIMAGEFINGERPRINT', 'Value': fingerprint,},]
+        Tags=[
+            {
+                'Key': 'HGIMAGEFINGERPRINT',
+                'Value': fingerprint,
+            },
+        ]
     )
 
     print('waiting for image %s' % image.id)
 
-    ec2client.get_waiter('image_available').wait(ImageIds=[image.id],)
+    ec2client.get_waiter('image_available').wait(
+        ImageIds=[image.id],
+    )
 
     print('image %s available as %s' % (image.id, image.name))
 
@@ -837,7 +875,9 @@
         ssh_username = 'admin'
     elif distro == 'debian10':
         image = find_image(
-            ec2resource, DEBIAN_ACCOUNT_ID_2, 'debian-10-amd64-20190909-10',
+            ec2resource,
+            DEBIAN_ACCOUNT_ID_2,
+            'debian-10-amd64-20190909-10',
         )
         ssh_username = 'admin'
     elif distro == 'ubuntu18.04':
@@ -1066,7 +1106,9 @@
 
 
 def ensure_windows_dev_ami(
-    c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME,
+    c: AWSConnection,
+    prefix='hg-',
+    base_image_name=WINDOWS_BASE_IMAGE_NAME,
 ):
     """Ensure Windows Development AMI is available and up-to-date.
 
@@ -1190,7 +1232,9 @@
             ssmclient,
             [instance],
             'AWS-RunPowerShellScript',
-            {'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),},
+            {
+                'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),
+            },
         )
 
         # Reboot so all updates are fully applied.
@@ -1202,7 +1246,10 @@
         print('rebooting instance %s' % instance.id)
         instance.stop()
         ec2client.get_waiter('instance_stopped').wait(
-            InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
+            InstanceIds=[instance.id],
+            WaiterConfig={
+                'Delay': 5,
+            },
         )
 
         instance.start()
--- a/contrib/automation/hgautomation/cli.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/automation/hgautomation/cli.py	Fri Nov 27 17:03:29 2020 -0500
@@ -282,16 +282,20 @@
         help='Path for local state files',
     )
     parser.add_argument(
-        '--aws-region', help='AWS region to use', default='us-west-2',
+        '--aws-region',
+        help='AWS region to use',
+        default='us-west-2',
     )
 
     subparsers = parser.add_subparsers()
 
     sp = subparsers.add_parser(
-        'bootstrap-linux-dev', help='Bootstrap Linux development environments',
+        'bootstrap-linux-dev',
+        help='Bootstrap Linux development environments',
     )
     sp.add_argument(
-        '--distros', help='Comma delimited list of distros to bootstrap',
+        '--distros',
+        help='Comma delimited list of distros to bootstrap',
     )
     sp.add_argument(
         '--parallel',
@@ -312,13 +316,17 @@
     sp.set_defaults(func=bootstrap_windows_dev)
 
     sp = subparsers.add_parser(
-        'build-all-windows-packages', help='Build all Windows packages',
+        'build-all-windows-packages',
+        help='Build all Windows packages',
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
-        '--version', help='Mercurial version string to use',
+        '--version',
+        help='Mercurial version string to use',
     )
     sp.add_argument(
         '--base-image-name',
@@ -328,7 +336,8 @@
     sp.set_defaults(func=build_all_windows_packages)
 
     sp = subparsers.add_parser(
-        'build-inno', help='Build Inno Setup installer(s)',
+        'build-inno',
+        help='Build Inno Setup installer(s)',
     )
     sp.add_argument(
         '--python-version',
@@ -346,10 +355,13 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
-        '--version', help='Mercurial version string to use in installer',
+        '--version',
+        help='Mercurial version string to use in installer',
     )
     sp.add_argument(
         '--base-image-name',
@@ -359,7 +371,8 @@
     sp.set_defaults(func=build_inno)
 
     sp = subparsers.add_parser(
-        'build-windows-wheel', help='Build Windows wheel(s)',
+        'build-windows-wheel',
+        help='Build Windows wheel(s)',
     )
     sp.add_argument(
         '--python-version',
@@ -376,7 +389,9 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
         '--base-image-name',
@@ -402,10 +417,13 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
-        '--version', help='Mercurial version string to use in installer',
+        '--version',
+        help='Mercurial version string to use in installer',
     )
     sp.add_argument(
         '--base-image-name',
@@ -421,11 +439,15 @@
     sp.set_defaults(func=terminate_ec2_instances)
 
     sp = subparsers.add_parser(
-        'purge-ec2-resources', help='Purge all EC2 resources managed by us',
+        'purge-ec2-resources',
+        help='Purge all EC2 resources managed by us',
     )
     sp.set_defaults(func=purge_ec2_resources)
 
-    sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
+    sp = subparsers.add_parser(
+        'run-tests-linux',
+        help='Run tests on Linux',
+    )
     sp.add_argument(
         '--distro',
         help='Linux distribution to run tests on',
@@ -468,10 +490,13 @@
     sp.set_defaults(func=run_tests_linux)
 
     sp = subparsers.add_parser(
-        'run-tests-windows', help='Run tests on Windows',
+        'run-tests-windows',
+        help='Run tests on Windows',
     )
     sp.add_argument(
-        '--instance-type', help='EC2 instance type to use', default='t3.medium',
+        '--instance-type',
+        help='EC2 instance type to use',
+        default='t3.medium',
     )
     sp.add_argument(
         '--python-version',
@@ -486,7 +511,8 @@
         default='x64',
     )
     sp.add_argument(
-        '--test-flags', help='Extra command line flags to pass to run-tests.py',
+        '--test-flags',
+        help='Extra command line flags to pass to run-tests.py',
     )
     sp.add_argument(
         '--base-image-name',
@@ -514,10 +540,12 @@
         help='Skip uploading to www.mercurial-scm.org',
     )
     sp.add_argument(
-        '--ssh-username', help='SSH username for mercurial-scm.org',
+        '--ssh-username',
+        help='SSH username for mercurial-scm.org',
     )
     sp.add_argument(
-        'version', help='Mercurial version string to locate local packages',
+        'version',
+        help='Mercurial version string to locate local packages',
     )
     sp.set_defaults(func=publish_windows_artifacts)
 
--- a/contrib/automation/hgautomation/windows.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/automation/hgautomation/windows.py	Fri Nov 27 17:03:29 2020 -0500
@@ -362,7 +362,8 @@
             raise Exception("unhandled arch: %s" % arch)
 
         ps = BUILD_INNO_PYTHON3.format(
-            pyoxidizer_target=target_triple, version=version,
+            pyoxidizer_target=target_triple,
+            version=version,
         )
     else:
         extra_args = []
@@ -427,7 +428,8 @@
             raise Exception("unhandled arch: %s" % arch)
 
         ps = BUILD_WIX_PYTHON3.format(
-            pyoxidizer_target=target_triple, version=version,
+            pyoxidizer_target=target_triple,
+            version=version,
         )
     else:
         extra_args = []
@@ -460,7 +462,10 @@
 
     python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
 
-    ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
+    ps = RUN_TESTS.format(
+        python_path=python_path,
+        test_flags=test_flags or '',
+    )
 
     run_powershell(winrm_client, ps)
 
--- a/contrib/byteify-strings.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/byteify-strings.py	Fri Nov 27 17:03:29 2020 -0500
@@ -213,15 +213,19 @@
             fn = t.string
 
             # *attr() builtins don't accept byte strings to 2nd argument.
-            if fn in (
-                'getattr',
-                'setattr',
-                'hasattr',
-                'safehasattr',
-                'wrapfunction',
-                'wrapclass',
-                'addattr',
-            ) and (opts['allow-attr-methods'] or not _isop(i - 1, '.')):
+            if (
+                fn
+                in (
+                    'getattr',
+                    'setattr',
+                    'hasattr',
+                    'safehasattr',
+                    'wrapfunction',
+                    'wrapclass',
+                    'addattr',
+                )
+                and (opts['allow-attr-methods'] or not _isop(i - 1, '.'))
+            ):
                 arg1idx = _findargnofcall(1)
                 if arg1idx is not None:
                     _ensuresysstr(arg1idx)
--- a/contrib/check-code.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/check-code.py	Fri Nov 27 17:03:29 2020 -0500
@@ -620,13 +620,17 @@
 ]
 
 inutilpats = [
-    [(r'\bui\.', "don't use ui in util"),],
+    [
+        (r'\bui\.', "don't use ui in util"),
+    ],
     # warnings
     [],
 ]
 
 inrevlogpats = [
-    [(r'\brepo\.', "don't use repo in revlog"),],
+    [
+        (r'\brepo\.', "don't use repo in revlog"),
+    ],
     # warnings
     [],
 ]
--- a/contrib/packaging/hgpackaging/cli.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/packaging/hgpackaging/cli.py	Fri Nov 27 17:03:29 2020 -0500
@@ -44,7 +44,11 @@
         )
     else:
         inno.build_with_py2exe(
-            SOURCE_DIR, build_dir, pathlib.Path(python), iscc, version=version,
+            SOURCE_DIR,
+            build_dir,
+            pathlib.Path(python),
+            iscc,
+            version=version,
         )
 
 
--- a/contrib/packaging/hgpackaging/inno.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/packaging/hgpackaging/inno.py	Fri Nov 27 17:03:29 2020 -0500
@@ -198,7 +198,11 @@
     except jinja2.TemplateSyntaxError as e:
         raise Exception(
             'template syntax error at %s:%d: %s'
-            % (e.name, e.lineno, e.message,)
+            % (
+                e.name,
+                e.lineno,
+                e.message,
+            )
         )
 
     content = template.render(package_files=package_files)
--- a/contrib/packaging/hgpackaging/wix.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/packaging/hgpackaging/wix.py	Fri Nov 27 17:03:29 2020 -0500
@@ -517,7 +517,10 @@
         args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
 
     args.extend(
-        [str(build_dir / 'stage.wixobj'), str(build_dir / 'mercurial.wixobj'),]
+        [
+            str(build_dir / 'stage.wixobj'),
+            str(build_dir / 'mercurial.wixobj'),
+        ]
     )
 
     subprocess.run(args, cwd=str(source_dir), check=True)
--- a/contrib/perf.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/perf.py	Fri Nov 27 17:03:29 2020 -0500
@@ -291,7 +291,9 @@
         experimental=True,
     )
     configitem(
-        b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'pre-run',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
         b'perf',
@@ -310,19 +312,29 @@
     # compatibility fix for a11fd395e83f
     # hg version: 5.2
     configitem(
-        b'perf', b'presleep', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'presleep',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'stub', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'stub',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'parentscount', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'parentscount',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'all-timing', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'all-timing',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'pre-run',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
         b'perf',
@@ -330,7 +342,9 @@
         default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'run-limits', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'run-limits',
+        default=mercurial.configitems.dynamicdefault,
     )
 
 
@@ -385,8 +399,7 @@
         from mercurial import node
 
         class defaultformatter(object):
-            """Minimized composition of baseformatter and plainformatter
-            """
+            """Minimized composition of baseformatter and plainformatter"""
 
             def __init__(self, ui, topic, opts):
                 self._ui = ui
@@ -658,8 +671,7 @@
 
 
 def getsvfs(repo):
-    """Return appropriate object to access files under .hg/store
-    """
+    """Return appropriate object to access files under .hg/store"""
     # for "historical portability":
     # repo.svfs has been available since 2.3 (or 7034365089bf)
     svfs = getattr(repo, 'svfs', None)
@@ -670,8 +682,7 @@
 
 
 def getvfs(repo):
-    """Return appropriate object to access files under .hg
-    """
+    """Return appropriate object to access files under .hg"""
     # for "historical portability":
     # repo.vfs has been available since 2.3 (or 7034365089bf)
     vfs = getattr(repo, 'vfs', None)
@@ -682,8 +693,7 @@
 
 
 def repocleartagscachefunc(repo):
-    """Return the function to clear tags cache according to repo internal API
-    """
+    """Return the function to clear tags cache according to repo internal API"""
     if util.safehasattr(repo, b'_tagscache'):  # since 2.0 (or 9dca7653b525)
         # in this case, setattr(repo, '_tagscache', None) or so isn't
         # correct way to clear tags cache, because existing code paths
@@ -847,7 +857,9 @@
 @command(
     b'perftags',
     formatteropts
-    + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
+    + [
+        (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
+    ],
 )
 def perftags(ui, repo, **opts):
     opts = _byteskwargs(opts)
@@ -900,8 +912,7 @@
 
 @command(b'perfdiscovery', formatteropts, b'PATH')
 def perfdiscovery(ui, repo, path, **opts):
-    """benchmark discovery between local repo and the peer at given path
-    """
+    """benchmark discovery between local repo and the peer at given path"""
     repos = [repo, None]
     timer, fm = gettimer(ui, opts)
     path = ui.expandpath(path)
@@ -919,7 +930,9 @@
 @command(
     b'perfbookmarks',
     formatteropts
-    + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
+    + [
+        (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
+    ],
 )
 def perfbookmarks(ui, repo, **opts):
     """benchmark parsing bookmarks from disk to memory"""
@@ -1184,8 +1197,7 @@
 
 @command(b'perfdirstatedirs', formatteropts)
 def perfdirstatedirs(ui, repo, **opts):
-    """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache
-    """
+    """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     repo.dirstate.hasdir(b"a")
@@ -1245,8 +1257,7 @@
 
 @command(b'perfdirstatewrite', formatteropts)
 def perfdirstatewrite(ui, repo, **opts):
-    """benchmap the time it take to write a dirstate on disk
-    """
+    """benchmap the time it take to write a dirstate on disk"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     ds = repo.dirstate
@@ -1359,7 +1370,9 @@
 
 @command(
     b'perfphases',
-    [(b'', b'full', False, b'include file reading time too'),],
+    [
+        (b'', b'full', False, b'include file reading time too'),
+    ],
     b"",
 )
 def perfphases(ui, repo, **opts):
@@ -1839,7 +1852,10 @@
 
 @command(
     b'perftemplating',
-    [(b'r', b'rev', [], b'revisions to run the template on'),] + formatteropts,
+    [
+        (b'r', b'rev', [], b'revisions to run the template on'),
+    ]
+    + formatteropts,
 )
 def perftemplating(ui, repo, testedtemplate=None, **opts):
     """test the rendering time of a given template"""
@@ -2193,10 +2209,18 @@
                 }
                 if dostats:
                     alldata['nbrevs'].append(
-                        (data['nbrevs'], base.hex(), parent.hex(),)
+                        (
+                            data['nbrevs'],
+                            base.hex(),
+                            parent.hex(),
+                        )
                     )
                     alldata['nbmissingfiles'].append(
-                        (data['nbmissingfiles'], base.hex(), parent.hex(),)
+                        (
+                            data['nbmissingfiles'],
+                            base.hex(),
+                            parent.hex(),
+                        )
                     )
                 if dotiming:
                     begin = util.timer()
@@ -2207,10 +2231,18 @@
                     data['nbrenamedfiles'] = len(renames)
                     if dostats:
                         alldata['time'].append(
-                            (data['time'], base.hex(), parent.hex(),)
+                            (
+                                data['time'],
+                                base.hex(),
+                                parent.hex(),
+                            )
                         )
                         alldata['nbrenames'].append(
-                            (data['nbrenamedfiles'], base.hex(), parent.hex(),)
+                            (
+                                data['nbrenamedfiles'],
+                                base.hex(),
+                                parent.hex(),
+                            )
                         )
                 fm.startitem()
                 fm.data(**data)
@@ -3321,7 +3353,9 @@
 
 @command(
     b'perfvolatilesets',
-    [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),]
+    [
+        (b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
+    ]
     + formatteropts,
 )
 def perfvolatilesets(ui, repo, *names, **opts):
@@ -3807,8 +3841,7 @@
     ],
 )
 def perfwrite(ui, repo, **opts):
-    """microbenchmark ui.write (and others)
-    """
+    """microbenchmark ui.write (and others)"""
     opts = _byteskwargs(opts)
 
     write = getattr(ui, _sysstr(opts[b'write_method']))
--- a/contrib/python-hook-examples.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/python-hook-examples.py	Fri Nov 27 17:03:29 2020 -0500
@@ -9,12 +9,12 @@
 
 
 def diffstat(ui, repo, **kwargs):
-    '''Example usage:
+    """Example usage:
 
     [hooks]
     commit.diffstat = python:/path/to/this/file.py:diffstat
     changegroup.diffstat = python:/path/to/this/file.py:diffstat
-    '''
+    """
     if kwargs.get('parent2'):
         return
     node = kwargs['node']
--- a/contrib/python-zstandard/make_cffi.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/python-zstandard/make_cffi.py	Fri Nov 27 17:03:29 2020 -0500
@@ -53,7 +53,10 @@
 # Headers whose preprocessed output will be fed into cdef().
 HEADERS = [
     os.path.join(HERE, "zstd", *p)
-    for p in (("zstd.h",), ("dictBuilder", "zdict.h"),)
+    for p in (
+        ("zstd.h",),
+        ("dictBuilder", "zdict.h"),
+    )
 ]
 
 INCLUDE_DIRS = [
@@ -80,12 +83,20 @@
 if compiler.compiler_type == "unix":
     args = list(compiler.executables["compiler"])
     args.extend(
-        ["-E", "-DZSTD_STATIC_LINKING_ONLY", "-DZDICT_STATIC_LINKING_ONLY",]
+        [
+            "-E",
+            "-DZSTD_STATIC_LINKING_ONLY",
+            "-DZDICT_STATIC_LINKING_ONLY",
+        ]
     )
 elif compiler.compiler_type == "msvc":
     args = [compiler.cc]
     args.extend(
-        ["/EP", "/DZSTD_STATIC_LINKING_ONLY", "/DZDICT_STATIC_LINKING_ONLY",]
+        [
+            "/EP",
+            "/DZSTD_STATIC_LINKING_ONLY",
+            "/DZDICT_STATIC_LINKING_ONLY",
+        ]
     )
 else:
     raise Exception("unsupported compiler type: %s" % compiler.compiler_type)
--- a/contrib/python3-ratchet.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/python3-ratchet.py	Fri Nov 27 17:03:29 2020 -0500
@@ -26,7 +26,9 @@
 
 _hgenv = dict(os.environ)
 _hgenv.update(
-    {'HGPLAIN': '1',}
+    {
+        'HGPLAIN': '1',
+    }
 )
 
 _HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
--- a/contrib/synthrepo.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/synthrepo.py	Fri Nov 27 17:03:29 2020 -0500
@@ -122,7 +122,7 @@
     optionalrepo=True,
 )
 def analyze(ui, repo, *revs, **opts):
-    '''create a simple model of a repository to use for later synthesis
+    """create a simple model of a repository to use for later synthesis
 
     This command examines every changeset in the given range (or all
     of history if none are specified) and creates a simple statistical
@@ -133,7 +133,7 @@
     :hg:`synthesize` to create or augment a repository with synthetic
     commits that have a structure that is statistically similar to the
     analyzed repository.
-    '''
+    """
     root = repo.root
     if not root.endswith(os.path.sep):
         root += os.path.sep
@@ -281,7 +281,7 @@
     _('hg synthesize [OPTION].. DESCFILE'),
 )
 def synthesize(ui, repo, descpath, **opts):
-    '''synthesize commits based on a model of an existing repository
+    """synthesize commits based on a model of an existing repository
 
     The model must have been generated by :hg:`analyze`. Commits will
     be generated randomly according to the probabilities described in
@@ -293,7 +293,7 @@
     names, words will be chosen randomly from a dictionary that is
     presumed to contain one word per line. Use --dict to specify the
     path to an alternate dictionary to use.
-    '''
+    """
     try:
         fp = hg.openpath(ui, descpath)
     except Exception as err:
@@ -542,12 +542,12 @@
     replacements = {'': ''}
 
     def rename(dirpath):
-        '''Recursively rename the directory and all path prefixes.
+        """Recursively rename the directory and all path prefixes.
 
         The mapping from path to renamed path is stored for all path prefixes
         as in dynamic programming, ensuring linear runtime and consistent
         renaming regardless of iteration order through the model.
-        '''
+        """
         if dirpath in replacements:
             return replacements[dirpath]
         head, _ = os.path.split(dirpath)
--- a/contrib/testparseutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/contrib/testparseutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -81,8 +81,7 @@
 
 
 class embeddedmatcher(object):  # pytype: disable=ignored-metaclass
-    """Base class to detect embedded code fragments in *.t test script
-    """
+    """Base class to detect embedded code fragments in *.t test script"""
 
     __metaclass__ = abc.ABCMeta
 
@@ -103,8 +102,7 @@
 
     @abc.abstractmethod
     def isinside(self, ctx, line):
-        """Examine whether line is inside embedded code, if not yet endsat
-        """
+        """Examine whether line is inside embedded code, if not yet endsat"""
 
     @abc.abstractmethod
     def ignores(self, ctx):
--- a/doc/hgmanpage.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/doc/hgmanpage.py	Fri Nov 27 17:03:29 2020 -0500
@@ -822,7 +822,10 @@
         # man 7 man argues to use ".IP" instead of ".TP"
         self.body.append(
             '.IP %s %d\n'
-            % (next(self._list_char[-1]), self._list_char[-1].get_width(),)
+            % (
+                next(self._list_char[-1]),
+                self._list_char[-1].get_width(),
+            )
         )
 
     def depart_list_item(self, node):
--- a/hgext/acl.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/acl.py	Fri Nov 27 17:03:29 2020 -0500
@@ -239,25 +239,44 @@
 
 # deprecated config: acl.config
 configitem(
-    b'acl', b'config', default=None,
+    b'acl',
+    b'config',
+    default=None,
 )
 configitem(
-    b'acl.groups', b'.*', default=None, generic=True,
+    b'acl.groups',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.deny.branches', b'.*', default=None, generic=True,
+    b'acl.deny.branches',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.allow.branches', b'.*', default=None, generic=True,
+    b'acl.allow.branches',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.deny', b'.*', default=None, generic=True,
+    b'acl.deny',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.allow', b'.*', default=None, generic=True,
+    b'acl.allow',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl', b'sources', default=lambda: [b'serve'],
+    b'acl',
+    b'sources',
+    default=lambda: [b'serve'],
 )
 
 
--- a/hgext/automv.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/automv.py	Fri Nov 27 17:03:29 2020 -0500
@@ -42,7 +42,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'automv', b'similarity', default=95,
+    b'automv',
+    b'similarity',
+    default=95,
 )
 
 
--- a/hgext/blackbox.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/blackbox.py	Fri Nov 27 17:03:29 2020 -0500
@@ -72,19 +72,29 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'blackbox', b'dirty', default=False,
+    b'blackbox',
+    b'dirty',
+    default=False,
 )
 configitem(
-    b'blackbox', b'maxsize', default=b'1 MB',
+    b'blackbox',
+    b'maxsize',
+    default=b'1 MB',
 )
 configitem(
-    b'blackbox', b'logsource', default=False,
+    b'blackbox',
+    b'logsource',
+    default=False,
 )
 configitem(
-    b'blackbox', b'maxfiles', default=7,
+    b'blackbox',
+    b'maxfiles',
+    default=7,
 )
 configitem(
-    b'blackbox', b'track', default=lambda: [b'*'],
+    b'blackbox',
+    b'track',
+    default=lambda: [b'*'],
 )
 configitem(
     b'blackbox',
@@ -92,7 +102,9 @@
     default=lambda: [b'chgserver', b'cmdserver', b'extension'],
 )
 configitem(
-    b'blackbox', b'date-format', default=b'%Y/%m/%d %H:%M:%S',
+    b'blackbox',
+    b'date-format',
+    default=b'%Y/%m/%d %H:%M:%S',
 )
 
 _lastlogger = loggingutil.proxylogger()
@@ -189,14 +201,15 @@
 
 @command(
     b'blackbox',
-    [(b'l', b'limit', 10, _(b'the number of events to show')),],
+    [
+        (b'l', b'limit', 10, _(b'the number of events to show')),
+    ],
     _(b'hg blackbox [OPTION]...'),
     helpcategory=command.CATEGORY_MAINTENANCE,
     helpbasic=True,
 )
 def blackbox(ui, repo, *revs, **opts):
-    '''view the recent repository events
-    '''
+    """view the recent repository events"""
 
     if not repo.vfs.exists(b'blackbox.log'):
         return
--- a/hgext/bugzilla.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/bugzilla.py	Fri Nov 27 17:03:29 2020 -0500
@@ -325,22 +325,34 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'bugzilla', b'apikey', default=b'',
+    b'bugzilla',
+    b'apikey',
+    default=b'',
 )
 configitem(
-    b'bugzilla', b'bzdir', default=b'/var/www/html/bugzilla',
+    b'bugzilla',
+    b'bzdir',
+    default=b'/var/www/html/bugzilla',
 )
 configitem(
-    b'bugzilla', b'bzemail', default=None,
+    b'bugzilla',
+    b'bzemail',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'bzurl', default=b'http://localhost/bugzilla/',
+    b'bugzilla',
+    b'bzurl',
+    default=b'http://localhost/bugzilla/',
 )
 configitem(
-    b'bugzilla', b'bzuser', default=None,
+    b'bugzilla',
+    b'bzuser',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'db', default=b'bugs',
+    b'bugzilla',
+    b'db',
+    default=b'bugs',
 )
 configitem(
     b'bugzilla',
@@ -353,19 +365,29 @@
     ),
 )
 configitem(
-    b'bugzilla', b'fixresolution', default=b'FIXED',
+    b'bugzilla',
+    b'fixresolution',
+    default=b'FIXED',
 )
 configitem(
-    b'bugzilla', b'fixstatus', default=b'RESOLVED',
+    b'bugzilla',
+    b'fixstatus',
+    default=b'RESOLVED',
 )
 configitem(
-    b'bugzilla', b'host', default=b'localhost',
+    b'bugzilla',
+    b'host',
+    default=b'localhost',
 )
 configitem(
-    b'bugzilla', b'notify', default=configitem.dynamicdefault,
+    b'bugzilla',
+    b'notify',
+    default=configitem.dynamicdefault,
 )
 configitem(
-    b'bugzilla', b'password', default=None,
+    b'bugzilla',
+    b'password',
+    default=None,
 )
 configitem(
     b'bugzilla',
@@ -377,25 +399,39 @@
     ),
 )
 configitem(
-    b'bugzilla', b'strip', default=0,
+    b'bugzilla',
+    b'strip',
+    default=0,
 )
 configitem(
-    b'bugzilla', b'style', default=None,
+    b'bugzilla',
+    b'style',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'template', default=None,
+    b'bugzilla',
+    b'template',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'timeout', default=5,
+    b'bugzilla',
+    b'timeout',
+    default=5,
 )
 configitem(
-    b'bugzilla', b'user', default=b'bugs',
+    b'bugzilla',
+    b'user',
+    default=b'bugs',
 )
 configitem(
-    b'bugzilla', b'usermap', default=None,
+    b'bugzilla',
+    b'usermap',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'version', default=None,
+    b'bugzilla',
+    b'version',
+    default=None,
 )
 
 
@@ -430,29 +466,29 @@
         '''remove bug IDs where node occurs in comment text from bugs.'''
 
     def updatebug(self, bugid, newstate, text, committer):
-        '''update the specified bug. Add comment text and set new states.
+        """update the specified bug. Add comment text and set new states.
 
         If possible add the comment as being from the committer of
         the changeset. Otherwise use the default Bugzilla user.
-        '''
+        """
 
     def notify(self, bugs, committer):
-        '''Force sending of Bugzilla notification emails.
+        """Force sending of Bugzilla notification emails.
 
         Only required if the access method does not trigger notification
         emails automatically.
-        '''
+        """
 
 
 # Bugzilla via direct access to MySQL database.
 class bzmysql(bzaccess):
-    '''Support for direct MySQL access to Bugzilla.
+    """Support for direct MySQL access to Bugzilla.
 
     The earliest Bugzilla version this is tested with is version 2.16.
 
     If your Bugzilla is version 3.4 or above, you are strongly
     recommended to use the XMLRPC access method instead.
-    '''
+    """
 
     @staticmethod
     def sql_buglist(ids):
@@ -581,9 +617,9 @@
             return userid
 
     def get_bugzilla_user(self, committer):
-        '''See if committer is a registered bugzilla user. Return
+        """See if committer is a registered bugzilla user. Return
         bugzilla username and userid if so. If not, return default
-        bugzilla username and userid.'''
+        bugzilla username and userid."""
         user = self.map_committer(committer)
         try:
             userid = self.get_user_id(user)
@@ -604,10 +640,10 @@
         return (user, userid)
 
     def updatebug(self, bugid, newstate, text, committer):
-        '''update bug state with comment text.
+        """update bug state with comment text.
 
         Try adding comment as committer of changeset, otherwise as
-        default bugzilla user.'''
+        default bugzilla user."""
         if len(newstate) > 0:
             self.ui.warn(_(b"Bugzilla/MySQL cannot update bug state\n"))
 
@@ -869,7 +905,7 @@
             return b"@%s = %s" % (fieldname, pycompat.bytestr(value))
 
     def send_bug_modify_email(self, bugid, commands, comment, committer):
-        '''send modification message to Bugzilla bug via email.
+        """send modification message to Bugzilla bug via email.
 
         The message format is documented in the Bugzilla email_in.pl
         specification. commands is a list of command lines, comment is the
@@ -878,7 +914,7 @@
         To stop users from crafting commit comments with
         Bugzilla commands, specify the bug ID via the message body, rather
         than the subject line, and leave a blank line after it.
-        '''
+        """
         user = self.map_committer(committer)
         matches = self.bzproxy.User.get(
             {b'match': [user], b'token': self.bztoken}
@@ -1016,11 +1052,11 @@
                 del bugs[bugid]
 
     def updatebug(self, bugid, newstate, text, committer):
-        '''update the specified bug. Add comment text and set new states.
+        """update the specified bug. Add comment text and set new states.
 
         If possible add the comment as being from the committer of
         the changeset. Otherwise use the default Bugzilla user.
-        '''
+        """
         bugmod = {}
         if b'hours' in newstate:
             bugmod[b'work_time'] = newstate[b'hours']
@@ -1050,11 +1086,11 @@
             self.ui.debug(b'added comment to bug %s\n' % bugid)
 
     def notify(self, bugs, committer):
-        '''Force sending of Bugzilla notification emails.
+        """Force sending of Bugzilla notification emails.
 
         Only required if the access method does not trigger notification
         emails automatically.
-        '''
+        """
         pass
 
 
@@ -1092,12 +1128,12 @@
         self.split_re = re.compile(br'\D+')
 
     def find_bugs(self, ctx):
-        '''return bugs dictionary created from commit comment.
+        """return bugs dictionary created from commit comment.
 
         Extract bug info from changeset comments. Filter out any that are
         not known to Bugzilla, and any that already have a reference to
         the given changeset in their comments.
-        '''
+        """
         start = 0
         bugs = {}
         bugmatch = self.bug_re.search(ctx.description(), start)
@@ -1152,8 +1188,8 @@
         '''update bugzilla bug with reference to changeset.'''
 
         def webroot(root):
-            '''strip leading prefix of repo root and turn into
-            url-safe path.'''
+            """strip leading prefix of repo root and turn into
+            url-safe path."""
             count = int(self.ui.config(b'bugzilla', b'strip'))
             root = util.pconvert(root)
             while count > 0:
@@ -1195,9 +1231,9 @@
 
 
 def hook(ui, repo, hooktype, node=None, **kwargs):
-    '''add comment to bugzilla for each changeset that refers to a
+    """add comment to bugzilla for each changeset that refers to a
     bugzilla bug id. only add a comment once per bug, so same change
-    seen multiple times does not fill bug with duplicate data.'''
+    seen multiple times does not fill bug with duplicate data."""
     if node is None:
         raise error.Abort(
             _(b'hook type %s does not pass a changeset id') % hooktype
--- a/hgext/churn.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/churn.py	Fri Nov 27 17:03:29 2020 -0500
@@ -156,7 +156,7 @@
     inferrepo=True,
 )
 def churn(ui, repo, *pats, **opts):
-    '''histogram of changes to the repository
+    """histogram of changes to the repository
 
     This command will display a histogram representing the number
     of changed lines or revisions, grouped according to the given
@@ -193,7 +193,7 @@
     Such a file may be specified with the --aliases option, otherwise
     a .hgchurn file will be looked for in the working directory root.
     Aliases will be split from the rightmost "=".
-    '''
+    """
 
     def pad(s, l):
         return s + b" " * (l - encoding.colwidth(s))
--- a/hgext/convert/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/convert/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -536,7 +536,7 @@
     norepo=True,
 )
 def debugcvsps(ui, *args, **opts):
-    '''create changeset information from CVS
+    """create changeset information from CVS
 
     This command is intended as a debugging tool for the CVS to
     Mercurial converter, and can be used as a direct replacement for
@@ -545,7 +545,7 @@
     Hg debugcvsps reads the CVS rlog for current directory (or any
     named directory) in the CVS repository, and converts the log to a
     series of changesets based on matching commit log entries and
-    dates.'''
+    dates."""
     return cvsps.debugcvsps(ui, *args, **opts)
 
 
--- a/hgext/convert/bzr.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/convert/bzr.py	Fri Nov 27 17:03:29 2020 -0500
@@ -21,7 +21,11 @@
 
 # these do not work with demandimport, blacklist
 demandimport.IGNORES.update(
-    [b'bzrlib.transactions', b'bzrlib.urlutils', b'ElementPath',]
+    [
+        b'bzrlib.transactions',
+        b'bzrlib.urlutils',
+        b'ElementPath',
+    ]
 )
 
 try:
--- a/hgext/convert/common.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/convert/common.py	Fri Nov 27 17:03:29 2020 -0500
@@ -172,8 +172,8 @@
         self.encoding = b'utf-8'
 
     def checkhexformat(self, revstr, mapname=b'splicemap'):
-        """ fails if revstr is not a 40 byte hex. mercurial and git both uses
-            such format for their revision numbering
+        """fails if revstr is not a 40 byte hex. mercurial and git both uses
+        such format for their revision numbering
         """
         if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
             raise error.Abort(
@@ -283,8 +283,7 @@
         return False
 
     def hasnativeclose(self):
-        """Return true if this source has ability to close branch.
-        """
+        """Return true if this source has ability to close branch."""
         return False
 
     def lookuprev(self, rev):
@@ -303,8 +302,8 @@
 
     def checkrevformat(self, revstr, mapname=b'splicemap'):
         """revstr is a string that describes a revision in the given
-           source control system.  Return true if revstr has correct
-           format.
+        source control system.  Return true if revstr has correct
+        format.
         """
         return True
 
--- a/hgext/convert/convcmd.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/convert/convcmd.py	Fri Nov 27 17:03:29 2020 -0500
@@ -96,7 +96,7 @@
 
 
 def mapbranch(branch, branchmap):
-    '''
+    """
     >>> bmap = {b'default': b'branch1'}
     >>> for i in [b'', None]:
     ...     mapbranch(i, bmap)
@@ -115,7 +115,7 @@
     'branch4'
     'branch4'
     'branch5'
-    '''
+    """
     # If branch is None or empty, this commit is coming from the source
     # repository's default branch and destined for the default branch in the
     # destination repository. For such commits, using a literal "default"
@@ -228,14 +228,14 @@
         self.branchmap = mapfile(ui, opts.get(b'branchmap'))
 
     def parsesplicemap(self, path):
-        """ check and validate the splicemap format and
-            return a child/parents dictionary.
-            Format checking has two parts.
-            1. generic format which is same across all source types
-            2. specific format checking which may be different for
-               different source type.  This logic is implemented in
-               checkrevformat function in source files like
-               hg.py, subversion.py etc.
+        """check and validate the splicemap format and
+        return a child/parents dictionary.
+        Format checking has two parts.
+        1. generic format which is same across all source types
+        2. specific format checking which may be different for
+           different source type.  This logic is implemented in
+           checkrevformat function in source files like
+           hg.py, subversion.py etc.
         """
 
         if not path:
@@ -275,8 +275,8 @@
         return m
 
     def walktree(self, heads):
-        '''Return a mapping that identifies the uncommitted parents of every
-        uncommitted changeset.'''
+        """Return a mapping that identifies the uncommitted parents of every
+        uncommitted changeset."""
         visit = list(heads)
         known = set()
         parents = {}
@@ -332,8 +332,8 @@
             parents[c] = pc
 
     def toposort(self, parents, sortmode):
-        '''Return an ordering such that every uncommitted changeset is
-        preceded by all its uncommitted ancestors.'''
+        """Return an ordering such that every uncommitted changeset is
+        preceded by all its uncommitted ancestors."""
 
         def mapchildren(parents):
             """Return a (children, roots) tuple where 'children' maps parent
--- a/hgext/convert/cvsps.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/convert/cvsps.py	Fri Nov 27 17:03:29 2020 -0500
@@ -29,25 +29,25 @@
 
 
 class logentry(object):
-    '''Class logentry has the following attributes:
-        .author    - author name as CVS knows it
-        .branch    - name of branch this revision is on
-        .branches  - revision tuple of branches starting at this revision
-        .comment   - commit message
-        .commitid  - CVS commitid or None
-        .date      - the commit date as a (time, tz) tuple
-        .dead      - true if file revision is dead
-        .file      - Name of file
-        .lines     - a tuple (+lines, -lines) or None
-        .parent    - Previous revision of this entry
-        .rcs       - name of file as returned from CVS
-        .revision  - revision number as tuple
-        .tags      - list of tags on the file
-        .synthetic - is this a synthetic "file ... added on ..." revision?
-        .mergepoint - the branch that has been merged from (if present in
-                      rlog output) or None
-        .branchpoints - the branches that start at the current entry or empty
-    '''
+    """Class logentry has the following attributes:
+    .author    - author name as CVS knows it
+    .branch    - name of branch this revision is on
+    .branches  - revision tuple of branches starting at this revision
+    .comment   - commit message
+    .commitid  - CVS commitid or None
+    .date      - the commit date as a (time, tz) tuple
+    .dead      - true if file revision is dead
+    .file      - Name of file
+    .lines     - a tuple (+lines, -lines) or None
+    .parent    - Previous revision of this entry
+    .rcs       - name of file as returned from CVS
+    .revision  - revision number as tuple
+    .tags      - list of tags on the file
+    .synthetic - is this a synthetic "file ... added on ..." revision?
+    .mergepoint - the branch that has been merged from (if present in
+                  rlog output) or None
+    .branchpoints - the branches that start at the current entry or empty
+    """
 
     def __init__(self, **entries):
         self.synthetic = False
@@ -580,20 +580,20 @@
 
 
 class changeset(object):
-    '''Class changeset has the following attributes:
-        .id        - integer identifying this changeset (list index)
-        .author    - author name as CVS knows it
-        .branch    - name of branch this changeset is on, or None
-        .comment   - commit message
-        .commitid  - CVS commitid or None
-        .date      - the commit date as a (time,tz) tuple
-        .entries   - list of logentry objects in this changeset
-        .parents   - list of one or two parent changesets
-        .tags      - list of tags on this changeset
-        .synthetic - from synthetic revision "file ... added on branch ..."
-        .mergepoint- the branch that has been merged from or None
-        .branchpoints- the branches that start at the current entry or empty
-    '''
+    """Class changeset has the following attributes:
+    .id        - integer identifying this changeset (list index)
+    .author    - author name as CVS knows it
+    .branch    - name of branch this changeset is on, or None
+    .comment   - commit message
+    .commitid  - CVS commitid or None
+    .date      - the commit date as a (time,tz) tuple
+    .entries   - list of logentry objects in this changeset
+    .parents   - list of one or two parent changesets
+    .tags      - list of tags on this changeset
+    .synthetic - from synthetic revision "file ... added on branch ..."
+    .mergepoint- the branch that has been merged from or None
+    .branchpoints- the branches that start at the current entry or empty
+    """
 
     def __init__(self, **entries):
         self.id = None
@@ -945,10 +945,10 @@
 
 
 def debugcvsps(ui, *args, **opts):
-    '''Read CVS rlog for current directory or named path in
+    """Read CVS rlog for current directory or named path in
     repository, and convert the log to changesets based on matching
     commit log entries and dates.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     if opts[b"new_cache"]:
         cache = b"write"
--- a/hgext/convert/filemap.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/convert/filemap.py	Fri Nov 27 17:03:29 2020 -0500
@@ -19,14 +19,14 @@
 
 
 def rpairs(path):
-    '''Yield tuples with path split at '/', starting with the full path.
+    """Yield tuples with path split at '/', starting with the full path.
     No leading, trailing or double '/', please.
     >>> for x in rpairs(b'foo/bar/baz'): print(x)
     ('foo/bar/baz', '')
     ('foo/bar', 'baz')
     ('foo', 'bar/baz')
     ('.', 'foo/bar/baz')
-    '''
+    """
     i = len(path)
     while i != -1:
         yield path[:i], path[i + 1 :]
@@ -35,17 +35,17 @@
 
 
 def normalize(path):
-    ''' We use posixpath.normpath to support cross-platform path format.
-    However, it doesn't handle None input. So we wrap it up. '''
+    """We use posixpath.normpath to support cross-platform path format.
+    However, it doesn't handle None input. So we wrap it up."""
     if path is None:
         return None
     return posixpath.normpath(path)
 
 
 class filemapper(object):
-    '''Map and filter filenames when importing.
+    """Map and filter filenames when importing.
     A name can be mapped to itself, a new name, or None (omit from new
-    repository).'''
+    repository)."""
 
     def __init__(self, ui, path=None):
         self.ui = ui
--- a/hgext/eol.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/eol.py	Fri Nov 27 17:03:29 2020 -0500
@@ -118,13 +118,19 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'eol', b'fix-trailing-newline', default=False,
+    b'eol',
+    b'fix-trailing-newline',
+    default=False,
 )
 configitem(
-    b'eol', b'native', default=pycompat.oslinesep,
+    b'eol',
+    b'native',
+    default=pycompat.oslinesep,
 )
 configitem(
-    b'eol', b'only-consistent', default=True,
+    b'eol',
+    b'only-consistent',
+    default=True,
 )
 
 # Matches a lone LF, i.e., one that is not part of CRLF.
--- a/hgext/extdiff.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/extdiff.py	Fri Nov 27 17:03:29 2020 -0500
@@ -118,19 +118,29 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'extdiff', br'opts\..*', default=b'', generic=True,
+    b'extdiff',
+    br'opts\..*',
+    default=b'',
+    generic=True,
 )
 
 configitem(
-    b'extdiff', br'gui\..*', generic=True,
+    b'extdiff',
+    br'gui\..*',
+    generic=True,
 )
 
 configitem(
-    b'diff-tools', br'.*\.diffargs$', default=None, generic=True,
+    b'diff-tools',
+    br'.*\.diffargs$',
+    default=None,
+    generic=True,
 )
 
 configitem(
-    b'diff-tools', br'.*\.gui$', generic=True,
+    b'diff-tools',
+    br'.*\.gui$',
+    generic=True,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -141,9 +151,9 @@
 
 
 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
-    '''snapshot files as of some revision
+    """snapshot files as of some revision
     if not using snapshot, -I/-X does not work and recursive diff
-    in tools like kdiff3 and meld displays too many files.'''
+    in tools like kdiff3 and meld displays too many files."""
     dirname = os.path.basename(repo.root)
     if dirname == b"":
         dirname = b"root"
@@ -230,9 +240,9 @@
 
 
 def _systembackground(cmd, environ=None, cwd=None):
-    ''' like 'procutil.system', but returns the Popen object directly
-        so we don't have to wait on it.
-    '''
+    """like 'procutil.system', but returns the Popen object directly
+    so we don't have to wait on it.
+    """
     env = procutil.shellenviron(environ)
     proc = subprocess.Popen(
         procutil.tonativestr(cmd),
@@ -530,13 +540,13 @@
 
 
 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
-    '''Do the actual diff:
+    """Do the actual diff:
 
     - copy to a temp structure if diffing 2 internal revisions
     - copy to a temp structure if diffing working revision with
       another one and more than 1 file is changed
     - just invoke the diff for a single file in the working dir
-    '''
+    """
 
     cmdutil.check_at_most_one_arg(opts, b'rev', b'change')
     revs = opts.get(b'rev')
@@ -628,14 +638,16 @@
 
 @command(
     b'extdiff',
-    [(b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),]
+    [
+        (b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),
+    ]
     + extdiffopts,
     _(b'hg extdiff [OPT]... [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     inferrepo=True,
 )
 def extdiff(ui, repo, *pats, **opts):
-    '''use external program to diff repository (or selected files)
+    """use external program to diff repository (or selected files)
 
     Show differences between revisions for the specified files, using
     an external program. The default program used is diff, with
@@ -664,7 +676,7 @@
 
     The --confirm option will prompt the user before each invocation of
     the external program. It is ignored if --per-file isn't specified.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     program = opts.get(b'program')
     option = opts.get(b'option')
--- a/hgext/factotum.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/factotum.py	Fri Nov 27 17:03:29 2020 -0500
@@ -70,13 +70,19 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'factotum', b'executable', default=b'/bin/auth/factotum',
+    b'factotum',
+    b'executable',
+    default=b'/bin/auth/factotum',
 )
 configitem(
-    b'factotum', b'mountpoint', default=b'/mnt/factotum',
+    b'factotum',
+    b'mountpoint',
+    default=b'/mnt/factotum',
 )
 configitem(
-    b'factotum', b'service', default=b'hg',
+    b'factotum',
+    b'service',
+    default=b'hg',
 )
 
 
--- a/hgext/fetch.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/fetch.py	Fri Nov 27 17:03:29 2020 -0500
@@ -54,7 +54,7 @@
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
 )
 def fetch(ui, repo, source=b'default', **opts):
-    '''pull changes from a remote repository, merge new changes if needed.
+    """pull changes from a remote repository, merge new changes if needed.
 
     This finds all changes from the repository at the specified path
     or URL and adds them to the local repository.
@@ -71,7 +71,7 @@
     See :hg:`help dates` for a list of formats valid for -d/--date.
 
     Returns 0 on success.
-    '''
+    """
 
     opts = pycompat.byteskwargs(opts)
     date = opts.get(b'date')
--- a/hgext/fix.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/fix.py	Fri Nov 27 17:03:29 2020 -0500
@@ -372,7 +372,7 @@
 
 
 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
-    """"Constructs the list of files to be fixed at specific revisions
+    """ "Constructs the list of files to be fixed at specific revisions
 
     It is up to the caller how to consume the work items, and the only
     dependence between them is that replacement revisions must be committed in
--- a/hgext/fsmonitor/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/fsmonitor/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -154,25 +154,40 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'fsmonitor', b'mode', default=b'on',
+    b'fsmonitor',
+    b'mode',
+    default=b'on',
 )
 configitem(
-    b'fsmonitor', b'walk_on_invalidate', default=False,
+    b'fsmonitor',
+    b'walk_on_invalidate',
+    default=False,
 )
 configitem(
-    b'fsmonitor', b'timeout', default=b'2',
+    b'fsmonitor',
+    b'timeout',
+    default=b'2',
 )
 configitem(
-    b'fsmonitor', b'blacklistusers', default=list,
+    b'fsmonitor',
+    b'blacklistusers',
+    default=list,
+)
+configitem(
+    b'fsmonitor',
+    b'watchman_exe',
+    default=b'watchman',
 )
 configitem(
-    b'fsmonitor', b'watchman_exe', default=b'watchman',
+    b'fsmonitor',
+    b'verbose',
+    default=True,
+    experimental=True,
 )
 configitem(
-    b'fsmonitor', b'verbose', default=True, experimental=True,
-)
-configitem(
-    b'experimental', b'fsmonitor.transaction_notify', default=False,
+    b'experimental',
+    b'fsmonitor.transaction_notify',
+    default=False,
 )
 
 # This extension is incompatible with the following blacklisted extensions
@@ -271,11 +286,11 @@
 
 
 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
-    '''Replacement for dirstate.walk, hooking into Watchman.
+    """Replacement for dirstate.walk, hooking into Watchman.
 
     Whenever full is False, ignored is False, and the Watchman client is
     available, use Watchman combined with saved state to possibly return only a
-    subset of files.'''
+    subset of files."""
 
     def bail(reason):
         self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
@@ -731,8 +746,8 @@
 
 
 def wrapsymlink(orig, source, link_name):
-    ''' if we create a dangling symlink, also touch the parent dir
-    to encourage fsevents notifications to work more correctly '''
+    """if we create a dangling symlink, also touch the parent dir
+    to encourage fsevents notifications to work more correctly"""
     try:
         return orig(source, link_name)
     finally:
@@ -743,13 +758,13 @@
 
 
 class state_update(object):
-    ''' This context manager is responsible for dispatching the state-enter
-        and state-leave signals to the watchman service. The enter and leave
-        methods can be invoked manually (for scenarios where context manager
-        semantics are not possible). If parameters oldnode and newnode are None,
-        they will be populated based on current working copy in enter and
-        leave, respectively. Similarly, if the distance is none, it will be
-        calculated based on the oldnode and newnode in the leave method.'''
+    """This context manager is responsible for dispatching the state-enter
+    and state-leave signals to the watchman service. The enter and leave
+    methods can be invoked manually (for scenarios where context manager
+    semantics are not possible). If parameters oldnode and newnode are None,
+    they will be populated based on current working copy in enter and
+    leave, respectively. Similarly, if the distance is none, it will be
+    calculated based on the oldnode and newnode in the leave method."""
 
     def __init__(
         self,
--- a/hgext/fsmonitor/pywatchman/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/fsmonitor/pywatchman/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -282,11 +282,11 @@
 
 class SocketTimeout(WatchmanError):
     """A specialized exception raised for socket timeouts during communication to/from watchman.
-       This makes it easier to implement non-blocking loops as callers can easily distinguish
-       between a routine timeout and an actual error condition.
+    This makes it easier to implement non-blocking loops as callers can easily distinguish
+    between a routine timeout and an actual error condition.
 
-       Note that catching WatchmanError will also catch this as it is a super-class, so backwards
-       compatibility in exception handling is preserved.
+    Note that catching WatchmanError will also catch this as it is a super-class, so backwards
+    compatibility in exception handling is preserved.
     """
 
 
@@ -323,7 +323,7 @@
         pass
 
     def readLine(self):
-        """ read a line
+        """read a line
         Maintains its own buffer, callers of the transport should not mix
         calls to readBytes and readLine.
         """
@@ -409,7 +409,7 @@
 
 
 def _get_overlapped_result_ex_impl(pipe, olap, nbytes, millis, alertable):
-    """ Windows 7 and earlier does not support GetOverlappedResultEx. The
+    """Windows 7 and earlier does not support GetOverlappedResultEx. The
     alternative is to use GetOverlappedResult and wait for read or write
     operation to complete. This is done be using CreateEvent and
     WaitForSingleObjectEx. CreateEvent, WaitForSingleObjectEx
@@ -510,9 +510,9 @@
         self.timeout = int(value * 1000)
 
     def readBytes(self, size):
-        """ A read can block for an unbounded amount of time, even if the
-            kernel reports that the pipe handle is signalled, so we need to
-            always perform our reads asynchronously
+        """A read can block for an unbounded amount of time, even if the
+        kernel reports that the pipe handle is signalled, so we need to
+        always perform our reads asynchronously
         """
 
         # try to satisfy the read from any buffered data
@@ -627,7 +627,7 @@
 
 
 class CLIProcessTransport(Transport):
-    """ open a pipe to the cli to talk to the service
+    """open a pipe to the cli to talk to the service
     This intended to be used only in the test harness!
 
     The CLI is an oddball because we only support JSON input
@@ -739,8 +739,8 @@
 
 
 class ImmutableBserCodec(BserCodec):
-    """ use the BSER encoding, decoding values using the newer
-        immutable object support """
+    """use the BSER encoding, decoding values using the newer
+    immutable object support"""
 
     def _loads(self, response):
         return bser.loads(
@@ -817,8 +817,8 @@
 
 
 class ImmutableBser2Codec(Bser2WithFallbackCodec, ImmutableBserCodec):
-    """ use the BSER encoding, decoding values using the newer
-        immutable object support """
+    """use the BSER encoding, decoding values using the newer
+    immutable object support"""
 
     pass
 
@@ -1050,7 +1050,7 @@
             self.sendConn = None
 
     def receive(self):
-        """ receive the next PDU from the watchman service
+        """receive the next PDU from the watchman service
 
         If the client has activated subscriptions or logs then
         this PDU may be a unilateral PDU sent by the service to
@@ -1098,7 +1098,7 @@
         return False
 
     def getLog(self, remove=True):
-        """ Retrieve buffered log data
+        """Retrieve buffered log data
 
         If remove is true the data will be removed from the buffer.
         Otherwise it will be left in the buffer
@@ -1109,7 +1109,7 @@
         return res
 
     def getSubscription(self, name, remove=True, root=None):
-        """ Retrieve the data associated with a named subscription
+        """Retrieve the data associated with a named subscription
 
         If remove is True (the default), the subscription data is removed
         from the buffer.  Otherwise the data is returned but left in
@@ -1144,7 +1144,7 @@
         return sub
 
     def query(self, *args):
-        """ Send a query to the watchman service and return the response
+        """Send a query to the watchman service and return the response
 
         This call will block until the response is returned.
         If any unilateral responses are sent by the service in between
--- a/hgext/fsmonitor/pywatchman/capabilities.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/fsmonitor/pywatchman/capabilities.py	Fri Nov 27 17:03:29 2020 -0500
@@ -55,8 +55,8 @@
 
 
 def synthesize(vers, opts):
-    """ Synthesize a capability enabled version response
-        This is a very limited emulation for relatively recent feature sets
+    """Synthesize a capability enabled version response
+    This is a very limited emulation for relatively recent feature sets
     """
     parsed_version = parse_version(vers["version"])
     vers["capabilities"] = {}
--- a/hgext/git/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/git/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -33,7 +33,9 @@
 configitem = registrar.configitem(configtable)
 # git.log-index-cache-miss: internal knob for testing
 configitem(
-    b"git", b"log-index-cache-miss", default=False,
+    b"git",
+    b"log-index-cache-miss",
+    default=False,
 )
 
 # TODO: extract an interface for this in core
@@ -224,8 +226,7 @@
         return bname
 
     def applychanges(self, repo, tr, changes):
-        """Apply a list of changes to bookmarks
-        """
+        """Apply a list of changes to bookmarks"""
         # TODO: this should respect transactions, but that's going to
         # require enlarging the gitbmstore to know how to do in-memory
         # temporary writes and read those back prior to transaction
--- a/hgext/git/manifest.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/git/manifest.py	Fri Nov 27 17:03:29 2020 -0500
@@ -127,7 +127,7 @@
         return dir in self._dirs
 
     def diff(self, other, match=lambda x: True, clean=False):
-        '''Finds changes between the current manifest and m2.
+        """Finds changes between the current manifest and m2.
 
         The result is returned as a dict with filename as key and
         values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
@@ -135,7 +135,7 @@
         in the current/other manifest. Where the file does not exist,
         the nodeid will be None and the flags will be the empty
         string.
-        '''
+        """
         result = {}
 
         def _iterativediff(t1, t2, subdir):
--- a/hgext/githelp.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/githelp.py	Fri Nov 27 17:03:29 2020 -0500
@@ -59,10 +59,10 @@
     helpbasic=True,
 )
 def githelp(ui, repo, *args, **kwargs):
-    '''suggests the Mercurial equivalent of the given git command
+    """suggests the Mercurial equivalent of the given git command
 
     Usage: hg githelp -- <git command>
-    '''
+    """
 
     if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
         raise error.Abort(
--- a/hgext/gpg.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/gpg.py	Fri Nov 27 17:03:29 2020 -0500
@@ -37,13 +37,20 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'gpg', b'cmd', default=b'gpg',
+    b'gpg',
+    b'cmd',
+    default=b'gpg',
 )
 configitem(
-    b'gpg', b'key', default=None,
+    b'gpg',
+    b'key',
+    default=None,
 )
 configitem(
-    b'gpg', b'.*', default=None, generic=True,
+    b'gpg',
+    b'.*',
+    default=None,
+    generic=True,
 )
 
 # Custom help category
@@ -78,7 +85,11 @@
             fp.close()
             gpgcmd = (
                 b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\""
-                % (self.path, sigfile, datafile,)
+                % (
+                    self.path,
+                    sigfile,
+                    datafile,
+                )
             )
             ret = procutil.filter(b"", gpgcmd)
         finally:
--- a/hgext/hgk.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/hgk.py	Fri Nov 27 17:03:29 2020 -0500
@@ -65,7 +65,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'hgk', b'path', default=b'hgk',
+    b'hgk',
+    b'path',
+    default=b'hgk',
 )
 
 
--- a/hgext/histedit.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/histedit.py	Fri Nov 27 17:03:29 2020 -0500
@@ -247,22 +247,34 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 configitem(
-    b'experimental', b'histedit.autoverb', default=False,
+    b'experimental',
+    b'histedit.autoverb',
+    default=False,
 )
 configitem(
-    b'histedit', b'defaultrev', default=None,
+    b'histedit',
+    b'defaultrev',
+    default=None,
 )
 configitem(
-    b'histedit', b'dropmissing', default=False,
+    b'histedit',
+    b'dropmissing',
+    default=False,
 )
 configitem(
-    b'histedit', b'linelen', default=80,
+    b'histedit',
+    b'linelen',
+    default=80,
 )
 configitem(
-    b'histedit', b'singletransaction', default=False,
+    b'histedit',
+    b'singletransaction',
+    default=False,
 )
 configitem(
-    b'ui', b'interface.histedit', default=None,
+    b'ui',
+    b'interface.histedit',
+    default=None,
 )
 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
 
@@ -280,7 +292,7 @@
 
 
 def geteditcomment(ui, first, last):
-    """ construct the editor comment
+    """construct the editor comment
     The comment includes::
      - an intro
      - sorted primary commands
@@ -477,8 +489,7 @@
 
     @classmethod
     def fromrule(cls, state, rule):
-        """Parses the given rule, returning an instance of the histeditaction.
-        """
+        """Parses the given rule, returning an instance of the histeditaction."""
         ruleid = rule.strip().split(b' ', 1)[0]
         # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
         # Check for validation of rule ids and get the rulehash
@@ -544,7 +555,7 @@
 
     def tostate(self):
         """Print an action in format used by histedit state files
-           (the first line is a verb, the remainder is the second)
+        (the first line is a verb, the remainder is the second)
         """
         return b"%s\n%s" % (self.verb, node.hex(self.node))
 
@@ -1178,8 +1189,8 @@
 
 # ============ EVENTS ===============
 def movecursor(state, oldpos, newpos):
-    '''Change the rule/changeset that the cursor is pointing to, regardless of
-    current mode (you can switch between patches from the view patch window).'''
+    """Change the rule/changeset that the cursor is pointing to, regardless of
+    current mode (you can switch between patches from the view patch window)."""
     state[b'pos'] = newpos
 
     mode, _ = state[b'mode']
@@ -1256,8 +1267,8 @@
 
 
 def changeview(state, delta, unit):
-    '''Change the region of whatever is being viewed (a patch or the list of
-    changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
+    """Change the region of whatever is being viewed (a patch or the list of
+    changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
     mode, _ = state[b'mode']
     if mode != MODE_PATCH:
         return
@@ -1582,8 +1593,12 @@
         b'mode': (MODE_INIT, MODE_INIT),
         b'page_height': None,
         b'modes': {
-            MODE_RULES: {b'line_offset': 0,},
-            MODE_PATCH: {b'line_offset': 0,},
+            MODE_RULES: {
+                b'line_offset': 0,
+            },
+            MODE_PATCH: {
+                b'line_offset': 0,
+            },
         },
         b'repo': repo,
     }
--- a/hgext/hooklib/changeset_obsoleted.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/hooklib/changeset_obsoleted.py	Fri Nov 27 17:03:29 2020 -0500
@@ -40,10 +40,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'notify_obsoleted', b'domain', default=None,
+    b'notify_obsoleted',
+    b'domain',
+    default=None,
 )
 configitem(
-    b'notify_obsoleted', b'messageidseed', default=None,
+    b'notify_obsoleted',
+    b'messageidseed',
+    default=None,
 )
 configitem(
     b'notify_obsoleted',
--- a/hgext/hooklib/changeset_published.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/hooklib/changeset_published.py	Fri Nov 27 17:03:29 2020 -0500
@@ -39,10 +39,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'notify_published', b'domain', default=None,
+    b'notify_published',
+    b'domain',
+    default=None,
 )
 configitem(
-    b'notify_published', b'messageidseed', default=None,
+    b'notify_published',
+    b'messageidseed',
+    default=None,
 )
 configitem(
     b'notify_published',
--- a/hgext/infinitepush/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/infinitepush/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -154,37 +154,59 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'infinitepush', b'server', default=False,
+    b'infinitepush',
+    b'server',
+    default=False,
 )
 configitem(
-    b'infinitepush', b'storetype', default=b'',
+    b'infinitepush',
+    b'storetype',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'indextype', default=b'',
+    b'infinitepush',
+    b'indextype',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'indexpath', default=b'',
+    b'infinitepush',
+    b'indexpath',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'storeallparts', default=False,
+    b'infinitepush',
+    b'storeallparts',
+    default=False,
 )
 configitem(
-    b'infinitepush', b'reponame', default=b'',
+    b'infinitepush',
+    b'reponame',
+    default=b'',
 )
 configitem(
-    b'scratchbranch', b'storepath', default=b'',
+    b'scratchbranch',
+    b'storepath',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'branchpattern', default=b'',
+    b'infinitepush',
+    b'branchpattern',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'pushtobundlestore', default=False,
+    b'infinitepush',
+    b'pushtobundlestore',
+    default=False,
 )
 configitem(
-    b'experimental', b'server-bundlestore-bookmark', default=b'',
+    b'experimental',
+    b'server-bundlestore-bookmark',
+    default=b'',
 )
 configitem(
-    b'experimental', b'infinitepush-scratchpush', default=False,
+    b'experimental',
+    b'infinitepush-scratchpush',
+    default=False,
 )
 
 experimental = b'experimental'
@@ -249,13 +271,13 @@
 
 
 def _tryhoist(ui, remotebookmark):
-    '''returns a bookmarks with hoisted part removed
+    """returns a bookmarks with hoisted part removed
 
     Remotenames extension has a 'hoist' config that allows to use remote
     bookmarks without specifying remote path. For example, 'hg update master'
     works as well as 'hg update remote/master'. We want to allow the same in
     infinitepush.
-    '''
+    """
 
     if common.isremotebooksenabled(ui):
         hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/'
@@ -427,11 +449,11 @@
 
 
 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
-    '''Tells remotefilelog to include all changed files to the changegroup
+    """Tells remotefilelog to include all changed files to the changegroup
 
     By default remotefilelog doesn't include file content to the changegroup.
     But we need to include it if we are fetching from bundlestore.
-    '''
+    """
     changedfiles = set()
     cl = bundlerepo.changelog
     for r in bundlerevs:
@@ -457,11 +479,11 @@
 
 
 def _rebundle(bundlerepo, bundleroots, unknownhead):
-    '''
+    """
     Bundle may include more revision then user requested. For example,
     if user asks for revision but bundle also consists its descendants.
     This function will filter out all revision that user is not requested.
-    '''
+    """
     parts = []
 
     version = b'02'
@@ -499,10 +521,10 @@
 
 
 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
-    '''generates bundle that will be send to the user
+    """generates bundle that will be send to the user
 
     returns tuple with raw bundle string and bundle type
-    '''
+    """
     parts = []
     if not _needsrebundling(head, bundlerepo):
         with util.posixfile(bundlefile, b"rb") as f:
@@ -1022,7 +1044,12 @@
                         )
                         rpart.addparam(b'return', b'1', mandatory=False)
 
-            op.records.add(part.type, {b'return': 1,})
+            op.records.add(
+                part.type,
+                {
+                    b'return': 1,
+                },
+            )
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -1112,7 +1139,12 @@
                     bundle2._processpart(op, part)
 
             if handleallparts:
-                op.records.add(part.type, {b'return': 1,})
+                op.records.add(
+                    part.type,
+                    {
+                        b'return': 1,
+                    },
+                )
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -1284,11 +1316,11 @@
 
 
 def bundle2pushkey(orig, op, part):
-    '''Wrapper of bundle2.handlepushkey()
+    """Wrapper of bundle2.handlepushkey()
 
     The only goal is to skip calling the original function if flag is set.
     It's set if infinitepush push is happening.
-    '''
+    """
     if op.records[scratchbranchparttype + b'_skippushkey']:
         if op.reply is not None:
             rpart = op.reply.newpart(b'reply:pushkey')
@@ -1300,11 +1332,11 @@
 
 
 def bundle2handlephases(orig, op, part):
-    '''Wrapper of bundle2.handlephases()
+    """Wrapper of bundle2.handlephases()
 
     The only goal is to skip calling the original function if flag is set.
     It's set if infinitepush push is happening.
-    '''
+    """
 
     if op.records[scratchbranchparttype + b'_skipphaseheads']:
         return
@@ -1313,11 +1345,11 @@
 
 
 def _asyncsavemetadata(root, nodes):
-    '''starts a separate process that fills metadata for the nodes
+    """starts a separate process that fills metadata for the nodes
 
     This function creates a separate process and doesn't wait for it's
     completion. This was done to avoid slowing down pushes
-    '''
+    """
 
     maxnodes = 50
     if len(nodes) > maxnodes:
--- a/hgext/infinitepush/bundleparts.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/infinitepush/bundleparts.py	Fri Nov 27 17:03:29 2020 -0500
@@ -90,11 +90,11 @@
 
 
 def _handlelfs(repo, missing):
-    '''Special case if lfs is enabled
+    """Special case if lfs is enabled
 
     If lfs is enabled then we need to call prepush hook
     to make sure large files are uploaded to lfs
-    '''
+    """
     try:
         lfsmod = extensions.find(b'lfs')
         lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
--- a/hgext/infinitepush/indexapi.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/infinitepush/indexapi.py	Fri Nov 27 17:03:29 2020 -0500
@@ -47,8 +47,7 @@
         raise NotImplementedError()
 
     def deletebookmarks(self, patterns):
-        """Accepts list of bookmarks and deletes them.
-        """
+        """Accepts list of bookmarks and deletes them."""
         raise NotImplementedError()
 
     def getbundle(self, node):
--- a/hgext/infinitepush/sqlindexapi.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/infinitepush/sqlindexapi.py	Fri Nov 27 17:03:29 2020 -0500
@@ -28,9 +28,9 @@
 
 
 class sqlindexapi(indexapi.indexapi):
-    '''
+    """
     Sql backend for infinitepush index. See schema.sql
-    '''
+    """
 
     def __init__(
         self,
--- a/hgext/keyword.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/keyword.py	Fri Nov 27 17:03:29 2020 -0500
@@ -158,13 +158,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'keywordset', b'svn', default=False,
+    b'keywordset',
+    b'svn',
+    default=False,
 )
 # date like in cvs' $Date
 @templatefilter(b'utcdate', intype=templateutil.date)
 def utcdate(date):
-    '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
-    '''
+    """Date. Returns a UTC-date in this format: "2009/08/18 11:00:13"."""
     dateformat = b'%Y/%m/%d %H:%M:%S'
     return dateutil.datestr((date[0], 0), dateformat)
 
@@ -172,18 +173,18 @@
 # date like in svn's $Date
 @templatefilter(b'svnisodate', intype=templateutil.date)
 def svnisodate(date):
-    '''Date. Returns a date in this format: "2009-08-18 13:00:13
+    """Date. Returns a date in this format: "2009-08-18 13:00:13
     +0200 (Tue, 18 Aug 2009)".
-    '''
+    """
     return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
 
 
 # date like in svn's $Id
 @templatefilter(b'svnutcdate', intype=templateutil.date)
 def svnutcdate(date):
-    '''Date. Returns a UTC-date in this format: "2009-08-18
+    """Date. Returns a UTC-date in this format: "2009-08-18
     11:00:13Z".
-    '''
+    """
     dateformat = b'%Y-%m-%d %H:%M:%SZ'
     return dateutil.datestr((date[0], 0), dateformat)
 
@@ -221,25 +222,25 @@
 
 
 def _shrinktext(text, subfunc):
-    '''Helper for keyword expansion removal in text.
-    Depending on subfunc also returns number of substitutions.'''
+    """Helper for keyword expansion removal in text.
+    Depending on subfunc also returns number of substitutions."""
     return subfunc(br'$\1$', text)
 
 
 def _preselect(wstatus, changed):
-    '''Retrieves modified and added files from a working directory state
+    """Retrieves modified and added files from a working directory state
     and returns the subset of each contained in given changed files
-    retrieved from a change context.'''
+    retrieved from a change context."""
     modified = [f for f in wstatus.modified if f in changed]
     added = [f for f in wstatus.added if f in changed]
     return modified, added
 
 
 class kwtemplater(object):
-    '''
+    """
     Sets up keyword templates, corresponding keyword regex, and
     provides keyword substitution functions.
-    '''
+    """
 
     def __init__(self, ui, repo, inc, exc):
         self.ui = ui
@@ -304,8 +305,8 @@
         return data
 
     def iskwfile(self, cand, ctx):
-        '''Returns subset of candidates which are configured for keyword
-        expansion but are not symbolic links.'''
+        """Returns subset of candidates which are configured for keyword
+        expansion but are not symbolic links."""
         return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
 
     def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
@@ -374,18 +375,18 @@
         return lines
 
     def wread(self, fname, data):
-        '''If in restricted mode returns data read from wdir with
-        keyword substitutions removed.'''
+        """If in restricted mode returns data read from wdir with
+        keyword substitutions removed."""
         if self.restrict:
             return self.shrink(fname, data)
         return data
 
 
 class kwfilelog(filelog.filelog):
-    '''
+    """
     Subclass of filelog to hook into its read, add, cmp methods.
     Keywords are "stored" unexpanded, and processed on reading.
-    '''
+    """
 
     def __init__(self, opener, kwt, path):
         super(kwfilelog, self).__init__(opener, path)
@@ -411,8 +412,8 @@
 
 
 def _status(ui, repo, wctx, kwt, *pats, **opts):
-    '''Bails out if [keyword] configuration is not active.
-    Returns status of working directory.'''
+    """Bails out if [keyword] configuration is not active.
+    Returns status of working directory."""
     if kwt:
         opts = pycompat.byteskwargs(opts)
         return repo.status(
@@ -448,7 +449,7 @@
     optionalrepo=True,
 )
 def demo(ui, repo, *args, **opts):
-    '''print [keywordmaps] configuration and an expansion example
+    """print [keywordmaps] configuration and an expansion example
 
     Show current, custom, or default keyword template maps and their
     expansions.
@@ -459,7 +460,7 @@
     Use -d/--default to disable current configuration.
 
     See :hg:`help templates` for information on templates and filters.
-    '''
+    """
 
     def demoitems(section, items):
         ui.write(b'[%s]\n' % section)
@@ -547,12 +548,12 @@
     inferrepo=True,
 )
 def expand(ui, repo, *pats, **opts):
-    '''expand keywords in the working directory
+    """expand keywords in the working directory
 
     Run after (re)enabling keyword expansion.
 
     kwexpand refuses to run if given files contain local changes.
-    '''
+    """
     # 3rd argument sets expansion to True
     _kwfwrite(ui, repo, True, *pats, **opts)
 
@@ -569,7 +570,7 @@
     inferrepo=True,
 )
 def files(ui, repo, *pats, **opts):
-    '''show files configured for keyword expansion
+    """show files configured for keyword expansion
 
     List which files in the working directory are matched by the
     [keyword] configuration patterns.
@@ -588,7 +589,7 @@
       k = keyword expansion candidate (not tracked)
       I = ignored
       i = ignored (not tracked)
-    '''
+    """
     kwt = getattr(repo, '_keywordkwt', None)
     wctx = repo[None]
     status = _status(ui, repo, wctx, kwt, *pats, **opts)
@@ -634,12 +635,12 @@
     inferrepo=True,
 )
 def shrink(ui, repo, *pats, **opts):
-    '''revert expanded keywords in the working directory
+    """revert expanded keywords in the working directory
 
     Must be run before changing/disabling active keywords.
 
     kwshrink refuses to run if given files contain local changes.
-    '''
+    """
     # 3rd argument sets expansion to False
     _kwfwrite(ui, repo, False, *pats, **opts)
 
@@ -648,8 +649,8 @@
 
 
 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
-    '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
-    rejects or conflicts due to expanded keywords in working dir.'''
+    """Monkeypatch/wrap patch.patchfile.__init__ to avoid
+    rejects or conflicts due to expanded keywords in working dir."""
     orig(self, ui, gp, backend, store, eolmode)
     kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
     if kwt:
@@ -702,7 +703,7 @@
 
 
 def kw_copy(orig, ui, repo, pats, opts, rename=False):
-    '''Wraps cmdutil.copy so that copy/rename destinations do not
+    """Wraps cmdutil.copy so that copy/rename destinations do not
     contain expanded keywords.
     Note that the source of a regular file destination may also be a
     symlink:
@@ -710,7 +711,7 @@
     cp sym x; hg cp -A sym x   -> x is file (maybe expanded keywords)
     For the latter we have to follow the symlink to find out whether its
     target is configured for expansion and we therefore must unexpand the
-    keywords in the destination.'''
+    keywords in the destination."""
     kwt = getattr(repo, '_keywordkwt', None)
     if kwt is None:
         return orig(ui, repo, pats, opts, rename)
@@ -722,9 +723,9 @@
         cwd = repo.getcwd()
 
         def haskwsource(dest):
-            '''Returns true if dest is a regular file and configured for
+            """Returns true if dest is a regular file and configured for
             expansion or a symlink which points to a file configured for
-            expansion. '''
+            expansion."""
             source = repo.dirstate.copied(dest)
             if b'l' in wctx.flags(source):
                 source = pathutil.canonpath(
@@ -785,12 +786,12 @@
 
 
 def uisetup(ui):
-    ''' Monkeypatches dispatch._parse to retrieve user command.
+    """Monkeypatches dispatch._parse to retrieve user command.
     Overrides file method to return kwfilelog instead of filelog
     if file matches user configuration.
     Wraps commit to overwrite configured files with updated
     keyword substitutions.
-    Monkeypatches patch and webcommands.'''
+    Monkeypatches patch and webcommands."""
 
     def kwdispatch_parse(orig, ui, args):
         '''Monkeypatch dispatch._parse to obtain running hg command.'''
--- a/hgext/largefiles/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -136,13 +136,19 @@
 eh.merge(proto.eh)
 
 eh.configitem(
-    b'largefiles', b'minsize', default=eh.configitem.dynamicdefault,
+    b'largefiles',
+    b'minsize',
+    default=eh.configitem.dynamicdefault,
 )
 eh.configitem(
-    b'largefiles', b'patterns', default=list,
+    b'largefiles',
+    b'patterns',
+    default=list,
 )
 eh.configitem(
-    b'largefiles', b'usercache', default=None,
+    b'largefiles',
+    b'usercache',
+    default=None,
 )
 
 cmdtable = eh.cmdtable
--- a/hgext/largefiles/basestore.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/basestore.py	Fri Nov 27 17:03:29 2020 -0500
@@ -17,8 +17,8 @@
 
 
 class StoreError(Exception):
-    '''Raised when there is a problem getting files from or putting
-    files to a central store.'''
+    """Raised when there is a problem getting files from or putting
+    files to a central store."""
 
     def __init__(self, filename, hash, url, detail):
         self.filename = filename
@@ -49,19 +49,19 @@
         raise NotImplementedError(b'abstract method')
 
     def exists(self, hashes):
-        '''Check to see if the store contains the given hashes. Given an
-        iterable of hashes it returns a mapping from hash to bool.'''
+        """Check to see if the store contains the given hashes. Given an
+        iterable of hashes it returns a mapping from hash to bool."""
         raise NotImplementedError(b'abstract method')
 
     def get(self, files):
-        '''Get the specified largefiles from the store and write to local
+        """Get the specified largefiles from the store and write to local
         files under repo.root.  files is a list of (filename, hash)
         tuples.  Return (success, missing), lists of files successfully
         downloaded and those not found in the store.  success is a list
         of (filename, hash) tuples; missing is a list of filenames that
         we could not get.  (The detailed error message will already have
         been presented to the user, so missing is just supplied as a
-        summary.)'''
+        summary.)"""
         success = []
         missing = []
         ui = self.ui
@@ -123,9 +123,9 @@
         return True
 
     def verify(self, revs, contents=False):
-        '''Verify the existence (and, optionally, contents) of every big
+        """Verify the existence (and, optionally, contents) of every big
         file revision referenced by every changeset in revs.
-        Return 0 if all is well, non-zero on any errors.'''
+        Return 0 if all is well, non-zero on any errors."""
 
         self.ui.status(
             _(b'searching %d changesets for largefiles\n') % len(revs)
@@ -163,17 +163,17 @@
         return int(failed)
 
     def _getfile(self, tmpfile, filename, hash):
-        '''Fetch one revision of one file from the store and write it
+        """Fetch one revision of one file from the store and write it
         to tmpfile.  Compute the hash of the file on-the-fly as it
         downloads and return the hash.  Close tmpfile.  Raise
         StoreError if unable to download the file (e.g. it does not
-        exist in the store).'''
+        exist in the store)."""
         raise NotImplementedError(b'abstract method')
 
     def _verifyfiles(self, contents, filestocheck):
-        '''Perform the actual verification of files in the store.
+        """Perform the actual verification of files in the store.
         'contents' controls verification of content hash.
         'filestocheck' is list of files to check.
         Returns _true_ if any problems are found!
-        '''
+        """
         raise NotImplementedError(b'abstract method')
--- a/hgext/largefiles/lfcommands.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/lfcommands.py	Fri Nov 27 17:03:29 2020 -0500
@@ -66,7 +66,7 @@
     inferrepo=True,
 )
 def lfconvert(ui, src, dest, *pats, **opts):
-    '''convert a normal repository to a largefiles repository
+    """convert a normal repository to a largefiles repository
 
     Convert repository SOURCE to a new repository DEST, identical to
     SOURCE except that certain files will be converted as largefiles:
@@ -82,7 +82,7 @@
     repository.
 
     Use --to-normal to convert largefiles back to normal files; after
-    this, the DEST repository can be used without largefiles at all.'''
+    this, the DEST repository can be used without largefiles at all."""
 
     opts = pycompat.byteskwargs(opts)
     if opts[b'to_normal']:
@@ -393,8 +393,8 @@
 
 
 def _islfile(file, ctx, matcher, size):
-    '''Return true if file should be considered a largefile, i.e.
-    matcher matches it or it is larger than size.'''
+    """Return true if file should be considered a largefile, i.e.
+    matcher matches it or it is larger than size."""
     # never store special .hg* files as largefiles
     if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
         return False
@@ -440,11 +440,11 @@
 
 
 def verifylfiles(ui, repo, all=False, contents=False):
-    '''Verify that every largefile revision in the current changeset
+    """Verify that every largefile revision in the current changeset
     exists in the central store.  With --contents, also verify that
     the contents of each local largefile file revision are correct (SHA-1 hash
     matches the revision ID).  With --all, check every changeset in
-    this repository.'''
+    this repository."""
     if all:
         revs = repo.revs(b'all()')
     else:
@@ -455,12 +455,12 @@
 
 
 def cachelfiles(ui, repo, node, filelist=None):
-    '''cachelfiles ensures that all largefiles needed by the specified revision
+    """cachelfiles ensures that all largefiles needed by the specified revision
     are present in the repository's largefile cache.
 
     returns a tuple (cached, missing).  cached is the list of files downloaded
     by this operation; missing is the list of files that were needed but could
-    not be found.'''
+    not be found."""
     lfiles = lfutil.listlfiles(repo, node)
     if filelist:
         lfiles = set(lfiles) & set(filelist)
@@ -502,11 +502,11 @@
 def updatelfiles(
     ui, repo, filelist=None, printmessage=None, normallookup=False
 ):
-    '''Update largefiles according to standins in the working directory
+    """Update largefiles according to standins in the working directory
 
     If ``printmessage`` is other than ``None``, it means "print (or
     ignore, for false) message forcibly".
-    '''
+    """
     statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
     with repo.wlock():
         lfdirstate = lfutil.openlfdirstate(ui, repo)
--- a/hgext/largefiles/lfutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/lfutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -80,10 +80,10 @@
 
 
 def usercachepath(ui, hash):
-    '''Return the correct location in the "global" largefiles cache for a file
+    """Return the correct location in the "global" largefiles cache for a file
     with the given hash.
     This cache is used for sharing of largefiles across repositories - both
-    to preserve download bandwidth and storage space.'''
+    to preserve download bandwidth and storage space."""
     return os.path.join(_usercachedir(ui), hash)
 
 
@@ -143,9 +143,9 @@
 
 
 def findfile(repo, hash):
-    '''Return store path of the largefile with the specified hash.
+    """Return store path of the largefile with the specified hash.
     As a side effect, the file might be linked from user cache.
-    Return None if the file can't be found locally.'''
+    Return None if the file can't be found locally."""
     path, exists = findstorepath(repo, hash)
     if exists:
         repo.ui.note(_(b'found %s in store\n') % hash)
@@ -191,10 +191,10 @@
 
 
 def openlfdirstate(ui, repo, create=True):
-    '''
+    """
     Return a dirstate object that tracks largefiles: i.e. its root is
     the repo root, but it is saved in .hg/largefiles/dirstate.
-    '''
+    """
     vfs = repo.vfs
     lfstoredir = longname
     opener = vfsmod.vfs(vfs.join(lfstoredir))
@@ -245,8 +245,8 @@
 
 
 def listlfiles(repo, rev=None, matcher=None):
-    '''return a list of largefiles in the working copy or the
-    specified changeset'''
+    """return a list of largefiles in the working copy or the
+    specified changeset"""
 
     if matcher is None:
         matcher = getstandinmatcher(repo)
@@ -265,18 +265,18 @@
 
 
 def storepath(repo, hash, forcelocal=False):
-    '''Return the correct location in the repository largefiles store for a
-    file with the given hash.'''
+    """Return the correct location in the repository largefiles store for a
+    file with the given hash."""
     if not forcelocal and repo.shared():
         return repo.vfs.reljoin(repo.sharedpath, longname, hash)
     return repo.vfs.join(longname, hash)
 
 
 def findstorepath(repo, hash):
-    '''Search through the local store path(s) to find the file for the given
+    """Search through the local store path(s) to find the file for the given
     hash.  If the file is not found, its path in the primary store is returned.
     The return value is a tuple of (path, exists(path)).
-    '''
+    """
     # For shared repos, the primary store is in the share source.  But for
     # backward compatibility, force a lookup in the local store if it wasn't
     # found in the share source.
@@ -291,11 +291,11 @@
 
 
 def copyfromcache(repo, hash, filename):
-    '''Copy the specified largefile from the repo or system cache to
+    """Copy the specified largefile from the repo or system cache to
     filename in the repository. Return true on success or false if the
     file was not found in either cache (which should not happened:
     this is meant to be called only after ensuring that the needed
-    largefile exists in the cache).'''
+    largefile exists in the cache)."""
     wvfs = repo.wvfs
     path = findfile(repo, hash)
     if path is None:
@@ -354,8 +354,8 @@
 
 
 def linktousercache(repo, hash):
-    '''Link / copy the largefile with the specified hash from the store
-    to the cache.'''
+    """Link / copy the largefile with the specified hash from the store
+    to the cache."""
     path = usercachepath(repo.ui, hash)
     link(storepath(repo, hash), path)
 
@@ -380,9 +380,9 @@
 
 
 def composestandinmatcher(repo, rmatcher):
-    '''Return a matcher that accepts standins corresponding to the
+    """Return a matcher that accepts standins corresponding to the
     files accepted by rmatcher. Pass the list of files in the matcher
-    as the paths specified by the user.'''
+    as the paths specified by the user."""
     smatcher = getstandinmatcher(repo, rmatcher)
     isstandin = smatcher.matchfn
 
@@ -395,8 +395,8 @@
 
 
 def standin(filename):
-    '''Return the repo-relative path to the standin for the specified big
-    file.'''
+    """Return the repo-relative path to the standin for the specified big
+    file."""
     # Notes:
     # 1) Some callers want an absolute path, but for instance addlargefiles
     #    needs it repo-relative so it can be passed to repo[None].add().  So
@@ -408,8 +408,8 @@
 
 
 def isstandin(filename):
-    '''Return true if filename is a big file standin. filename must be
-    in Mercurial's internal form (slash-separated).'''
+    """Return true if filename is a big file standin. filename must be
+    in Mercurial's internal form (slash-separated)."""
     return filename.startswith(shortnameslash)
 
 
@@ -439,9 +439,9 @@
 
 
 def readasstandin(fctx):
-    '''read hex hash from given filectx of standin file
+    """read hex hash from given filectx of standin file
 
-    This encapsulates how "standin" data is stored into storage layer.'''
+    This encapsulates how "standin" data is stored into storage layer."""
     return fctx.data().strip()
 
 
@@ -451,8 +451,8 @@
 
 
 def copyandhash(instream, outfile):
-    '''Read bytes from instream (iterable) and write them to outfile,
-    computing the SHA-1 hash of the data along the way. Return the hash.'''
+    """Read bytes from instream (iterable) and write them to outfile,
+    computing the SHA-1 hash of the data along the way. Return the hash."""
     hasher = hashutil.sha1(b'')
     for data in instream:
         hasher.update(data)
@@ -635,11 +635,11 @@
 
 
 def updatestandinsbymatch(repo, match):
-    '''Update standins in the working directory according to specified match
+    """Update standins in the working directory according to specified match
 
     This returns (possibly modified) ``match`` object to be used for
     subsequent commit process.
-    '''
+    """
 
     ui = repo.ui
 
@@ -741,7 +741,7 @@
 
 
 class automatedcommithook(object):
-    '''Stateful hook to update standins at the 1st commit of resuming
+    """Stateful hook to update standins at the 1st commit of resuming
 
     For efficiency, updating standins in the working directory should
     be avoided while automated committing (like rebase, transplant and
@@ -750,7 +750,7 @@
     But the 1st commit of resuming automated committing (e.g. ``rebase
     --continue``) should update them, because largefiles may be
     modified manually.
-    '''
+    """
 
     def __init__(self, resuming):
         self.resuming = resuming
@@ -764,14 +764,14 @@
 
 
 def getstatuswriter(ui, repo, forcibly=None):
-    '''Return the function to write largefiles specific status out
+    """Return the function to write largefiles specific status out
 
     If ``forcibly`` is ``None``, this returns the last element of
     ``repo._lfstatuswriters`` as "default" writer function.
 
     Otherwise, this returns the function to always write out (or
     ignore if ``not forcibly``) status.
-    '''
+    """
     if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
         return repo._lfstatuswriters[-1]
     else:
--- a/hgext/largefiles/localstore.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/localstore.py	Fri Nov 27 17:03:29 2020 -0500
@@ -20,9 +20,9 @@
 
 
 class localstore(basestore.basestore):
-    '''localstore first attempts to grab files out of the store in the remote
+    """localstore first attempts to grab files out of the store in the remote
     Mercurial repository.  Failing that, it attempts to grab the files from
-    the user cache.'''
+    the user cache."""
 
     def __init__(self, ui, repo, remote):
         self.remote = remote.local()
--- a/hgext/largefiles/overrides.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/overrides.py	Fri Nov 27 17:03:29 2020 -0500
@@ -58,8 +58,8 @@
 
 
 def composelargefilematcher(match, manifest):
-    '''create a matcher that matches only the largefiles in the original
-    matcher'''
+    """create a matcher that matches only the largefiles in the original
+    matcher"""
     m = copy.copy(match)
     lfile = lambda f: lfutil.standin(f) in manifest
     m._files = [lf for lf in m._files if lfile(lf)]
@@ -586,11 +586,17 @@
                 mresult.addfile(lfile, b'k', None, b'replaces standin')
                 if branchmerge:
                     mresult.addfile(
-                        standin, b'k', None, b'replaced by non-standin',
+                        standin,
+                        b'k',
+                        None,
+                        b'replaced by non-standin',
                     )
                 else:
                     mresult.addfile(
-                        standin, b'r', None, b'replaced by non-standin',
+                        standin,
+                        b'r',
+                        None,
+                        b'replaced by non-standin',
                     )
         elif lm in (b'g', b'dc') and sm != b'r':
             if lm == b'dc':
@@ -610,7 +616,10 @@
                 if branchmerge:
                     # largefile can be restored from standin safely
                     mresult.addfile(
-                        lfile, b'k', None, b'replaced by standin',
+                        lfile,
+                        b'k',
+                        None,
+                        b'replaced by standin',
                     )
                     mresult.addfile(standin, b'k', None, b'replaces standin')
                 else:
@@ -628,7 +637,10 @@
             else:  # pick remote normal file
                 mresult.addfile(lfile, b'g', largs, b'replaces standin')
                 mresult.addfile(
-                    standin, b'r', None, b'replaced by non-standin',
+                    standin,
+                    b'r',
+                    None,
+                    b'replaced by non-standin',
                 )
 
     return mresult
--- a/hgext/largefiles/proto.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/proto.py	Fri Nov 27 17:03:29 2020 -0500
@@ -39,8 +39,8 @@
 
 
 def putlfile(repo, proto, sha):
-    '''Server command for putting a largefile into a repository's local store
-    and into the user cache.'''
+    """Server command for putting a largefile into a repository's local store
+    and into the user cache."""
     with proto.mayberedirectstdio() as output:
         path = lfutil.storepath(repo, sha)
         util.makedirs(os.path.dirname(path))
@@ -69,8 +69,8 @@
 
 
 def getlfile(repo, proto, sha):
-    '''Server command for retrieving a largefile from the repository-local
-    cache or user cache.'''
+    """Server command for retrieving a largefile from the repository-local
+    cache or user cache."""
     filename = lfutil.findfile(repo, sha)
     if not filename:
         raise error.Abort(
@@ -93,12 +93,12 @@
 
 
 def statlfile(repo, proto, sha):
-    '''Server command for checking if a largefile is present - returns '2\n' if
+    """Server command for checking if a largefile is present - returns '2\n' if
     the largefile is missing, '0\n' if it seems to be in good condition.
 
     The value 1 is reserved for mismatched checksum, but that is too expensive
     to be verified on every stat and must be caught be running 'hg verify'
-    server side.'''
+    server side."""
     filename = lfutil.findfile(repo, sha)
     if not filename:
         return wireprototypes.bytesresponse(b'2\n')
@@ -194,8 +194,8 @@
 
 
 def heads(orig, repo, proto):
-    '''Wrap server command - largefile capable clients will know to call
-    lheads instead'''
+    """Wrap server command - largefile capable clients will know to call
+    lheads instead"""
     if lfutil.islfilesrepo(repo):
         return wireprototypes.ooberror(LARGEFILES_REQUIRED_MSG)
 
--- a/hgext/largefiles/remotestore.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/remotestore.py	Fri Nov 27 17:03:29 2020 -0500
@@ -146,8 +146,8 @@
         raise NotImplementedError(b'abstract method')
 
     def _stat(self, hashes):
-        '''Get information about availability of files specified by
+        """Get information about availability of files specified by
         hashes in the remote store. Return dictionary mapping hashes
         to return code where 0 means that file is available, other
-        values if not.'''
+        values if not."""
         raise NotImplementedError(b'abstract method')
--- a/hgext/largefiles/reposetup.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/reposetup.py	Fri Nov 27 17:03:29 2020 -0500
@@ -360,7 +360,7 @@
         # TODO: _subdirlfs should be moved into "lfutil.py", because
         # it is referred only from "lfutil.updatestandinsbymatch"
         def _subdirlfs(self, files, lfiles):
-            '''
+            """
             Adjust matched file list
             If we pass a directory to commit whose only committable files
             are largefiles, the core commit code aborts before finding
@@ -370,7 +370,7 @@
             we explicitly add the largefiles to the match list and remove
             the directory.
             In other cases, we leave the match list unmodified.
-            '''
+            """
             actualfiles = []
             dirs = []
             regulars = []
--- a/hgext/largefiles/wirestore.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/largefiles/wirestore.py	Fri Nov 27 17:03:29 2020 -0500
@@ -30,13 +30,23 @@
         return self.remote.getlfile(hash)
 
     def _stat(self, hashes):
-        '''For each hash, return 0 if it is available, other values if not.
+        """For each hash, return 0 if it is available, other values if not.
         It is usually 2 if the largefile is missing, but might be 1 the server
-        has a corrupted copy.'''
+        has a corrupted copy."""
 
         with self.remote.commandexecutor() as e:
             fs = []
             for hash in hashes:
-                fs.append((hash, e.callcommand(b'statlfile', {b'sha': hash,})))
+                fs.append(
+                    (
+                        hash,
+                        e.callcommand(
+                            b'statlfile',
+                            {
+                                b'sha': hash,
+                            },
+                        ),
+                    )
+                )
 
             return {hash: f.result() for hash, f in fs}
--- a/hgext/lfs/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/lfs/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -172,33 +172,51 @@
 templatekeyword = eh.templatekeyword
 
 eh.configitem(
-    b'experimental', b'lfs.serve', default=True,
+    b'experimental',
+    b'lfs.serve',
+    default=True,
 )
 eh.configitem(
-    b'experimental', b'lfs.user-agent', default=None,
+    b'experimental',
+    b'lfs.user-agent',
+    default=None,
 )
 eh.configitem(
-    b'experimental', b'lfs.disableusercache', default=False,
+    b'experimental',
+    b'lfs.disableusercache',
+    default=False,
 )
 eh.configitem(
-    b'experimental', b'lfs.worker-enable', default=True,
+    b'experimental',
+    b'lfs.worker-enable',
+    default=True,
 )
 
 eh.configitem(
-    b'lfs', b'url', default=None,
+    b'lfs',
+    b'url',
+    default=None,
 )
 eh.configitem(
-    b'lfs', b'usercache', default=None,
+    b'lfs',
+    b'usercache',
+    default=None,
 )
 # Deprecated
 eh.configitem(
-    b'lfs', b'threshold', default=None,
+    b'lfs',
+    b'threshold',
+    default=None,
 )
 eh.configitem(
-    b'lfs', b'track', default=b'none()',
+    b'lfs',
+    b'track',
+    default=b'none()',
 )
 eh.configitem(
-    b'lfs', b'retry', default=5,
+    b'lfs',
+    b'retry',
+    default=5,
 )
 
 lfsprocessor = (
--- a/hgext/lfs/blobstore.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/lfs/blobstore.py	Fri Nov 27 17:03:29 2020 -0500
@@ -96,8 +96,7 @@
 
 
 class lfsuploadfile(httpconnectionmod.httpsendfile):
-    """a file-like object that supports keepalive.
-    """
+    """a file-like object that supports keepalive."""
 
     def __init__(self, ui, filename):
         super(lfsuploadfile, self).__init__(ui, filename, b'rb')
@@ -258,9 +257,9 @@
 
 
 def _urlerrorreason(urlerror):
-    '''Create a friendly message for the given URLError to be used in an
+    """Create a friendly message for the given URLError to be used in an
     LfsRemoteError message.
-    '''
+    """
     inst = urlerror
 
     if isinstance(urlerror.reason, Exception):
@@ -338,7 +337,10 @@
         ]
         requestdata = pycompat.bytesurl(
             json.dumps(
-                {'objects': objects, 'operation': pycompat.strurl(action),}
+                {
+                    'objects': objects,
+                    'operation': pycompat.strurl(action),
+                }
             )
         )
         url = b'%s/objects/batch' % self.baseurl
--- a/hgext/lfs/wrapper.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/lfs/wrapper.py	Fri Nov 27 17:03:29 2020 -0500
@@ -381,10 +381,10 @@
 
 
 def uploadblobsfromrevs(repo, revs):
-    '''upload lfs blobs introduced by revs
+    """upload lfs blobs introduced by revs
 
     Note: also used by other extensions e. g. infinitepush. avoid renaming.
-    '''
+    """
     if _canskipupload(repo):
         return
     pointers = extractpointers(repo, revs)
--- a/hgext/mq.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/mq.py	Fri Nov 27 17:03:29 2020 -0500
@@ -125,16 +125,24 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'mq', b'git', default=b'auto',
+    b'mq',
+    b'git',
+    default=b'auto',
 )
 configitem(
-    b'mq', b'keepchanges', default=False,
+    b'mq',
+    b'keepchanges',
+    default=False,
 )
 configitem(
-    b'mq', b'plain', default=False,
+    b'mq',
+    b'plain',
+    default=False,
 )
 configitem(
-    b'mq', b'secret', default=False,
+    b'mq',
+    b'secret',
+    default=False,
 )
 
 # force load strip extension formerly included in mq and import some utility
@@ -156,8 +164,8 @@
 
 
 def checksubstate(repo, baserev=None):
-    '''return list of subrepos at a different revision than substate.
-    Abort if any subrepos have uncommitted changes.'''
+    """return list of subrepos at a different revision than substate.
+    Abort if any subrepos have uncommitted changes."""
     inclsubs = []
     wctx = repo[None]
     if baserev:
@@ -449,9 +457,9 @@
     __str__ = encoding.strmethod(__bytes__)
 
     def _delmsg(self):
-        '''Remove existing message, keeping the rest of the comments fields.
+        """Remove existing message, keeping the rest of the comments fields.
         If comments contains 'subject: ', message will prepend
-        the field and a blank line.'''
+        the field and a blank line."""
         if self.message:
             subj = b'subject: ' + self.message[0].lower()
             for i in pycompat.xrange(len(self.comments)):
@@ -949,8 +957,8 @@
         return (0, head)
 
     def patch(self, repo, patchfile):
-        '''Apply patchfile  to the working directory.
-        patchfile: name of patch file'''
+        """Apply patchfile  to the working directory.
+        patchfile: name of patch file"""
         files = set()
         try:
             fuzz = patchmod.patch(
@@ -1363,7 +1371,7 @@
 
     def new(self, repo, patchfn, *pats, **opts):
         """options:
-           msg: a string or a no-argument function returning a string
+        msg: a string or a no-argument function returning a string
         """
         opts = pycompat.byteskwargs(opts)
         msg = opts.get(b'msg')
@@ -1718,7 +1726,10 @@
             except:  # re-raises
                 self.ui.warn(_(b'cleaning up working directory...\n'))
                 cmdutil.revert(
-                    self.ui, repo, repo[b'.'], no_backup=True,
+                    self.ui,
+                    repo,
+                    repo[b'.'],
+                    no_backup=True,
                 )
                 # only remove unknown files that we know we touched or
                 # created while patching
@@ -2823,7 +2834,7 @@
     norepo=True,
 )
 def clone(ui, source, dest=None, **opts):
-    '''clone main and patch repository at same time
+    """clone main and patch repository at same time
 
     If source is local, destination will have no patches applied. If
     source is remote, this command can not check if patches are
@@ -2838,7 +2849,7 @@
     would be created by :hg:`init --mq`.
 
     Return 0 on success.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
 
     def patchdir(repo):
@@ -2937,7 +2948,10 @@
 
 @command(
     b"qseries",
-    [(b'm', b'missing', None, _(b'print patches not in series')),] + seriesopts,
+    [
+        (b'm', b'missing', None, _(b'print patches not in series')),
+    ]
+    + seriesopts,
     _(b'hg qseries [-ms]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
@@ -3282,9 +3296,9 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def goto(ui, repo, patch, **opts):
-    '''push or pop patches until named patch is at top of stack
-
-    Returns 0 on success.'''
+    """push or pop patches until named patch is at top of stack
+
+    Returns 0 on success."""
     opts = pycompat.byteskwargs(opts)
     opts = fixkeepchangesopts(ui, opts)
     q = repo.mq
@@ -3321,7 +3335,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def guard(ui, repo, *args, **opts):
-    '''set or print guards for a patch
+    """set or print guards for a patch
 
     Guards control whether a patch can be pushed. A patch with no
     guards is always pushed. A patch with a positive guard ("+foo") is
@@ -3341,7 +3355,7 @@
       hg qguard other.patch -- +2.6.17 -stable
 
     Returns 0 on success.
-    '''
+    """
 
     def status(idx):
         guards = q.seriesguards[idx] or [b'unguarded']
@@ -3712,7 +3726,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def select(ui, repo, *args, **opts):
-    '''set or print guarded patches to push
+    """set or print guarded patches to push
 
     Use the :hg:`qguard` command to set or print guards on patch, then use
     qselect to tell mq which guards to use. A patch will be pushed if
@@ -3744,7 +3758,7 @@
     Use -s/--series to print a list of all guards in the series file
     (no other arguments needed). Use -v for more information.
 
-    Returns 0 on success.'''
+    Returns 0 on success."""
 
     q = repo.mq
     opts = pycompat.byteskwargs(opts)
@@ -3888,7 +3902,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def qqueue(ui, repo, name=None, **opts):
-    '''manage multiple patch queues
+    """manage multiple patch queues
 
     Supports switching between different patch queues, as well as creating
     new patch queues and deleting existing ones.
@@ -3907,7 +3921,7 @@
     active queue.
 
     Returns 0 on success.
-    '''
+    """
     q = repo.mq
     _defaultqueue = b'patches'
     _allqueues = b'patches.queues'
@@ -4250,8 +4264,7 @@
 
 @revsetpredicate(b'mq()')
 def revsetmq(repo, subset, x):
-    """Changesets managed by MQ.
-    """
+    """Changesets managed by MQ."""
     revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
     applied = {repo[r.node].rev() for r in repo.mq.applied}
     return smartset.baseset([r for r in subset if r in applied])
--- a/hgext/narrow/narrowbundle2.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/narrow/narrowbundle2.py	Fri Nov 27 17:03:29 2020 -0500
@@ -78,7 +78,14 @@
 
 
 def generateellipsesbundle2(
-    bundler, repo, include, exclude, version, common, heads, depth,
+    bundler,
+    repo,
+    include,
+    exclude,
+    version,
+    common,
+    heads,
+    depth,
 ):
     match = narrowspec.match(repo.root, include=include, exclude=exclude)
     if depth is not None:
@@ -113,7 +120,13 @@
 
 
 def generate_ellipses_bundle2_for_widening(
-    bundler, repo, oldmatch, newmatch, version, common, known,
+    bundler,
+    repo,
+    oldmatch,
+    newmatch,
+    version,
+    common,
+    known,
 ):
     common = set(common or [nullid])
     # Steps:
--- a/hgext/narrow/narrowwirepeer.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/narrow/narrowwirepeer.py	Fri Nov 27 17:03:29 2020 -0500
@@ -120,7 +120,13 @@
             )
         else:
             narrowbundle2.generate_ellipses_bundle2_for_widening(
-                bundler, repo, oldmatch, newmatch, cgversion, common, known,
+                bundler,
+                repo,
+                oldmatch,
+                newmatch,
+                cgversion,
+                common,
+                known,
             )
     except error.Abort as exc:
         bundler = bundle2.bundle20(repo.ui)
--- a/hgext/notify.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/notify.py	Fri Nov 27 17:03:29 2020 -0500
@@ -190,64 +190,104 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'notify', b'changegroup', default=None,
+    b'notify',
+    b'changegroup',
+    default=None,
 )
 configitem(
-    b'notify', b'config', default=None,
+    b'notify',
+    b'config',
+    default=None,
 )
 configitem(
-    b'notify', b'diffstat', default=True,
+    b'notify',
+    b'diffstat',
+    default=True,
 )
 configitem(
-    b'notify', b'domain', default=None,
+    b'notify',
+    b'domain',
+    default=None,
 )
 configitem(
-    b'notify', b'messageidseed', default=None,
+    b'notify',
+    b'messageidseed',
+    default=None,
 )
 configitem(
-    b'notify', b'fromauthor', default=None,
+    b'notify',
+    b'fromauthor',
+    default=None,
 )
 configitem(
-    b'notify', b'incoming', default=None,
+    b'notify',
+    b'incoming',
+    default=None,
 )
 configitem(
-    b'notify', b'maxdiff', default=300,
+    b'notify',
+    b'maxdiff',
+    default=300,
 )
 configitem(
-    b'notify', b'maxdiffstat', default=-1,
+    b'notify',
+    b'maxdiffstat',
+    default=-1,
 )
 configitem(
-    b'notify', b'maxsubject', default=67,
+    b'notify',
+    b'maxsubject',
+    default=67,
 )
 configitem(
-    b'notify', b'mbox', default=None,
+    b'notify',
+    b'mbox',
+    default=None,
 )
 configitem(
-    b'notify', b'merge', default=True,
+    b'notify',
+    b'merge',
+    default=True,
 )
 configitem(
-    b'notify', b'outgoing', default=None,
+    b'notify',
+    b'outgoing',
+    default=None,
 )
 configitem(
-    b'notify', b'reply-to-predecessor', default=False,
+    b'notify',
+    b'reply-to-predecessor',
+    default=False,
 )
 configitem(
-    b'notify', b'sources', default=b'serve',
+    b'notify',
+    b'sources',
+    default=b'serve',
 )
 configitem(
-    b'notify', b'showfunc', default=None,
+    b'notify',
+    b'showfunc',
+    default=None,
 )
 configitem(
-    b'notify', b'strip', default=0,
+    b'notify',
+    b'strip',
+    default=0,
 )
 configitem(
-    b'notify', b'style', default=None,
+    b'notify',
+    b'style',
+    default=None,
 )
 configitem(
-    b'notify', b'template', default=None,
+    b'notify',
+    b'template',
+    default=None,
 )
 configitem(
-    b'notify', b'test', default=True,
+    b'notify',
+    b'test',
+    default=True,
 )
 
 # template for single changeset can include email headers.
@@ -539,10 +579,10 @@
 
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
-    '''send email notifications to interested subscribers.
+    """send email notifications to interested subscribers.
 
     if used as changegroup hook, send one email for all changesets in
-    changegroup. else send one email per changeset.'''
+    changegroup. else send one email per changeset."""
 
     n = notifier(ui, repo, hooktype)
     ctx = repo.unfiltered()[node]
--- a/hgext/pager.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/pager.py	Fri Nov 27 17:03:29 2020 -0500
@@ -41,7 +41,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'pager', b'attend', default=lambda: attended,
+    b'pager',
+    b'attend',
+    default=lambda: attended,
 )
 
 
--- a/hgext/patchbomb.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/patchbomb.py	Fri Nov 27 17:03:29 2020 -0500
@@ -110,34 +110,54 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'patchbomb', b'bundletype', default=None,
+    b'patchbomb',
+    b'bundletype',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'bcc', default=None,
+    b'patchbomb',
+    b'bcc',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'cc', default=None,
+    b'patchbomb',
+    b'cc',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'confirm', default=False,
+    b'patchbomb',
+    b'confirm',
+    default=False,
 )
 configitem(
-    b'patchbomb', b'flagtemplate', default=None,
+    b'patchbomb',
+    b'flagtemplate',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'from', default=None,
+    b'patchbomb',
+    b'from',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'intro', default=b'auto',
+    b'patchbomb',
+    b'intro',
+    default=b'auto',
 )
 configitem(
-    b'patchbomb', b'publicurl', default=None,
+    b'patchbomb',
+    b'publicurl',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'reply-to', default=None,
+    b'patchbomb',
+    b'reply-to',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'to', default=None,
+    b'patchbomb',
+    b'to',
+    default=None,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -148,8 +168,7 @@
 
 
 def _addpullheader(seq, ctx):
-    """Add a header pointing to a public URL where the changeset is available
-    """
+    """Add a header pointing to a public URL where the changeset is available"""
     repo = ctx.repo()
     # experimental config: patchbomb.publicurl
     # waiting for some logic that check that the changeset are available on the
@@ -656,7 +675,7 @@
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def email(ui, repo, *revs, **opts):
-    '''send changesets by email
+    """send changesets by email
 
     By default, diffs are sent in the format generated by
     :hg:`export`, one per message. The series starts with a "[PATCH 0
@@ -739,7 +758,7 @@
 
     Before using this command, you will need to enable email in your
     hgrc. See the [email] section in hgrc(5) for details.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
 
     _charsets = mail._charsets(ui)
--- a/hgext/phabricator.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/phabricator.py	Fri Nov 27 17:03:29 2020 -0500
@@ -108,33 +108,51 @@
 
 # developer config: phabricator.batchsize
 eh.configitem(
-    b'phabricator', b'batchsize', default=12,
+    b'phabricator',
+    b'batchsize',
+    default=12,
 )
 eh.configitem(
-    b'phabricator', b'callsign', default=None,
+    b'phabricator',
+    b'callsign',
+    default=None,
 )
 eh.configitem(
-    b'phabricator', b'curlcmd', default=None,
+    b'phabricator',
+    b'curlcmd',
+    default=None,
 )
 # developer config: phabricator.debug
 eh.configitem(
-    b'phabricator', b'debug', default=False,
+    b'phabricator',
+    b'debug',
+    default=False,
 )
 # developer config: phabricator.repophid
 eh.configitem(
-    b'phabricator', b'repophid', default=None,
+    b'phabricator',
+    b'repophid',
+    default=None,
 )
 eh.configitem(
-    b'phabricator', b'url', default=None,
+    b'phabricator',
+    b'url',
+    default=None,
 )
 eh.configitem(
-    b'phabsend', b'confirm', default=False,
+    b'phabsend',
+    b'confirm',
+    default=False,
 )
 eh.configitem(
-    b'phabimport', b'secret', default=False,
+    b'phabimport',
+    b'secret',
+    default=False,
 )
 eh.configitem(
-    b'phabimport', b'obsolete', default=False,
+    b'phabimport',
+    b'obsolete',
+    default=False,
 )
 
 colortable = {
@@ -166,8 +184,7 @@
 
 @eh.wrapfunction(localrepo, "loadhgrc")
 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
-    """Load ``.arcconfig`` content into a ui instance on repository open.
-    """
+    """Load ``.arcconfig`` content into a ui instance on repository open."""
     result = False
     arcconfig = {}
 
@@ -633,8 +650,7 @@
 
 @attr.s
 class phabhunk(dict):
-    """Represents a Differential hunk, which is owned by a Differential change
-    """
+    """Represents a Differential hunk, which is owned by a Differential change"""
 
     oldOffset = attr.ib(default=0)  # camelcase-required
     oldLength = attr.ib(default=0)  # camelcase-required
@@ -1512,7 +1528,9 @@
                         mapping.get(old.p2().node(), (old.p2(),))[0],
                     ]
                     newdesc = rewriteutil.update_hash_refs(
-                        repo, newdesc, mapping,
+                        repo,
+                        newdesc,
+                        mapping,
                     )
                     new = context.metadataonlyctx(
                         repo,
@@ -2227,7 +2245,10 @@
     m = _differentialrevisiondescre.search(ctx.description())
     if m:
         return templateutil.hybriddict(
-            {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
+            {
+                b'url': m.group('url'),
+                b'id': b"D%s" % m.group('id'),
+            }
         )
     else:
         tags = ctx.repo().nodetags(ctx.node())
@@ -2238,14 +2259,18 @@
                     url += b'/'
                 url += t
 
-                return templateutil.hybriddict({b'url': url, b'id': t,})
+                return templateutil.hybriddict(
+                    {
+                        b'url': url,
+                        b'id': t,
+                    }
+                )
     return None
 
 
 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
 def template_status(context, mapping):
-    """:phabstatus: String. Status of Phabricator differential.
-    """
+    """:phabstatus: String. Status of Phabricator differential."""
     ctx = context.resource(mapping, b'ctx')
     repo = context.resource(mapping, b'repo')
     ui = context.resource(mapping, b'ui')
@@ -2259,7 +2284,10 @@
     for drev in drevs:
         if int(drev[b'id']) == drevid:
             return templateutil.hybriddict(
-                {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
+                {
+                    b'url': drev[b'uri'],
+                    b'status': drev[b'statusName'],
+                }
             )
     return None
 
--- a/hgext/purge.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/purge.py	Fri Nov 27 17:03:29 2020 -0500
@@ -67,7 +67,7 @@
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def purge(ui, repo, *dirs, **opts):
-    '''removes files not tracked by Mercurial
+    """removes files not tracked by Mercurial
 
     Delete files not known to Mercurial. This is useful to test local
     and uncommitted changes in an otherwise-clean source tree.
@@ -95,7 +95,7 @@
     you forgot to add to the repository. If you only want to print the
     list of files that this program would delete, use the --print
     option.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
 
--- a/hgext/rebase.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/rebase.py	Fri Nov 27 17:03:29 2020 -0500
@@ -507,10 +507,10 @@
         ui.note(_(b'rebase merging completed\n'))
 
     def _concludenode(self, rev, editor, commitmsg=None):
-        '''Commit the wd changes with parents p1 and p2.
+        """Commit the wd changes with parents p1 and p2.
 
         Reuse commit info from rev but also store useful information in extra.
-        Return node of committed revision.'''
+        Return node of committed revision."""
         repo = self.repo
         ctx = repo[rev]
         if commitmsg is None:
@@ -1135,7 +1135,11 @@
             overrides = {(b'rebase', b'singletransaction'): True}
             with ui.configoverride(overrides, b'rebase'):
                 _origrebase(
-                    ui, repo, action, opts, rbsrt,
+                    ui,
+                    repo,
+                    action,
+                    opts,
+                    rbsrt,
                 )
         except error.ConflictResolutionRequired:
             ui.status(_(b'hit a merge conflict\n'))
@@ -1447,8 +1451,8 @@
 
 
 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
-    '''Commit the memory changes with parents p1 and p2.
-    Return node of committed revision.'''
+    """Commit the memory changes with parents p1 and p2.
+    Return node of committed revision."""
     # By convention, ``extra['branch']`` (set by extrafn) clobbers
     # ``branch`` (used when passing ``--keepbranches``).
     branch = None
@@ -1475,8 +1479,8 @@
 
 
 def commitnode(repo, editor, extra, user, date, commitmsg):
-    '''Commit the wd changes with parents p1 and p2.
-    Return node of committed revision.'''
+    """Commit the wd changes with parents p1 and p2.
+    Return node of committed revision."""
     dsguard = util.nullcontextmanager()
     if not repo.ui.configbool(b'rebase', b'singletransaction'):
         dsguard = dirstateguard.dirstateguard(repo, b'rebase')
@@ -1965,11 +1969,11 @@
 
 
 def buildstate(repo, destmap, collapse):
-    '''Define which revisions are going to be rebased and where
+    """Define which revisions are going to be rebased and where
 
     repo: repo
     destmap: {srcrev: destrev}
-    '''
+    """
     rebaseset = destmap.keys()
     originalwd = repo[b'.'].rev()
 
--- a/hgext/record.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/record.py	Fri Nov 27 17:03:29 2020 -0500
@@ -39,7 +39,7 @@
     helpcategory=command.CATEGORY_COMMITTING,
 )
 def record(ui, repo, *pats, **opts):
-    '''interactively select changes to commit
+    """interactively select changes to commit
 
     If a list of files is omitted, all changes reported by :hg:`status`
     will be candidates for recording.
@@ -65,7 +65,7 @@
 
       ? - display help
 
-    This command is not available when committing a merge.'''
+    This command is not available when committing a merge."""
 
     if not ui.interactive():
         raise error.Abort(
@@ -106,11 +106,11 @@
     inferrepo=True,
 )
 def qrecord(ui, repo, patch, *pats, **opts):
-    '''interactively record a new patch
+    """interactively record a new patch
 
     See :hg:`help qnew` & :hg:`help record` for more information and
     usage.
-    '''
+    """
     return _qrecord(b'qnew', ui, repo, patch, *pats, **opts)
 
 
--- a/hgext/remotefilelog/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -758,10 +758,10 @@
     rcache = {}
 
     def getrenamed(fn, rev):
-        '''looks up all renames for a file (up to endrev) the first
+        """looks up all renames for a file (up to endrev) the first
         time the file is given. It indexes on the changerev and only
         parses the manifest if linkrev != changerev.
-        Returns rename info for fn at changerev rev.'''
+        Returns rename info for fn at changerev rev."""
         if rev in rcache.setdefault(fn, {}):
             return rcache[fn][rev]
 
@@ -822,8 +822,7 @@
 
 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
 def gc(ui, *args, **opts):
-    '''garbage collect the client and server filelog caches
-    '''
+    """garbage collect the client and server filelog caches"""
     cachepaths = set()
 
     # get the system client cache
@@ -1105,7 +1104,9 @@
 
 @command(
     b'debugremotefilelog',
-    [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
+    [
+        (b'd', b'decompress', None, _(b'decompress the filelog first')),
+    ],
     _(b'hg debugremotefilelog <path>'),
     norepo=True,
 )
@@ -1115,7 +1116,9 @@
 
 @command(
     b'verifyremotefilelog',
-    [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
+    [
+        (b'd', b'decompress', None, _(b'decompress the filelogs first')),
+    ],
     _(b'hg verifyremotefilelogs <directory>'),
     norepo=True,
 )
--- a/hgext/remotefilelog/basestore.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/basestore.py	Fri Nov 27 17:03:29 2020 -0500
@@ -103,7 +103,7 @@
     def _cleanupdirectory(self, rootdir):
         """Removes the empty directories and unnecessary files within the root
         directory recursively. Note that this method does not remove the root
-        directory itself. """
+        directory itself."""
 
         oldfiles = set()
         otherfiles = set()
--- a/hgext/remotefilelog/contentstore.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/contentstore.py	Fri Nov 27 17:03:29 2020 -0500
@@ -17,8 +17,7 @@
 
 
 class ChainIndicies(object):
-    """A static class for easy reference to the delta chain indicies.
-    """
+    """A static class for easy reference to the delta chain indicies."""
 
     # The filename of this revision delta
     NAME = 0
@@ -73,8 +72,7 @@
 
     @basestore.baseunionstore.retriable
     def getdelta(self, name, node):
-        """Return the single delta entry for the given name/node pair.
-        """
+        """Return the single delta entry for the given name/node pair."""
         for store in self.stores:
             try:
                 return store.getdelta(name, node)
--- a/hgext/remotefilelog/fileserverclient.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/fileserverclient.py	Fri Nov 27 17:03:29 2020 -0500
@@ -302,8 +302,7 @@
 
 
 class fileserverclient(object):
-    """A client for requesting files from the remote file server.
-    """
+    """A client for requesting files from the remote file server."""
 
     def __init__(self, repo):
         ui = repo.ui
@@ -568,8 +567,7 @@
     def prefetch(
         self, fileids, force=False, fetchdata=True, fetchhistory=False
     ):
-        """downloads the given file versions to the cache
-        """
+        """downloads the given file versions to the cache"""
         repo = self.repo
         idstocheck = []
         for file, id in fileids:
--- a/hgext/remotefilelog/remotefilectx.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/remotefilectx.py	Fri Nov 27 17:03:29 2020 -0500
@@ -63,8 +63,8 @@
             return self.linkrev()
 
     def filectx(self, fileid, changeid=None):
-        '''opens an arbitrary revision of the file without
-        opening a new filelog'''
+        """opens an arbitrary revision of the file without
+        opening a new filelog"""
         return remotefilectx(
             self._repo,
             self._path,
--- a/hgext/remotefilelog/remotefilelogserver.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/remotefilelogserver.py	Fri Nov 27 17:03:29 2020 -0500
@@ -40,8 +40,7 @@
 
 
 def setupserver(ui, repo):
-    """Sets up a normal Mercurial repo so it can serve files to shallow repos.
-    """
+    """Sets up a normal Mercurial repo so it can serve files to shallow repos."""
     onetimesetup(ui)
 
     # don't send files to shallow clients during pulls
@@ -79,8 +78,7 @@
 
 
 def onetimesetup(ui):
-    """Configures the wireprotocol for both clients and servers.
-    """
+    """Configures the wireprotocol for both clients and servers."""
     global onetime
     if onetime:
         return
@@ -281,8 +279,7 @@
 
 
 def getflogheads(repo, proto, path):
-    """A server api for requesting a filelog's heads
-    """
+    """A server api for requesting a filelog's heads"""
     flog = repo.file(path)
     heads = flog.heads()
     return b'\n'.join((hex(head) for head in heads if head != nullid))
@@ -309,8 +306,7 @@
 
 
 def getfiles(repo, proto):
-    """A server api for requesting particular versions of particular files.
-    """
+    """A server api for requesting particular versions of particular files."""
     if shallowutil.isenabled(repo):
         raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
     if not isinstance(proto, _sshv1server):
--- a/hgext/remotefilelog/repack.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/repack.py	Fri Nov 27 17:03:29 2020 -0500
@@ -54,8 +54,7 @@
 
 
 def fullrepack(repo, options=None):
-    """If ``packsonly`` is True, stores creating only loose objects are skipped.
-    """
+    """If ``packsonly`` is True, stores creating only loose objects are skipped."""
     if util.safehasattr(repo, 'shareddatastores'):
         datasource = contentstore.unioncontentstore(*repo.shareddatastores)
         historysource = metadatastore.unionmetadatastore(
@@ -874,8 +873,7 @@
 
 
 class repackentry(object):
-    """Simple class representing a single revision entry in the repackledger.
-    """
+    """Simple class representing a single revision entry in the repackledger."""
 
     __slots__ = (
         'filename',
--- a/hgext/remotefilelog/shallowrepo.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotefilelog/shallowrepo.py	Fri Nov 27 17:03:29 2020 -0500
@@ -161,11 +161,11 @@
             return path
 
         def maybesparsematch(self, *revs, **kwargs):
-            '''
+            """
             A wrapper that allows the remotefilelog to invoke sparsematch() if
             this is a sparse repository, or returns None if this is not a
             sparse repository.
-            '''
+            """
             if revs:
                 ret = sparse.matcher(repo, revs=revs)
             else:
@@ -217,8 +217,7 @@
         def backgroundprefetch(
             self, revs, base=None, repack=False, pats=None, opts=None
         ):
-            """Runs prefetch in background with optional repack
-            """
+            """Runs prefetch in background with optional repack"""
             cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch']
             if repack:
                 cmd.append(b'--repack')
--- a/hgext/remotenames.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/remotenames.py	Fri Nov 27 17:03:29 2020 -0500
@@ -66,13 +66,19 @@
 revsetpredicate = registrar.revsetpredicate()
 
 configitem(
-    b'remotenames', b'bookmarks', default=True,
+    b'remotenames',
+    b'bookmarks',
+    default=True,
 )
 configitem(
-    b'remotenames', b'branches', default=True,
+    b'remotenames',
+    b'branches',
+    default=True,
 )
 configitem(
-    b'remotenames', b'hoistedpeer', default=b'default',
+    b'remotenames',
+    b'hoistedpeer',
+    default=b'default',
 )
 
 
--- a/hgext/schemes.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/schemes.py	Fri Nov 27 17:03:29 2020 -0500
@@ -142,8 +142,7 @@
 
 @command(b'debugexpandscheme', norepo=True)
 def expandscheme(ui, url, **opts):
-    """given a repo path, provide the scheme-expanded path
-    """
+    """given a repo path, provide the scheme-expanded path"""
     repo = hg._peerlookup(url)
     if isinstance(repo, ShortRepository):
         url = repo.resolve(url)
--- a/hgext/share.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/share.py	Fri Nov 27 17:03:29 2020 -0500
@@ -75,7 +75,12 @@
     [
         (b'U', b'noupdate', None, _(b'do not create a working directory')),
         (b'B', b'bookmarks', None, _(b'also share bookmarks')),
-        (b'', b'relative', None, _(b'point to source using a relative path'),),
+        (
+            b'',
+            b'relative',
+            None,
+            _(b'point to source using a relative path'),
+        ),
     ],
     _(b'[-U] [-B] SOURCE [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
--- a/hgext/transplant.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/transplant.py	Fri Nov 27 17:03:29 2020 -0500
@@ -62,10 +62,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'transplant', b'filter', default=None,
+    b'transplant',
+    b'filter',
+    default=None,
 )
 configitem(
-    b'transplant', b'log', default=None,
+    b'transplant',
+    b'log',
+    default=None,
 )
 
 
@@ -140,8 +144,8 @@
         self.getcommiteditor = getcommiteditor
 
     def applied(self, repo, node, parent):
-        '''returns True if a node is already an ancestor of parent
-        or is parent or has already been transplanted'''
+        """returns True if a node is already an ancestor of parent
+        or is parent or has already been transplanted"""
         if hasnode(repo, parent):
             parentrev = repo.changelog.rev(parent)
         if hasnode(repo, node):
@@ -682,7 +686,7 @@
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def transplant(ui, repo, *revs, **opts):
-    '''transplant changesets from another branch
+    """transplant changesets from another branch
 
     Selected changesets will be applied on top of the current working
     directory with the log of the original changeset. The changesets
@@ -731,7 +735,7 @@
     If a changeset application fails, you can fix the merge by hand
     and then resume where you left off by calling :hg:`transplant
     --continue/-c`.
-    '''
+    """
     with repo.wlock():
         return _dotransplant(ui, repo, *revs, **opts)
 
@@ -743,9 +747,9 @@
                 yield node
 
     def transplantwalk(repo, dest, heads, match=util.always):
-        '''Yield all nodes that are ancestors of a head but not ancestors
+        """Yield all nodes that are ancestors of a head but not ancestors
         of dest.
-        If no heads are specified, the heads of repo will be used.'''
+        If no heads are specified, the heads of repo will be used."""
         if not heads:
             heads = repo.heads()
         ancestors = []
@@ -886,8 +890,7 @@
 
 @revsetpredicate(b'transplanted([set])')
 def revsettransplanted(repo, subset, x):
-    """Transplanted changesets in set, or all transplanted changesets.
-    """
+    """Transplanted changesets in set, or all transplanted changesets."""
     if x:
         s = revset.getset(repo, subset, x)
     else:
--- a/hgext/uncommit.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/uncommit.py	Fri Nov 27 17:03:29 2020 -0500
@@ -43,10 +43,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'experimental', b'uncommitondirtywdir', default=False,
+    b'experimental',
+    b'uncommitondirtywdir',
+    default=False,
 )
 configitem(
-    b'experimental', b'uncommit.keep', default=False,
+    b'experimental',
+    b'uncommit.keep',
+    default=False,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
--- a/hgext/win32mbcs.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/win32mbcs.py	Fri Nov 27 17:03:29 2020 -0500
@@ -70,7 +70,9 @@
 # Encoding.encoding may be updated by --encoding option.
 # Use a lambda do delay the resolution.
 configitem(
-    b'win32mbcs', b'encoding', default=lambda: encoding.encoding,
+    b'win32mbcs',
+    b'encoding',
+    default=lambda: encoding.encoding,
 )
 
 _encoding = None  # see extsetup
--- a/hgext/win32text.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/hgext/win32text.py	Fri Nov 27 17:03:29 2020 -0500
@@ -62,7 +62,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'win32text', b'warn', default=True,
+    b'win32text',
+    b'warn',
+    default=True,
 )
 
 # regexp for single LF without CR preceding.
--- a/i18n/check-translation.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/i18n/check-translation.py	Fri Nov 27 17:03:29 2020 -0500
@@ -33,8 +33,7 @@
 
 
 def match(checker, pe):
-    """Examine whether POEntry "pe" is target of specified checker or not
-    """
+    """Examine whether POEntry "pe" is target of specified checker or not"""
     if not checker.match(pe.msgid):
         return
     # examine suppression by translator comment
--- a/mercurial/ancestor.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/ancestor.py	Fri Nov 27 17:03:29 2020 -0500
@@ -148,11 +148,11 @@
 
 
 class incrementalmissingancestors(object):
-    '''persistent state used to calculate missing ancestors incrementally
+    """persistent state used to calculate missing ancestors incrementally
 
     Although similar in spirit to lazyancestors below, this is a separate class
     because trying to support contains and missingancestors operations with the
-    same internal data structures adds needless complexity.'''
+    same internal data structures adds needless complexity."""
 
     def __init__(self, pfunc, bases):
         self.bases = set(bases)
@@ -198,12 +198,12 @@
                 break
 
     def missingancestors(self, revs):
-        '''return all the ancestors of revs that are not ancestors of self.bases
+        """return all the ancestors of revs that are not ancestors of self.bases
 
         This may include elements from revs.
 
         Equivalent to the revset (::revs - ::self.bases). Revs are returned in
-        revision number order, which is a topological order.'''
+        revision number order, which is a topological order."""
         revsvisit = set(revs)
         basesvisit = self.bases
         pfunc = self.pfunc
--- a/mercurial/archival.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/archival.py	Fri Nov 27 17:03:29 2020 -0500
@@ -37,8 +37,8 @@
 
 
 def tidyprefix(dest, kind, prefix):
-    '''choose prefix to use for names in archive.  make sure prefix is
-    safe for consumers.'''
+    """choose prefix to use for names in archive.  make sure prefix is
+    safe for consumers."""
 
     if prefix:
         prefix = util.normpath(prefix)
@@ -132,8 +132,8 @@
 
 
 class tarit(object):
-    '''write archive to tar file or stream.  can write uncompressed,
-    or compress with gzip or bzip2.'''
+    """write archive to tar file or stream.  can write uncompressed,
+    or compress with gzip or bzip2."""
 
     if pycompat.ispy3:
         GzipFileWithTime = gzip.GzipFile  # camelcase-required
@@ -185,8 +185,10 @@
                     mtime=mtime,
                 )
                 self.fileobj = gzfileobj
-                return tarfile.TarFile.taropen(  # pytype: disable=attribute-error
-                    name, pycompat.sysstr(mode), gzfileobj
+                return (
+                    tarfile.TarFile.taropen(  # pytype: disable=attribute-error
+                        name, pycompat.sysstr(mode), gzfileobj
+                    )
                 )
             else:
                 try:
@@ -224,8 +226,8 @@
 
 
 class zipit(object):
-    '''write archive to zip file or stream.  can write uncompressed,
-    or compressed with deflate.'''
+    """write archive to zip file or stream.  can write uncompressed,
+    or compressed with deflate."""
 
     def __init__(self, dest, mtime, compress=True):
         if isinstance(dest, bytes):
@@ -316,7 +318,7 @@
     mtime=None,
     subrepos=False,
 ):
-    '''create archive of repo as it was at node.
+    """create archive of repo as it was at node.
 
     dest can be name of directory, name of archive file, or file
     object to write archive to.
@@ -333,7 +335,7 @@
     mtime is the modified time, in seconds, or None to use the changeset time.
 
     subrepos tells whether to include subrepos.
-    '''
+    """
 
     if kind == b'txz' and not pycompat.ispy3:
         raise error.Abort(_(b'xz compression is only available in Python 3'))
--- a/mercurial/bookmarks.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/bookmarks.py	Fri Nov 27 17:03:29 2020 -0500
@@ -189,8 +189,7 @@
         return self._nodemap.get(node, [])
 
     def applychanges(self, repo, tr, changes):
-        """Apply a list of changes to bookmarks
-        """
+        """Apply a list of changes to bookmarks"""
         bmchanges = tr.changes.get(b'bookmarks')
         for name, node in changes:
             old = self._refmap.get(name)
@@ -422,8 +421,8 @@
 
 
 def calculateupdate(ui, repo):
-    '''Return a tuple (activemark, movemarkfrom) indicating the active bookmark
-    and where to move the active bookmark from, if needed.'''
+    """Return a tuple (activemark, movemarkfrom) indicating the active bookmark
+    and where to move the active bookmark from, if needed."""
     checkout, movemarkfrom = None, None
     activemark = repo._activebookmark
     if isactivewdirparent(repo):
@@ -509,7 +508,7 @@
 
 
 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
-    '''Compare bookmarks between srcmarks and dstmarks
+    """Compare bookmarks between srcmarks and dstmarks
 
     This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
     differ, invalid)", each are list of bookmarks below:
@@ -532,7 +531,7 @@
 
     If "targets" is specified, only bookmarks listed in it are
     examined.
-    '''
+    """
 
     if targets:
         bset = set(targets)
@@ -585,14 +584,14 @@
 
 
 def _diverge(ui, b, path, localmarks, remotenode):
-    '''Return appropriate diverged bookmark for specified ``path``
+    """Return appropriate diverged bookmark for specified ``path``
 
     This returns None, if it is failed to assign any divergent
     bookmark name.
 
     This reuses already existing one with "@number" suffix, if it
     refers ``remotenode``.
-    '''
+    """
     if b == b'@':
         b = b''
     # try to use an @pathalias suffix
@@ -762,13 +761,17 @@
 
 
 def incoming(ui, repo, peer):
-    '''Show bookmarks incoming from other to repo
-    '''
+    """Show bookmarks incoming from other to repo"""
     ui.status(_(b"searching for changed bookmarks\n"))
 
     with peer.commandexecutor() as e:
         remotemarks = unhexlifybookmarks(
-            e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
+            e.callcommand(
+                b'listkeys',
+                {
+                    b'namespace': b'bookmarks',
+                },
+            ).result()
         )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -813,8 +816,7 @@
 
 
 def outgoing(ui, repo, other):
-    '''Show bookmarks outgoing from repo to other
-    '''
+    """Show bookmarks outgoing from repo to other"""
     ui.status(_(b"searching for changed bookmarks\n"))
 
     remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
@@ -863,13 +865,18 @@
 
 
 def summary(repo, peer):
-    '''Compare bookmarks between repo and other for "hg summary" output
+    """Compare bookmarks between repo and other for "hg summary" output
 
     This returns "(# of incoming, # of outgoing)" tuple.
-    '''
+    """
     with peer.commandexecutor() as e:
         remotemarks = unhexlifybookmarks(
-            e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
+            e.callcommand(
+                b'listkeys',
+                {
+                    b'namespace': b'bookmarks',
+                },
+            ).result()
         )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
--- a/mercurial/branchmap.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/branchmap.py	Fri Nov 27 17:03:29 2020 -0500
@@ -41,7 +41,17 @@
     )
 
     assert any(
-        (Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union,)
+        (
+            Any,
+            Callable,
+            Dict,
+            Iterable,
+            List,
+            Optional,
+            Set,
+            Tuple,
+            Union,
+        )
     )
 
 subsettable = repoviewutil.subsettable
@@ -139,8 +149,7 @@
 
 
 def _unknownnode(node):
-    """ raises ValueError when branchcache found a node which does not exists
-    """
+    """raises ValueError when branchcache found a node which does not exists"""
     raise ValueError('node %s does not exist' % pycompat.sysstr(hex(node)))
 
 
@@ -183,9 +192,9 @@
         hasnode=None,
     ):
         # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes,  int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
-        """ hasnode is a function which can be used to verify whether changelog
+        """hasnode is a function which can be used to verify whether changelog
         has a given node or not. If it's not provided, we assume that every node
-        we have exists in changelog """
+        we have exists in changelog"""
         self.tipnode = tipnode
         self.tiprev = tiprev
         self.filteredhash = filteredhash
@@ -304,7 +313,7 @@
         return bcache
 
     def load(self, repo, lineiter):
-        """ fully loads the branchcache by reading from the file using the line
+        """fully loads the branchcache by reading from the file using the line
         iterator passed"""
         for line in lineiter:
             line = line.rstrip(b'\n')
@@ -340,8 +349,8 @@
             return False
 
     def _branchtip(self, heads):
-        '''Return tuple with last open head in heads and false,
-        otherwise return last closed head and true.'''
+        """Return tuple with last open head in heads and false,
+        otherwise return last closed head and true."""
         tip = heads[-1]
         closed = True
         for h in reversed(heads):
@@ -352,9 +361,9 @@
         return tip, closed
 
     def branchtip(self, branch):
-        '''Return the tipmost open head on branch head, otherwise return the
+        """Return the tipmost open head on branch head, otherwise return the
         tipmost closed head on branch.
-        Raise KeyError for unknown branch.'''
+        Raise KeyError for unknown branch."""
         return self._branchtip(self[branch])[0]
 
     def iteropen(self, nodes):
--- a/mercurial/bundle2.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/bundle2.py	Fri Nov 27 17:03:29 2020 -0500
@@ -489,7 +489,12 @@
 
 def _processchangegroup(op, cg, tr, source, url, **kwargs):
     ret = cg.apply(op.repo, tr, source, url, **kwargs)
-    op.records.add(b'changegroup', {b'return': ret,})
+    op.records.add(
+        b'changegroup',
+        {
+            b'return': ret,
+        },
+    )
     return ret
 
 
@@ -1647,8 +1652,7 @@
 
 
 def obsmarkersversion(caps):
-    """extract the list of supported obsmarkers versions from a bundle2caps dict
-    """
+    """extract the list of supported obsmarkers versions from a bundle2caps dict"""
     obscaps = caps.get(b'obsmarkers', ())
     return [int(c[1:]) for c in obscaps if c.startswith(b'V')]
 
--- a/mercurial/bundlerepo.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/bundlerepo.py	Fri Nov 27 17:03:29 2020 -0500
@@ -328,8 +328,7 @@
         self._cgunpacker = changegroup.getunbundler(version, cgstream, b'UN')
 
     def _writetempbundle(self, readfn, suffix, header=b''):
-        """Write a temporary file to disk
-        """
+        """Write a temporary file to disk"""
         fdtemp, temp = self.vfs.mkstemp(prefix=b"hg-bundle-", suffix=suffix)
         self.tempfile = temp
 
@@ -530,7 +529,7 @@
 def getremotechanges(
     ui, repo, peer, onlyheads=None, bundlename=None, force=False
 ):
-    '''obtains a bundle of changes incoming from peer
+    """obtains a bundle of changes incoming from peer
 
     "onlyheads" restricts the returned changes to those reachable from the
       specified heads.
@@ -548,7 +547,7 @@
     "cleanupfn" must be called without arguments when you're done processing
       the changes; it closes both the original "peer" and the one returned
       here.
-    '''
+    """
     tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads, force=force)
     common, incoming, rheads = tmp
     if not incoming:
@@ -611,7 +610,10 @@
                 with peer.commandexecutor() as e:
                     cg = e.callcommand(
                         b'changegroup',
-                        {b'nodes': incoming, b'source': b'incoming',},
+                        {
+                            b'nodes': incoming,
+                            b'source': b'incoming',
+                        },
                     ).result()
 
                 rheads = None
@@ -655,7 +657,10 @@
 
         with peer.commandexecutor() as e:
             remotephases = e.callcommand(
-                b'listkeys', {b'namespace': b'phases',}
+                b'listkeys',
+                {
+                    b'namespace': b'phases',
+                },
             ).result()
 
         pullop = exchange.pulloperation(bundlerepo, peer, heads=reponodes)
--- a/mercurial/changelog.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/changelog.py	Fri Nov 27 17:03:29 2020 -0500
@@ -91,8 +91,8 @@
 
 
 class appender(object):
-    '''the changelog index must be updated last on disk, so we use this class
-    to delay writes to it'''
+    """the changelog index must be updated last on disk, so we use this class
+    to delay writes to it"""
 
     def __init__(self, vfs, name, mode, buf):
         self.data = buf
--- a/mercurial/cmdutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/cmdutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -399,7 +399,7 @@
 
 
 def recordfilter(ui, originalhunks, match, operation=None):
-    """ Prompts the user to filter the originalhunks and return a list of
+    """Prompts the user to filter the originalhunks and return a list of
     selected hunks.
     *operation* is used for to build ui messages to indicate the user what
     kind of filtering they are doing: reverting, committing, shelving, etc.
@@ -1078,7 +1078,7 @@
 
 
 def bailifchanged(repo, merge=True, hint=None):
-    """ enforce the precondition that working directory must be clean.
+    """enforce the precondition that working directory must be clean.
 
     'merge' can be set to false if a pending uncommitted merge should be
     ignored (such as when 'update --check' runs).
@@ -2184,7 +2184,7 @@
     opts=None,
     match=None,
 ):
-    '''export changesets as hg patches
+    """export changesets as hg patches
 
     Args:
       repo: The repository from which we're exporting revisions.
@@ -2205,7 +2205,7 @@
         fntemplate specified: Each rev is written to a unique file named using
                             the given template.
         Otherwise: All revs will be written to basefm.
-    '''
+    """
     _prefetchchangedfiles(repo, revs, match)
 
     if not fntemplate:
@@ -3476,7 +3476,8 @@
                 repo, [f for sublist in oplist for f in sublist]
             )
             prefetch(
-                repo, [(ctx.rev(), matchfiles)],
+                repo,
+                [(ctx.rev(), matchfiles)],
             )
             match = scmutil.match(repo[None], pats)
             _performrevert(
@@ -3724,10 +3725,10 @@
 
 
 def checkunfinished(repo, commit=False, skipmerge=False):
-    '''Look for an unfinished multistep operation, like graft, and abort
+    """Look for an unfinished multistep operation, like graft, and abort
     if found. It's probably good to check this right before
     bailifchanged().
-    '''
+    """
     # Check for non-clearable states first, so things like rebase will take
     # precedence over update.
     for state in statemod._unfinishedstates:
@@ -3753,9 +3754,9 @@
 
 
 def clearunfinished(repo):
-    '''Check for unfinished operations (as above), and clear the ones
+    """Check for unfinished operations (as above), and clear the ones
     that are clearable.
-    '''
+    """
     for state in statemod._unfinishedstates:
         if state._reportonly:
             continue
@@ -3770,8 +3771,8 @@
 
 
 def getunfinishedstate(repo):
-    ''' Checks for unfinished operations and returns statecheck object
-        for it'''
+    """Checks for unfinished operations and returns statecheck object
+    for it"""
     for state in statemod._unfinishedstates:
         if state.isunfinished(repo):
             return state
@@ -3779,7 +3780,7 @@
 
 
 def howtocontinue(repo):
-    '''Check for an unfinished operation and return the command to finish
+    """Check for an unfinished operation and return the command to finish
     it.
 
     statemod._unfinishedstates list is checked for an unfinished operation
@@ -3788,7 +3789,7 @@
 
     Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
     a boolean.
-    '''
+    """
     contmsg = _(b"continue: %s")
     for state in statemod._unfinishedstates:
         if not state._continueflag:
@@ -3801,13 +3802,13 @@
 
 
 def checkafterresolved(repo):
-    '''Inform the user about the next action after completing hg resolve
+    """Inform the user about the next action after completing hg resolve
 
     If there's a an unfinished operation that supports continue flag,
     howtocontinue will yield repo.ui.warn as the reporter.
 
     Otherwise, it will yield repo.ui.note.
-    '''
+    """
     msg, warning = howtocontinue(repo)
     if msg is not None:
         if warning:
@@ -3817,14 +3818,14 @@
 
 
 def wrongtooltocontinue(repo, task):
-    '''Raise an abort suggesting how to properly continue if there is an
+    """Raise an abort suggesting how to properly continue if there is an
     active task.
 
     Uses howtocontinue() to find the active task.
 
     If there's no task (repo.ui.note for 'hg commit'), it does not offer
     a hint.
-    '''
+    """
     after = howtocontinue(repo)
     hint = None
     if after[1]:
--- a/mercurial/commands.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/commands.py	Fri Nov 27 17:03:29 2020 -0500
@@ -605,7 +605,7 @@
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def archive(ui, repo, dest, **opts):
-    '''create an unversioned archive of a repository revision
+    """create an unversioned archive of a repository revision
 
     By default, the revision used is the parent of the working
     directory; use -r/--rev to specify a different revision.
@@ -644,7 +644,7 @@
     removed.
 
     Returns 0 on success.
-    '''
+    """
 
     opts = pycompat.byteskwargs(opts)
     rev = opts.get(b'rev')
@@ -718,7 +718,7 @@
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def backout(ui, repo, node=None, rev=None, **opts):
-    '''reverse effect of earlier changeset
+    """reverse effect of earlier changeset
 
     Prepare a new changeset with the effect of REV undone in the
     current working directory. If no conflicts were encountered,
@@ -768,7 +768,7 @@
 
     Returns 0 on success, 1 if nothing to backout or there are unresolved
     files.
-    '''
+    """
     with repo.wlock(), repo.lock():
         return _dobackout(ui, repo, node, rev, **opts)
 
@@ -1166,7 +1166,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def bookmark(ui, repo, *names, **opts):
-    '''create a new bookmark or list existing bookmarks
+    """create a new bookmark or list existing bookmarks
 
     Bookmarks are labels on changesets to help track lines of development.
     Bookmarks are unversioned and can be moved, renamed and deleted.
@@ -1224,7 +1224,7 @@
       - print only the active bookmark name::
 
           hg book -ql .
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     force = opts.get(b'force')
     rev = opts.get(b'rev')
@@ -2804,7 +2804,9 @@
 
 @command(
     b'forget',
-    [(b'i', b'interactive', None, _(b'use interactive mode')),]
+    [
+        (b'i', b'interactive', None, _(b'use interactive mode')),
+    ]
     + walkopts
     + dryrunopts,
     _(b'[OPTION]... FILE...'),
@@ -2904,7 +2906,7 @@
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def graft(ui, repo, *revs, **opts):
-    '''copy changes from other branches onto the current branch
+    """copy changes from other branches onto the current branch
 
     This command uses Mercurial's merge logic to copy individual
     changes from other branches without merging branches in the
@@ -2997,7 +2999,7 @@
     See :hg:`help revisions` for more about specifying revisions.
 
     Returns 0 on successful completion, 1 if there are unresolved files.
-    '''
+    """
     with repo.wlock():
         return _dograft(ui, repo, *revs, **opts)
 
@@ -5261,7 +5263,12 @@
             None,
             _(b'run even when remote repository is unrelated'),
         ),
-        (b'', b'confirm', None, _(b'confirm pull before applying changes'),),
+        (
+            b'',
+            b'confirm',
+            None,
+            _(b'confirm pull before applying changes'),
+        ),
         (
             b'r',
             b'rev',
@@ -5518,7 +5525,9 @@
 
     if opts.get(b'all_bookmarks'):
         cmdutil.check_incompatible_arguments(
-            opts, b'all_bookmarks', [b'bookmark', b'rev'],
+            opts,
+            b'all_bookmarks',
+            [b'bookmark', b'rev'],
         )
         opts[b'bookmark'] = list(repo._bookmarks)
 
@@ -5608,7 +5617,9 @@
 
 @command(
     b'recover',
-    [(b'', b'verify', False, b"run `hg verify` after successful recover"),],
+    [
+        (b'', b'verify', False, b"run `hg verify` after successful recover"),
+    ],
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
 def recover(ui, repo, **opts):
@@ -6448,7 +6459,7 @@
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def shelve(ui, repo, *pats, **opts):
-    '''save and set aside changes from the working directory
+    """save and set aside changes from the working directory
 
     Shelving takes files that "hg status" reports as not clean, saves
     the modifications to a bundle (a shelved change), and reverts the
@@ -6479,7 +6490,7 @@
 
     To delete specific shelved changes, use ``--delete``. To delete
     all shelved changes, use ``--cleanup``.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     allowables = [
         (b'addremove', {b'create'}),  # 'create' is pseudo action
@@ -7707,8 +7718,7 @@
 
 
 def loadcmdtable(ui, name, cmdtable):
-    """Load command functions from specified cmdtable
-    """
+    """Load command functions from specified cmdtable"""
     overrides = [cmd for cmd in cmdtable if cmd in table]
     if overrides:
         ui.warn(
--- a/mercurial/commandserver.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/commandserver.py	Fri Nov 27 17:03:29 2020 -0500
@@ -316,8 +316,8 @@
         return -1
 
     def runcommand(self):
-        """ reads a list of \0 terminated arguments, executes
-        and writes the return code to the result channel """
+        """reads a list of \0 terminated arguments, executes
+        and writes the return code to the result channel"""
         from . import dispatch  # avoid cycle
 
         args = self._readlist()
--- a/mercurial/commit.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/commit.py	Fri Nov 27 17:03:29 2020 -0500
@@ -98,7 +98,11 @@
         )
         xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
         repo.hook(
-            b'pretxncommit', throw=True, node=hex(n), parent1=xp1, parent2=xp2,
+            b'pretxncommit',
+            throw=True,
+            node=hex(n),
+            parent1=xp1,
+            parent2=xp2,
         )
         # set the new commit is proper phase
         targetphase = subrepoutil.newcommitphase(repo.ui, ctx)
@@ -154,10 +158,10 @@
 
 
 def _get_salvaged(repo, ms, ctx):
-    """ returns a list of salvaged files
+    """returns a list of salvaged files
 
     returns empty list if config option which process salvaged files are
-    not enabled """
+    not enabled"""
     salvaged = []
     copy_sd = repo.filecopiesmode == b'changeset-sidedata'
     if copy_sd and len(ctx.parents()) > 1:
@@ -238,7 +242,14 @@
 
 
 def _filecommit(
-    repo, fctx, manifest1, manifest2, linkrev, tr, includecopymeta, ms,
+    repo,
+    fctx,
+    manifest1,
+    manifest2,
+    linkrev,
+    tr,
+    includecopymeta,
+    ms,
 ):
     """
     commit an individual file as part of a larger transaction
--- a/mercurial/config.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/config.py	Fri Nov 27 17:03:29 2020 -0500
@@ -208,9 +208,11 @@
     def read(self, path, fp=None, sections=None, remap=None):
         if not fp:
             fp = util.posixfile(path, b'rb')
-        assert getattr(fp, 'mode', 'rb') == 'rb', (
-            b'config files must be opened in binary mode, got fp=%r mode=%r'
-            % (fp, fp.mode,)
+        assert (
+            getattr(fp, 'mode', 'rb') == 'rb'
+        ), b'config files must be opened in binary mode, got fp=%r mode=%r' % (
+            fp,
+            fp.mode,
         )
 
         dir = os.path.dirname(path)
--- a/mercurial/configitems.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/configitems.py	Fri Nov 27 17:03:29 2020 -0500
@@ -133,78 +133,127 @@
 
 def _registerdiffopts(section, configprefix=b''):
     coreconfigitem(
-        section, configprefix + b'nodates', default=False,
+        section,
+        configprefix + b'nodates',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'showfunc', default=False,
+        section,
+        configprefix + b'showfunc',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'unified', default=None,
+        section,
+        configprefix + b'unified',
+        default=None,
     )
     coreconfigitem(
-        section, configprefix + b'git', default=False,
+        section,
+        configprefix + b'git',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignorews', default=False,
+        section,
+        configprefix + b'ignorews',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignorewsamount', default=False,
+        section,
+        configprefix + b'ignorewsamount',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignoreblanklines', default=False,
+        section,
+        configprefix + b'ignoreblanklines',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignorewseol', default=False,
+        section,
+        configprefix + b'ignorewseol',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'nobinary', default=False,
+        section,
+        configprefix + b'nobinary',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'noprefix', default=False,
+        section,
+        configprefix + b'noprefix',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'word-diff', default=False,
+        section,
+        configprefix + b'word-diff',
+        default=False,
     )
 
 
 coreconfigitem(
-    b'alias', b'.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'auth', b'cookiefile', default=None,
+    b'alias',
+    b'.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'auth',
+    b'cookiefile',
+    default=None,
 )
 _registerdiffopts(section=b'annotate')
 # bookmarks.pushing: internal hack for discovery
 coreconfigitem(
-    b'bookmarks', b'pushing', default=list,
+    b'bookmarks',
+    b'pushing',
+    default=list,
 )
 # bundle.mainreporoot: internal hack for bundlerepo
 coreconfigitem(
-    b'bundle', b'mainreporoot', default=b'',
-)
-coreconfigitem(
-    b'censor', b'policy', default=b'abort', experimental=True,
-)
-coreconfigitem(
-    b'chgserver', b'idletimeout', default=3600,
-)
-coreconfigitem(
-    b'chgserver', b'skiphash', default=False,
-)
-coreconfigitem(
-    b'cmdserver', b'log', default=None,
-)
-coreconfigitem(
-    b'cmdserver', b'max-log-files', default=7,
-)
-coreconfigitem(
-    b'cmdserver', b'max-log-size', default=b'1 MB',
-)
-coreconfigitem(
-    b'cmdserver', b'max-repo-cache', default=0, experimental=True,
-)
-coreconfigitem(
-    b'cmdserver', b'message-encodings', default=list,
+    b'bundle',
+    b'mainreporoot',
+    default=b'',
+)
+coreconfigitem(
+    b'censor',
+    b'policy',
+    default=b'abort',
+    experimental=True,
+)
+coreconfigitem(
+    b'chgserver',
+    b'idletimeout',
+    default=3600,
+)
+coreconfigitem(
+    b'chgserver',
+    b'skiphash',
+    default=False,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'log',
+    default=None,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'max-log-files',
+    default=7,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'max-log-size',
+    default=b'1 MB',
+)
+coreconfigitem(
+    b'cmdserver',
+    b'max-repo-cache',
+    default=0,
+    experimental=True,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'message-encodings',
+    default=list,
 )
 coreconfigitem(
     b'cmdserver',
@@ -212,16 +261,25 @@
     default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
 )
 coreconfigitem(
-    b'cmdserver', b'shutdown-on-interrupt', default=True,
-)
-coreconfigitem(
-    b'color', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'color', b'mode', default=b'auto',
-)
-coreconfigitem(
-    b'color', b'pagermode', default=dynamicdefault,
+    b'cmdserver',
+    b'shutdown-on-interrupt',
+    default=True,
+)
+coreconfigitem(
+    b'color',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'color',
+    b'mode',
+    default=b'auto',
+)
+coreconfigitem(
+    b'color',
+    b'pagermode',
+    default=dynamicdefault,
 )
 coreconfigitem(
     b'command-templates',
@@ -230,7 +288,10 @@
     alias=[(b'ui', b'graphnodetemplate')],
 )
 coreconfigitem(
-    b'command-templates', b'log', default=None, alias=[(b'ui', b'logtemplate')],
+    b'command-templates',
+    b'log',
+    default=None,
+    alias=[(b'ui', b'logtemplate')],
 )
 coreconfigitem(
     b'command-templates',
@@ -252,7 +313,9 @@
     alias=[(b'ui', b'pre-merge-tool-output-template')],
 )
 coreconfigitem(
-    b'command-templates', b'oneline-summary', default=None,
+    b'command-templates',
+    b'oneline-summary',
+    default=None,
 )
 coreconfigitem(
     b'command-templates',
@@ -262,327 +325,546 @@
 )
 _registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
 coreconfigitem(
-    b'commands', b'commit.post-status', default=False,
-)
-coreconfigitem(
-    b'commands', b'grep.all-files', default=False, experimental=True,
-)
-coreconfigitem(
-    b'commands', b'merge.require-rev', default=False,
-)
-coreconfigitem(
-    b'commands', b'push.require-revs', default=False,
-)
-coreconfigitem(
-    b'commands', b'resolve.confirm', default=False,
-)
-coreconfigitem(
-    b'commands', b'resolve.explicit-re-merge', default=False,
-)
-coreconfigitem(
-    b'commands', b'resolve.mark-check', default=b'none',
+    b'commands',
+    b'commit.post-status',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'grep.all-files',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'commands',
+    b'merge.require-rev',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'push.require-revs',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'resolve.confirm',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'resolve.explicit-re-merge',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'resolve.mark-check',
+    default=b'none',
 )
 _registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
 coreconfigitem(
-    b'commands', b'show.aliasprefix', default=list,
-)
-coreconfigitem(
-    b'commands', b'status.relative', default=False,
-)
-coreconfigitem(
-    b'commands', b'status.skipstates', default=[], experimental=True,
-)
-coreconfigitem(
-    b'commands', b'status.terse', default=b'',
-)
-coreconfigitem(
-    b'commands', b'status.verbose', default=False,
-)
-coreconfigitem(
-    b'commands', b'update.check', default=None,
-)
-coreconfigitem(
-    b'commands', b'update.requiredest', default=False,
-)
-coreconfigitem(
-    b'committemplate', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'convert', b'bzr.saverev', default=True,
-)
-coreconfigitem(
-    b'convert', b'cvsps.cache', default=True,
-)
-coreconfigitem(
-    b'convert', b'cvsps.fuzz', default=60,
-)
-coreconfigitem(
-    b'convert', b'cvsps.logencoding', default=None,
-)
-coreconfigitem(
-    b'convert', b'cvsps.mergefrom', default=None,
-)
-coreconfigitem(
-    b'convert', b'cvsps.mergeto', default=None,
-)
-coreconfigitem(
-    b'convert', b'git.committeractions', default=lambda: [b'messagedifferent'],
-)
-coreconfigitem(
-    b'convert', b'git.extrakeys', default=list,
-)
-coreconfigitem(
-    b'convert', b'git.findcopiesharder', default=False,
-)
-coreconfigitem(
-    b'convert', b'git.remoteprefix', default=b'remote',
-)
-coreconfigitem(
-    b'convert', b'git.renamelimit', default=400,
-)
-coreconfigitem(
-    b'convert', b'git.saverev', default=True,
-)
-coreconfigitem(
-    b'convert', b'git.similarity', default=50,
-)
-coreconfigitem(
-    b'convert', b'git.skipsubmodules', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.clonebranches', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.ignoreerrors', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.preserve-hash', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.revs', default=None,
-)
-coreconfigitem(
-    b'convert', b'hg.saverev', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.sourcename', default=None,
-)
-coreconfigitem(
-    b'convert', b'hg.startrev', default=None,
-)
-coreconfigitem(
-    b'convert', b'hg.tagsbranch', default=b'default',
-)
-coreconfigitem(
-    b'convert', b'hg.usebranchnames', default=True,
-)
-coreconfigitem(
-    b'convert', b'ignoreancestorcheck', default=False, experimental=True,
-)
-coreconfigitem(
-    b'convert', b'localtimezone', default=False,
-)
-coreconfigitem(
-    b'convert', b'p4.encoding', default=dynamicdefault,
-)
-coreconfigitem(
-    b'convert', b'p4.startrev', default=0,
-)
-coreconfigitem(
-    b'convert', b'skiptags', default=False,
-)
-coreconfigitem(
-    b'convert', b'svn.debugsvnlog', default=True,
-)
-coreconfigitem(
-    b'convert', b'svn.trunk', default=None,
-)
-coreconfigitem(
-    b'convert', b'svn.tags', default=None,
-)
-coreconfigitem(
-    b'convert', b'svn.branches', default=None,
-)
-coreconfigitem(
-    b'convert', b'svn.startrev', default=0,
-)
-coreconfigitem(
-    b'debug', b'dirstate.delaywrite', default=0,
-)
-coreconfigitem(
-    b'defaults', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'devel', b'all-warnings', default=False,
-)
-coreconfigitem(
-    b'devel', b'bundle2.debug', default=False,
-)
-coreconfigitem(
-    b'devel', b'bundle.delta', default=b'',
-)
-coreconfigitem(
-    b'devel', b'cache-vfs', default=None,
-)
-coreconfigitem(
-    b'devel', b'check-locks', default=False,
-)
-coreconfigitem(
-    b'devel', b'check-relroot', default=False,
-)
-coreconfigitem(
-    b'devel', b'default-date', default=None,
-)
-coreconfigitem(
-    b'devel', b'deprec-warn', default=False,
-)
-coreconfigitem(
-    b'devel', b'disableloaddefaultcerts', default=False,
-)
-coreconfigitem(
-    b'devel', b'warn-empty-changegroup', default=False,
-)
-coreconfigitem(
-    b'devel', b'legacy.exchange', default=list,
-)
-coreconfigitem(
-    b'devel', b'persistent-nodemap', default=False,
-)
-coreconfigitem(
-    b'devel', b'servercafile', default=b'',
-)
-coreconfigitem(
-    b'devel', b'serverexactprotocol', default=b'',
-)
-coreconfigitem(
-    b'devel', b'serverrequirecert', default=False,
-)
-coreconfigitem(
-    b'devel', b'strip-obsmarkers', default=True,
-)
-coreconfigitem(
-    b'devel', b'warn-config', default=None,
-)
-coreconfigitem(
-    b'devel', b'warn-config-default', default=None,
-)
-coreconfigitem(
-    b'devel', b'user.obsmarker', default=None,
-)
-coreconfigitem(
-    b'devel', b'warn-config-unknown', default=None,
-)
-coreconfigitem(
-    b'devel', b'debug.copies', default=False,
-)
-coreconfigitem(
-    b'devel', b'debug.extensions', default=False,
-)
-coreconfigitem(
-    b'devel', b'debug.repo-filters', default=False,
-)
-coreconfigitem(
-    b'devel', b'debug.peer-request', default=False,
-)
-coreconfigitem(
-    b'devel', b'discovery.randomize', default=True,
+    b'commands',
+    b'show.aliasprefix',
+    default=list,
+)
+coreconfigitem(
+    b'commands',
+    b'status.relative',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'status.skipstates',
+    default=[],
+    experimental=True,
+)
+coreconfigitem(
+    b'commands',
+    b'status.terse',
+    default=b'',
+)
+coreconfigitem(
+    b'commands',
+    b'status.verbose',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'update.check',
+    default=None,
+)
+coreconfigitem(
+    b'commands',
+    b'update.requiredest',
+    default=False,
+)
+coreconfigitem(
+    b'committemplate',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'convert',
+    b'bzr.saverev',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.cache',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.fuzz',
+    default=60,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.logencoding',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.mergefrom',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.mergeto',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'git.committeractions',
+    default=lambda: [b'messagedifferent'],
+)
+coreconfigitem(
+    b'convert',
+    b'git.extrakeys',
+    default=list,
+)
+coreconfigitem(
+    b'convert',
+    b'git.findcopiesharder',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'git.remoteprefix',
+    default=b'remote',
+)
+coreconfigitem(
+    b'convert',
+    b'git.renamelimit',
+    default=400,
+)
+coreconfigitem(
+    b'convert',
+    b'git.saverev',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'git.similarity',
+    default=50,
+)
+coreconfigitem(
+    b'convert',
+    b'git.skipsubmodules',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.clonebranches',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.ignoreerrors',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.preserve-hash',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.revs',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.saverev',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.sourcename',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.startrev',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.tagsbranch',
+    default=b'default',
+)
+coreconfigitem(
+    b'convert',
+    b'hg.usebranchnames',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'ignoreancestorcheck',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'convert',
+    b'localtimezone',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'p4.encoding',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'convert',
+    b'p4.startrev',
+    default=0,
+)
+coreconfigitem(
+    b'convert',
+    b'skiptags',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.debugsvnlog',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.trunk',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.tags',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.branches',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.startrev',
+    default=0,
+)
+coreconfigitem(
+    b'debug',
+    b'dirstate.delaywrite',
+    default=0,
+)
+coreconfigitem(
+    b'defaults',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'devel',
+    b'all-warnings',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'bundle2.debug',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'bundle.delta',
+    default=b'',
+)
+coreconfigitem(
+    b'devel',
+    b'cache-vfs',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'check-locks',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'check-relroot',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'default-date',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'deprec-warn',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'disableloaddefaultcerts',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-empty-changegroup',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'legacy.exchange',
+    default=list,
+)
+coreconfigitem(
+    b'devel',
+    b'persistent-nodemap',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'servercafile',
+    default=b'',
+)
+coreconfigitem(
+    b'devel',
+    b'serverexactprotocol',
+    default=b'',
+)
+coreconfigitem(
+    b'devel',
+    b'serverrequirecert',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'strip-obsmarkers',
+    default=True,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-config',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-config-default',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'user.obsmarker',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-config-unknown',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.copies',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.extensions',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.repo-filters',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.peer-request',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'discovery.randomize',
+    default=True,
 )
 _registerdiffopts(section=b'diff')
 coreconfigitem(
-    b'email', b'bcc', default=None,
-)
-coreconfigitem(
-    b'email', b'cc', default=None,
-)
-coreconfigitem(
-    b'email', b'charsets', default=list,
-)
-coreconfigitem(
-    b'email', b'from', default=None,
-)
-coreconfigitem(
-    b'email', b'method', default=b'smtp',
-)
-coreconfigitem(
-    b'email', b'reply-to', default=None,
-)
-coreconfigitem(
-    b'email', b'to', default=None,
-)
-coreconfigitem(
-    b'experimental', b'archivemetatemplate', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'auto-publish', default=b'publish',
-)
-coreconfigitem(
-    b'experimental', b'bundle-phases', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundle2-advertise', default=True,
-)
-coreconfigitem(
-    b'experimental', b'bundle2-output-capture', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundle2.pushback', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundle2lazylocking', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.bzip2', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.gzip', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.none', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.zstd', default=None,
-)
-coreconfigitem(
-    b'experimental', b'changegroup3', default=False,
-)
-coreconfigitem(
-    b'experimental', b'cleanup-as-archived', default=False,
-)
-coreconfigitem(
-    b'experimental', b'clientcompressionengines', default=list,
-)
-coreconfigitem(
-    b'experimental', b'copytrace', default=b'on',
-)
-coreconfigitem(
-    b'experimental', b'copytrace.movecandidateslimit', default=100,
-)
-coreconfigitem(
-    b'experimental', b'copytrace.sourcecommitlimit', default=100,
-)
-coreconfigitem(
-    b'experimental', b'copies.read-from', default=b"filelog-only",
-)
-coreconfigitem(
-    b'experimental', b'copies.write-to', default=b'filelog-only',
-)
-coreconfigitem(
-    b'experimental', b'crecordtest', default=None,
-)
-coreconfigitem(
-    b'experimental', b'directaccess', default=False,
-)
-coreconfigitem(
-    b'experimental', b'directaccess.revnums', default=False,
-)
-coreconfigitem(
-    b'experimental', b'editortmpinhg', default=False,
-)
-coreconfigitem(
-    b'experimental', b'evolution', default=list,
+    b'email',
+    b'bcc',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'cc',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'charsets',
+    default=list,
+)
+coreconfigitem(
+    b'email',
+    b'from',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'method',
+    default=b'smtp',
+)
+coreconfigitem(
+    b'email',
+    b'reply-to',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'to',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'archivemetatemplate',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'auto-publish',
+    default=b'publish',
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle-phases',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2-advertise',
+    default=True,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2-output-capture',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2.pushback',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2lazylocking',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.bzip2',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.gzip',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.none',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.zstd',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'changegroup3',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'cleanup-as-archived',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'clientcompressionengines',
+    default=list,
+)
+coreconfigitem(
+    b'experimental',
+    b'copytrace',
+    default=b'on',
+)
+coreconfigitem(
+    b'experimental',
+    b'copytrace.movecandidateslimit',
+    default=100,
+)
+coreconfigitem(
+    b'experimental',
+    b'copytrace.sourcecommitlimit',
+    default=100,
+)
+coreconfigitem(
+    b'experimental',
+    b'copies.read-from',
+    default=b"filelog-only",
+)
+coreconfigitem(
+    b'experimental',
+    b'copies.write-to',
+    default=b'filelog-only',
+)
+coreconfigitem(
+    b'experimental',
+    b'crecordtest',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'directaccess',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'directaccess.revnums',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'editortmpinhg',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution',
+    default=list,
 )
 coreconfigitem(
     b'experimental',
@@ -591,10 +873,14 @@
     alias=[(b'experimental', b'allowdivergence')],
 )
 coreconfigitem(
-    b'experimental', b'evolution.allowunstable', default=None,
-)
-coreconfigitem(
-    b'experimental', b'evolution.createmarkers', default=None,
+    b'experimental',
+    b'evolution.allowunstable',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.createmarkers',
+    default=None,
 )
 coreconfigitem(
     b'experimental',
@@ -603,109 +889,173 @@
     alias=[(b'experimental', b'effect-flags')],
 )
 coreconfigitem(
-    b'experimental', b'evolution.exchange', default=None,
-)
-coreconfigitem(
-    b'experimental', b'evolution.bundle-obsmarker', default=False,
-)
-coreconfigitem(
-    b'experimental', b'log.topo', default=False,
-)
-coreconfigitem(
-    b'experimental', b'evolution.report-instabilities', default=True,
-)
-coreconfigitem(
-    b'experimental', b'evolution.track-operation', default=True,
+    b'experimental',
+    b'evolution.exchange',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.bundle-obsmarker',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'log.topo',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.report-instabilities',
+    default=True,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.track-operation',
+    default=True,
 )
 # repo-level config to exclude a revset visibility
 #
 # The target use case is to use `share` to expose different subset of the same
 # repository, especially server side. See also `server.view`.
 coreconfigitem(
-    b'experimental', b'extra-filter-revs', default=None,
-)
-coreconfigitem(
-    b'experimental', b'maxdeltachainspan', default=-1,
+    b'experimental',
+    b'extra-filter-revs',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'maxdeltachainspan',
+    default=-1,
 )
 # tracks files which were undeleted (merge might delete them but we explicitly
 # kept/undeleted them) and creates new filenodes for them
 coreconfigitem(
-    b'experimental', b'merge-track-salvaged', default=False,
-)
-coreconfigitem(
-    b'experimental', b'mergetempdirprefix', default=None,
-)
-coreconfigitem(
-    b'experimental', b'mmapindexthreshold', default=None,
-)
-coreconfigitem(
-    b'experimental', b'narrow', default=False,
-)
-coreconfigitem(
-    b'experimental', b'nonnormalparanoidcheck', default=False,
-)
-coreconfigitem(
-    b'experimental', b'exportableenviron', default=list,
-)
-coreconfigitem(
-    b'experimental', b'extendedheader.index', default=None,
-)
-coreconfigitem(
-    b'experimental', b'extendedheader.similarity', default=False,
-)
-coreconfigitem(
-    b'experimental', b'graphshorten', default=False,
-)
-coreconfigitem(
-    b'experimental', b'graphstyle.parent', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'graphstyle.missing', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'graphstyle.grandparent', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'hook-track-tags', default=False,
-)
-coreconfigitem(
-    b'experimental', b'httppeer.advertise-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'httppeer.v2-encoder-order', default=None,
-)
-coreconfigitem(
-    b'experimental', b'httppostargs', default=False,
+    b'experimental',
+    b'merge-track-salvaged',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'mergetempdirprefix',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'mmapindexthreshold',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'narrow',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'nonnormalparanoidcheck',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'exportableenviron',
+    default=list,
+)
+coreconfigitem(
+    b'experimental',
+    b'extendedheader.index',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'extendedheader.similarity',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphshorten',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphstyle.parent',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphstyle.missing',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphstyle.grandparent',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'hook-track-tags',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'httppeer.advertise-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'httppeer.v2-encoder-order',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'httppostargs',
+    default=False,
 )
 coreconfigitem(b'experimental', b'nointerrupt', default=False)
 coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
 
 coreconfigitem(
-    b'experimental', b'obsmarkers-exchange-debug', default=False,
-)
-coreconfigitem(
-    b'experimental', b'remotenames', default=False,
-)
-coreconfigitem(
-    b'experimental', b'removeemptydirs', default=True,
-)
-coreconfigitem(
-    b'experimental', b'revert.interactive.select-to-keep', default=False,
-)
-coreconfigitem(
-    b'experimental', b'revisions.prefixhexnode', default=False,
-)
-coreconfigitem(
-    b'experimental', b'revlogv2', default=None,
-)
-coreconfigitem(
-    b'experimental', b'revisions.disambiguatewithin', default=None,
-)
-coreconfigitem(
-    b'experimental', b'rust.index', default=False,
-)
-coreconfigitem(
-    b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
+    b'experimental',
+    b'obsmarkers-exchange-debug',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'remotenames',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'removeemptydirs',
+    default=True,
+)
+coreconfigitem(
+    b'experimental',
+    b'revert.interactive.select-to-keep',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'revisions.prefixhexnode',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'revlogv2',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'revisions.disambiguatewithin',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'rust.index',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'server.filesdata.recommended-batch-size',
+    default=50000,
 )
 coreconfigitem(
     b'experimental',
@@ -713,10 +1063,14 @@
     default=100000,
 )
 coreconfigitem(
-    b'experimental', b'server.stream-narrow-clones', default=False,
-)
-coreconfigitem(
-    b'experimental', b'single-head-per-branch', default=False,
+    b'experimental',
+    b'server.stream-narrow-clones',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'single-head-per-branch',
+    default=False,
 )
 coreconfigitem(
     b'experimental',
@@ -724,73 +1078,125 @@
     default=False,
 )
 coreconfigitem(
-    b'experimental', b'sshserver.support-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'sparse-read', default=False,
-)
-coreconfigitem(
-    b'experimental', b'sparse-read.density-threshold', default=0.50,
-)
-coreconfigitem(
-    b'experimental', b'sparse-read.min-gap-size', default=b'65K',
-)
-coreconfigitem(
-    b'experimental', b'treemanifest', default=False,
-)
-coreconfigitem(
-    b'experimental', b'update.atomic-file', default=False,
-)
-coreconfigitem(
-    b'experimental', b'sshpeer.advertise-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'web.apiserver', default=False,
-)
-coreconfigitem(
-    b'experimental', b'web.api.http-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'web.api.debugreflect', default=False,
-)
-coreconfigitem(
-    b'experimental', b'worker.wdir-get-thread-safe', default=False,
-)
-coreconfigitem(
-    b'experimental', b'worker.repository-upgrade', default=False,
-)
-coreconfigitem(
-    b'experimental', b'xdiff', default=False,
-)
-coreconfigitem(
-    b'extensions', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'extdata', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'format', b'bookmarks-in-store', default=False,
-)
-coreconfigitem(
-    b'format', b'chunkcachesize', default=None, experimental=True,
-)
-coreconfigitem(
-    b'format', b'dotencode', default=True,
-)
-coreconfigitem(
-    b'format', b'generaldelta', default=False, experimental=True,
-)
-coreconfigitem(
-    b'format', b'manifestcachesize', default=None, experimental=True,
-)
-coreconfigitem(
-    b'format', b'maxchainlen', default=dynamicdefault, experimental=True,
-)
-coreconfigitem(
-    b'format', b'obsstore-version', default=None,
-)
-coreconfigitem(
-    b'format', b'sparse-revlog', default=True,
+    b'experimental',
+    b'sshserver.support-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'sparse-read',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'sparse-read.density-threshold',
+    default=0.50,
+)
+coreconfigitem(
+    b'experimental',
+    b'sparse-read.min-gap-size',
+    default=b'65K',
+)
+coreconfigitem(
+    b'experimental',
+    b'treemanifest',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'update.atomic-file',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'sshpeer.advertise-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'web.apiserver',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'web.api.http-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'web.api.debugreflect',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'worker.wdir-get-thread-safe',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'worker.repository-upgrade',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'xdiff',
+    default=False,
+)
+coreconfigitem(
+    b'extensions',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'extdata',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'format',
+    b'bookmarks-in-store',
+    default=False,
+)
+coreconfigitem(
+    b'format',
+    b'chunkcachesize',
+    default=None,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'dotencode',
+    default=True,
+)
+coreconfigitem(
+    b'format',
+    b'generaldelta',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'manifestcachesize',
+    default=None,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'maxchainlen',
+    default=dynamicdefault,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'obsstore-version',
+    default=None,
+)
+coreconfigitem(
+    b'format',
+    b'sparse-revlog',
+    default=True,
 )
 coreconfigitem(
     b'format',
@@ -799,13 +1205,19 @@
     alias=[(b'experimental', b'format.compression')],
 )
 coreconfigitem(
-    b'format', b'usefncache', default=True,
-)
-coreconfigitem(
-    b'format', b'usegeneraldelta', default=True,
-)
-coreconfigitem(
-    b'format', b'usestore', default=True,
+    b'format',
+    b'usefncache',
+    default=True,
+)
+coreconfigitem(
+    b'format',
+    b'usegeneraldelta',
+    default=True,
+)
+coreconfigitem(
+    b'format',
+    b'usestore',
+    default=True,
 )
 # Right now, the only efficient implement of the nodemap logic is in Rust, so
 # the persistent nodemap feature needs to stay experimental as long as the Rust
@@ -820,43 +1232,77 @@
     experimental=True,
 )
 coreconfigitem(
-    b'format', b'exp-use-side-data', default=False, experimental=True,
-)
-coreconfigitem(
-    b'format', b'exp-share-safe', default=False, experimental=True,
-)
-coreconfigitem(
-    b'format', b'internal-phase', default=False, experimental=True,
-)
-coreconfigitem(
-    b'fsmonitor', b'warn_when_unused', default=True,
-)
-coreconfigitem(
-    b'fsmonitor', b'warn_update_file_count', default=50000,
-)
-coreconfigitem(
-    b'fsmonitor', b'warn_update_file_count_rust', default=400000,
-)
-coreconfigitem(
-    b'help', br'hidden-command\..*', default=False, generic=True,
-)
-coreconfigitem(
-    b'help', br'hidden-topic\..*', default=False, generic=True,
-)
-coreconfigitem(
-    b'hooks', b'.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'hgweb-paths', b'.*', default=list, generic=True,
-)
-coreconfigitem(
-    b'hostfingerprints', b'.*', default=list, generic=True,
-)
-coreconfigitem(
-    b'hostsecurity', b'ciphers', default=None,
-)
-coreconfigitem(
-    b'hostsecurity', b'minimumprotocol', default=dynamicdefault,
+    b'format',
+    b'exp-use-side-data',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'exp-share-safe',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'internal-phase',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'fsmonitor',
+    b'warn_when_unused',
+    default=True,
+)
+coreconfigitem(
+    b'fsmonitor',
+    b'warn_update_file_count',
+    default=50000,
+)
+coreconfigitem(
+    b'fsmonitor',
+    b'warn_update_file_count_rust',
+    default=400000,
+)
+coreconfigitem(
+    b'help',
+    br'hidden-command\..*',
+    default=False,
+    generic=True,
+)
+coreconfigitem(
+    b'help',
+    br'hidden-topic\..*',
+    default=False,
+    generic=True,
+)
+coreconfigitem(
+    b'hooks',
+    b'.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'hgweb-paths',
+    b'.*',
+    default=list,
+    generic=True,
+)
+coreconfigitem(
+    b'hostfingerprints',
+    b'.*',
+    default=list,
+    generic=True,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'ciphers',
+    default=None,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'minimumprotocol',
+    default=dynamicdefault,
 )
 coreconfigitem(
     b'hostsecurity',
@@ -865,73 +1311,122 @@
     generic=True,
 )
 coreconfigitem(
-    b'hostsecurity', b'.*:ciphers$', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'hostsecurity', b'.*:fingerprints$', default=list, generic=True,
-)
-coreconfigitem(
-    b'hostsecurity', b'.*:verifycertsfile$', default=None, generic=True,
+    b'hostsecurity',
+    b'.*:ciphers$',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'.*:fingerprints$',
+    default=list,
+    generic=True,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'.*:verifycertsfile$',
+    default=None,
+    generic=True,
 )
 
 coreconfigitem(
-    b'http_proxy', b'always', default=False,
-)
-coreconfigitem(
-    b'http_proxy', b'host', default=None,
-)
-coreconfigitem(
-    b'http_proxy', b'no', default=list,
-)
-coreconfigitem(
-    b'http_proxy', b'passwd', default=None,
-)
-coreconfigitem(
-    b'http_proxy', b'user', default=None,
+    b'http_proxy',
+    b'always',
+    default=False,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'host',
+    default=None,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'no',
+    default=list,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'passwd',
+    default=None,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'user',
+    default=None,
 )
 
 coreconfigitem(
-    b'http', b'timeout', default=None,
+    b'http',
+    b'timeout',
+    default=None,
 )
 
 coreconfigitem(
-    b'logtoprocess', b'commandexception', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'commandfinish', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'command', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'develwarn', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'uiblocked', default=None,
-)
-coreconfigitem(
-    b'merge', b'checkunknown', default=b'abort',
-)
-coreconfigitem(
-    b'merge', b'checkignored', default=b'abort',
-)
-coreconfigitem(
-    b'experimental', b'merge.checkpathconflicts', default=False,
-)
-coreconfigitem(
-    b'merge', b'followcopies', default=True,
-)
-coreconfigitem(
-    b'merge', b'on-failure', default=b'continue',
-)
-coreconfigitem(
-    b'merge', b'preferancestor', default=lambda: [b'*'], experimental=True,
-)
-coreconfigitem(
-    b'merge', b'strict-capability-check', default=False,
-)
-coreconfigitem(
-    b'merge-tools', b'.*', default=None, generic=True,
+    b'logtoprocess',
+    b'commandexception',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'commandfinish',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'command',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'develwarn',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'uiblocked',
+    default=None,
+)
+coreconfigitem(
+    b'merge',
+    b'checkunknown',
+    default=b'abort',
+)
+coreconfigitem(
+    b'merge',
+    b'checkignored',
+    default=b'abort',
+)
+coreconfigitem(
+    b'experimental',
+    b'merge.checkpathconflicts',
+    default=False,
+)
+coreconfigitem(
+    b'merge',
+    b'followcopies',
+    default=True,
+)
+coreconfigitem(
+    b'merge',
+    b'on-failure',
+    default=b'continue',
+)
+coreconfigitem(
+    b'merge',
+    b'preferancestor',
+    default=lambda: [b'*'],
+    experimental=True,
+)
+coreconfigitem(
+    b'merge',
+    b'strict-capability-check',
+    default=False,
+)
+coreconfigitem(
+    b'merge-tools',
+    b'.*',
+    default=None,
+    generic=True,
 )
 coreconfigitem(
     b'merge-tools',
@@ -941,10 +1436,18 @@
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
-)
-coreconfigitem(
-    b'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
+    b'merge-tools',
+    br'.*\.binary$',
+    default=False,
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    b'merge-tools',
+    br'.*\.check$',
+    default=list,
+    generic=True,
+    priority=-1,
 )
 coreconfigitem(
     b'merge-tools',
@@ -961,10 +1464,18 @@
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
-)
-coreconfigitem(
-    b'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
+    b'merge-tools',
+    br'.*\.fixeol$',
+    default=False,
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    b'merge-tools',
+    br'.*\.gui$',
+    default=False,
+    generic=True,
+    priority=-1,
 )
 coreconfigitem(
     b'merge-tools',
@@ -981,7 +1492,11 @@
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
+    b'merge-tools',
+    br'.*\.priority$',
+    default=0,
+    generic=True,
+    priority=-1,
 )
 coreconfigitem(
     b'merge-tools',
@@ -991,100 +1506,168 @@
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
-)
-coreconfigitem(
-    b'pager', b'attend-.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'pager', b'ignore', default=list,
-)
-coreconfigitem(
-    b'pager', b'pager', default=dynamicdefault,
-)
-coreconfigitem(
-    b'patch', b'eol', default=b'strict',
-)
-coreconfigitem(
-    b'patch', b'fuzz', default=2,
-)
-coreconfigitem(
-    b'paths', b'default', default=None,
-)
-coreconfigitem(
-    b'paths', b'default-push', default=None,
-)
-coreconfigitem(
-    b'paths', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'phases', b'checksubrepos', default=b'follow',
-)
-coreconfigitem(
-    b'phases', b'new-commit', default=b'draft',
-)
-coreconfigitem(
-    b'phases', b'publish', default=True,
-)
-coreconfigitem(
-    b'profiling', b'enabled', default=False,
-)
-coreconfigitem(
-    b'profiling', b'format', default=b'text',
-)
-coreconfigitem(
-    b'profiling', b'freq', default=1000,
-)
-coreconfigitem(
-    b'profiling', b'limit', default=30,
-)
-coreconfigitem(
-    b'profiling', b'nested', default=0,
-)
-coreconfigitem(
-    b'profiling', b'output', default=None,
-)
-coreconfigitem(
-    b'profiling', b'showmax', default=0.999,
-)
-coreconfigitem(
-    b'profiling', b'showmin', default=dynamicdefault,
-)
-coreconfigitem(
-    b'profiling', b'showtime', default=True,
-)
-coreconfigitem(
-    b'profiling', b'sort', default=b'inlinetime',
-)
-coreconfigitem(
-    b'profiling', b'statformat', default=b'hotpath',
-)
-coreconfigitem(
-    b'profiling', b'time-track', default=dynamicdefault,
-)
-coreconfigitem(
-    b'profiling', b'type', default=b'stat',
-)
-coreconfigitem(
-    b'progress', b'assume-tty', default=False,
-)
-coreconfigitem(
-    b'progress', b'changedelay', default=1,
-)
-coreconfigitem(
-    b'progress', b'clear-complete', default=True,
-)
-coreconfigitem(
-    b'progress', b'debug', default=False,
-)
-coreconfigitem(
-    b'progress', b'delay', default=3,
-)
-coreconfigitem(
-    b'progress', b'disable', default=False,
-)
-coreconfigitem(
-    b'progress', b'estimateinterval', default=60.0,
+    b'merge-tools',
+    br'.*\.symlink$',
+    default=False,
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    b'pager',
+    b'attend-.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'pager',
+    b'ignore',
+    default=list,
+)
+coreconfigitem(
+    b'pager',
+    b'pager',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'patch',
+    b'eol',
+    default=b'strict',
+)
+coreconfigitem(
+    b'patch',
+    b'fuzz',
+    default=2,
+)
+coreconfigitem(
+    b'paths',
+    b'default',
+    default=None,
+)
+coreconfigitem(
+    b'paths',
+    b'default-push',
+    default=None,
+)
+coreconfigitem(
+    b'paths',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'phases',
+    b'checksubrepos',
+    default=b'follow',
+)
+coreconfigitem(
+    b'phases',
+    b'new-commit',
+    default=b'draft',
+)
+coreconfigitem(
+    b'phases',
+    b'publish',
+    default=True,
+)
+coreconfigitem(
+    b'profiling',
+    b'enabled',
+    default=False,
+)
+coreconfigitem(
+    b'profiling',
+    b'format',
+    default=b'text',
+)
+coreconfigitem(
+    b'profiling',
+    b'freq',
+    default=1000,
+)
+coreconfigitem(
+    b'profiling',
+    b'limit',
+    default=30,
+)
+coreconfigitem(
+    b'profiling',
+    b'nested',
+    default=0,
+)
+coreconfigitem(
+    b'profiling',
+    b'output',
+    default=None,
+)
+coreconfigitem(
+    b'profiling',
+    b'showmax',
+    default=0.999,
+)
+coreconfigitem(
+    b'profiling',
+    b'showmin',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'profiling',
+    b'showtime',
+    default=True,
+)
+coreconfigitem(
+    b'profiling',
+    b'sort',
+    default=b'inlinetime',
+)
+coreconfigitem(
+    b'profiling',
+    b'statformat',
+    default=b'hotpath',
+)
+coreconfigitem(
+    b'profiling',
+    b'time-track',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'profiling',
+    b'type',
+    default=b'stat',
+)
+coreconfigitem(
+    b'progress',
+    b'assume-tty',
+    default=False,
+)
+coreconfigitem(
+    b'progress',
+    b'changedelay',
+    default=1,
+)
+coreconfigitem(
+    b'progress',
+    b'clear-complete',
+    default=True,
+)
+coreconfigitem(
+    b'progress',
+    b'debug',
+    default=False,
+)
+coreconfigitem(
+    b'progress',
+    b'delay',
+    default=3,
+)
+coreconfigitem(
+    b'progress',
+    b'disable',
+    default=False,
+)
+coreconfigitem(
+    b'progress',
+    b'estimateinterval',
+    default=60.0,
 )
 coreconfigitem(
     b'progress',
@@ -1092,16 +1675,24 @@
     default=lambda: [b'topic', b'bar', b'number', b'estimate'],
 )
 coreconfigitem(
-    b'progress', b'refresh', default=0.1,
-)
-coreconfigitem(
-    b'progress', b'width', default=dynamicdefault,
-)
-coreconfigitem(
-    b'pull', b'confirm', default=False,
-)
-coreconfigitem(
-    b'push', b'pushvars.server', default=False,
+    b'progress',
+    b'refresh',
+    default=0.1,
+)
+coreconfigitem(
+    b'progress',
+    b'width',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'pull',
+    b'confirm',
+    default=False,
+)
+coreconfigitem(
+    b'push',
+    b'pushvars.server',
+    default=False,
 )
 coreconfigitem(
     b'rewrite',
@@ -1110,13 +1701,21 @@
     alias=[(b'ui', b'history-editing-backup')],
 )
 coreconfigitem(
-    b'rewrite', b'update-timestamp', default=False,
-)
-coreconfigitem(
-    b'rewrite', b'empty-successor', default=b'skip', experimental=True,
-)
-coreconfigitem(
-    b'storage', b'new-repo-backend', default=b'revlogv1', experimental=True,
+    b'rewrite',
+    b'update-timestamp',
+    default=False,
+)
+coreconfigitem(
+    b'rewrite',
+    b'empty-successor',
+    default=b'skip',
+    experimental=True,
+)
+coreconfigitem(
+    b'storage',
+    b'new-repo-backend',
+    default=b'revlogv1',
+    experimental=True,
 )
 coreconfigitem(
     b'storage',
@@ -1133,37 +1732,59 @@
     b'storage', b'revlog.nodemap.mode', default=b'compat', experimental=True
 )
 coreconfigitem(
-    b'storage', b'revlog.reuse-external-delta', default=True,
-)
-coreconfigitem(
-    b'storage', b'revlog.reuse-external-delta-parent', default=None,
-)
-coreconfigitem(
-    b'storage', b'revlog.zlib.level', default=None,
-)
-coreconfigitem(
-    b'storage', b'revlog.zstd.level', default=None,
-)
-coreconfigitem(
-    b'server', b'bookmarks-pushkey-compat', default=True,
-)
-coreconfigitem(
-    b'server', b'bundle1', default=True,
-)
-coreconfigitem(
-    b'server', b'bundle1gd', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1.pull', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1gd.pull', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1.push', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1gd.push', default=None,
+    b'storage',
+    b'revlog.reuse-external-delta',
+    default=True,
+)
+coreconfigitem(
+    b'storage',
+    b'revlog.reuse-external-delta-parent',
+    default=None,
+)
+coreconfigitem(
+    b'storage',
+    b'revlog.zlib.level',
+    default=None,
+)
+coreconfigitem(
+    b'storage',
+    b'revlog.zstd.level',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bookmarks-pushkey-compat',
+    default=True,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1',
+    default=True,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1gd',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1.pull',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1gd.pull',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1.push',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1gd.push',
+    default=None,
 )
 coreconfigitem(
     b'server',
@@ -1172,73 +1793,120 @@
     alias=[(b'experimental', b'bundle2.stream')],
 )
 coreconfigitem(
-    b'server', b'compressionengines', default=list,
-)
-coreconfigitem(
-    b'server', b'concurrent-push-mode', default=b'check-related',
-)
-coreconfigitem(
-    b'server', b'disablefullbundle', default=False,
-)
-coreconfigitem(
-    b'server', b'maxhttpheaderlen', default=1024,
-)
-coreconfigitem(
-    b'server', b'pullbundle', default=False,
-)
-coreconfigitem(
-    b'server', b'preferuncompressed', default=False,
-)
-coreconfigitem(
-    b'server', b'streamunbundle', default=False,
-)
-coreconfigitem(
-    b'server', b'uncompressed', default=True,
-)
-coreconfigitem(
-    b'server', b'uncompressedallowsecret', default=False,
-)
-coreconfigitem(
-    b'server', b'view', default=b'served',
-)
-coreconfigitem(
-    b'server', b'validate', default=False,
-)
-coreconfigitem(
-    b'server', b'zliblevel', default=-1,
-)
-coreconfigitem(
-    b'server', b'zstdlevel', default=3,
-)
-coreconfigitem(
-    b'share', b'pool', default=None,
-)
-coreconfigitem(
-    b'share', b'poolnaming', default=b'identity',
-)
-coreconfigitem(
-    b'shelve', b'maxbackups', default=10,
-)
-coreconfigitem(
-    b'smtp', b'host', default=None,
-)
-coreconfigitem(
-    b'smtp', b'local_hostname', default=None,
-)
-coreconfigitem(
-    b'smtp', b'password', default=None,
-)
-coreconfigitem(
-    b'smtp', b'port', default=dynamicdefault,
-)
-coreconfigitem(
-    b'smtp', b'tls', default=b'none',
-)
-coreconfigitem(
-    b'smtp', b'username', default=None,
-)
-coreconfigitem(
-    b'sparse', b'missingwarning', default=True, experimental=True,
+    b'server',
+    b'compressionengines',
+    default=list,
+)
+coreconfigitem(
+    b'server',
+    b'concurrent-push-mode',
+    default=b'check-related',
+)
+coreconfigitem(
+    b'server',
+    b'disablefullbundle',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'maxhttpheaderlen',
+    default=1024,
+)
+coreconfigitem(
+    b'server',
+    b'pullbundle',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'preferuncompressed',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'streamunbundle',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'uncompressed',
+    default=True,
+)
+coreconfigitem(
+    b'server',
+    b'uncompressedallowsecret',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'view',
+    default=b'served',
+)
+coreconfigitem(
+    b'server',
+    b'validate',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'zliblevel',
+    default=-1,
+)
+coreconfigitem(
+    b'server',
+    b'zstdlevel',
+    default=3,
+)
+coreconfigitem(
+    b'share',
+    b'pool',
+    default=None,
+)
+coreconfigitem(
+    b'share',
+    b'poolnaming',
+    default=b'identity',
+)
+coreconfigitem(
+    b'shelve',
+    b'maxbackups',
+    default=10,
+)
+coreconfigitem(
+    b'smtp',
+    b'host',
+    default=None,
+)
+coreconfigitem(
+    b'smtp',
+    b'local_hostname',
+    default=None,
+)
+coreconfigitem(
+    b'smtp',
+    b'password',
+    default=None,
+)
+coreconfigitem(
+    b'smtp',
+    b'port',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'smtp',
+    b'tls',
+    default=b'none',
+)
+coreconfigitem(
+    b'smtp',
+    b'username',
+    default=None,
+)
+coreconfigitem(
+    b'sparse',
+    b'missingwarning',
+    default=True,
+    experimental=True,
 )
 coreconfigitem(
     b'subrepos',
@@ -1246,367 +1914,612 @@
     default=dynamicdefault,  # to make backporting simpler
 )
 coreconfigitem(
-    b'subrepos', b'hg:allowed', default=dynamicdefault,
-)
-coreconfigitem(
-    b'subrepos', b'git:allowed', default=dynamicdefault,
-)
-coreconfigitem(
-    b'subrepos', b'svn:allowed', default=dynamicdefault,
-)
-coreconfigitem(
-    b'templates', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'templateconfig', b'.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'trusted', b'groups', default=list,
-)
-coreconfigitem(
-    b'trusted', b'users', default=list,
-)
-coreconfigitem(
-    b'ui', b'_usedassubrepo', default=False,
-)
-coreconfigitem(
-    b'ui', b'allowemptycommit', default=False,
-)
-coreconfigitem(
-    b'ui', b'archivemeta', default=True,
-)
-coreconfigitem(
-    b'ui', b'askusername', default=False,
-)
-coreconfigitem(
-    b'ui', b'available-memory', default=None,
+    b'subrepos',
+    b'hg:allowed',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'subrepos',
+    b'git:allowed',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'subrepos',
+    b'svn:allowed',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'templates',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'templateconfig',
+    b'.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'trusted',
+    b'groups',
+    default=list,
+)
+coreconfigitem(
+    b'trusted',
+    b'users',
+    default=list,
+)
+coreconfigitem(
+    b'ui',
+    b'_usedassubrepo',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'allowemptycommit',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'archivemeta',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'askusername',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'available-memory',
+    default=None,
 )
 
 coreconfigitem(
-    b'ui', b'clonebundlefallback', default=False,
-)
-coreconfigitem(
-    b'ui', b'clonebundleprefers', default=list,
-)
-coreconfigitem(
-    b'ui', b'clonebundles', default=True,
-)
-coreconfigitem(
-    b'ui', b'color', default=b'auto',
-)
-coreconfigitem(
-    b'ui', b'commitsubrepos', default=False,
-)
-coreconfigitem(
-    b'ui', b'debug', default=False,
-)
-coreconfigitem(
-    b'ui', b'debugger', default=None,
-)
-coreconfigitem(
-    b'ui', b'editor', default=dynamicdefault,
-)
-coreconfigitem(
-    b'ui', b'detailed-exit-code', default=False, experimental=True,
-)
-coreconfigitem(
-    b'ui', b'fallbackencoding', default=None,
-)
-coreconfigitem(
-    b'ui', b'forcecwd', default=None,
-)
-coreconfigitem(
-    b'ui', b'forcemerge', default=None,
-)
-coreconfigitem(
-    b'ui', b'formatdebug', default=False,
-)
-coreconfigitem(
-    b'ui', b'formatjson', default=False,
-)
-coreconfigitem(
-    b'ui', b'formatted', default=None,
-)
-coreconfigitem(
-    b'ui', b'interactive', default=None,
-)
-coreconfigitem(
-    b'ui', b'interface', default=None,
-)
-coreconfigitem(
-    b'ui', b'interface.chunkselector', default=None,
-)
-coreconfigitem(
-    b'ui', b'large-file-limit', default=10000000,
-)
-coreconfigitem(
-    b'ui', b'logblockedtimes', default=False,
-)
-coreconfigitem(
-    b'ui', b'merge', default=None,
-)
-coreconfigitem(
-    b'ui', b'mergemarkers', default=b'basic',
-)
-coreconfigitem(
-    b'ui', b'message-output', default=b'stdio',
-)
-coreconfigitem(
-    b'ui', b'nontty', default=False,
-)
-coreconfigitem(
-    b'ui', b'origbackuppath', default=None,
-)
-coreconfigitem(
-    b'ui', b'paginate', default=True,
-)
-coreconfigitem(
-    b'ui', b'patch', default=None,
-)
-coreconfigitem(
-    b'ui', b'portablefilenames', default=b'warn',
-)
-coreconfigitem(
-    b'ui', b'promptecho', default=False,
-)
-coreconfigitem(
-    b'ui', b'quiet', default=False,
-)
-coreconfigitem(
-    b'ui', b'quietbookmarkmove', default=False,
-)
-coreconfigitem(
-    b'ui', b'relative-paths', default=b'legacy',
-)
-coreconfigitem(
-    b'ui', b'remotecmd', default=b'hg',
-)
-coreconfigitem(
-    b'ui', b'report_untrusted', default=True,
-)
-coreconfigitem(
-    b'ui', b'rollback', default=True,
-)
-coreconfigitem(
-    b'ui', b'signal-safe-lock', default=True,
-)
-coreconfigitem(
-    b'ui', b'slash', default=False,
-)
-coreconfigitem(
-    b'ui', b'ssh', default=b'ssh',
-)
-coreconfigitem(
-    b'ui', b'ssherrorhint', default=None,
-)
-coreconfigitem(
-    b'ui', b'statuscopies', default=False,
-)
-coreconfigitem(
-    b'ui', b'strict', default=False,
-)
-coreconfigitem(
-    b'ui', b'style', default=b'',
-)
-coreconfigitem(
-    b'ui', b'supportcontact', default=None,
-)
-coreconfigitem(
-    b'ui', b'textwidth', default=78,
-)
-coreconfigitem(
-    b'ui', b'timeout', default=b'600',
-)
-coreconfigitem(
-    b'ui', b'timeout.warn', default=0,
-)
-coreconfigitem(
-    b'ui', b'timestamp-output', default=False,
-)
-coreconfigitem(
-    b'ui', b'traceback', default=False,
-)
-coreconfigitem(
-    b'ui', b'tweakdefaults', default=False,
+    b'ui',
+    b'clonebundlefallback',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'clonebundleprefers',
+    default=list,
+)
+coreconfigitem(
+    b'ui',
+    b'clonebundles',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'color',
+    default=b'auto',
+)
+coreconfigitem(
+    b'ui',
+    b'commitsubrepos',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'debug',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'debugger',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'editor',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'ui',
+    b'detailed-exit-code',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'ui',
+    b'fallbackencoding',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'forcecwd',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'forcemerge',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'formatdebug',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'formatjson',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'formatted',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'interactive',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'interface',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'interface.chunkselector',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'large-file-limit',
+    default=10000000,
+)
+coreconfigitem(
+    b'ui',
+    b'logblockedtimes',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'merge',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'mergemarkers',
+    default=b'basic',
+)
+coreconfigitem(
+    b'ui',
+    b'message-output',
+    default=b'stdio',
+)
+coreconfigitem(
+    b'ui',
+    b'nontty',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'origbackuppath',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'paginate',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'patch',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'portablefilenames',
+    default=b'warn',
+)
+coreconfigitem(
+    b'ui',
+    b'promptecho',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'quiet',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'quietbookmarkmove',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'relative-paths',
+    default=b'legacy',
+)
+coreconfigitem(
+    b'ui',
+    b'remotecmd',
+    default=b'hg',
+)
+coreconfigitem(
+    b'ui',
+    b'report_untrusted',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'rollback',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'signal-safe-lock',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'slash',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'ssh',
+    default=b'ssh',
+)
+coreconfigitem(
+    b'ui',
+    b'ssherrorhint',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'statuscopies',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'strict',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'style',
+    default=b'',
+)
+coreconfigitem(
+    b'ui',
+    b'supportcontact',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'textwidth',
+    default=78,
+)
+coreconfigitem(
+    b'ui',
+    b'timeout',
+    default=b'600',
+)
+coreconfigitem(
+    b'ui',
+    b'timeout.warn',
+    default=0,
+)
+coreconfigitem(
+    b'ui',
+    b'timestamp-output',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'traceback',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'tweakdefaults',
+    default=False,
 )
 coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
 coreconfigitem(
-    b'ui', b'verbose', default=False,
-)
-coreconfigitem(
-    b'verify', b'skipflags', default=None,
-)
-coreconfigitem(
-    b'web', b'allowbz2', default=False,
-)
-coreconfigitem(
-    b'web', b'allowgz', default=False,
-)
-coreconfigitem(
-    b'web', b'allow-pull', alias=[(b'web', b'allowpull')], default=True,
-)
-coreconfigitem(
-    b'web', b'allow-push', alias=[(b'web', b'allow_push')], default=list,
-)
-coreconfigitem(
-    b'web', b'allowzip', default=False,
-)
-coreconfigitem(
-    b'web', b'archivesubrepos', default=False,
-)
-coreconfigitem(
-    b'web', b'cache', default=True,
-)
-coreconfigitem(
-    b'web', b'comparisoncontext', default=5,
-)
-coreconfigitem(
-    b'web', b'contact', default=None,
-)
-coreconfigitem(
-    b'web', b'deny_push', default=list,
-)
-coreconfigitem(
-    b'web', b'guessmime', default=False,
-)
-coreconfigitem(
-    b'web', b'hidden', default=False,
-)
-coreconfigitem(
-    b'web', b'labels', default=list,
-)
-coreconfigitem(
-    b'web', b'logoimg', default=b'hglogo.png',
-)
-coreconfigitem(
-    b'web', b'logourl', default=b'https://mercurial-scm.org/',
-)
-coreconfigitem(
-    b'web', b'accesslog', default=b'-',
-)
-coreconfigitem(
-    b'web', b'address', default=b'',
-)
-coreconfigitem(
-    b'web', b'allow-archive', alias=[(b'web', b'allow_archive')], default=list,
-)
-coreconfigitem(
-    b'web', b'allow_read', default=list,
-)
-coreconfigitem(
-    b'web', b'baseurl', default=None,
-)
-coreconfigitem(
-    b'web', b'cacerts', default=None,
-)
-coreconfigitem(
-    b'web', b'certificate', default=None,
-)
-coreconfigitem(
-    b'web', b'collapse', default=False,
-)
-coreconfigitem(
-    b'web', b'csp', default=None,
-)
-coreconfigitem(
-    b'web', b'deny_read', default=list,
-)
-coreconfigitem(
-    b'web', b'descend', default=True,
-)
-coreconfigitem(
-    b'web', b'description', default=b"",
-)
-coreconfigitem(
-    b'web', b'encoding', default=lambda: encoding.encoding,
-)
-coreconfigitem(
-    b'web', b'errorlog', default=b'-',
-)
-coreconfigitem(
-    b'web', b'ipv6', default=False,
-)
-coreconfigitem(
-    b'web', b'maxchanges', default=10,
-)
-coreconfigitem(
-    b'web', b'maxfiles', default=10,
-)
-coreconfigitem(
-    b'web', b'maxshortchanges', default=60,
-)
-coreconfigitem(
-    b'web', b'motd', default=b'',
-)
-coreconfigitem(
-    b'web', b'name', default=dynamicdefault,
-)
-coreconfigitem(
-    b'web', b'port', default=8000,
-)
-coreconfigitem(
-    b'web', b'prefix', default=b'',
-)
-coreconfigitem(
-    b'web', b'push_ssl', default=True,
-)
-coreconfigitem(
-    b'web', b'refreshinterval', default=20,
-)
-coreconfigitem(
-    b'web', b'server-header', default=None,
-)
-coreconfigitem(
-    b'web', b'static', default=None,
-)
-coreconfigitem(
-    b'web', b'staticurl', default=None,
-)
-coreconfigitem(
-    b'web', b'stripes', default=1,
-)
-coreconfigitem(
-    b'web', b'style', default=b'paper',
-)
-coreconfigitem(
-    b'web', b'templates', default=None,
-)
-coreconfigitem(
-    b'web', b'view', default=b'served', experimental=True,
-)
-coreconfigitem(
-    b'worker', b'backgroundclose', default=dynamicdefault,
+    b'ui',
+    b'verbose',
+    default=False,
+)
+coreconfigitem(
+    b'verify',
+    b'skipflags',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'allowbz2',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'allowgz',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'allow-pull',
+    alias=[(b'web', b'allowpull')],
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'allow-push',
+    alias=[(b'web', b'allow_push')],
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'allowzip',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'archivesubrepos',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'cache',
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'comparisoncontext',
+    default=5,
+)
+coreconfigitem(
+    b'web',
+    b'contact',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'deny_push',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'guessmime',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'hidden',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'labels',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'logoimg',
+    default=b'hglogo.png',
+)
+coreconfigitem(
+    b'web',
+    b'logourl',
+    default=b'https://mercurial-scm.org/',
+)
+coreconfigitem(
+    b'web',
+    b'accesslog',
+    default=b'-',
+)
+coreconfigitem(
+    b'web',
+    b'address',
+    default=b'',
+)
+coreconfigitem(
+    b'web',
+    b'allow-archive',
+    alias=[(b'web', b'allow_archive')],
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'allow_read',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'baseurl',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'cacerts',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'certificate',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'collapse',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'csp',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'deny_read',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'descend',
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'description',
+    default=b"",
+)
+coreconfigitem(
+    b'web',
+    b'encoding',
+    default=lambda: encoding.encoding,
+)
+coreconfigitem(
+    b'web',
+    b'errorlog',
+    default=b'-',
+)
+coreconfigitem(
+    b'web',
+    b'ipv6',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'maxchanges',
+    default=10,
+)
+coreconfigitem(
+    b'web',
+    b'maxfiles',
+    default=10,
+)
+coreconfigitem(
+    b'web',
+    b'maxshortchanges',
+    default=60,
+)
+coreconfigitem(
+    b'web',
+    b'motd',
+    default=b'',
+)
+coreconfigitem(
+    b'web',
+    b'name',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'web',
+    b'port',
+    default=8000,
+)
+coreconfigitem(
+    b'web',
+    b'prefix',
+    default=b'',
+)
+coreconfigitem(
+    b'web',
+    b'push_ssl',
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'refreshinterval',
+    default=20,
+)
+coreconfigitem(
+    b'web',
+    b'server-header',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'static',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'staticurl',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'stripes',
+    default=1,
+)
+coreconfigitem(
+    b'web',
+    b'style',
+    default=b'paper',
+)
+coreconfigitem(
+    b'web',
+    b'templates',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'view',
+    default=b'served',
+    experimental=True,
+)
+coreconfigitem(
+    b'worker',
+    b'backgroundclose',
+    default=dynamicdefault,
 )
 # Windows defaults to a limit of 512 open files. A buffer of 128
 # should give us enough headway.
 coreconfigitem(
-    b'worker', b'backgroundclosemaxqueue', default=384,
-)
-coreconfigitem(
-    b'worker', b'backgroundcloseminfilecount', default=2048,
-)
-coreconfigitem(
-    b'worker', b'backgroundclosethreadcount', default=4,
-)
-coreconfigitem(
-    b'worker', b'enabled', default=True,
-)
-coreconfigitem(
-    b'worker', b'numcpus', default=None,
+    b'worker',
+    b'backgroundclosemaxqueue',
+    default=384,
+)
+coreconfigitem(
+    b'worker',
+    b'backgroundcloseminfilecount',
+    default=2048,
+)
+coreconfigitem(
+    b'worker',
+    b'backgroundclosethreadcount',
+    default=4,
+)
+coreconfigitem(
+    b'worker',
+    b'enabled',
+    default=True,
+)
+coreconfigitem(
+    b'worker',
+    b'numcpus',
+    default=None,
 )
 
 # Rebase related configuration moved to core because other extension are doing
 # strange things. For example, shelve import the extensions to reuse some bit
 # without formally loading it.
 coreconfigitem(
-    b'commands', b'rebase.requiredest', default=False,
-)
-coreconfigitem(
-    b'experimental', b'rebaseskipobsolete', default=True,
-)
-coreconfigitem(
-    b'rebase', b'singletransaction', default=False,
-)
-coreconfigitem(
-    b'rebase', b'experimental.inmemory', default=False,
-)
+    b'commands',
+    b'rebase.requiredest',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'rebaseskipobsolete',
+    default=True,
+)
+coreconfigitem(
+    b'rebase',
+    b'singletransaction',
+    default=False,
+)
+coreconfigitem(
+    b'rebase',
+    b'experimental.inmemory',
+    default=False,
+)
--- a/mercurial/context.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/context.py	Fri Nov 27 17:03:29 2020 -0500
@@ -316,9 +316,9 @@
         return subrepo.nullsubrepo(self, path, pctx)
 
     def workingsub(self, path):
-        '''return a subrepo for the stored revision, or wdir if this is a wdir
+        """return a subrepo for the stored revision, or wdir if this is a wdir
         context.
-        '''
+        """
         return subrepo.subrepo(self, path, allowwdir=True)
 
     def match(
@@ -1054,8 +1054,7 @@
         return lkr
 
     def isintroducedafter(self, changelogrev):
-        """True if a filectx has been introduced after a given floor revision
-        """
+        """True if a filectx has been introduced after a given floor revision"""
         if self.linkrev() >= changelogrev:
             return True
         introrev = self._introrev(stoprev=changelogrev)
@@ -1232,7 +1231,7 @@
 
 class filectx(basefilectx):
     """A filecontext object makes access to data related to a particular
-       filerevision convenient."""
+    filerevision convenient."""
 
     def __init__(
         self,
@@ -1244,15 +1243,16 @@
         changectx=None,
     ):
         """changeid must be a revision number, if specified.
-           fileid can be a file revision or node."""
+        fileid can be a file revision or node."""
         self._repo = repo
         self._path = path
 
         assert (
             changeid is not None or fileid is not None or changectx is not None
-        ), (
-            b"bad args: changeid=%r, fileid=%r, changectx=%r"
-            % (changeid, fileid, changectx,)
+        ), b"bad args: changeid=%r, fileid=%r, changectx=%r" % (
+            changeid,
+            fileid,
+            changectx,
         )
 
         if filelog is not None:
@@ -1289,8 +1289,8 @@
             return self._repo.unfiltered()[self._changeid]
 
     def filectx(self, fileid, changeid=None):
-        '''opens an arbitrary revision of the file without
-        opening a new filelog'''
+        """opens an arbitrary revision of the file without
+        opening a new filelog"""
         return filectx(
             self._repo,
             self._path,
@@ -2101,7 +2101,7 @@
 
 class workingfilectx(committablefilectx):
     """A workingfilectx object makes access to data related to a particular
-       file in the working directory convenient."""
+    file in the working directory convenient."""
 
     def __init__(self, repo, path, filelog=None, workingctx=None):
         super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
@@ -2702,8 +2702,7 @@
 
     @propertycache
     def _changedset(self):
-        """Return the set of files changed in this context
-        """
+        """Return the set of files changed in this context"""
         changed = set(self._status.modified)
         changed.update(self._status.added)
         changed.update(self._status.removed)
@@ -2877,8 +2876,7 @@
 
     @propertycache
     def _status(self):
-        """Calculate exact status from ``files`` specified at construction
-        """
+        """Calculate exact status from ``files`` specified at construction"""
         man1 = self.p1().manifest()
         p2 = self._parents[1]
         # "1 < len(self._parents)" can't be used for checking
--- a/mercurial/copies.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/copies.py	Fri Nov 27 17:03:29 2020 -0500
@@ -702,7 +702,7 @@
 
 
 def _isfullcopytraceable(repo, c1, base):
-    """ Checks that if base, source and destination are all no-public branches,
+    """Checks that if base, source and destination are all no-public branches,
     if yes let's use the full copytrace algorithm for increased capabilities
     since it will be fast enough.
 
@@ -770,14 +770,16 @@
         self.movewithdir = {} if movewithdir is None else movewithdir
 
     def __repr__(self):
-        return (
-            '<branch_copies\n  copy=%r\n  renamedelete=%r\n  dirmove=%r\n  movewithdir=%r\n>'
-            % (self.copy, self.renamedelete, self.dirmove, self.movewithdir,)
+        return '<branch_copies\n  copy=%r\n  renamedelete=%r\n  dirmove=%r\n  movewithdir=%r\n>' % (
+            self.copy,
+            self.renamedelete,
+            self.dirmove,
+            self.movewithdir,
         )
 
 
 def _fullcopytracing(repo, c1, c2, base):
-    """ The full copytracing algorithm which finds all the new files that were
+    """The full copytracing algorithm which finds all the new files that were
     added from merge base up to the top commit and for each file it checks if
     this file was copied from another file.
 
@@ -967,7 +969,7 @@
 
 
 def _heuristicscopytracing(repo, c1, c2, base):
-    """ Fast copytracing using filename heuristics
+    """Fast copytracing using filename heuristics
 
     Assumes that moves or renames are of following two types:
 
--- a/mercurial/crecord.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/crecord.py	Fri Nov 27 17:03:29 2020 -0500
@@ -1000,7 +1000,7 @@
 
     def toggleallbetween(self):
         """toggle applied on or off for all items in range [lastapplied,
-        current]. """
+        current]."""
         if (
             not self.lastapplieditem
             or self.currentselecteditem == self.lastapplieditem
--- a/mercurial/dagop.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/dagop.py	Fri Nov 27 17:03:29 2020 -0500
@@ -682,7 +682,7 @@
 
 
 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
-    r'''
+    r"""
     Given parent and child fctxes and annotate data for parents, for all lines
     in either parent that match the child, annotate the child with the parent's
     data.
@@ -691,7 +691,7 @@
     annotate data as well such that child is never blamed for any lines.
 
     See test-annotate.py for unit tests.
-    '''
+    """
     pblocks = [
         (parent, mdiff.allblocks(parent.text, child.text, opts=diffopts))
         for parent in parents
--- a/mercurial/dagparser.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/dagparser.py	Fri Nov 27 17:03:29 2020 -0500
@@ -425,7 +425,7 @@
     usedots=False,
     maxlinewidth=70,
 ):
-    '''generates lines of a textual representation for a dag event stream
+    """generates lines of a textual representation for a dag event stream
 
     events should generate what parsedag() does, so:
 
@@ -501,7 +501,7 @@
         >>> dagtext(parsedag(b'+1 :f +1 :p2 *f */p2'))
         '+1 :f +1 :p2 *f */p2'
 
-    '''
+    """
     return b"\n".join(
         dagtextlines(
             dag,
--- a/mercurial/debugcommands.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/debugcommands.py	Fri Nov 27 17:03:29 2020 -0500
@@ -1062,11 +1062,14 @@
 
 
 @command(
-    b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
+    b'debugdownload',
+    [
+        (b'o', b'output', b'', _(b'path')),
+    ],
+    optionalrepo=True,
 )
 def debugdownload(ui, repo, url, output=None, **opts):
-    """download a resource using Mercurial logic and config
-    """
+    """download a resource using Mercurial logic and config"""
     fh = urlmod.open(ui, url, output)
 
     dest = ui
@@ -1510,10 +1513,10 @@
 
 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
 def debuginstall(ui, **opts):
-    '''test Mercurial installation
+    """test Mercurial installation
 
     Returns 0 on success.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
 
     problems = 0
@@ -2173,8 +2176,7 @@
     ],
 )
 def debugnodemap(ui, repo, **opts):
-    """write and inspect on disk nodemap
-    """
+    """write and inspect on disk nodemap"""
     if opts['dump_new']:
         unfi = repo.unfiltered()
         cl = unfi.changelog
@@ -2402,13 +2404,13 @@
     _(b'FILESPEC...'),
 )
 def debugpathcomplete(ui, repo, *specs, **opts):
-    '''complete part or all of a tracked path
+    """complete part or all of a tracked path
 
     This command supports shells that offer path name completion. It
     currently completes only files already known to the dirstate.
 
     Completion extends only to the next path segment unless
-    --full is specified, in which case entire paths are used.'''
+    --full is specified, in which case entire paths are used."""
 
     def complete(path, acceptable):
         dirstate = repo.dirstate
@@ -2587,13 +2589,13 @@
 
 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
-    '''access the pushkey key/value protocol
+    """access the pushkey key/value protocol
 
     With two args, list the keys in the given namespace.
 
     With five args, set a key to new if it currently is set to old.
     Reports success or failure.
-    '''
+    """
 
     target = hg.peer(ui, {}, repopath)
     if keyinfo:
@@ -3432,7 +3434,7 @@
 
 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
 def debugssl(ui, repo, source=None, **opts):
-    '''test a secure connection to a server
+    """test a secure connection to a server
 
     This builds the certificate chain for the server on Windows, installing the
     missing intermediates and trusted root via Windows Update if necessary.  It
@@ -3443,7 +3445,7 @@
 
     If the update succeeds, retry the original operation.  Otherwise, the cause
     of the SSL error is likely another issue.
-    '''
+    """
     if not pycompat.iswindows:
         raise error.Abort(
             _(b'certificate chain building is only possible on Windows')
@@ -3785,7 +3787,9 @@
 
 @command(
     b'debuguigetpass',
-    [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
+    [
+        (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
+    ],
     _(b'[-p TEXT]'),
     norepo=True,
 )
@@ -3801,7 +3805,9 @@
 
 @command(
     b'debuguiprompt',
-    [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
+    [
+        (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
+    ],
     _(b'[-p TEXT]'),
     norepo=True,
 )
@@ -4314,7 +4320,10 @@
                 {
                     'loggingfh': ui,
                     'loggingname': b's',
-                    'loggingopts': {'logdata': True, 'logdataapis': False,},
+                    'loggingopts': {
+                        'logdata': True,
+                        'logdataapis': False,
+                    },
                 }
             )
 
--- a/mercurial/diffutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/diffutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -43,14 +43,14 @@
     formatchanging=False,
     configprefix=b'',
 ):
-    '''return diffopts with only opted-in features parsed
+    """return diffopts with only opted-in features parsed
 
     Features:
     - git: git-style diffs
     - whitespace: whitespace options like ignoreblanklines and ignorews
     - formatchanging: options that will likely break or cause correctness issues
       with most diff parsers
-    '''
+    """
 
     def get(key, name=None, getter=ui.configbool, forceplain=None):
         if opts:
--- a/mercurial/dirstate.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/dirstate.py	Fri Nov 27 17:03:29 2020 -0500
@@ -74,12 +74,12 @@
 @interfaceutil.implementer(intdirstate.idirstate)
 class dirstate(object):
     def __init__(self, opener, ui, root, validate, sparsematchfn):
-        '''Create a new dirstate object.
+        """Create a new dirstate object.
 
         opener is an open()-like callable that can be used to open the
         dirstate file; root is the root of the directory tracked by
         the dirstate.
-        '''
+        """
         self._opener = opener
         self._validate = validate
         self._root = root
@@ -112,12 +112,12 @@
 
     @contextlib.contextmanager
     def parentchange(self):
-        '''Context manager for handling dirstate parents.
+        """Context manager for handling dirstate parents.
 
         If an exception occurs in the scope of the context manager,
         the incoherent dirstate won't be written when wlock is
         released.
-        '''
+        """
         self._parentwriters += 1
         yield
         # Typically we want the "undo" step of a context manager in a
@@ -128,9 +128,9 @@
         self._parentwriters -= 1
 
     def pendingparentchange(self):
-        '''Returns true if the dirstate is in the middle of a set of changes
+        """Returns true if the dirstate is in the middle of a set of changes
         that modify the dirstate parent.
-        '''
+        """
         return self._parentwriters > 0
 
     @propertycache
@@ -247,12 +247,12 @@
         return encoding.getcwd()
 
     def getcwd(self):
-        '''Return the path from which a canonical path is calculated.
+        """Return the path from which a canonical path is calculated.
 
         This path should be used to resolve file patterns or to convert
         canonical paths back to file paths for display. It shouldn't be
         used to get real file paths. Use vfs functions instead.
-        '''
+        """
         cwd = self._cwd
         if cwd == self._root:
             return b''
@@ -275,7 +275,7 @@
         return path
 
     def __getitem__(self, key):
-        '''Return the current state of key (a filename) in the dirstate.
+        """Return the current state of key (a filename) in the dirstate.
 
         States are:
           n  normal
@@ -283,7 +283,7 @@
           r  marked for removal
           a  marked for addition
           ?  not tracked
-        '''
+        """
         return self._map.get(key, (b"?",))[0]
 
     def __contains__(self, key):
@@ -370,11 +370,11 @@
             raise
 
     def invalidate(self):
-        '''Causes the next access to reread the dirstate.
+        """Causes the next access to reread the dirstate.
 
         This is different from localrepo.invalidatedirstate() because it always
         rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
-        check whether the dirstate has changed before rereading it.'''
+        check whether the dirstate has changed before rereading it."""
 
         for a in ("_map", "_branch", "_ignore"):
             if a in self.__dict__:
@@ -426,7 +426,7 @@
         self._map.addfile(f, oldstate, state, mode, size, mtime)
 
     def normal(self, f, parentfiledata=None):
-        '''Mark a file normal and clean.
+        """Mark a file normal and clean.
 
         parentfiledata: (mode, size, mtime) of the clean file
 
@@ -434,7 +434,7 @@
         size), as or close as possible from the point where we
         determined the file was clean, to limit the risk of the
         file having been changed by an external process between the
-        moment where the file was determined to be clean and now.'''
+        moment where the file was determined to be clean and now."""
         if parentfiledata:
             (mode, size, mtime) = parentfiledata
         else:
@@ -581,7 +581,7 @@
         return folded
 
     def normalize(self, path, isknown=False, ignoremissing=False):
-        '''
+        """
         normalize the case of a pathname when on a casefolding filesystem
 
         isknown specifies whether the filename came from walking the
@@ -596,7 +596,7 @@
         - version of name already stored in the dirstate
         - version of name stored on disk
         - version provided via command arguments
-        '''
+        """
 
         if self._checkcase:
             return self._normalize(path, isknown, ignoremissing)
@@ -643,11 +643,11 @@
         self._dirty = True
 
     def identity(self):
-        '''Return identity of dirstate itself to detect changing in storage
+        """Return identity of dirstate itself to detect changing in storage
 
         If identity of previous dirstate is equal to this, writing
         changes based on the former dirstate out can keep consistency.
-        '''
+        """
         return self._map.identity
 
     def write(self, tr):
@@ -769,14 +769,14 @@
         return (None, -1, b"")
 
     def _walkexplicit(self, match, subrepos):
-        '''Get stat data about the files explicitly specified by match.
+        """Get stat data about the files explicitly specified by match.
 
         Return a triple (results, dirsfound, dirsnotfound).
         - results is a mapping from filename to stat result. It also contains
           listings mapping subrepos and .hg to None.
         - dirsfound is a list of files found to be directories.
         - dirsnotfound is a list of files that the dirstate thinks are
-          directories and that were not found.'''
+          directories and that were not found."""
 
         def badtype(mode):
             kind = _(b'unknown')
@@ -904,7 +904,7 @@
         return results, dirsfound, dirsnotfound
 
     def walk(self, match, subrepos, unknown, ignored, full=True):
-        '''
+        """
         Walk recursively through the directory tree, finding all files
         matched by match.
 
@@ -913,7 +913,7 @@
         Return a dict mapping filename to stat-like object (either
         mercurial.osutil.stat instance or return value of os.stat()).
 
-        '''
+        """
         # full is a flag that extensions that hook into walk can use -- this
         # implementation doesn't use it at all. This satisfies the contract
         # because we only guarantee a "maybe".
@@ -1168,7 +1168,7 @@
         return (lookup, status)
 
     def status(self, match, subrepos, ignored, clean, unknown):
-        '''Determine the status of the working copy relative to the
+        """Determine the status of the working copy relative to the
         dirstate and return a pair of (unsure, status), where status is of type
         scmutil.status and:
 
@@ -1182,7 +1182,7 @@
           status.clean:
             files that have definitely not been modified since the
             dirstate was written
-        '''
+        """
         listignored, listclean, listunknown = ignored, clean, unknown
         lookup, modified, added, unknown, ignored = [], [], [], [], []
         removed, deleted, clean = [], [], []
@@ -1305,9 +1305,9 @@
         return (lookup, status)
 
     def matches(self, match):
-        '''
+        """
         return files in the dirstate (in whatever state) filtered by match
-        '''
+        """
         dmap = self._map
         if rustmod is not None:
             dmap = self._map._rustmap
--- a/mercurial/dirstateguard.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/dirstateguard.py	Fri Nov 27 17:03:29 2020 -0500
@@ -17,7 +17,7 @@
 
 
 class dirstateguard(util.transactional):
-    '''Restore dirstate at unexpected failure.
+    """Restore dirstate at unexpected failure.
 
     At the construction, this class does:
 
@@ -28,7 +28,7 @@
     is invoked before ``close()``.
 
     This just removes the backup file at ``close()`` before ``release()``.
-    '''
+    """
 
     def __init__(self, repo, name):
         self._repo = repo
--- a/mercurial/discovery.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/discovery.py	Fri Nov 27 17:03:29 2020 -0500
@@ -75,7 +75,7 @@
 
 
 class outgoing(object):
-    '''Represents the result of a findcommonoutgoing() call.
+    """Represents the result of a findcommonoutgoing() call.
 
     Members:
 
@@ -94,7 +94,7 @@
       remotely.
 
     Some members are computed on demand from the heads, unless provided upfront
-    by discovery.'''
+    by discovery."""
 
     def __init__(
         self, repo, commonheads=None, ancestorsof=None, missingroots=None
@@ -157,7 +157,7 @@
 def findcommonoutgoing(
     repo, other, onlyheads=None, force=False, commoninc=None, portable=False
 ):
-    '''Return an outgoing instance to identify the nodes present in repo but
+    """Return an outgoing instance to identify the nodes present in repo but
     not in other.
 
     If onlyheads is given, only nodes ancestral to nodes in onlyheads
@@ -168,7 +168,7 @@
     findcommonincoming(repo, other, force) to avoid recomputing it here.
 
     If portable is given, compute more conservative common and ancestorsof,
-    to make bundles created from the instance more portable.'''
+    to make bundles created from the instance more portable."""
     # declare an empty outgoing object to be filled later
     og = outgoing(repo, None, None)
 
@@ -332,7 +332,10 @@
 
     with remote.commandexecutor() as e:
         remotebookmarks = e.callcommand(
-            b'listkeys', {b'namespace': b'bookmarks',}
+            b'listkeys',
+            {
+                b'namespace': b'bookmarks',
+            },
         ).result()
 
     bookmarkedheads = set()
@@ -470,7 +473,10 @@
                 if branch not in (b'default', None):
                     errormsg = _(
                         b"push creates new remote head %s on branch '%s'"
-                    ) % (short(dhs[0]), branch,)
+                    ) % (
+                        short(dhs[0]),
+                        branch,
+                    )
                 elif repo[dhs[0]].bookmarks():
                     errormsg = _(
                         b"push creates new remote head %s "
--- a/mercurial/dispatch.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/dispatch.py	Fri Nov 27 17:03:29 2020 -0500
@@ -519,10 +519,10 @@
 
 
 def aliasinterpolate(name, args, cmd):
-    '''interpolate args into cmd for shell aliases
+    """interpolate args into cmd for shell aliases
 
     This also handles $0, $@ and "$@".
-    '''
+    """
     # util.interpolate can't deal with "$@" (with quotes) because it's only
     # built to match prefix + patterns.
     replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)}
@@ -630,12 +630,18 @@
         except error.UnknownCommand:
             self.badalias = _(
                 b"alias '%s' resolves to unknown command '%s'"
-            ) % (self.name, cmd,)
+            ) % (
+                self.name,
+                cmd,
+            )
             self.unknowncmd = True
         except error.AmbiguousCommand:
             self.badalias = _(
                 b"alias '%s' resolves to ambiguous command '%s'"
-            ) % (self.name, cmd,)
+            ) % (
+                self.name,
+                cmd,
+            )
 
     def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
         # confine strings to be passed to i18n.gettext()
--- a/mercurial/encoding.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/encoding.py	Fri Nov 27 17:03:29 2020 -0500
@@ -113,8 +113,8 @@
 
 
 class localstr(bytes):
-    '''This class allows strings that are unmodified to be
-    round-tripped to the local encoding and back'''
+    """This class allows strings that are unmodified to be
+    round-tripped to the local encoding and back"""
 
     def __new__(cls, u, l):
         s = bytes.__new__(cls, l)
@@ -329,8 +329,8 @@
 
 def getcols(s, start, c):
     # type: (bytes, int, int) -> bytes
-    '''Use colwidth to find a c-column substring of s starting at byte
-    index start'''
+    """Use colwidth to find a c-column substring of s starting at byte
+    index start"""
     for x in pycompat.xrange(start + c, len(s)):
         t = s[start:x]
         if colwidth(t) == c:
@@ -487,7 +487,7 @@
 
 
 class normcasespecs(object):
-    '''what a platform's normcase does to ASCII strings
+    """what a platform's normcase does to ASCII strings
 
     This is specified per platform, and should be consistent with what normcase
     on that platform actually does.
@@ -496,7 +496,7 @@
     upper: normcase uppercases ASCII strings
     other: the fallback function should always be called
 
-    This should be kept in sync with normcase_spec in util.h.'''
+    This should be kept in sync with normcase_spec in util.h."""
 
     lower = -1
     upper = 1
@@ -505,7 +505,7 @@
 
 def jsonescape(s, paranoid=False):
     # type: (Any, Any) -> Any
-    '''returns a string suitable for JSON
+    """returns a string suitable for JSON
 
     JSON is problematic for us because it doesn't support non-Unicode
     bytes. To deal with this, we take the following approach:
@@ -547,7 +547,7 @@
     'non-BMP: \\\\ud834\\\\udd1e'
     >>> jsonescape(b'<foo@example.org>', paranoid=True)
     '\\\\u003cfoo@example.org\\\\u003e'
-    '''
+    """
 
     u8chars = toutf8b(s)
     try:
@@ -569,11 +569,11 @@
 
 def getutf8char(s, pos):
     # type: (bytes, int) -> bytes
-    '''get the next full utf-8 character in the given string, starting at pos
+    """get the next full utf-8 character in the given string, starting at pos
 
     Raises a UnicodeError if the given location does not start a valid
     utf-8 character.
-    '''
+    """
 
     # find how many bytes to attempt decoding from first nibble
     l = _utf8len[ord(s[pos : pos + 1]) >> 4]
@@ -588,7 +588,7 @@
 
 def toutf8b(s):
     # type: (bytes) -> bytes
-    '''convert a local, possibly-binary string into UTF-8b
+    """convert a local, possibly-binary string into UTF-8b
 
     This is intended as a generic method to preserve data when working
     with schemes like JSON and XML that have no provision for
@@ -616,7 +616,7 @@
     arbitrary bytes into an internal Unicode format that can be
     re-encoded back into the original. Here we are exposing the
     internal surrogate encoding as a UTF-8 string.)
-    '''
+    """
 
     if isinstance(s, localstr):
         # assume that the original UTF-8 sequence would never contain
@@ -657,7 +657,7 @@
 
 def fromutf8b(s):
     # type: (bytes) -> bytes
-    '''Given a UTF-8b string, return a local, possibly-binary string.
+    """Given a UTF-8b string, return a local, possibly-binary string.
 
     return the original binary string. This
     is a round-trip process for strings like filenames, but metadata
@@ -677,7 +677,7 @@
     True
     >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
     True
-    '''
+    """
 
     if isasciistr(s):
         return s
--- a/mercurial/error.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/error.py	Fri Nov 27 17:03:29 2020 -0500
@@ -394,8 +394,7 @@
 
 
 class UnknownVersion(Abort):
-    """generic exception for aborting from an encounter with an unknown version
-    """
+    """generic exception for aborting from an encounter with an unknown version"""
 
     def __init__(self, msg, hint=None, version=None):
         self.version = version
--- a/mercurial/exchange.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/exchange.py	Fri Nov 27 17:03:29 2020 -0500
@@ -378,14 +378,14 @@
     publish=False,
     opargs=None,
 ):
-    '''Push outgoing changesets (limited by revs) from a local
+    """Push outgoing changesets (limited by revs) from a local
     repository to remote. Return an integer:
       - None means nothing to push
       - 0 means HTTP error
       - 1 means we pushed and remote head count is unchanged *or*
         we have outgoing changesets but refused to push
       - other values as described by addchangegroup()
-    '''
+    """
     if opargs is None:
         opargs = {}
     pushop = pushoperation(
@@ -1510,8 +1510,8 @@
 
 
 def add_confirm_callback(repo, pullop):
-    """ adds a finalize callback to transaction which can be used to show stats
-    to user and confirm the pull before committing transaction """
+    """adds a finalize callback to transaction which can be used to show stats
+    to user and confirm the pull before committing transaction"""
 
     tr = pullop.trmanager.transaction()
     scmutil.registersummarycallback(
@@ -1892,7 +1892,11 @@
     elif pullop.heads is None:
         with pullop.remote.commandexecutor() as e:
             cg = e.callcommand(
-                b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
+                b'changegroup',
+                {
+                    b'nodes': pullop.fetch,
+                    b'source': b'pull',
+                },
             ).result()
 
     elif not pullop.remote.capable(b'changegroupsubset'):
--- a/mercurial/exchangev2.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/exchangev2.py	Fri Nov 27 17:03:29 2020 -0500
@@ -89,7 +89,10 @@
             continue
 
         phases.advanceboundary(
-            repo, tr, phasenumber, csetres[b'nodesbyphase'][phase],
+            repo,
+            tr,
+            phasenumber,
+            csetres[b'nodesbyphase'][phase],
         )
 
     # Write bookmark updates.
@@ -189,7 +192,10 @@
 def _fetchrawstorefiles(repo, remote):
     with remote.commandexecutor() as e:
         objs = e.callcommand(
-            b'rawstorefiledata', {b'files': [b'changelog', b'manifestlog'],}
+            b'rawstorefiledata',
+            {
+                b'files': [b'changelog', b'manifestlog'],
+            },
         ).result()
 
         # First object is a summary of files data that follows.
@@ -746,7 +752,10 @@
         with remote.commandexecutor() as e:
             args = {
                 b'revisions': [
-                    {b'type': b'changesetexplicit', b'nodes': batch,}
+                    {
+                        b'type': b'changesetexplicit',
+                        b'nodes': batch,
+                    }
                 ],
                 b'fields': fields,
                 b'haveparents': haveparents,
--- a/mercurial/extensions.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/extensions.py	Fri Nov 27 17:03:29 2020 -0500
@@ -457,7 +457,7 @@
 
 
 def afterloaded(extension, callback):
-    '''Run the specified function after a named extension is loaded.
+    """Run the specified function after a named extension is loaded.
 
     If the named extension is already loaded, the callback will be called
     immediately.
@@ -467,7 +467,7 @@
 
     The callback receives the named argument ``loaded``, which is a boolean
     indicating whether the dependent extension actually loaded.
-    '''
+    """
 
     if extension in _extensions:
         # Report loaded as False if the extension is disabled
@@ -500,12 +500,12 @@
 
 
 def bind(func, *args):
-    '''Partial function application
+    """Partial function application
 
-      Returns a new function that is the partial application of args and kwargs
-      to func.  For example,
+    Returns a new function that is the partial application of args and kwargs
+    to func.  For example,
 
-          f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
+        f(1, 2, bar=3) === bind(f, 1)(2, bar=3)"""
     assert callable(func)
 
     def closure(*a, **kw):
@@ -618,7 +618,7 @@
 
 
 def wrapfunction(container, funcname, wrapper):
-    '''Wrap the function named funcname in container
+    """Wrap the function named funcname in container
 
     Replace the funcname member in the given container with the specified
     wrapper. The container is typically a module, class, or instance.
@@ -649,7 +649,7 @@
     work. Since you cannot control what other extensions are loaded by
     your end users, you should play nicely with others by using the
     subclass trick.
-    '''
+    """
     assert callable(wrapper)
 
     origfn = getattr(container, funcname)
@@ -668,7 +668,7 @@
 
 
 def unwrapfunction(container, funcname, wrapper=None):
-    '''undo wrapfunction
+    """undo wrapfunction
 
     If wrappers is None, undo the last wrap. Otherwise removes the wrapper
     from the chain of wrappers.
@@ -676,7 +676,7 @@
     Return the removed wrapper.
     Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
     wrapper is not None but is not found in the wrapper chain.
-    '''
+    """
     chain = getwrapperchain(container, funcname)
     origfn = chain.pop()
     if wrapper is None:
@@ -689,13 +689,13 @@
 
 
 def getwrapperchain(container, funcname):
-    '''get a chain of wrappers of a function
+    """get a chain of wrappers of a function
 
     Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
 
     The wrapper functions are the ones passed to wrapfunction, whose first
     argument is origfunc.
-    '''
+    """
     result = []
     fn = getattr(container, funcname)
     while fn:
@@ -744,11 +744,11 @@
 
 
 def _moduledoc(file):
-    '''return the top-level python documentation for the given file
+    """return the top-level python documentation for the given file
 
     Loosely inspired by pydoc.source_synopsis(), but rewritten to
     handle triple quotes and to return the whole text instead of just
-    the synopsis'''
+    the synopsis"""
     result = []
 
     line = file.readline()
@@ -883,8 +883,8 @@
 
 
 def disabledcmd(ui, cmd, strict=False):
-    '''find cmd from disabled extensions without importing.
-    returns (cmdname, extname, doc)'''
+    """find cmd from disabled extensions without importing.
+    returns (cmdname, extname, doc)"""
 
     paths = _disabledpaths()
     if not paths:
--- a/mercurial/filemerge.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/filemerge.py	Fri Nov 27 17:03:29 2020 -0500
@@ -1232,8 +1232,7 @@
 
 
 def loadinternalmerge(ui, extname, registrarobj):
-    """Load internal merge tool from specified registrarobj
-    """
+    """Load internal merge tool from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         fullname = b':' + name
         internals[fullname] = func
--- a/mercurial/fileset.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/fileset.py	Fri Nov 27 17:03:29 2020 -0500
@@ -122,8 +122,7 @@
 
 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
 def modified(mctx, x):
-    """File that is modified according to :hg:`status`.
-    """
+    """File that is modified according to :hg:`status`."""
     # i18n: "modified" is a keyword
     getargs(x, 0, 0, _(b"modified takes no arguments"))
     s = set(mctx.status().modified)
@@ -132,8 +131,7 @@
 
 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
 def added(mctx, x):
-    """File that is added according to :hg:`status`.
-    """
+    """File that is added according to :hg:`status`."""
     # i18n: "added" is a keyword
     getargs(x, 0, 0, _(b"added takes no arguments"))
     s = set(mctx.status().added)
@@ -142,8 +140,7 @@
 
 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
 def removed(mctx, x):
-    """File that is removed according to :hg:`status`.
-    """
+    """File that is removed according to :hg:`status`."""
     # i18n: "removed" is a keyword
     getargs(x, 0, 0, _(b"removed takes no arguments"))
     s = set(mctx.status().removed)
@@ -152,8 +149,7 @@
 
 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
 def deleted(mctx, x):
-    """Alias for ``missing()``.
-    """
+    """Alias for ``missing()``."""
     # i18n: "deleted" is a keyword
     getargs(x, 0, 0, _(b"deleted takes no arguments"))
     s = set(mctx.status().deleted)
@@ -162,8 +158,7 @@
 
 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
 def missing(mctx, x):
-    """File that is missing according to :hg:`status`.
-    """
+    """File that is missing according to :hg:`status`."""
     # i18n: "missing" is a keyword
     getargs(x, 0, 0, _(b"missing takes no arguments"))
     s = set(mctx.status().deleted)
@@ -190,8 +185,7 @@
 
 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
 def clean(mctx, x):
-    """File that is clean according to :hg:`status`.
-    """
+    """File that is clean according to :hg:`status`."""
     # i18n: "clean" is a keyword
     getargs(x, 0, 0, _(b"clean takes no arguments"))
     s = set(mctx.status().clean)
@@ -208,8 +202,7 @@
 
 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
 def binary(mctx, x):
-    """File that appears to be binary (contains NUL bytes).
-    """
+    """File that appears to be binary (contains NUL bytes)."""
     # i18n: "binary" is a keyword
     getargs(x, 0, 0, _(b"binary takes no arguments"))
     return mctx.fpredicate(
@@ -219,8 +212,7 @@
 
 @predicate(b'exec()')
 def exec_(mctx, x):
-    """File that is marked as executable.
-    """
+    """File that is marked as executable."""
     # i18n: "exec" is a keyword
     getargs(x, 0, 0, _(b"exec takes no arguments"))
     ctx = mctx.ctx
@@ -229,8 +221,7 @@
 
 @predicate(b'symlink()')
 def symlink(mctx, x):
-    """File that is marked as a symlink.
-    """
+    """File that is marked as a symlink."""
     # i18n: "symlink" is a keyword
     getargs(x, 0, 0, _(b"symlink takes no arguments"))
     ctx = mctx.ctx
@@ -239,8 +230,7 @@
 
 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
 def resolved(mctx, x):
-    """File that is marked resolved according to :hg:`resolve -l`.
-    """
+    """File that is marked resolved according to :hg:`resolve -l`."""
     # i18n: "resolved" is a keyword
     getargs(x, 0, 0, _(b"resolved takes no arguments"))
     if mctx.ctx.rev() is not None:
@@ -253,8 +243,7 @@
 
 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
 def unresolved(mctx, x):
-    """File that is marked unresolved according to :hg:`resolve -l`.
-    """
+    """File that is marked unresolved according to :hg:`resolve -l`."""
     # i18n: "unresolved" is a keyword
     getargs(x, 0, 0, _(b"unresolved takes no arguments"))
     if mctx.ctx.rev() is not None:
@@ -267,8 +256,7 @@
 
 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
 def hgignore(mctx, x):
-    """File that matches the active .hgignore pattern.
-    """
+    """File that matches the active .hgignore pattern."""
     # i18n: "hgignore" is a keyword
     getargs(x, 0, 0, _(b"hgignore takes no arguments"))
     return mctx.ctx.repo().dirstate._ignore
@@ -288,8 +276,7 @@
 
 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
 def grep(mctx, x):
-    """File contains the given regular expression.
-    """
+    """File contains the given regular expression."""
     try:
         # i18n: "grep" is a keyword
         r = re.compile(getstring(x, _(b"grep requires a pattern")))
@@ -414,8 +401,7 @@
 
 @predicate(b'copied()')
 def copied(mctx, x):
-    """File that is recorded as being copied.
-    """
+    """File that is recorded as being copied."""
     # i18n: "copied" is a keyword
     getargs(x, 0, 0, _(b"copied takes no arguments"))
 
@@ -476,8 +462,7 @@
 
 @predicate(b'subrepo([pattern])')
 def subrepo(mctx, x):
-    """Subrepositories whose paths match the given pattern.
-    """
+    """Subrepositories whose paths match the given pattern."""
     # i18n: "subrepo" is a keyword
     getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
     ctx = mctx.ctx
@@ -628,8 +613,7 @@
 
 
 def loadpredicate(ui, extname, registrarobj):
-    """Load fileset predicates from specified registrarobj
-    """
+    """Load fileset predicates from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         symbols[name] = func
 
--- a/mercurial/help.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/help.py	Fri Nov 27 17:03:29 2020 -0500
@@ -698,10 +698,10 @@
     fullname=None,
     **opts
 ):
-    '''
+    """
     Generate the help for 'name' as unformatted restructured text. If
     'name' is None, describe the commands available.
-    '''
+    """
 
     opts = pycompat.byteskwargs(opts)
 
--- a/mercurial/hg.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/hg.py	Fri Nov 27 17:03:29 2020 -0500
@@ -243,7 +243,7 @@
 
 
 def defaultdest(source):
-    '''return default destination of clone if none is given
+    """return default destination of clone if none is given
 
     >>> defaultdest(b'foo')
     'foo'
@@ -257,7 +257,7 @@
     ''
     >>> defaultdest(b'http://example.org/foo/')
     'foo'
-    '''
+    """
     path = util.url(source).path
     if not path:
         return b''
@@ -333,7 +333,7 @@
 
 
 def _prependsourcehgrc(repo):
-    """ copies the source repo config and prepend it in current repo .hg/hgrc
+    """copies the source repo config and prepend it in current repo .hg/hgrc
     on unshare. This is only done if the share was perfomed using share safe
     method where we share config of source in shares"""
     srcvfs = vfsmod.vfs(repo.sharedpath)
@@ -443,10 +443,10 @@
 
 
 def copystore(ui, srcrepo, destpath):
-    '''copy files from store of srcrepo in destpath
+    """copy files from store of srcrepo in destpath
 
     returns destlock
-    '''
+    """
     destlock = None
     try:
         hardlink = None
@@ -517,7 +517,12 @@
         for r in rev:
             with srcpeer.commandexecutor() as e:
                 remoterevs.append(
-                    e.callcommand(b'lookup', {b'key': r,}).result()
+                    e.callcommand(
+                        b'lookup',
+                        {
+                            b'key': r,
+                        },
+                    ).result()
                 )
         revs = remoterevs
 
@@ -751,7 +756,10 @@
             try:
                 with srcpeer.commandexecutor() as e:
                     rootnode = e.callcommand(
-                        b'lookup', {b'key': b'0',}
+                        b'lookup',
+                        {
+                            b'key': b'0',
+                        },
                     ).result()
 
                 if rootnode != node.nullid:
@@ -900,7 +908,12 @@
                 for rev in revs:
                     with srcpeer.commandexecutor() as e:
                         remoterevs.append(
-                            e.callcommand(b'lookup', {b'key': rev,}).result()
+                            e.callcommand(
+                                b'lookup',
+                                {
+                                    b'key': rev,
+                                },
+                            ).result()
                         )
                 revs = remoterevs
 
@@ -974,7 +987,10 @@
                 if update is not True:
                     with srcpeer.commandexecutor() as e:
                         checkout = e.callcommand(
-                            b'lookup', {b'key': update,}
+                            b'lookup',
+                            {
+                                b'key': update,
+                            },
                         ).result()
 
                 uprev = None
@@ -1176,7 +1192,10 @@
 
 
 def merge(
-    ctx, force=False, remind=True, labels=None,
+    ctx,
+    force=False,
+    remind=True,
+    labels=None,
 ):
     """Branch merge with node, resolving changes. Return true if any
     unresolved conflicts."""
--- a/mercurial/hgweb/__init__.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/hgweb/__init__.py	Fri Nov 27 17:03:29 2020 -0500
@@ -27,7 +27,7 @@
 
 
 def hgweb(config, name=None, baseui=None):
-    '''create an hgweb wsgi object
+    """create an hgweb wsgi object
 
     config can be one of:
     - repo object (single repo view)
@@ -35,7 +35,7 @@
     - path to config file (multi-repo view)
     - dict of virtual:real pairs (multi-repo view)
     - list of virtual:real tuples (multi-repo view)
-    '''
+    """
 
     if isinstance(config, pycompat.unicode):
         raise error.ProgrammingError(
--- a/mercurial/hgweb/common.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/hgweb/common.py	Fri Nov 27 17:03:29 2020 -0500
@@ -51,9 +51,9 @@
 
 
 def checkauthz(hgweb, req, op):
-    '''Check permission for operation based on request data (including
+    """Check permission for operation based on request data (including
     authentication info). Return if op allowed, else raise an ErrorResponse
-    exception.'''
+    exception."""
 
     user = req.remoteuser
 
--- a/mercurial/hgweb/hgweb_mod.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/hgweb/hgweb_mod.py	Fri Nov 27 17:03:29 2020 -0500
@@ -86,12 +86,12 @@
 
 
 def makebreadcrumb(url, prefix=b''):
-    '''Return a 'URL breadcrumb' list
+    """Return a 'URL breadcrumb' list
 
     A 'URL breadcrumb' is a list of URL-name pairs,
     corresponding to each of the path items on a URL.
     This can be used to create path navigation entries.
-    '''
+    """
     if url.endswith(b'/'):
         url = url[:-1]
     if prefix:
--- a/mercurial/hgweb/request.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/hgweb/request.py	Fri Nov 27 17:03:29 2020 -0500
@@ -622,8 +622,8 @@
 
 
 def wsgiapplication(app_maker):
-    '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
-    can and should now be used as a WSGI application.'''
+    """For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
+    can and should now be used as a WSGI application."""
     application = app_maker()
 
     def run_wsgi(env, respond):
--- a/mercurial/hgweb/webutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/hgweb/webutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -491,11 +491,11 @@
 
 
 def changelistentry(web, ctx):
-    '''Obtain a dictionary to be used for entries in a changelist.
+    """Obtain a dictionary to be used for entries in a changelist.
 
     This function is called when producing items for the "entries" list passed
     to the "shortlog" and "changelog" templates.
-    '''
+    """
     repo = web.repo
     rev = ctx.rev()
     n = scmutil.binnode(ctx)
--- a/mercurial/hook.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/hook.py	Fri Nov 27 17:03:29 2020 -0500
@@ -30,14 +30,14 @@
 
 
 def pythonhook(ui, repo, htype, hname, funcname, args, throw):
-    '''call python hook. hook is callable object, looked up as
+    """call python hook. hook is callable object, looked up as
     name in python module. if callable returns "true", hook
     fails, else passes. if hook raises exception, treated as
     hook failure. exception propagates if throw is "true".
 
     reason for "true" meaning "hook failed" is so that
     unmodified commands (e.g. mercurial.commands.update) can
-    be run as hooks without wrappers to convert return values.'''
+    be run as hooks without wrappers to convert return values."""
 
     if callable(funcname):
         obj = funcname
--- a/mercurial/httppeer.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/httppeer.py	Fri Nov 27 17:03:29 2020 -0500
@@ -766,7 +766,10 @@
                 % _(b', ').join(sorted(permissions))
             )
 
-        permission = {b'push': b'rw', b'pull': b'ro',}[permissions.pop()]
+        permission = {
+            b'push': b'rw',
+            b'pull': b'ro',
+        }[permissions.pop()]
 
         handler, resp = sendv2request(
             self._ui,
@@ -942,7 +945,10 @@
 #    Integer priority for the service. If we could choose from multiple
 #    services, we choose the one with the highest priority.
 API_PEERS = {
-    wireprototypes.HTTP_WIREPROTO_V2: {b'init': httpv2peer, b'priority': 50,},
+    wireprototypes.HTTP_WIREPROTO_V2: {
+        b'init': httpv2peer,
+        b'priority': 50,
+    },
 }
 
 
--- a/mercurial/interfaces/dirstate.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/interfaces/dirstate.py	Fri Nov 27 17:03:29 2020 -0500
@@ -9,12 +9,12 @@
 
 class idirstate(interfaceutil.Interface):
     def __init__(opener, ui, root, validate, sparsematchfn):
-        '''Create a new dirstate object.
+        """Create a new dirstate object.
 
         opener is an open()-like callable that can be used to open the
         dirstate file; root is the root of the directory tracked by
         the dirstate.
-        '''
+        """
 
     # TODO: all these private methods and attributes should be made
     # public or removed from the interface.
@@ -31,17 +31,17 @@
 
     @contextlib.contextmanager
     def parentchange():
-        '''Context manager for handling dirstate parents.
+        """Context manager for handling dirstate parents.
 
         If an exception occurs in the scope of the context manager,
         the incoherent dirstate won't be written when wlock is
         released.
-        '''
+        """
 
     def pendingparentchange():
-        '''Returns true if the dirstate is in the middle of a set of changes
+        """Returns true if the dirstate is in the middle of a set of changes
         that modify the dirstate parent.
-        '''
+        """
 
     def hasdir(d):
         pass
@@ -50,18 +50,18 @@
         pass
 
     def getcwd():
-        '''Return the path from which a canonical path is calculated.
+        """Return the path from which a canonical path is calculated.
 
         This path should be used to resolve file patterns or to convert
         canonical paths back to file paths for display. It shouldn't be
         used to get real file paths. Use vfs functions instead.
-        '''
+        """
 
     def pathto(f, cwd=None):
         pass
 
     def __getitem__(key):
-        '''Return the current state of key (a filename) in the dirstate.
+        """Return the current state of key (a filename) in the dirstate.
 
         States are:
           n  normal
@@ -69,7 +69,7 @@
           r  marked for removal
           a  marked for addition
           ?  not tracked
-        '''
+        """
 
     def __contains__(key):
         """Check if bytestring `key` is known to the dirstate."""
@@ -111,11 +111,11 @@
         pass
 
     def invalidate():
-        '''Causes the next access to reread the dirstate.
+        """Causes the next access to reread the dirstate.
 
         This is different from localrepo.invalidatedirstate() because it always
         rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
-        check whether the dirstate has changed before rereading it.'''
+        check whether the dirstate has changed before rereading it."""
 
     def copy(source, dest):
         """Mark dest as a copy of source. Unmark dest if source is None."""
@@ -127,7 +127,7 @@
         pass
 
     def normal(f, parentfiledata=None):
-        '''Mark a file normal and clean.
+        """Mark a file normal and clean.
 
         parentfiledata: (mode, size, mtime) of the clean file
 
@@ -135,7 +135,7 @@
         size), as or close as possible from the point where we
         determined the file was clean, to limit the risk of the
         file having been changed by an external process between the
-        moment where the file was determined to be clean and now.'''
+        moment where the file was determined to be clean and now."""
         pass
 
     def normallookup(f):
@@ -157,7 +157,7 @@
         '''Drop a file from the dirstate'''
 
     def normalize(path, isknown=False, ignoremissing=False):
-        '''
+        """
         normalize the case of a pathname when on a casefolding filesystem
 
         isknown specifies whether the filename came from walking the
@@ -172,7 +172,7 @@
         - version of name already stored in the dirstate
         - version of name stored on disk
         - version provided via command arguments
-        '''
+        """
 
     def clear():
         pass
@@ -181,11 +181,11 @@
         pass
 
     def identity():
-        '''Return identity of dirstate it to detect changing in storage
+        """Return identity of dirstate it to detect changing in storage
 
         If identity of previous dirstate is equal to this, writing
         changes based on the former dirstate out can keep consistency.
-        '''
+        """
 
     def write(tr):
         pass
@@ -201,7 +201,7 @@
         """
 
     def walk(match, subrepos, unknown, ignored, full=True):
-        '''
+        """
         Walk recursively through the directory tree, finding all files
         matched by match.
 
@@ -210,10 +210,10 @@
         Return a dict mapping filename to stat-like object (either
         mercurial.osutil.stat instance or return value of os.stat()).
 
-        '''
+        """
 
     def status(match, subrepos, ignored, clean, unknown):
-        '''Determine the status of the working copy relative to the
+        """Determine the status of the working copy relative to the
         dirstate and return a pair of (unsure, status), where status is of type
         scmutil.status and:
 
@@ -227,12 +227,12 @@
           status.clean:
             files that have definitely not been modified since the
             dirstate was written
-        '''
+        """
 
     def matches(match):
-        '''
+        """
         return files in the dirstate (in whatever state) filtered by match
-        '''
+        """
 
     def savebackup(tr, backupname):
         '''Save current dirstate into backup file'''
--- a/mercurial/interfaces/repository.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/interfaces/repository.py	Fri Nov 27 17:03:29 2020 -0500
@@ -617,7 +617,7 @@
         """
 
     def revision(node, raw=False):
-        """"Obtain fulltext data for a node.
+        """ "Obtain fulltext data for a node.
 
         By default, any storage transformations are applied before the data
         is returned. If ``raw`` is True, non-raw storage transformations
@@ -628,8 +628,7 @@
         """
 
     def rawdata(node):
-        """Obtain raw data for a node.
-        """
+        """Obtain raw data for a node."""
 
     def read(node):
         """Resolve file fulltext data.
--- a/mercurial/keepalive.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/keepalive.py	Fri Nov 27 17:03:29 2020 -0500
@@ -112,7 +112,7 @@
     """
     The connection manager must be able to:
       * keep track of all existing
-      """
+    """
 
     def __init__(self):
         self._lock = threading.Lock()
@@ -675,8 +675,7 @@
 
 
 def wrapgetresponse(cls):
-    """Wraps getresponse in cls with a broken-pipe sane version.
-    """
+    """Wraps getresponse in cls with a broken-pipe sane version."""
 
     def safegetresponse(self):
         # In safesend() we might set the _broken_pipe_resp
--- a/mercurial/localrepo.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/localrepo.py	Fri Nov 27 17:03:29 2020 -0500
@@ -96,8 +96,7 @@
 
 
 class _basefilecache(scmutil.filecache):
-    """All filecache usage on repo are done for logic that should be unfiltered
-    """
+    """All filecache usage on repo are done for logic that should be unfiltered"""
 
     def __get__(self, repo, type=None):
         if repo is None:
@@ -400,8 +399,8 @@
 
 @interfaceutil.implementer(repository.ipeerlegacycommands)
 class locallegacypeer(localpeer):
-    '''peer extension which implements legacy methods too; used for tests with
-    restricted capabilities'''
+    """peer extension which implements legacy methods too; used for tests with
+    restricted capabilities"""
 
     def __init__(self, repo):
         super(locallegacypeer, self).__init__(repo, caps=legacycaps)
@@ -440,7 +439,7 @@
 
 
 def _getsharedvfs(hgvfs, requirements):
-    """ returns the vfs object pointing to root of shared source
+    """returns the vfs object pointing to root of shared source
     repo for a shared repository
 
     hgvfs is vfs pointing at .hg/ of current repo (shared one)
@@ -465,7 +464,7 @@
 
 
 def _readrequires(vfs, allowmissing):
-    """ reads the require file present at root of this vfs
+    """reads the require file present at root of this vfs
     and return a set of requirements
 
     If allowmissing is True, we suppress ENOENT if raised"""
@@ -1756,7 +1755,7 @@
         return iter(self.changelog)
 
     def revs(self, expr, *args):
-        '''Find revisions matching a revset.
+        """Find revisions matching a revset.
 
         The revset is specified as a string ``expr`` that may contain
         %-formatting to escape certain types. See ``revsetlang.formatspec``.
@@ -1767,30 +1766,30 @@
 
         Returns a smartset.abstractsmartset, which is a list-like interface
         that contains integer revisions.
-        '''
+        """
         tree = revsetlang.spectree(expr, *args)
         return revset.makematcher(tree)(self)
 
     def set(self, expr, *args):
-        '''Find revisions matching a revset and emit changectx instances.
+        """Find revisions matching a revset and emit changectx instances.
 
         This is a convenience wrapper around ``revs()`` that iterates the
         result and is a generator of changectx instances.
 
         Revset aliases from the configuration are not expanded. To expand
         user aliases, consider calling ``scmutil.revrange()``.
-        '''
+        """
         for r in self.revs(expr, *args):
             yield self[r]
 
     def anyrevs(self, specs, user=False, localalias=None):
-        '''Find revisions matching one of the given revsets.
+        """Find revisions matching one of the given revsets.
 
         Revset aliases from the configuration are not expanded by default. To
         expand user aliases, specify ``user=True``. To provide some local
         definitions overriding user aliases, set ``localalias`` to
         ``{name: definitionstring}``.
-        '''
+        """
         if specs == [b'null']:
             return revset.baseset([nullrev])
         if specs == [b'.']:
@@ -1822,8 +1821,8 @@
 
     @filteredpropertycache
     def _tagscache(self):
-        '''Returns a tagscache object that contains various tags related
-        caches.'''
+        """Returns a tagscache object that contains various tags related
+        caches."""
 
         # This simplifies its cache management by having one decorated
         # function (this one) and the rest simply fetch things from it.
@@ -1861,12 +1860,12 @@
         return t
 
     def _findtags(self):
-        '''Do the hard work of finding tags.  Return a pair of dicts
+        """Do the hard work of finding tags.  Return a pair of dicts
         (tags, tagtypes) where tags maps tag name to node, and tagtypes
         maps tag name to a string like \'global\' or \'local\'.
         Subclasses or extensions are free to add their own tags, but
         should be aware that the returned dicts will be retained for the
-        duration of the localrepo object.'''
+        duration of the localrepo object."""
 
         # XXX what tagtype should subclasses/extensions use?  Currently
         # mq and bookmarks add tags, but do not set the tagtype at all.
@@ -1897,13 +1896,13 @@
         return (tags, tagtypes)
 
     def tagtype(self, tagname):
-        '''
+        """
         return the type of the given tag. result can be:
 
         'local'  : a local tag
         'global' : a global tag
         None     : tag does not exist
-        '''
+        """
 
         return self._tagscache.tagtypes.get(tagname)
 
@@ -1933,8 +1932,8 @@
         return self._bookmarks.names(node)
 
     def branchmap(self):
-        '''returns a dictionary {branch: [branchheads]} with branchheads
-        ordered by increasing revision number'''
+        """returns a dictionary {branch: [branchheads]} with branchheads
+        ordered by increasing revision number"""
         return self._branchcaches[self]
 
     @unfilteredmethod
@@ -1944,13 +1943,13 @@
         return self._revbranchcache
 
     def branchtip(self, branch, ignoremissing=False):
-        '''return the tip node for a given branch
+        """return the tip node for a given branch
 
         If ignoremissing is True, then this method will not raise an error.
         This is helpful for callers that only expect None for a missing branch
         (e.g. namespace).
 
-        '''
+        """
         try:
             return self.branchmap().branchtip(branch)
         except KeyError:
@@ -2014,7 +2013,7 @@
 
     def filectx(self, path, changeid=None, fileid=None, changectx=None):
         """changeid must be a changeset revision, if specified.
-           fileid can be a file revision or node."""
+        fileid can be a file revision or node."""
         return context.filectx(
             self, path, changeid, fileid, changectx=changectx
         )
@@ -2311,8 +2310,7 @@
         tr.addfinalize(b'flush-fncache', self.store.write)
 
         def txnclosehook(tr2):
-            """To be run if transaction is successful, will schedule a hook run
-            """
+            """To be run if transaction is successful, will schedule a hook run"""
             # Don't reference tr2 in hook() so we don't hold a reference.
             # This reduces memory consumption when there are multiple
             # transactions per lock. This can likely go away if issue5045
@@ -2362,8 +2360,7 @@
         tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
 
         def txnaborthook(tr2):
-            """To be run if transaction is aborted
-            """
+            """To be run if transaction is aborted"""
             reporef().hook(
                 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
             )
@@ -2620,14 +2617,14 @@
         self._quick_access_changeid_invalidate()
 
     def invalidatedirstate(self):
-        '''Invalidates the dirstate, causing the next call to dirstate
+        """Invalidates the dirstate, causing the next call to dirstate
         to check if it was modified since the last time it was read,
         rereading it if it has.
 
         This is different to dirstate.invalidate() that it doesn't always
         rereads the dirstate. Use dirstate.invalidate() if you want to
         explicitly read the dirstate again (i.e. restoring it to a previous
-        known good state).'''
+        known good state)."""
         if hasunfilteredcache(self, 'dirstate'):
             for k in self.dirstate._filecache:
                 try:
@@ -2637,13 +2634,13 @@
             delattr(self.unfiltered(), 'dirstate')
 
     def invalidate(self, clearfilecache=False):
-        '''Invalidates both store and non-store parts other than dirstate
+        """Invalidates both store and non-store parts other than dirstate
 
         If a transaction is running, invalidation of store is omitted,
         because discarding in-memory changes might cause inconsistency
         (e.g. incomplete fncache causes unintentional failure, but
         redundant one doesn't).
-        '''
+        """
         unfiltered = self.unfiltered()  # all file caches are stored unfiltered
         for k in list(self._filecache.keys()):
             # dirstate is invalidated separately in invalidatedirstate()
@@ -2673,8 +2670,8 @@
             self.store.invalidatecaches()
 
     def invalidateall(self):
-        '''Fully invalidates both store and non-store parts, causing the
-        subsequent operation to reread any outside changes.'''
+        """Fully invalidates both store and non-store parts, causing the
+        subsequent operation to reread any outside changes."""
         # extension should hook this to invalidate its caches
         self.invalidate()
         self.invalidatedirstate()
@@ -2689,7 +2686,13 @@
             ce.refresh()
 
     def _lock(
-        self, vfs, lockname, wait, releasefn, acquirefn, desc,
+        self,
+        vfs,
+        lockname,
+        wait,
+        releasefn,
+        acquirefn,
+        desc,
     ):
         timeout = 0
         warntimeout = 0
@@ -2726,12 +2729,12 @@
             callback(True)
 
     def lock(self, wait=True):
-        '''Lock the repository store (.hg/store) and return a weak reference
+        """Lock the repository store (.hg/store) and return a weak reference
         to the lock. Use this before modifying the store (e.g. committing or
         stripping). If you are opening a transaction, get a lock as well.)
 
         If both 'lock' and 'wlock' must be acquired, ensure you always acquires
-        'wlock' first to avoid a dead-lock hazard.'''
+        'wlock' first to avoid a dead-lock hazard."""
         l = self._currentlock(self._lockref)
         if l is not None:
             l.lock()
@@ -2749,13 +2752,13 @@
         return l
 
     def wlock(self, wait=True):
-        '''Lock the non-store parts of the repository (everything under
+        """Lock the non-store parts of the repository (everything under
         .hg except .hg/store) and return a weak reference to the lock.
 
         Use this before modifying files in .hg.
 
         If both 'lock' and 'wlock' must be acquired, ensure you always acquires
-        'wlock' first to avoid a dead-lock hazard.'''
+        'wlock' first to avoid a dead-lock hazard."""
         l = self._wlockref and self._wlockref()
         if l is not None and l.held:
             l.lock()
@@ -2963,7 +2966,7 @@
 
     @unfilteredmethod
     def destroying(self):
-        '''Inform the repository that nodes are about to be destroyed.
+        """Inform the repository that nodes are about to be destroyed.
         Intended for use by strip and rollback, so there's a common
         place for anything that has to be done before destroying history.
 
@@ -2972,7 +2975,7 @@
         destroyed is imminent, the repo will be invalidated causing those
         changes to stay in memory (waiting for the next unlock), or vanish
         completely.
-        '''
+        """
         # When using the same lock to commit and strip, the phasecache is left
         # dirty after committing. Then when we strip, the repo is invalidated,
         # causing those changes to disappear.
@@ -2981,10 +2984,10 @@
 
     @unfilteredmethod
     def destroyed(self):
-        '''Inform the repository that nodes have been destroyed.
+        """Inform the repository that nodes have been destroyed.
         Intended for use by strip and rollback, so there's a common
         place for anything that has to be done after destroying history.
-        '''
+        """
         # When one tries to:
         # 1) destroy nodes thus calling this method (e.g. strip)
         # 2) use phasecache somewhere (e.g. commit)
@@ -3067,13 +3070,13 @@
         return sorted(heads, key=self.changelog.rev, reverse=True)
 
     def branchheads(self, branch=None, start=None, closed=False):
-        '''return a (possibly filtered) list of heads for the given branch
+        """return a (possibly filtered) list of heads for the given branch
 
         Heads are returned in topological order, from newest to oldest.
         If branch is None, use the dirstate branch.
         If start is not None, return only heads reachable from start.
         If closed is True, return heads that are marked as closed as well.
-        '''
+        """
         if branch is None:
             branch = self[None].branch()
         branches = self.branchmap()
@@ -3352,10 +3355,10 @@
 
 
 def checkrequirementscompat(ui, requirements):
-    """ Checks compatibility of repository requirements enabled and disabled.
+    """Checks compatibility of repository requirements enabled and disabled.
 
     Returns a set of requirements which needs to be dropped because dependend
-    requirements are not enabled. Also warns users about it """
+    requirements are not enabled. Also warns users about it"""
 
     dropped = set()
 
--- a/mercurial/lock.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/lock.py	Fri Nov 27 17:03:29 2020 -0500
@@ -175,14 +175,14 @@
 
 
 class lock(object):
-    '''An advisory lock held by one process to control access to a set
+    """An advisory lock held by one process to control access to a set
     of files.  Non-cooperating processes or incorrectly written scripts
     can ignore Mercurial's locking scheme and stomp all over the
     repository, so don't do that.
 
     Typically used via localrepository.lock() to lock the repository
     store (.hg/store/) or localrepository.wlock() to lock everything
-    else under .hg/.'''
+    else under .hg/."""
 
     # lock is symlink on platforms that support it, file on others.
 
--- a/mercurial/logcmdutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/logcmdutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -417,8 +417,7 @@
                 )
 
     def _exthook(self, ctx):
-        '''empty method used by extension as a hook point
-        '''
+        """empty method used by extension as a hook point"""
 
     def _showpatch(self, ctx, graphwidth=0):
         if self._includestat:
@@ -509,13 +508,13 @@
 
 
 class changesettemplater(changesetprinter):
-    '''format changeset information.
+    """format changeset information.
 
     Note: there are a variety of convenience functions to build a
     changesettemplater for common cases. See functions such as:
     maketemplater, changesetdisplayer, buildcommittemplate, or other
     functions that use changesest_templater.
-    '''
+    """
 
     # Arguments before "buffered" used to be positional. Consider not
     # adding/removing arguments before "buffered" to not break callers.
--- a/mercurial/logexchange.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/logexchange.py	Fri Nov 27 17:03:29 2020 -0500
@@ -141,7 +141,10 @@
 
     with remoterepo.commandexecutor() as e:
         bookmarks = e.callcommand(
-            b'listkeys', {b'namespace': b'bookmarks',}
+            b'listkeys',
+            {
+                b'namespace': b'bookmarks',
+            },
         ).result()
 
     # on a push, we don't want to keep obsolete heads since
--- a/mercurial/mail.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/mail.py	Fri Nov 27 17:03:29 2020 -0500
@@ -44,10 +44,10 @@
 
 
 class STARTTLS(smtplib.SMTP):
-    '''Derived class to verify the peer certificate for STARTTLS.
+    """Derived class to verify the peer certificate for STARTTLS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
-    '''
+    """
 
     def __init__(self, ui, host=None, **kwargs):
         smtplib.SMTP.__init__(self, **kwargs)
@@ -76,10 +76,10 @@
 
 
 class SMTPS(smtplib.SMTP):
-    '''Derived class to verify the peer certificate for SMTPS.
+    """Derived class to verify the peer certificate for SMTPS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
-    '''
+    """
 
     def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
         self.keyfile = keyfile
@@ -221,8 +221,8 @@
 
 
 def connect(ui, mbox=None):
-    '''make a mail connection. return a function to send mail.
-    call as sendmail(sender, list-of-recipients, msg).'''
+    """make a mail connection. return a function to send mail.
+    call as sendmail(sender, list-of-recipients, msg)."""
     if mbox:
         open(mbox, b'wb').close()
         return lambda s, r, m: _mbox(mbox, s, r, m)
@@ -267,11 +267,11 @@
 
 def mimetextpatch(s, subtype='plain', display=False):
     # type: (bytes, str, bool) -> email.message.Message
-    '''Return MIME message suitable for a patch.
+    """Return MIME message suitable for a patch.
     Charset will be detected by first trying to decode as us-ascii, then utf-8,
     and finally the global encodings. If all those fail, fall back to
     ISO-8859-1, an encoding with that allows all byte sequences.
-    Transfer encodings will be used if necessary.'''
+    Transfer encodings will be used if necessary."""
 
     cs = [
         'us-ascii',
@@ -293,9 +293,9 @@
 
 def mimetextqp(body, subtype, charset):
     # type: (bytes, str, str) -> email.message.Message
-    '''Return MIME message.
+    """Return MIME message.
     Quoted-printable transfer encoding will be used if necessary.
-    '''
+    """
     cs = email.charset.Charset(charset)
     msg = email.message.Message()
     msg.set_type('text/' + subtype)
@@ -337,11 +337,11 @@
 
 def _encode(ui, s, charsets):
     # type: (Any, bytes, List[str]) -> Tuple[bytes, str]
-    '''Returns (converted) string, charset tuple.
+    """Returns (converted) string, charset tuple.
     Finds out best charset by cycling through sendcharsets in descending
     order. Tries both encoding and fallbackencoding for input. Only as
     last resort send as is in fake ascii.
-    Caveat: Do not use for mail parts containing patches!'''
+    Caveat: Do not use for mail parts containing patches!"""
     sendcharsets = charsets or _charsets(ui)
     if not isinstance(s, bytes):
         # We have unicode data, which we need to try and encode to
@@ -427,9 +427,9 @@
 
 def addrlistencode(ui, addrs, charsets=None, display=False):
     # type: (Any, List[bytes], List[str], bool) -> List[str]
-    '''Turns a list of addresses into a list of RFC-2047 compliant headers.
+    """Turns a list of addresses into a list of RFC-2047 compliant headers.
     A single element of input list may contain multiple addresses, but output
-    always has one address per item'''
+    always has one address per item"""
     straddrs = []
     for a in addrs:
         assert isinstance(a, bytes), '%r unexpectedly not a bytestr' % a
@@ -447,8 +447,8 @@
 
 def mimeencode(ui, s, charsets=None, display=False):
     # type: (Any, bytes, List[str], bool) -> email.message.Message
-    '''creates mime text object, encodes it if needed, and sets
-    charset and transfer-encoding accordingly.'''
+    """creates mime text object, encodes it if needed, and sets
+    charset and transfer-encoding accordingly."""
     cs = 'us-ascii'
     if not display:
         s, cs = _encode(ui, s, charsets)
--- a/mercurial/manifest.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/manifest.py	Fri Nov 27 17:03:29 2020 -0500
@@ -528,8 +528,8 @@
         return dir in self._dirs
 
     def _filesfastpath(self, match):
-        '''Checks whether we can correctly and quickly iterate over matcher
-        files instead of over manifest files.'''
+        """Checks whether we can correctly and quickly iterate over matcher
+        files instead of over manifest files."""
         files = match.files()
         return len(files) < 100 and (
             match.isexact()
@@ -537,13 +537,13 @@
         )
 
     def walk(self, match):
-        '''Generates matching file names.
+        """Generates matching file names.
 
         Equivalent to manifest.matches(match).iterkeys(), but without creating
         an entirely new manifest.
 
         It also reports nonexistent files by marking them bad with match.bad().
-        '''
+        """
         if match.always():
             for f in iter(self):
                 yield f
@@ -591,7 +591,7 @@
         return m
 
     def diff(self, m2, match=None, clean=False):
-        '''Finds changes between the current manifest and m2.
+        """Finds changes between the current manifest and m2.
 
         Args:
           m2: the manifest to which this manifest should be compared.
@@ -604,7 +604,7 @@
         in the current/other manifest. Where the file does not exist,
         the nodeid will be None and the flags will be the empty
         string.
-        '''
+        """
         if match:
             m1 = self._matches(match)
             m2 = m2._matches(match)
@@ -703,14 +703,14 @@
 
 
 def _msearch(m, s, lo=0, hi=None):
-    '''return a tuple (start, end) that says where to find s within m.
+    """return a tuple (start, end) that says where to find s within m.
 
     If the string is found m[start:end] are the line containing
     that string.  If start == end the string was not found and
     they indicate the proper sorted insertion point.
 
     m should be a buffer, a memoryview or a byte string.
-    s is a byte string'''
+    s is a byte string"""
 
     def advance(i, c):
         while i < lenm and m[i : i + 1] != c:
@@ -909,14 +909,14 @@
         )
 
     def dir(self):
-        '''The directory that this tree manifest represents, including a
-        trailing '/'. Empty string for the repo root directory.'''
+        """The directory that this tree manifest represents, including a
+        trailing '/'. Empty string for the repo root directory."""
         return self._dir
 
     def node(self):
-        '''This node of this instance. nullid for unsaved instances. Should
+        """This node of this instance. nullid for unsaved instances. Should
         be updated when the instance is read or written from a revlog.
-        '''
+        """
         assert not self._dirty
         return self._node
 
@@ -1157,10 +1157,10 @@
         return dirslash in self._dirs or dirslash in self._lazydirs
 
     def walk(self, match):
-        '''Generates matching file names.
+        """Generates matching file names.
 
         It also reports nonexistent files by marking them bad with match.bad().
-        '''
+        """
         if match.always():
             for f in iter(self):
                 yield f
@@ -1202,8 +1202,7 @@
                         yield f
 
     def _matches(self, match):
-        '''recursively generate a new manifest filtered by the match argument.
-        '''
+        """recursively generate a new manifest filtered by the match argument."""
         if match.always():
             return self.copy()
         return self._matches_inner(match)
@@ -1253,7 +1252,7 @@
         raise FastdeltaUnavailable()
 
     def diff(self, m2, match=None, clean=False):
-        '''Finds changes between the current manifest and m2.
+        """Finds changes between the current manifest and m2.
 
         Args:
           m2: the manifest to which this manifest should be compared.
@@ -1266,7 +1265,7 @@
         in the current/other manifest. Where the file does not exist,
         the nodeid will be None and the flags will be the empty
         string.
-        '''
+        """
         if match and not match.always():
             m1 = self._matches(match)
             m2 = m2._matches(match)
@@ -1546,9 +1545,9 @@
 
 @interfaceutil.implementer(repository.imanifeststorage)
 class manifestrevlog(object):
-    '''A revlog that stores manifest texts. This is responsible for caching the
+    """A revlog that stores manifest texts. This is responsible for caching the
     full-text manifest contents.
-    '''
+    """
 
     def __init__(
         self,
@@ -2077,12 +2076,12 @@
         return self._data
 
     def readfast(self, shallow=False):
-        '''Calls either readdelta or read, based on which would be less work.
+        """Calls either readdelta or read, based on which would be less work.
         readdelta is called if the delta is against the p1, and therefore can be
         read quickly.
 
         If `shallow` is True, nothing changes since this is a flat manifest.
-        '''
+        """
         store = self._storage()
         r = store.rev(self._node)
         deltaparent = store.deltaparent(r)
@@ -2091,12 +2090,12 @@
         return self.read()
 
     def readdelta(self, shallow=False):
-        '''Returns a manifest containing just the entries that are present
+        """Returns a manifest containing just the entries that are present
         in this manifest, but not in its p1 manifest. This is efficient to read
         if the revlog delta is already p1.
 
         Changing the value of `shallow` has no effect on flat manifests.
-        '''
+        """
         store = self._storage()
         r = store.rev(self._node)
         d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
@@ -2208,7 +2207,7 @@
         return self._storage().parents(self._node)
 
     def readdelta(self, shallow=False):
-        '''Returns a manifest containing just the entries that are present
+        """Returns a manifest containing just the entries that are present
         in this manifest, but not in its p1 manifest. This is efficient to read
         if the revlog delta is already p1.
 
@@ -2217,7 +2216,7 @@
         subdirectory entry will be reported as it appears in the manifest, i.e.
         the subdirectory will be reported among files and distinguished only by
         its 't' flag.
-        '''
+        """
         store = self._storage()
         if shallow:
             r = store.rev(self._node)
@@ -2237,13 +2236,13 @@
             return md
 
     def readfast(self, shallow=False):
-        '''Calls either readdelta or read, based on which would be less work.
+        """Calls either readdelta or read, based on which would be less work.
         readdelta is called if the delta is against the p1, and therefore can be
         read quickly.
 
         If `shallow` is True, it only returns the entries from this manifest,
         and not any submanifests.
-        '''
+        """
         store = self._storage()
         r = store.rev(self._node)
         deltaparent = store.deltaparent(r)
--- a/mercurial/match.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/match.py	Fri Nov 27 17:03:29 2020 -0500
@@ -47,8 +47,8 @@
 
 
 def _rematcher(regex):
-    '''compile the regexp with the best available regexp engine and return a
-    matcher function'''
+    """compile the regexp with the best available regexp engine and return a
+    matcher function"""
     m = util.re.compile(regex)
     try:
         # slightly faster, provided by facebook's re2 bindings
@@ -82,8 +82,8 @@
 
 
 def _expandsubinclude(kindpats, root):
-    '''Returns the list of subinclude matcher args and the kindpats without the
-    subincludes in it.'''
+    """Returns the list of subinclude matcher args and the kindpats without the
+    subincludes in it."""
     relmatchers = []
     other = []
 
@@ -107,7 +107,7 @@
 
 
 def _kindpatsalwaysmatch(kindpats):
-    """"Checks whether the kindspats match everything, as e.g.
+    """ "Checks whether the kindspats match everything, as e.g.
     'relpath:.' does.
     """
     for kind, pat, source in kindpats:
@@ -117,11 +117,21 @@
 
 
 def _buildkindpatsmatcher(
-    matchercls, root, cwd, kindpats, ctx=None, listsubrepos=False, badfn=None,
+    matchercls,
+    root,
+    cwd,
+    kindpats,
+    ctx=None,
+    listsubrepos=False,
+    badfn=None,
 ):
     matchers = []
     fms, kindpats = _expandsets(
-        cwd, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn,
+        cwd,
+        kindpats,
+        ctx=ctx,
+        listsubrepos=listsubrepos,
+        badfn=badfn,
     )
     if kindpats:
         m = matchercls(root, kindpats, badfn=badfn)
@@ -321,8 +331,8 @@
 
 
 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
-    '''Convert 'kind:pat' from the patterns list to tuples with kind and
-    normalized and rooted patterns and with listfiles expanded.'''
+    """Convert 'kind:pat' from the patterns list to tuples with kind and
+    normalized and rooted patterns and with listfiles expanded."""
     kindpats = []
     for kind, pat in [_patsplit(p, default) for p in patterns]:
         if kind in cwdrelativepatternkinds:
@@ -383,8 +393,8 @@
     # Callbacks related to how the matcher is used by dirstate.walk.
     # Subscribers to these events must monkeypatch the matcher object.
     def bad(self, f, msg):
-        '''Callback from dirstate.walk for each explicit file that can't be
-        found/accessed, with an error message.'''
+        """Callback from dirstate.walk for each explicit file that can't be
+        found/accessed, with an error message."""
 
     # If an traversedir is set, it will be called when a directory discovered
     # by recursive traversal is visited.
@@ -395,11 +405,11 @@
         return []
 
     def files(self):
-        '''Explicitly listed files or patterns or roots:
+        """Explicitly listed files or patterns or roots:
         if no patterns or .always(): empty list,
         if exact: list exact files,
         if not .anypats(): list all files and dirs,
-        else: optimal roots'''
+        else: optimal roots"""
         return self._files
 
     @propertycache
@@ -414,18 +424,18 @@
         return False
 
     def visitdir(self, dir):
-        '''Decides whether a directory should be visited based on whether it
+        """Decides whether a directory should be visited based on whether it
         has potential matches in it or one of its subdirectories. This is
         based on the match's primary, included, and excluded patterns.
 
         Returns the string 'all' if the given directory and all subdirectories
         should be visited. Otherwise returns True or False indicating whether
         the given directory should be visited.
-        '''
+        """
         return True
 
     def visitchildrenset(self, dir):
-        '''Decides whether a directory should be visited based on whether it
+        """Decides whether a directory should be visited based on whether it
         has potential matches in it or one of its subdirectories, and
         potentially lists which subdirectories of that directory should be
         visited. This is based on the match's primary, included, and excluded
@@ -464,27 +474,27 @@
           indicating that there are no files in this dir to investigate (or
           equivalently that if there are files to investigate in 'dir' that it
           will always return 'this').
-        '''
+        """
         return b'this'
 
     def always(self):
-        '''Matcher will match everything and .files() will be empty --
-        optimization might be possible.'''
+        """Matcher will match everything and .files() will be empty --
+        optimization might be possible."""
         return False
 
     def isexact(self):
-        '''Matcher will match exactly the list of files in .files() --
-        optimization might be possible.'''
+        """Matcher will match exactly the list of files in .files() --
+        optimization might be possible."""
         return False
 
     def prefix(self):
-        '''Matcher will match the paths in .files() recursively --
-        optimization might be possible.'''
+        """Matcher will match the paths in .files() recursively --
+        optimization might be possible."""
         return False
 
     def anypats(self):
-        '''None of .always(), .isexact(), and .prefix() is true --
-        optimizations will be difficult.'''
+        """None of .always(), .isexact(), and .prefix() is true --
+        optimizations will be difficult."""
         return not self.always() and not self.isexact() and not self.prefix()
 
 
@@ -734,7 +744,7 @@
 
 
 class exactmatcher(basematcher):
-    r'''Matches the input files exactly. They are interpreted as paths, not
+    r"""Matches the input files exactly. They are interpreted as paths, not
     patterns (so no kind-prefixes).
 
     >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
@@ -752,7 +762,7 @@
     False
     >>> m(br're:.*\.c$')
     True
-    '''
+    """
 
     def __init__(self, files, badfn=None):
         super(exactmatcher, self).__init__(badfn)
@@ -799,11 +809,11 @@
 
 
 class differencematcher(basematcher):
-    '''Composes two matchers by matching if the first matches and the second
+    """Composes two matchers by matching if the first matches and the second
     does not.
 
     The second matcher's non-matching-attributes (bad, traversedir) are ignored.
-    '''
+    """
 
     def __init__(self, m1, m2):
         super(differencematcher, self).__init__()
@@ -868,10 +878,10 @@
 
 
 def intersectmatchers(m1, m2):
-    '''Composes two matchers by matching if both of them match.
+    """Composes two matchers by matching if both of them match.
 
     The second matcher's non-matching-attributes (bad, traversedir) are ignored.
-    '''
+    """
     if m1 is None or m2 is None:
         return m1 or m2
     if m1.always():
@@ -1166,7 +1176,7 @@
 
 
 def patkind(pattern, default=None):
-    r'''If pattern is 'kind:pat' with a known kind, return kind.
+    r"""If pattern is 'kind:pat' with a known kind, return kind.
 
     >>> patkind(br're:.*\.c$')
     're'
@@ -1177,7 +1187,7 @@
     >>> patkind(b'main.py')
     >>> patkind(b'main.py', default=b're')
     're'
-    '''
+    """
     return _patsplit(pattern, default)[0]
 
 
@@ -1192,7 +1202,7 @@
 
 
 def _globre(pat):
-    r'''Convert an extended glob string to a regexp string.
+    r"""Convert an extended glob string to a regexp string.
 
     >>> from . import pycompat
     >>> def bprint(s):
@@ -1213,7 +1223,7 @@
     (?:a|b)
     >>> bprint(_globre(br'.\*\?'))
     \.\*\?
-    '''
+    """
     i, n = 0, len(pat)
     res = b''
     group = 0
@@ -1276,9 +1286,9 @@
 
 
 def _regex(kind, pat, globsuffix):
-    '''Convert a (normalized) pattern of any kind into a
+    """Convert a (normalized) pattern of any kind into a
     regular expression.
-    globsuffix is appended to the regexp of globs.'''
+    globsuffix is appended to the regexp of globs."""
     if not pat and kind in (b'glob', b'relpath'):
         return b''
     if kind == b're':
@@ -1312,8 +1322,8 @@
 
 
 def _buildmatch(kindpats, globsuffix, root):
-    '''Return regexp string and a matcher function for kindpats.
-    globsuffix is appended to the regexp of globs.'''
+    """Return regexp string and a matcher function for kindpats.
+    globsuffix is appended to the regexp of globs."""
     matchfuncs = []
 
     subincludes, kindpats = _expandsubinclude(kindpats, root)
@@ -1422,13 +1432,13 @@
 
 
 def _patternrootsanddirs(kindpats):
-    '''Returns roots and directories corresponding to each pattern.
+    """Returns roots and directories corresponding to each pattern.
 
     This calculates the roots and directories exactly matching the patterns and
     returns a tuple of (roots, dirs) for each. It does not return other
     directories which may also need to be considered, like the parent
     directories.
-    '''
+    """
     r = []
     d = []
     for kind, pat, source in kindpats:
@@ -1459,7 +1469,7 @@
 
 
 def _rootsdirsandparents(kindpats):
-    '''Returns roots and exact directories from patterns.
+    """Returns roots and exact directories from patterns.
 
     `roots` are directories to match recursively, `dirs` should
     be matched non-recursively, and `parents` are the implicitly required
@@ -1486,7 +1496,7 @@
     ...      (b'relre', b'rr', b'')])
     >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
     (['', '', ''], []) ['']
-    '''
+    """
     r, d = _patternrootsanddirs(kindpats)
 
     p = set()
@@ -1503,13 +1513,13 @@
 
 
 def _explicitfiles(kindpats):
-    '''Returns the potential explicit filenames from the patterns.
+    """Returns the potential explicit filenames from the patterns.
 
     >>> _explicitfiles([(b'path', b'foo/bar', b'')])
     ['foo/bar']
     >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
     []
-    '''
+    """
     # Keep only the pattern kinds where one can specify filenames (vs only
     # directory names).
     filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
@@ -1528,7 +1538,7 @@
 
 
 def readpatternfile(filepath, warn, sourceinfo=False):
-    '''parse a pattern file, returning a list of
+    """parse a pattern file, returning a list of
     patterns. These patterns should be given to compile()
     to be validated and converted into a match function.
 
@@ -1549,7 +1559,7 @@
     if sourceinfo is set, returns a list of tuples:
     (pattern, lineno, originalline).
     This is useful to debug ignore patterns.
-    '''
+    """
 
     syntaxes = {
         b're': b'relre:',
--- a/mercurial/mdiff.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/mdiff.py	Fri Nov 27 17:03:29 2020 -0500
@@ -39,7 +39,7 @@
 
 # TODO: this looks like it could be an attrs, which might help pytype
 class diffopts(object):
-    '''context is the number of context lines
+    """context is the number of context lines
     text treats all files as text
     showfunc enables diff -p output
     git enables the git extended patch format
@@ -50,7 +50,7 @@
     ignorewsamount ignores changes in the amount of whitespace
     ignoreblanklines ignores changes whose lines are all blank
     upgrade generates git diffs to avoid data loss
-    '''
+    """
 
     _HAS_DYNAMIC_ATTRIBUTES = True
 
--- a/mercurial/merge.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/merge.py	Fri Nov 27 17:03:29 2020 -0500
@@ -217,7 +217,10 @@
                 if config == b'warn':
                     warnconflicts.add(f)
                 mresult.addfile(
-                    f, mergestatemod.ACTION_GET, (fl2, True), b'remote created',
+                    f,
+                    mergestatemod.ACTION_GET,
+                    (fl2, True),
+                    b'remote created',
                 )
 
     for f in sorted(abortconflicts):
@@ -281,7 +284,10 @@
         for f in wctx.removed():
             if f not in mctx:
                 mresult.addfile(
-                    f, mergestatemod.ACTION_FORGET, None, b"forget removed",
+                    f,
+                    mergestatemod.ACTION_FORGET,
+                    None,
+                    b"forget removed",
                 )
 
 
@@ -544,10 +550,10 @@
 
 
 class mergeresult(object):
-    '''An object representing result of merging manifests.
+    """An object representing result of merging manifests.
 
     It has information about what actions need to be performed on dirstate
-    mapping of divergent renames and other such cases.'''
+    mapping of divergent renames and other such cases."""
 
     def __init__(self):
         """
@@ -572,7 +578,7 @@
         self._renamedelete = renamedelete
 
     def addfile(self, filename, action, data, message):
-        """ adds a new file to the mergeresult object
+        """adds a new file to the mergeresult object
 
         filename: file which we are adding
         action: one of mergestatemod.ACTION_*
@@ -589,15 +595,15 @@
         self._actionmapping[action][filename] = (data, message)
 
     def getfile(self, filename, default_return=None):
-        """ returns (action, args, msg) about this file
+        """returns (action, args, msg) about this file
 
-        returns default_return if the file is not present """
+        returns default_return if the file is not present"""
         if filename in self._filemapping:
             return self._filemapping[filename]
         return default_return
 
     def files(self, actions=None):
-        """ returns files on which provided action needs to perfromed
+        """returns files on which provided action needs to perfromed
 
         If actions is None, all files are returned
         """
@@ -613,14 +619,14 @@
                     yield f
 
     def removefile(self, filename):
-        """ removes a file from the mergeresult object as the file might
-        not merging anymore """
+        """removes a file from the mergeresult object as the file might
+        not merging anymore"""
         action, data, message = self._filemapping[filename]
         del self._filemapping[filename]
         del self._actionmapping[action][filename]
 
     def getactions(self, actions, sort=False):
-        """ get list of files which are marked with these actions
+        """get list of files which are marked with these actions
         if sort is true, files for each action is sorted and then added
 
         Returns a list of tuple of form (filename, data, message)
@@ -637,10 +643,10 @@
                     yield f, args, msg
 
     def len(self, actions=None):
-        """ returns number of files which needs actions
+        """returns number of files which needs actions
 
         if actions is passed, total of number of files in that action
-        only is returned """
+        only is returned"""
 
         if actions is None:
             return len(self._filemapping)
@@ -656,8 +662,8 @@
                 yield key, val
 
     def addcommitinfo(self, filename, key, value):
-        """ adds key-value information about filename which will be required
-        while committing this merge """
+        """adds key-value information about filename which will be required
+        while committing this merge"""
         self._commitinfo[filename][key] = value
 
     @property
@@ -674,8 +680,8 @@
 
     @property
     def actionsdict(self):
-        """ returns a dictionary of actions to be perfomed with action as key
-        and a list of files and related arguments as values """
+        """returns a dictionary of actions to be perfomed with action as key
+        and a list of files and related arguments as values"""
         res = collections.defaultdict(list)
         for a, d in pycompat.iteritems(self._actionmapping):
             for f, (args, msg) in pycompat.iteritems(d):
@@ -689,8 +695,8 @@
             self._actionmapping[act][f] = data, msg
 
     def hasconflicts(self):
-        """ tells whether this merge resulted in some actions which can
-        result in conflicts or not """
+        """tells whether this merge resulted in some actions which can
+        result in conflicts or not"""
         for a in self._actionmapping.keys():
             if (
                 a
@@ -839,7 +845,10 @@
                 nol = b'l' not in fl1 + fl2 + fla
                 if n2 == a and fl2 == fla:
                     mresult.addfile(
-                        f, mergestatemod.ACTION_KEEP, (), b'remote unchanged',
+                        f,
+                        mergestatemod.ACTION_KEEP,
+                        (),
+                        b'remote unchanged',
                     )
                 elif n1 == a and fl1 == fla:  # local unchanged - use remote
                     if n1 == n2:  # optimization: keep local content
@@ -936,11 +945,17 @@
                     # This file was locally added. We should forget it instead of
                     # deleting it.
                     mresult.addfile(
-                        f, mergestatemod.ACTION_FORGET, None, b'remote deleted',
+                        f,
+                        mergestatemod.ACTION_FORGET,
+                        None,
+                        b'remote deleted',
                     )
                 else:
                     mresult.addfile(
-                        f, mergestatemod.ACTION_REMOVE, None, b'other deleted',
+                        f,
+                        mergestatemod.ACTION_REMOVE,
+                        None,
+                        b'other deleted',
                     )
                     if branchmerge:
                         # the file must be absent after merging,
@@ -1086,7 +1101,7 @@
 
 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
     """Resolves false conflicts where the nodeid changed but the content
-       remained the same."""
+    remained the same."""
     # We force a copy of actions.items() because we're going to mutate
     # actions as we resolve trivial conflicts.
     for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
@@ -1423,7 +1438,13 @@
     prefetch = scmutil.prefetchfiles
     matchfiles = scmutil.matchfiles
     prefetch(
-        repo, [(ctx.rev(), matchfiles(repo, files),)],
+        repo,
+        [
+            (
+                ctx.rev(),
+                matchfiles(repo, files),
+            )
+        ],
     )
 
 
@@ -1444,7 +1465,13 @@
 
 
 def applyupdates(
-    repo, mresult, wctx, mctx, overwrite, wantfiledata, labels=None,
+    repo,
+    mresult,
+    wctx,
+    mctx,
+    overwrite,
+    wantfiledata,
+    labels=None,
 ):
     """apply the merge action list to the working directory
 
@@ -1734,7 +1761,8 @@
     if dirstate.rustmod is not None:
         # When using rust status, fsmonitor becomes necessary at higher sizes
         fsmonitorthreshold = repo.ui.configint(
-            b'fsmonitor', b'warn_update_file_count_rust',
+            b'fsmonitor',
+            b'warn_update_file_count_rust',
         )
 
     try:
@@ -2001,7 +2029,10 @@
                     0,
                 ):
                     mresult.addfile(
-                        f, mergestatemod.ACTION_REMOVE, None, b'prompt delete',
+                        f,
+                        mergestatemod.ACTION_REMOVE,
+                        None,
+                        b'prompt delete',
                     )
                 elif f in p1:
                     mresult.addfile(
@@ -2012,7 +2043,10 @@
                     )
                 else:
                     mresult.addfile(
-                        f, mergestatemod.ACTION_ADD, None, b'prompt keep',
+                        f,
+                        mergestatemod.ACTION_ADD,
+                        None,
+                        b'prompt keep',
                     )
             elif m == mergestatemod.ACTION_DELETED_CHANGED:
                 f1, f2, fa, move, anc = args
@@ -2089,7 +2123,13 @@
 
         wantfiledata = updatedirstate and not branchmerge
         stats, getfiledata = applyupdates(
-            repo, mresult, wc, p2, overwrite, wantfiledata, labels=labels,
+            repo,
+            mresult,
+            wc,
+            p2,
+            overwrite,
+            wantfiledata,
+            labels=labels,
         )
 
         if updatedirstate:
--- a/mercurial/mergestate.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/mergestate.py	Fri Nov 27 17:03:29 2020 -0500
@@ -132,7 +132,7 @@
 
 
 class _mergestate_base(object):
-    '''track 3-way merge state of individual files
+    """track 3-way merge state of individual files
 
     The merge state is stored on disk when needed. Two files are used: one with
     an old format (version 1), and one with a new format (version 2). Version 2
@@ -164,7 +164,7 @@
 
     The resolve command transitions between 'u' and 'r' for conflicts and
     'pu' and 'pr' for path conflicts.
-    '''
+    """
 
     def __init__(self, repo):
         """Initialize the merge state.
@@ -275,8 +275,8 @@
         self._dirty = True
 
     def addcommitinfo(self, path, data):
-        """ stores information which is required at commit
-        into _stateextras """
+        """stores information which is required at commit
+        into _stateextras"""
         self._stateextras[path].update(data)
         self._dirty = True
 
--- a/mercurial/metadata.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/metadata.py	Fri Nov 27 17:03:29 2020 -0500
@@ -254,8 +254,7 @@
 
 
 def _process_root(ctx):
-    """compute the appropriate changed files for a changeset with no parents
-    """
+    """compute the appropriate changed files for a changeset with no parents"""
     # Simple, there was nothing before it, so everything is added.
     md = ChangingFiles()
     manifest = ctx.manifest()
@@ -265,8 +264,7 @@
 
 
 def _process_linear(parent_ctx, children_ctx, parent=1):
-    """compute the appropriate changed files for a changeset with a single parent
-    """
+    """compute the appropriate changed files for a changeset with a single parent"""
     md = ChangingFiles()
     parent_manifest = parent_ctx.manifest()
     children_manifest = children_ctx.manifest()
@@ -515,8 +513,7 @@
 
 
 def computechangesetfilesadded(ctx):
-    """return the list of files added in a changeset
-    """
+    """return the list of files added in a changeset"""
     added = []
     for f in ctx.files():
         if not any(f in p for p in ctx.parents()):
@@ -580,8 +577,7 @@
 
 
 def computechangesetfilesremoved(ctx):
-    """return the list of files removed in a changeset
-    """
+    """return the list of files removed in a changeset"""
     removed = []
     for f in ctx.files():
         if f not in ctx:
@@ -593,8 +589,7 @@
 
 
 def computechangesetfilesmerged(ctx):
-    """return the list of files merged in a changeset
-    """
+    """return the list of files merged in a changeset"""
     merged = []
     if len(ctx.parents()) < 2:
         return merged
--- a/mercurial/minirst.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/minirst.py	Fri Nov 27 17:03:29 2020 -0500
@@ -52,7 +52,7 @@
 
 
 def replace(text, substs):
-    '''
+    """
     Apply a list of (find, replace) pairs to a text.
 
     >>> replace(b"foo bar", [(b'f', b'F'), (b'b', b'B')])
@@ -63,7 +63,7 @@
     >>> encoding.encoding = b'shiftjis'
     >>> replace(b'\\x81\\\\', [(b'\\\\', b'/')])
     '\\x81\\\\'
-    '''
+    """
 
     # some character encodings (cp932 for Japanese, at least) use
     # ASCII characters other than control/alphabet/digit as a part of
@@ -322,10 +322,10 @@
 
 
 def findtables(blocks):
-    '''Find simple tables
+    """Find simple tables
 
-       Only simple one-line table elements are supported
-    '''
+    Only simple one-line table elements are supported
+    """
 
     for block in blocks:
         # Searching for a block that looks like this:
@@ -432,7 +432,11 @@
     while i < len(blocks):
         if blocks[i][b'type'] == blocks[i - 1][b'type'] and blocks[i][
             b'type'
-        ] in (b'bullet', b'option', b'field',):
+        ] in (
+            b'bullet',
+            b'option',
+            b'field',
+        ):
             i += 1
         elif not blocks[i - 1][b'lines']:
             # no lines in previous block, do not separate
--- a/mercurial/narrowspec.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/narrowspec.py	Fri Nov 27 17:03:29 2020 -0500
@@ -226,7 +226,7 @@
 
 
 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
-    r""" Restricts the patterns according to repo settings,
+    r"""Restricts the patterns according to repo settings,
     results in a logical AND operation
 
     :param req_includes: requested includes
--- a/mercurial/obsolete.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/obsolete.py	Fri Nov 27 17:03:29 2020 -0500
@@ -998,8 +998,7 @@
 
 @cachefor(b'contentdivergent')
 def _computecontentdivergentset(repo):
-    """the set of rev that compete to be the final successors of some revision.
-    """
+    """the set of rev that compete to be the final successors of some revision."""
     divergent = set()
     obsstore = repo.obsstore
     newermap = {}
--- a/mercurial/obsutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/obsutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -381,7 +381,7 @@
 
 
 def metanotblacklisted(metaitem):
-    """ Check that the key of a meta item (extrakey, extravalue) does not
+    """Check that the key of a meta item (extrakey, extravalue) does not
     match at least one of the blacklist pattern
     """
     metakey = metaitem[0]
@@ -439,7 +439,7 @@
 
 
 def geteffectflag(source, successors):
-    """ From an obs-marker relation, compute what changed between the
+    """From an obs-marker relation, compute what changed between the
     predecessor and the successor.
     """
     effects = 0
@@ -816,7 +816,7 @@
 
 
 def _getobsfate(successorssets):
-    """ Compute a changeset obsolescence fate based on its successorssets.
+    """Compute a changeset obsolescence fate based on its successorssets.
     Successors can be the tipmost ones or the immediate ones. This function
     return values are not meant to be shown directly to users, it is meant to
     be used by internal functions only.
@@ -843,7 +843,7 @@
 
 
 def obsfateverb(successorset, markers):
-    """ Return the verb summarizing the successorset and potentially using
+    """Return the verb summarizing the successorset and potentially using
     information from the markers
     """
     if not successorset:
@@ -856,14 +856,12 @@
 
 
 def markersdates(markers):
-    """returns the list of dates for a list of markers
-    """
+    """returns the list of dates for a list of markers"""
     return [m[4] for m in markers]
 
 
 def markersusers(markers):
-    """ Returns a sorted list of markers users without duplicates
-    """
+    """Returns a sorted list of markers users without duplicates"""
     markersmeta = [dict(m[3]) for m in markers]
     users = {
         encoding.tolocal(meta[b'user'])
@@ -875,8 +873,7 @@
 
 
 def markersoperations(markers):
-    """ Returns a sorted list of markers operations without duplicates
-    """
+    """Returns a sorted list of markers operations without duplicates"""
     markersmeta = [dict(m[3]) for m in markers]
     operations = {
         meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
@@ -886,7 +883,7 @@
 
 
 def obsfateprinter(ui, repo, successors, markers, formatctx):
-    """ Build a obsfate string for a single successorset using all obsfate
+    """Build a obsfate string for a single successorset using all obsfate
     related function defined in obsutil
     """
     quiet = ui.quiet
@@ -950,8 +947,7 @@
 
 
 def _getfilteredreason(repo, changeid, ctx):
-    """return a human-friendly string on why a obsolete changeset is hidden
-    """
+    """return a human-friendly string on why a obsolete changeset is hidden"""
     successors = successorssets(repo, ctx.node())
     fate = _getobsfate(successors)
 
--- a/mercurial/parser.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/parser.py	Fri Nov 27 17:03:29 2020 -0500
@@ -406,8 +406,7 @@
 
 
 def parseerrordetail(inst):
-    """Compose error message from specified ParseError object
-    """
+    """Compose error message from specified ParseError object"""
     if inst.location is not None:
         return _(b'at %d: %s') % (inst.location, inst.message)
     else:
--- a/mercurial/patch.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/patch.py	Fri Nov 27 17:03:29 2020 -0500
@@ -200,7 +200,7 @@
 
 @contextlib.contextmanager
 def extract(ui, fileobj):
-    '''extract patch from data read from fileobj.
+    """extract patch from data read from fileobj.
 
     patch can be a normal patch or contained in an email message.
 
@@ -214,7 +214,7 @@
       - p1,
       - p2.
     Any item can be missing from the dictionary. If filename is missing,
-    fileobj did not contain a patch. Caller must unlink filename when done.'''
+    fileobj did not contain a patch. Caller must unlink filename when done."""
 
     fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
     tmpfp = os.fdopen(fd, 'wb')
@@ -905,8 +905,7 @@
 
 
 class header(object):
-    """patch header
-    """
+    """patch header"""
 
     diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
     diff_re = re.compile(b'diff -r .* (.*)$')
@@ -1854,7 +1853,7 @@
 
 
 def pathtransform(path, strip, prefix):
-    '''turn a path from a patch into a path suitable for the repository
+    """turn a path from a patch into a path suitable for the repository
 
     prefix, if not empty, is expected to be normalized with a / at the end.
 
@@ -1873,7 +1872,7 @@
     >>> pathtransform(b'a/b/c', 3, b'')
     Traceback (most recent call last):
     PatchError: unable to strip away 1 of 3 dirs from a/b/c
-    '''
+    """
     pathlen = len(path)
     i = 0
     if strip == 0:
@@ -2503,7 +2502,7 @@
     copysourcematch=None,
     hunksfilterfn=None,
 ):
-    '''yields diff of changes to files between two nodes, or node and
+    """yields diff of changes to files between two nodes, or node and
     working directory.
 
     if node1 is None, use first dirstate parent instead.
@@ -2531,7 +2530,7 @@
 
     hunksfilterfn, if not None, should be a function taking a filectx and
     hunks generator that may yield filtered hunks.
-    '''
+    """
     if not node1 and not node2:
         node1 = repo.dirstate.p1()
 
@@ -2886,10 +2885,10 @@
 
 
 def _filepairs(modified, added, removed, copy, opts):
-    '''generates tuples (f1, f2, copyop), where f1 is the name of the file
+    """generates tuples (f1, f2, copyop), where f1 is the name of the file
     before and f2 is the the name after. For added files, f1 will be None,
     and for removed files, f2 will be None. copyop may be set to None, 'copy'
-    or 'rename' (the latter two only if opts.git is set).'''
+    or 'rename' (the latter two only if opts.git is set)."""
     gone = set()
 
     copyto = {v: k for k, v in copy.items()}
@@ -2948,13 +2947,13 @@
     losedatafn,
     pathfn,
 ):
-    '''given input data, generate a diff and yield it in blocks
+    """given input data, generate a diff and yield it in blocks
 
     If generating a diff would lose data like flags or binary data and
     losedatafn is not None, it will be called.
 
     pathfn is applied to every path in the diff output.
-    '''
+    """
 
     if opts.noprefix:
         aprefix = bprefix = b''
@@ -3079,7 +3078,7 @@
 
 
 def diffcontent(data1, data2, header, binary, opts):
-    """ diffs two versions of a file.
+    """diffs two versions of a file.
 
     data1 and data2 are tuples containg:
 
@@ -3241,9 +3240,9 @@
 
 
 def diffstatui(*args, **kw):
-    '''like diffstat(), but yields 2-tuples of (output, label) for
+    """like diffstat(), but yields 2-tuples of (output, label) for
     ui.write()
-    '''
+    """
 
     for line in diffstat(*args, **kw).splitlines():
         if line and line[-1] in b'+-':
--- a/mercurial/pathutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/pathutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -24,7 +24,7 @@
 
 
 class pathauditor(object):
-    '''ensure that a filesystem path contains no banned components.
+    """ensure that a filesystem path contains no banned components.
     the following properties of a path are checked:
 
     - ends with a directory separator
@@ -44,7 +44,7 @@
     If 'cached' is set to True, audited paths and sub-directories are cached.
     Be careful to not keep the cache of unmanaged directories for long because
     audited paths may be replaced with symlinks.
-    '''
+    """
 
     def __init__(self, root, callback=None, realfs=True, cached=False):
         self.audited = set()
@@ -59,8 +59,8 @@
             self.normcase = lambda x: x
 
     def __call__(self, path, mode=None):
-        '''Check the relative path.
-        path may contain a pattern (e.g. foodir/**.txt)'''
+        """Check the relative path.
+        path may contain a pattern (e.g. foodir/**.txt)"""
 
         path = util.localpath(path)
         normpath = self.normcase(path)
@@ -164,7 +164,7 @@
 
 
 def canonpath(root, cwd, myname, auditor=None):
-    '''return the canonical path of myname, given cwd and root
+    """return the canonical path of myname, given cwd and root
 
     >>> def check(root, cwd, myname):
     ...     a = pathauditor(root, realfs=False)
@@ -204,7 +204,7 @@
     'filename'
     >>> unixonly(b'/repo', b'/repo/subdir', b'filename', b'subdir/filename')
     'subdir/filename'
-    '''
+    """
     if util.endswithsep(root):
         rootsep = root
     else:
@@ -266,7 +266,7 @@
 
 
 def normasprefix(path):
-    '''normalize the specified path as path prefix
+    """normalize the specified path as path prefix
 
     Returned value can be used safely for "p.startswith(prefix)",
     "p[len(prefix):]", and so on.
@@ -280,7 +280,7 @@
     '/foo/bar/'
     >>> normasprefix(b'/').replace(pycompat.ossep, b'/')
     '/'
-    '''
+    """
     d, p = os.path.splitdrive(path)
     if len(p) != len(pycompat.ossep):
         return path + pycompat.ossep
@@ -300,9 +300,9 @@
     '''a multiset of directory names from a set of file paths'''
 
     def __init__(self, map, skip=None):
-        '''
+        """
         a dict map indicates a dirstate while a list indicates a manifest
-        '''
+        """
         self._dirs = {}
         addpath = self.addpath
         if isinstance(map, dict) and skip is not None:
--- a/mercurial/posix.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/posix.py	Fri Nov 27 17:03:29 2020 -0500
@@ -76,7 +76,7 @@
 
 
 def split(p):
-    '''Same as posixpath.split, but faster
+    """Same as posixpath.split, but faster
 
     >>> import posixpath
     >>> for f in [b'/absolute/path/to/file',
@@ -88,7 +88,7 @@
     ...           b'///multiple_leading_separators_at_root',
     ...           b'']:
     ...     assert split(f) == posixpath.split(f), f
-    '''
+    """
     ht = p.rsplit(b'/', 1)
     if len(ht) == 1:
         return b'', p
@@ -183,9 +183,9 @@
 
 
 def copymode(src, dst, mode=None, enforcewritable=False):
-    '''Copy the file mode from the file at path src to dst.
+    """Copy the file mode from the file at path src to dst.
     If src doesn't exist, we're using mode instead. If mode is None, we're
-    using umask.'''
+    using umask."""
     try:
         st_mode = os.lstat(src).st_mode & 0o777
     except OSError as inst:
@@ -359,24 +359,24 @@
 
 
 def checkosfilename(path):
-    '''Check that the base-relative path is a valid filename on this platform.
-    Returns None if the path is ok, or a UI string describing the problem.'''
+    """Check that the base-relative path is a valid filename on this platform.
+    Returns None if the path is ok, or a UI string describing the problem."""
     return None  # on posix platforms, every path is ok
 
 
 def getfsmountpoint(dirpath):
-    '''Get the filesystem mount point from a directory (best-effort)
+    """Get the filesystem mount point from a directory (best-effort)
 
     Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
-    '''
+    """
     return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
 
 
 def getfstype(dirpath):
-    '''Get the filesystem type name from a directory (best-effort)
+    """Get the filesystem type name from a directory (best-effort)
 
     Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
-    '''
+    """
     return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
 
 
@@ -419,7 +419,7 @@
 if pycompat.isdarwin:
 
     def normcase(path):
-        '''
+        """
         Normalize a filename for OS X-compatible comparison:
         - escape-encode invalid characters
         - decompose to NFD
@@ -434,7 +434,7 @@
         'e\\xcc\\x81'
         >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
         '%b8%ca%c3\\xca\\xbe%c8.jpg'
-        '''
+        """
 
         try:
             return encoding.asciilower(path)  # exception for non-ASCII
@@ -475,7 +475,12 @@
 
     # default mount points
     cygwinmountpoints = sorted(
-        [b"/usr/bin", b"/usr/lib", b"/cygdrive",], reverse=True
+        [
+            b"/usr/bin",
+            b"/usr/lib",
+            b"/cygdrive",
+        ],
+        reverse=True,
     )
 
     # use upper-ing as normcase as same as NTFS workaround
@@ -553,10 +558,10 @@
 
 
 def findexe(command):
-    '''Find executable for command searching like which does.
+    """Find executable for command searching like which does.
     If command is a basename then PATH is searched for command.
     PATH isn't searched if command is an absolute or relative path.
-    If command isn't found None is returned.'''
+    If command isn't found None is returned."""
     if pycompat.sysplatform == b'OpenVMS':
         return command
 
@@ -587,8 +592,8 @@
 
 
 def statfiles(files):
-    '''Stat each file in files. Yield each stat, or None if a file does not
-    exist or has a type we don't care about.'''
+    """Stat each file in files. Yield each stat, or None if a file does not
+    exist or has a type we don't care about."""
     lstat = os.lstat
     getkind = stat.S_IFMT
     for nf in files:
--- a/mercurial/progress.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/progress.py	Fri Nov 27 17:03:29 2020 -0500
@@ -251,7 +251,7 @@
             return False
 
     def _calibrateestimate(self, topic, now, pos):
-        '''Adjust starttimes and startvals for topic so ETA works better
+        """Adjust starttimes and startvals for topic so ETA works better
 
         If progress is non-linear (ex. get much slower in the last minute),
         it's more friendly to only use a recent time span for ETA and speed
@@ -260,7 +260,7 @@
             [======================================>       ]
                                              ^^^^^^^
                            estimateinterval, only use this for estimation
-        '''
+        """
         interval = self.estimateinterval
         if interval <= 0:
             return
--- a/mercurial/pure/charencode.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/pure/charencode.py	Fri Nov 27 17:03:29 2020 -0500
@@ -21,17 +21,17 @@
 
 
 def asciilower(s):
-    '''convert a string to lowercase if ASCII
+    """convert a string to lowercase if ASCII
 
-    Raises UnicodeDecodeError if non-ASCII characters are found.'''
+    Raises UnicodeDecodeError if non-ASCII characters are found."""
     s.decode('ascii')
     return s.lower()
 
 
 def asciiupper(s):
-    '''convert a string to uppercase if ASCII
+    """convert a string to uppercase if ASCII
 
-    Raises UnicodeDecodeError if non-ASCII characters are found.'''
+    Raises UnicodeDecodeError if non-ASCII characters are found."""
     s.decode('ascii')
     return s.upper()
 
--- a/mercurial/pure/mpatch.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/pure/mpatch.py	Fri Nov 27 17:03:29 2020 -0500
@@ -15,8 +15,7 @@
 
 
 class mpatchError(Exception):
-    """error raised when a delta cannot be decoded
-    """
+    """error raised when a delta cannot be decoded"""
 
 
 # This attempts to apply a series of patches in time proportional to
--- a/mercurial/pure/osutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/pure/osutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -39,7 +39,7 @@
 
 
 def listdir(path, stat=False, skip=None):
-    '''listdir(path, stat=False) -> list_of_tuples
+    """listdir(path, stat=False) -> list_of_tuples
 
     Return a sorted list containing information about the entries
     in the directory.
@@ -51,7 +51,7 @@
     Otherwise, each element is a 2-tuple:
 
       (name, type)
-    '''
+    """
     result = []
     prefix = path
     if not prefix.endswith(pycompat.ossep):
@@ -222,7 +222,7 @@
         )
 
     class posixfile(object):
-        '''a file object aiming for POSIX-like semantics
+        """a file object aiming for POSIX-like semantics
 
         CPython's open() returns a file that was opened *without* setting the
         _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
@@ -231,7 +231,7 @@
         renamed and deleted while they are held open.
         Note that if a file opened with posixfile is unlinked, the file
         remains but cannot be opened again or be recreated under the same name,
-        until all reading processes have closed the file.'''
+        until all reading processes have closed the file."""
 
         def __init__(self, name, mode=b'r', bufsize=-1):
             if b'b' in mode:
@@ -290,11 +290,11 @@
             return getattr(self._file, name)
 
         def __setattr__(self, name, value):
-            '''mimics the read-only attributes of Python file objects
+            """mimics the read-only attributes of Python file objects
             by raising 'TypeError: readonly attribute' if someone tries:
               f = posixfile('foo.txt')
               f.name = 'bla'
-            '''
+            """
             return self._file.__setattr__(name, value)
 
         def __enter__(self):
--- a/mercurial/pure/parsers.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/pure/parsers.py	Fri Nov 27 17:03:29 2020 -0500
@@ -234,8 +234,7 @@
 
 
 def parse_index_devel_nodemap(data, inline):
-    """like parse_index2, but alway return a PersistentNodeMapIndexObject
-    """
+    """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
     return PersistentNodeMapIndexObject(data), None
 
 
--- a/mercurial/rcutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/rcutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -39,13 +39,13 @@
 
 
 def envrcitems(env=None):
-    '''Return [(section, name, value, source)] config items.
+    """Return [(section, name, value, source)] config items.
 
     The config items are extracted from environment variables specified by env,
     used to override systemrc, but not userrc.
 
     If env is not provided, encoding.environ will be used.
-    '''
+    """
     if env is None:
         env = encoding.environ
     checklist = [
@@ -73,7 +73,7 @@
 
 
 def rccomponents():
-    '''return an ordered [(type, obj)] about where to load configs.
+    """return an ordered [(type, obj)] about where to load configs.
 
     respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is
     used. if $HGRCPATH is not set, the platform default will be used.
@@ -84,7 +84,7 @@
     obj is a string, and is the config file path. if type is 'items', obj is a
     list of (section, name, value, source) that should fill the config directly.
     If type is 'resource', obj is a tuple of (package name, resource name).
-    '''
+    """
     envrc = (b'items', envrcitems())
 
     if b'HGRCPATH' in encoding.environ:
@@ -108,9 +108,9 @@
 
 
 def defaultpagerenv():
-    '''return a dict of default environment variables and their values,
+    """return a dict of default environment variables and their values,
     intended to be set before starting a pager.
-    '''
+    """
     return {b'LESS': b'FRX', b'LV': b'-c'}
 
 
--- a/mercurial/registrar.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/registrar.py	Fri Nov 27 17:03:29 2020 -0500
@@ -95,8 +95,7 @@
         self._table.update(registrarbase._table)
 
     def _parsefuncdecl(self, decl):
-        """Parse function declaration and return the name of function in it
-        """
+        """Parse function declaration and return the name of function in it"""
         i = decl.find(b'(')
         if i >= 0:
             return decl[:i]
@@ -121,8 +120,7 @@
         return self._docformat % (decl, doc)
 
     def _extrasetup(self, name, func):
-        """Execute extra setup for registered function, if needed
-        """
+        """Execute extra setup for registered function, if needed"""
 
 
 class command(_funcregistrarbase):
@@ -345,8 +343,7 @@
 
 
 class _templateregistrarbase(_funcregistrarbase):
-    """Base of decorator to register functions as template specific one
-    """
+    """Base of decorator to register functions as template specific one"""
 
     _docformat = b":%s: %s"
 
--- a/mercurial/repoview.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/repoview.py	Fri Nov 27 17:03:29 2020 -0500
@@ -48,8 +48,7 @@
 
 
 def pinnedrevs(repo):
-    """revisions blocking hidden changesets from being filtered
-    """
+    """revisions blocking hidden changesets from being filtered"""
 
     cl = repo.changelog
     pinned = set()
--- a/mercurial/revlog.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/revlog.py	Fri Nov 27 17:03:29 2020 -0500
@@ -1491,8 +1491,8 @@
 
     def lookup(self, id):
         """locate a node based on:
-            - revision number or str(revision number)
-            - nodeid or subset of hex nodeid
+        - revision number or str(revision number)
+        - nodeid or subset of hex nodeid
         """
         n = self._match(id)
         if n is not None:
@@ -1771,8 +1771,7 @@
             return rev - 1
 
     def issnapshot(self, rev):
-        """tells whether rev is a snapshot
-        """
+        """tells whether rev is a snapshot"""
         if not self._sparserevlog:
             return self.deltaparent(rev) == nullrev
         elif util.safehasattr(self.index, b'issnapshot'):
@@ -2037,8 +2036,7 @@
         self._chunkclear()
 
     def _nodeduplicatecallback(self, transaction, node):
-        """called when trying to add a node already stored.
-        """
+        """called when trying to add a node already stored."""
 
     def addrevision(
         self,
--- a/mercurial/revlogutils/nodemap.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/revlogutils/nodemap.py	Fri Nov 27 17:03:29 2020 -0500
@@ -86,8 +86,7 @@
 
 
 class _NoTransaction(object):
-    """transaction like object to update the nodemap outside a transaction
-    """
+    """transaction like object to update the nodemap outside a transaction"""
 
     def __init__(self):
         self._postclose = {}
@@ -129,8 +128,7 @@
 
 
 def _persist_nodemap(tr, revlog, pending=False):
-    """Write nodemap data on disk for a given revlog
-    """
+    """Write nodemap data on disk for a given revlog"""
     if getattr(revlog, 'filteredrevs', ()):
         raise error.ProgrammingError(
             "cannot persist nodemap of a filtered changelog"
@@ -400,15 +398,13 @@
 
 
 def persistent_data(index):
-    """return the persistent binary form for a nodemap for a given index
-    """
+    """return the persistent binary form for a nodemap for a given index"""
     trie = _build_trie(index)
     return _persist_trie(trie)
 
 
 def update_persistent_data(index, root, max_idx, last_rev):
-    """return the incremental update for persistent nodemap from a given index
-    """
+    """return the incremental update for persistent nodemap from a given index"""
     changed_block, trie = _update_trie(index, root, last_rev)
     return (
         changed_block * S_BLOCK.size,
--- a/mercurial/revset.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/revset.py	Fri Nov 27 17:03:29 2020 -0500
@@ -529,8 +529,7 @@
 
 @predicate(b'author(string)', safe=True, weight=10)
 def author(repo, subset, x):
-    """Alias for ``user(string)``.
-    """
+    """Alias for ``user(string)``."""
     # i18n: "author" is a keyword
     n = getstring(x, _(b"author requires a string"))
     kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
@@ -737,8 +736,7 @@
 
 @predicate(b'children(set)', safe=True)
 def children(repo, subset, x):
-    """Child changesets of changesets in set.
-    """
+    """Child changesets of changesets in set."""
     s = getset(repo, fullreposet(repo), x)
     cs = _children(repo, subset, s)
     return subset & cs
@@ -746,8 +744,7 @@
 
 @predicate(b'closed()', safe=True, weight=10)
 def closed(repo, subset, x):
-    """Changeset is closed.
-    """
+    """Changeset is closed."""
     # i18n: "closed" is a keyword
     getargs(x, 0, 0, _(b"closed takes no arguments"))
     return subset.filter(
@@ -771,8 +768,7 @@
 
 @predicate(b'commonancestors(set)', safe=True)
 def commonancestors(repo, subset, x):
-    """Changesets that are ancestors of every changeset in set.
-    """
+    """Changesets that are ancestors of every changeset in set."""
     startrevs = getset(repo, fullreposet(repo), x, order=anyorder)
     if not startrevs:
         return baseset()
@@ -868,8 +864,7 @@
 
 @predicate(b'date(interval)', safe=True, weight=10)
 def date(repo, subset, x):
-    """Changesets within the interval, see :hg:`help dates`.
-    """
+    """Changesets within the interval, see :hg:`help dates`."""
     # i18n: "date" is a keyword
     ds = getstring(x, _(b"date requires a string"))
     dm = dateutil.matchdate(ds)
@@ -1108,8 +1103,7 @@
 
 @predicate(b'extinct()', safe=True)
 def extinct(repo, subset, x):
-    """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)
-    """
+    """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)"""
     # i18n: "extinct" is a keyword
     getargs(x, 0, 0, _(b"extinct takes no arguments"))
     extincts = obsmod.getrevs(repo, b'extinct')
@@ -1216,8 +1210,7 @@
 
 @predicate(b'first(set, [n])', safe=True, takeorder=True, weight=0)
 def first(repo, subset, x, order):
-    """An alias for limit().
-    """
+    """An alias for limit()."""
     return limit(repo, subset, x, order)
 
 
@@ -1341,8 +1334,7 @@
 
 @predicate(b'all()', safe=True)
 def getall(repo, subset, x):
-    """All changesets, the same as ``0:tip``.
-    """
+    """All changesets, the same as ``0:tip``."""
     # i18n: "all" is a keyword
     getargs(x, 0, 0, _(b"all takes no arguments"))
     return subset & spanset(repo)  # drop "null" if any
@@ -1480,8 +1472,7 @@
 
 @predicate(b'head()', safe=True)
 def head(repo, subset, x):
-    """Changeset is a named branch head.
-    """
+    """Changeset is a named branch head."""
     # i18n: "head" is a keyword
     getargs(x, 0, 0, _(b"head takes no arguments"))
     hs = set()
@@ -1493,8 +1484,7 @@
 
 @predicate(b'heads(set)', safe=True, takeorder=True)
 def heads(repo, subset, x, order):
-    """Members of set with no children in set.
-    """
+    """Members of set with no children in set."""
     # argument set should never define order
     if order == defineorder:
         order = followorder
@@ -1515,8 +1505,7 @@
 
 @predicate(b'hidden()', safe=True)
 def hidden(repo, subset, x):
-    """Hidden changesets.
-    """
+    """Hidden changesets."""
     # i18n: "hidden" is a keyword
     getargs(x, 0, 0, _(b"hidden takes no arguments"))
     hiddenrevs = repoview.filterrevs(repo, b'visible')
@@ -1546,8 +1535,7 @@
 
 @predicate(b'limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
 def limit(repo, subset, x, order):
-    """First n members of set, defaulting to 1, starting from offset.
-    """
+    """First n members of set, defaulting to 1, starting from offset."""
     args = getargsdict(x, b'limit', b'set n offset')
     if b'set' not in args:
         # i18n: "limit" is a keyword
@@ -1571,8 +1559,7 @@
 
 @predicate(b'last(set, [n])', safe=True, takeorder=True)
 def last(repo, subset, x, order):
-    """Last n members of set, defaulting to 1.
-    """
+    """Last n members of set, defaulting to 1."""
     # i18n: "last" is a keyword
     l = getargs(x, 1, 2, _(b"last requires one or two arguments"))
     lim = 1
@@ -1592,8 +1579,7 @@
 
 @predicate(b'max(set)', safe=True)
 def maxrev(repo, subset, x):
-    """Changeset with highest revision number in set.
-    """
+    """Changeset with highest revision number in set."""
     os = getset(repo, fullreposet(repo), x)
     try:
         m = os.max()
@@ -1608,8 +1594,7 @@
 
 @predicate(b'merge()', safe=True)
 def merge(repo, subset, x):
-    """Changeset is a merge changeset.
-    """
+    """Changeset is a merge changeset."""
     # i18n: "merge" is a keyword
     getargs(x, 0, 0, _(b"merge takes no arguments"))
     cl = repo.changelog
@@ -1626,8 +1611,7 @@
 
 @predicate(b'branchpoint()', safe=True)
 def branchpoint(repo, subset, x):
-    """Changesets with more than one child.
-    """
+    """Changesets with more than one child."""
     # i18n: "branchpoint" is a keyword
     getargs(x, 0, 0, _(b"branchpoint takes no arguments"))
     cl = repo.changelog
@@ -1648,8 +1632,7 @@
 
 @predicate(b'min(set)', safe=True)
 def minrev(repo, subset, x):
-    """Changeset with lowest revision number in set.
-    """
+    """Changeset with lowest revision number in set."""
     os = getset(repo, fullreposet(repo), x)
     try:
         m = os.min()
@@ -1715,8 +1698,7 @@
 
 @predicate(b'id(string)', safe=True)
 def node_(repo, subset, x):
-    """Revision non-ambiguously specified by the given hex string prefix.
-    """
+    """Revision non-ambiguously specified by the given hex string prefix."""
     # i18n: "id" is a keyword
     l = getargs(x, 1, 1, _(b"id requires one argument"))
     # i18n: "id" is a keyword
@@ -1747,8 +1729,7 @@
 
 @predicate(b'none()', safe=True)
 def none(repo, subset, x):
-    """No changesets.
-    """
+    """No changesets."""
     # i18n: "none" is a keyword
     getargs(x, 0, 0, _(b"none takes no arguments"))
     return baseset()
@@ -1869,8 +1850,7 @@
 
 @predicate(b'p1([set])', safe=True)
 def p1(repo, subset, x):
-    """First parent of changesets in set, or the working directory.
-    """
+    """First parent of changesets in set, or the working directory."""
     if x is None:
         p = repo[x].p1().rev()
         if p >= 0:
@@ -1892,8 +1872,7 @@
 
 @predicate(b'p2([set])', safe=True)
 def p2(repo, subset, x):
-    """Second parent of changesets in set, or the working directory.
-    """
+    """Second parent of changesets in set, or the working directory."""
     if x is None:
         ps = repo[x].parents()
         try:
@@ -2305,8 +2284,7 @@
 
 @predicate(b'reverse(set)', safe=True, takeorder=True, weight=0)
 def reverse(repo, subset, x, order):
-    """Reverse order of set.
-    """
+    """Reverse order of set."""
     l = getset(repo, subset, x, order)
     if order == defineorder:
         l.reverse()
@@ -2315,8 +2293,7 @@
 
 @predicate(b'roots(set)', safe=True)
 def roots(repo, subset, x):
-    """Changesets in set with no parent changeset in set.
-    """
+    """Changesets in set with no parent changeset in set."""
     s = getset(repo, fullreposet(repo), x)
     parents = repo.changelog.parentrevs
 
@@ -2556,8 +2533,7 @@
 
 @predicate(b'orphan()', safe=True)
 def orphan(repo, subset, x):
-    """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)
-    """
+    """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)"""
     # i18n: "orphan" is a keyword
     getargs(x, 0, 0, _(b"orphan takes no arguments"))
     orphan = obsmod.getrevs(repo, b'orphan')
@@ -2566,8 +2542,7 @@
 
 @predicate(b'unstable()', safe=True)
 def unstable(repo, subset, x):
-    """Changesets with instabilities. (EXPERIMENTAL)
-    """
+    """Changesets with instabilities. (EXPERIMENTAL)"""
     # i18n: "unstable" is a keyword
     getargs(x, 0, 0, b'unstable takes no arguments')
     _unstable = set()
@@ -2781,8 +2756,7 @@
 
 
 def loadpredicate(ui, extname, registrarobj):
-    """Load revset predicates from specified registrarobj
-    """
+    """Load revset predicates from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         symbols[name] = func
         if func._safe:
--- a/mercurial/revsetlang.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/revsetlang.py	Fri Nov 27 17:03:29 2020 -0500
@@ -83,7 +83,7 @@
 
 
 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
-    '''
+    """
     Parse a revset statement into a stream of tokens
 
     ``syminitletters`` is the set of valid characters for the initial
@@ -102,7 +102,7 @@
     >>> list(tokenize(b"@::"))
     [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
 
-    '''
+    """
     if not isinstance(program, bytes):
         raise error.ProgrammingError(
             b'revset statement must be bytes, got %r' % program
@@ -621,8 +621,7 @@
 
 
 def foldconcat(tree):
-    """Fold elements to be concatenated by `##`
-    """
+    """Fold elements to be concatenated by `##`"""
     if not isinstance(tree, tuple) or tree[0] in (
         b'string',
         b'symbol',
@@ -742,7 +741,7 @@
 
 
 def formatspec(expr, *args):
-    '''
+    """
     This is a convenience function for using revsets internally, and
     escapes arguments appropriately. Aliases are intentionally ignored
     so that intended expression behavior isn't accidentally subverted.
@@ -777,7 +776,7 @@
     "sort((:), 'desc', 'user')"
     >>> formatspec(b'%ls', [b'a', b"'"])
     "_list('a\\\\x00\\\\'')"
-    '''
+    """
     parsed = _parseargs(expr, args)
     ret = []
     for t, arg in parsed:
--- a/mercurial/scmutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/scmutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -66,11 +66,11 @@
 
 @attr.s(slots=True, repr=False)
 class status(object):
-    '''Struct with a list of files per status.
+    """Struct with a list of files per status.
 
     The 'deleted', 'unknown' and 'ignored' properties are only
     relevant to the working copy.
-    '''
+    """
 
     modified = attr.ib(default=attr.Factory(list))
     added = attr.ib(default=attr.Factory(list))
@@ -123,9 +123,9 @@
 
 
 def nochangesfound(ui, repo, excluded=None):
-    '''Report no changes for push/pull, excluded is None or a list of
+    """Report no changes for push/pull, excluded is None or a list of
     nodes excluded from the push/pull.
-    '''
+    """
     secretlist = []
     if excluded:
         for n in excluded:
@@ -335,8 +335,8 @@
 
 
 def checkportabilityalert(ui):
-    '''check if the user's config requests nothing, a warning, or abort for
-    non-portable filenames'''
+    """check if the user's config requests nothing, a warning, or abort for
+    non-portable filenames"""
     val = ui.config(b'ui', b'portablefilenames')
     lval = val.lower()
     bval = stringutil.parsebool(val)
@@ -402,8 +402,8 @@
 
 
 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
-    '''yield every hg repository under path, always recursively.
-    The recurse flag will only control recursion into repo working dirs'''
+    """yield every hg repository under path, always recursively.
+    The recurse flag will only control recursion into repo working dirs"""
 
     def errhandler(err):
         if err.filename == path:
@@ -793,7 +793,7 @@
 
 
 def walkchangerevs(repo, revs, makefilematcher, prepare):
-    '''Iterate over files and the revs in a "windowed" way.
+    """Iterate over files and the revs in a "windowed" way.
 
     Callers most commonly need to iterate backwards over the history
     in which they are interested. Doing so has awful (quadratic-looking)
@@ -805,7 +805,7 @@
 
     This function returns an iterator yielding contexts. Before
     yielding each context, the iterator will first call the prepare
-    function on each context in the window in forward order.'''
+    function on each context in the window in forward order."""
 
     if not revs:
         return []
@@ -897,17 +897,17 @@
 
 
 def anypats(pats, opts):
-    '''Checks if any patterns, including --include and --exclude were given.
+    """Checks if any patterns, including --include and --exclude were given.
 
     Some commands (e.g. addremove) use this condition for deciding whether to
     print absolute or relative paths.
-    '''
+    """
     return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
 
 
 def expandpats(pats):
-    '''Expand bare globs when running on windows.
-    On posix we assume it already has already been done by sh.'''
+    """Expand bare globs when running on windows.
+    On posix we assume it already has already been done by sh."""
     if not util.expandglobs:
         return list(pats)
     ret = []
@@ -928,9 +928,9 @@
 def matchandpats(
     ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
 ):
-    '''Return a matcher and the patterns that were used.
+    """Return a matcher and the patterns that were used.
     The matcher will warn about bad matches, unless an alternate badfn callback
-    is provided.'''
+    is provided."""
     if opts is None:
         opts = {}
     if not globbed and default == b'relpath':
@@ -1001,7 +1001,7 @@
 
 
 def backuppath(ui, repo, filepath):
-    '''customize where working copy backup files (.orig files) are created
+    """customize where working copy backup files (.orig files) are created
 
     Fetch user defined path from config file: [ui] origbackuppath = <path>
     Fall back to default (filepath with .orig suffix) if not specified
@@ -1009,7 +1009,7 @@
     filepath is repo-relative
 
     Returns an absolute path
-    '''
+    """
     origvfs = getorigvfs(ui, repo)
     if origvfs is None:
         return repo.wjoin(filepath + b".orig")
@@ -1300,8 +1300,8 @@
 
 
 def marktouched(repo, files, similarity=0.0):
-    '''Assert that files have somehow been operated upon. files are relative to
-    the repo root.'''
+    """Assert that files have somehow been operated upon. files are relative to
+    the repo root."""
     m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
     rejected = []
 
@@ -1335,11 +1335,11 @@
 
 
 def _interestingfiles(repo, matcher):
-    '''Walk dirstate with matcher, looking for files that addremove would care
+    """Walk dirstate with matcher, looking for files that addremove would care
     about.
 
     This is different from dirstate.status because it doesn't care about
-    whether files are modified or clean.'''
+    whether files are modified or clean."""
     added, unknown, deleted, removed, forgotten = [], [], [], [], []
     audit_path = pathutil.pathauditor(repo.root, cached=True)
 
@@ -1394,8 +1394,8 @@
 
 
 def _markchanges(repo, unknown, deleted, renames):
-    '''Marks the files in unknown as added, the files in deleted as removed,
-    and the files in renames as copied.'''
+    """Marks the files in unknown as added, the files in deleted as removed,
+    and the files in renames as copied."""
     wctx = repo[None]
     with repo.wlock():
         wctx.forget(deleted)
@@ -1424,10 +1424,10 @@
         endrev = len(repo)
 
     def getrenamed(fn, rev):
-        '''looks up all renames for a file (up to endrev) the first
+        """looks up all renames for a file (up to endrev) the first
         time the file is given. It indexes on the changerev and only
         parses the manifest if linkrev != changerev.
-        Returns rename info for fn at changerev rev.'''
+        Returns rename info for fn at changerev rev."""
         if fn not in rcache:
             rcache[fn] = {}
             fl = repo.file(fn)
@@ -1548,7 +1548,7 @@
 
 
 def filterrequirements(requirements):
-    """ filters the requirements into two sets:
+    """filters the requirements into two sets:
 
     wcreq: requirements which should be written in .hg/requires
     storereq: which should be written in .hg/store/requires
@@ -1871,8 +1871,7 @@
 
 
 def gdinitconfig(ui):
-    """helper function to know if a repo should be created as general delta
-    """
+    """helper function to know if a repo should be created as general delta"""
     # experimental config: format.generaldelta
     return ui.configbool(b'format', b'generaldelta') or ui.configbool(
         b'format', b'usegeneraldelta'
@@ -1880,8 +1879,7 @@
 
 
 def gddeltaconfig(ui):
-    """helper function to know if incoming delta should be optimised
-    """
+    """helper function to know if incoming delta should be optimised"""
     # experimental config: format.generaldelta
     return ui.configbool(b'format', b'generaldelta')
 
--- a/mercurial/setdiscovery.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/setdiscovery.py	Fri Nov 27 17:03:29 2020 -0500
@@ -292,9 +292,9 @@
     ancestorsof=None,
     samplegrowth=1.05,
 ):
-    '''Return a tuple (common, anyincoming, remoteheads) used to identify
+    """Return a tuple (common, anyincoming, remoteheads) used to identify
     missing nodes from or in remote.
-    '''
+    """
     start = util.timer()
 
     roundtrips = 0
@@ -371,7 +371,10 @@
     with remote.commandexecutor() as e:
         fheads = e.callcommand(b'heads', {})
         fknown = e.callcommand(
-            b'known', {b'nodes': [clnode(r) for r in sample],}
+            b'known',
+            {
+                b'nodes': [clnode(r) for r in sample],
+            },
         )
 
     srvheadhashes, yesno = fheads.result(), fknown.result()
@@ -449,7 +452,10 @@
 
         with remote.commandexecutor() as e:
             yesno = e.callcommand(
-                b'known', {b'nodes': [clnode(r) for r in sample],}
+                b'known',
+                {
+                    b'nodes': [clnode(r) for r in sample],
+                },
             ).result()
 
         full = True
--- a/mercurial/shelve.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/shelve.py	Fri Nov 27 17:03:29 2020 -0500
@@ -350,8 +350,7 @@
 
 
 def _aborttransaction(repo, tr):
-    '''Abort current transaction for shelve/unshelve, but keep dirstate
-    '''
+    """Abort current transaction for shelve/unshelve, but keep dirstate"""
     dirstatebackupname = b'dirstate.shelve'
     repo.dirstate.savebackup(tr, dirstatebackupname)
     tr.abort()
--- a/mercurial/similar.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/similar.py	Fri Nov 27 17:03:29 2020 -0500
@@ -15,11 +15,11 @@
 
 
 def _findexactmatches(repo, added, removed):
-    '''find renamed files that have no changes
+    """find renamed files that have no changes
 
     Takes a list of new filectxs and a list of removed filectxs, and yields
     (before, after) tuples of exact matches.
-    '''
+    """
     # Build table of removed files: {hash(fctx.data()): [fctx, ...]}.
     # We use hash() to discard fctx.data() from memory.
     hashes = {}
@@ -77,11 +77,11 @@
 
 
 def _findsimilarmatches(repo, added, removed, threshold):
-    '''find potentially renamed files based on similar file content
+    """find potentially renamed files based on similar file content
 
     Takes a list of new filectxs and a list of removed filectxs, and yields
     (before, after, score) tuples of partial matches.
-    '''
+    """
     copies = {}
     progress = repo.ui.makeprogress(
         _(b'searching for similar files'), unit=_(b'files'), total=len(removed)
--- a/mercurial/simplemerge.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/simplemerge.py	Fri Nov 27 17:03:29 2020 -0500
@@ -57,8 +57,7 @@
 
 
 def compare_range(a, astart, aend, b, bstart, bend):
-    """Compare a[astart:aend] == b[bstart:bend], without slicing.
-    """
+    """Compare a[astart:aend] == b[bstart:bend], without slicing."""
     if (aend - astart) != (bend - bstart):
         return False
     for ia, ib in zip(
@@ -102,8 +101,7 @@
         localorother=None,
         minimize=False,
     ):
-        """Return merge in cvs-like form.
-        """
+        """Return merge in cvs-like form."""
         self.conflicts = False
         newline = b'\n'
         if len(self.a) > 0:
--- a/mercurial/sshpeer.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/sshpeer.py	Fri Nov 27 17:03:29 2020 -0500
@@ -121,8 +121,7 @@
         return self._call(b'readline')
 
     def _call(self, methname, data=None):
-        """call <methname> on "main", forward output of "side" while blocking
-        """
+        """call <methname> on "main", forward output of "side" while blocking"""
         # data can be '' or 0
         if (data is not None and not data) or self._main.closed:
             _forwardoutput(self._ui, self._side)
--- a/mercurial/sslutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/sslutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -227,8 +227,7 @@
 
 
 def commonssloptions(minimumprotocol):
-    """Return SSLContext options common to servers and clients.
-    """
+    """Return SSLContext options common to servers and clients."""
     if minimumprotocol not in configprotocols:
         raise ValueError(b'protocol value not supported: %s' % minimumprotocol)
 
@@ -617,11 +616,11 @@
 
 
 def _verifycert(cert, hostname):
-    '''Verify that cert (in socket.getpeercert() format) matches hostname.
+    """Verify that cert (in socket.getpeercert() format) matches hostname.
     CRLs is not handled.
 
     Returns error message if any problems are found and None on success.
-    '''
+    """
     if not cert:
         return _(b'no certificate received')
 
--- a/mercurial/state.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/state.py	Fri Nov 27 17:03:29 2020 -0500
@@ -55,7 +55,7 @@
     """
 
     def __init__(self, repo, fname):
-        """ repo is the repo object
+        """repo is the repo object
         fname is the file name in which data should be stored in .hg directory
         """
         self._repo = repo
@@ -105,11 +105,11 @@
 
 class _statecheck(object):
     """a utility class that deals with multistep operations like graft,
-       histedit, bisect, update etc and check whether such commands
-       are in an unfinished conditition or not and return appropriate message
-       and hint.
-       It also has the ability to register and determine the states of any new
-       multistep operation or multistep command extension.
+    histedit, bisect, update etc and check whether such commands
+    are in an unfinished conditition or not and return appropriate message
+    and hint.
+    It also has the ability to register and determine the states of any new
+    multistep operation or multistep command extension.
     """
 
     def __init__(
@@ -173,7 +173,11 @@
                 return _(
                     b"use 'hg %s --continue', 'hg %s --abort', "
                     b"or 'hg %s --stop'"
-                ) % (self._opname, self._opname, self._opname,)
+                ) % (
+                    self._opname,
+                    self._opname,
+                    self._opname,
+                )
 
         return self._cmdhint
 
--- a/mercurial/statprof.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/statprof.py	Fri Nov 27 17:03:29 2020 -0500
@@ -411,11 +411,11 @@
 
 
 def reset(frequency=None):
-    '''Clear out the state of the profiler.  Do not call while the
+    """Clear out the state of the profiler.  Do not call while the
     profiler is running.
 
     The optional frequency argument specifies the number of samples to
-    collect per second.'''
+    collect per second."""
     assert state.profile_level == 0, b"Can't reset() while statprof is running"
     CodeSite.cache.clear()
     state.reset(frequency)
@@ -525,8 +525,8 @@
 
 
 def display_by_line(data, fp):
-    '''Print the profiler data with each sample line represented
-    as one row in a table.  Sorted by self-time per line.'''
+    """Print the profiler data with each sample line represented
+    as one row in a table.  Sorted by self-time per line."""
     stats = SiteStats.buildstats(data.samples)
     stats.sort(reverse=True, key=lambda x: x.selfseconds())
 
@@ -554,9 +554,9 @@
 
 
 def display_by_method(data, fp):
-    '''Print the profiler data with each sample function represented
+    """Print the profiler data with each sample function represented
     as one row in a table.  Important lines within that function are
-    output as nested rows.  Sorted by self-time per line.'''
+    output as nested rows.  Sorted by self-time per line."""
     fp.write(
         b'%5.5s %10.10s   %7.7s  %-8.8s\n'
         % (b'%  ', b'cumulative', b'self', b'')
@@ -835,9 +835,9 @@
 
 
 def simplifypath(path):
-    '''Attempt to make the path to a Python module easier to read by
+    """Attempt to make the path to a Python module easier to read by
     removing whatever part of the Python search path it was found
-    on.'''
+    on."""
 
     if path in _pathcache:
         return _pathcache[path]
--- a/mercurial/store.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/store.py	Fri Nov 27 17:03:29 2020 -0500
@@ -52,7 +52,7 @@
 # This avoids a collision between a file named foo and a dir named
 # foo.i or foo.d
 def _encodedir(path):
-    '''
+    """
     >>> _encodedir(b'data/foo.i')
     'data/foo.i'
     >>> _encodedir(b'data/foo.i/bla.i')
@@ -61,7 +61,7 @@
     'data/foo.i.hg.hg/bla.i'
     >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
     'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
-    '''
+    """
     return (
         path.replace(b".hg/", b".hg.hg/")
         .replace(b".i/", b".i.hg/")
@@ -73,14 +73,14 @@
 
 
 def decodedir(path):
-    '''
+    """
     >>> decodedir(b'data/foo.i')
     'data/foo.i'
     >>> decodedir(b'data/foo.i.hg/bla.i')
     'data/foo.i/bla.i'
     >>> decodedir(b'data/foo.i.hg.hg/bla.i')
     'data/foo.i.hg/bla.i'
-    '''
+    """
     if b".hg/" not in path:
         return path
     return (
@@ -91,14 +91,14 @@
 
 
 def _reserved():
-    ''' characters that are problematic for filesystems
+    """characters that are problematic for filesystems
 
     * ascii escapes (0..31)
     * ascii hi (126..255)
     * windows specials
 
     these characters will be escaped by encodefunctions
-    '''
+    """
     winreserved = [ord(x) for x in u'\\:*?"<>|']
     for x in range(32):
         yield x
@@ -109,7 +109,7 @@
 
 
 def _buildencodefun():
-    '''
+    """
     >>> enc, dec = _buildencodefun()
 
     >>> enc(b'nothing/special.txt')
@@ -131,7 +131,7 @@
     'the~07quick~adshot'
     >>> dec(b'the~07quick~adshot')
     'the\\x07quick\\xadshot'
-    '''
+    """
     e = b'_'
     xchr = pycompat.bytechr
     asciistr = list(map(xchr, range(127)))
@@ -172,23 +172,23 @@
 
 
 def encodefilename(s):
-    '''
+    """
     >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
     'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
-    '''
+    """
     return _encodefname(encodedir(s))
 
 
 def decodefilename(s):
-    '''
+    """
     >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
     'foo.i/bar.d/bla.hg/hi:world?/HELLO'
-    '''
+    """
     return decodedir(_decodefname(s))
 
 
 def _buildlowerencodefun():
-    '''
+    """
     >>> f = _buildlowerencodefun()
     >>> f(b'nothing/special.txt')
     'nothing/special.txt'
@@ -198,7 +198,7 @@
     'hello~3aworld~3f'
     >>> f(b'the\\x07quick\\xADshot')
     'the~07quick~adshot'
-    '''
+    """
     xchr = pycompat.bytechr
     cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
     for x in _reserved():
@@ -220,7 +220,7 @@
 
 
 def _auxencode(path, dotencode):
-    '''
+    """
     Encodes filenames containing names reserved by Windows or which end in
     period or space. Does not touch other single reserved characters c.
     Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
@@ -240,7 +240,7 @@
     ['foo.~20']
     >>> _auxencode([b' .foo'], True)
     ['~20.foo']
-    '''
+    """
     for i, n in enumerate(path):
         if not n:
             continue
@@ -305,7 +305,7 @@
 
 
 def _hybridencode(path, dotencode):
-    '''encodes path with a length limit
+    """encodes path with a length limit
 
     Encodes all paths that begin with 'data/', according to the following.
 
@@ -334,7 +334,7 @@
 
     The string 'data/' at the beginning is replaced with 'dh/', if the hashed
     encoding was used.
-    '''
+    """
     path = encodedir(path)
     ef = _encodefname(path).split(b'/')
     res = b'/'.join(_auxencode(ef, dotencode))
@@ -444,11 +444,11 @@
         return reversed(self._walk(b'', False))
 
     def walk(self, matcher=None):
-        '''yields (unencoded, encoded, size)
+        """yields (unencoded, encoded, size)
 
         if a matcher is passed, storage files of only those tracked paths
         are passed with matches the matcher
-        '''
+        """
         # yield data files first
         for x in self.datafiles(matcher):
             yield x
@@ -517,10 +517,10 @@
         self.addls = set()
 
     def ensureloaded(self, warn=None):
-        '''read the fncache file if not already read.
+        """read the fncache file if not already read.
 
         If the file on disk is corrupted, raise. If warn is provided,
-        warn and keep going instead.'''
+        warn and keep going instead."""
         if self.entries is None:
             self._load(warn)
 
--- a/mercurial/strip.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/strip.py	Fri Nov 27 17:03:29 2020 -0500
@@ -114,7 +114,12 @@
             ),
         ),
         (b'', b'no-backup', None, _(b'do not save backup bundle')),
-        (b'', b'nobackup', None, _(b'do not save backup bundle (DEPRECATED)'),),
+        (
+            b'',
+            b'nobackup',
+            None,
+            _(b'do not save backup bundle (DEPRECATED)'),
+        ),
         (b'n', b'', None, _(b'ignored  (DEPRECATED)')),
         (
             b'k',
--- a/mercurial/subrepo.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/subrepo.py	Fri Nov 27 17:03:29 2020 -0500
@@ -49,9 +49,9 @@
 
 
 def _expandedabspath(path):
-    '''
+    """
     get a path or url and if it is a path expand it and return an absolute path
-    '''
+    """
     expandedpath = util.urllocalpath(util.expandpath(path))
     u = util.url(expandedpath)
     if not u.scheme:
@@ -268,8 +268,7 @@
             )
 
     def bailifchanged(self, ignoreupdate=False, hint=None):
-        """raise Abort if subrepository is ``dirty()``
-        """
+        """raise Abort if subrepository is ``dirty()``"""
         dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
         if dirtyreason:
             raise error.Abort(dirtyreason, hint=hint)
@@ -291,8 +290,7 @@
         raise NotImplementedError
 
     def phase(self, state):
-        """returns phase of specified state in the subrepository.
-        """
+        """returns phase of specified state in the subrepository."""
         return phases.public
 
     def remove(self):
@@ -384,10 +382,10 @@
         return total
 
     def walk(self, match):
-        '''
+        """
         walk recursively through the directory tree, finding all files
         matched by the match function
-        '''
+        """
 
     def forget(self, match, prefix, uipathfn, dryrun, interactive):
         return ([], [])
@@ -423,9 +421,9 @@
         return revid
 
     def unshare(self):
-        '''
+        """
         convert this repository from shared to normal storage.
-        '''
+        """
 
     def verify(self, onpush=False):
         """verify the revision of this repository that is held in `_state` is
@@ -437,14 +435,12 @@
 
     @propertycache
     def wvfs(self):
-        """return vfs to access the working directory of this subrepository
-        """
+        """return vfs to access the working directory of this subrepository"""
         return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
 
     @propertycache
     def _relpath(self):
-        """return path to this subrepository as seen from outermost repository
-        """
+        """return path to this subrepository as seen from outermost repository"""
         return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
 
 
@@ -503,10 +499,10 @@
         return clean
 
     def _calcstorehash(self, remotepath):
-        '''calculate a unique "store hash"
+        """calculate a unique "store hash"
 
         This method is used to to detect when there are changes that may
-        require a push to a given remote path.'''
+        require a push to a given remote path."""
         # sort the files that will be hashed in increasing (likely) file size
         filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
         yield b'# %s\n' % _expandedabspath(remotepath)
@@ -525,11 +521,11 @@
         return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
 
     def _cachestorehash(self, remotepath):
-        '''cache the current store hash
+        """cache the current store hash
 
         Each remote repo requires its own store hash cache, because a subrepo
         store may be "clean" versus a given remote repo, but not versus another
-        '''
+        """
         cachefile = _getstorehashcachename(remotepath)
         with self._repo.lock():
             storehash = list(self._calcstorehash(remotepath))
@@ -537,8 +533,7 @@
             vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
 
     def _getctx(self):
-        '''fetch the context for this subrepo revision, possibly a workingctx
-        '''
+        """fetch the context for this subrepo revision, possibly a workingctx"""
         if self._ctx.rev() is None:
             return self._repo[None]  # workingctx if parent is workingctx
         else:
@@ -1048,14 +1043,12 @@
 
     @propertycache
     def wvfs(self):
-        """return own wvfs for efficiency and consistency
-        """
+        """return own wvfs for efficiency and consistency"""
         return self._repo.wvfs
 
     @propertycache
     def _relpath(self):
-        """return path to this subrepository as seen from outermost repository
-        """
+        """return path to this subrepository as seen from outermost repository"""
         # Keep consistent dir separators by avoiding vfs.join(self._path)
         return reporelpath(self._repo)
 
@@ -1170,12 +1163,16 @@
                 externals.append(path)
             elif item == 'missing':
                 missing.append(path)
-            if item not in (
-                '',
-                'normal',
-                'unversioned',
-                'external',
-            ) or props not in ('', 'none', 'normal'):
+            if (
+                item
+                not in (
+                    '',
+                    'normal',
+                    'unversioned',
+                    'external',
+                )
+                or props not in ('', 'none', 'normal')
+            ):
                 changes.append(path)
         for path in changes:
             for ext in externals:
@@ -1384,7 +1381,7 @@
 
     @staticmethod
     def _checkversion(out):
-        '''ensure git version is new enough
+        """ensure git version is new enough
 
         >>> _checkversion = gitsubrepo._checkversion
         >>> _checkversion(b'git version 1.6.0')
@@ -1405,7 +1402,7 @@
         'unknown'
         >>> _checkversion(b'no')
         'unknown'
-        '''
+        """
         version = gitsubrepo._gitversion(out)
         # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
         # despite the docstring comment.  For now, error on 1.4.0, warn on
@@ -1516,9 +1513,9 @@
         self._gitcommand([b'update-index', b'-q', b'--refresh'])
 
     def _gitbranchmap(self):
-        '''returns 2 things:
+        """returns 2 things:
         a map from git branch to revision
-        a map from revision to branches'''
+        a map from revision to branches"""
         branch2rev = {}
         rev2branch = {}
 
--- a/mercurial/tagmerge.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/tagmerge.py	Fri Nov 27 17:03:29 2020 -0500
@@ -87,12 +87,12 @@
 
 
 def readtagsformerge(ui, repo, lines, fn=b'', keeplinenums=False):
-    '''read the .hgtags file into a structure that is suitable for merging
+    """read the .hgtags file into a structure that is suitable for merging
 
     Depending on the keeplinenums flag, clear the line numbers associated
     with each tag. This is done because only the line numbers of the first
     parent are useful for merging.
-    '''
+    """
     filetags = tagsmod._readtaghist(
         ui, repo, lines, fn=fn, recode=None, calcnodelines=True
     )[1]
@@ -104,7 +104,7 @@
 
 
 def grouptagnodesbyline(tagnodes):
-    '''
+    """
     Group nearby nodes (i.e. those that must be written next to each other)
 
     The input is a list of [node, position] pairs, corresponding to a given tag
@@ -118,7 +118,7 @@
     position is None).
 
     The result is a list of [position, [consecutive node list]]
-    '''
+    """
     firstlinenum = None
     for hexnode, linenum in tagnodes:
         firstlinenum = linenum
@@ -139,14 +139,14 @@
 
 
 def writemergedtags(fcd, mergedtags):
-    '''
+    """
     write the merged tags while trying to minimize the diff to the first parent
 
     This function uses the ordering info stored on the merged tags dict to
     generate an .hgtags file which is correct (in the sense that its contents
     correspond to the result of the tag merge) while also being as close as
     possible to the first parent's .hgtags file.
-    '''
+    """
     # group the node-tag pairs that must be written next to each other
     for tname, taglist in list(mergedtags.items()):
         mergedtags[tname] = grouptagnodesbyline(taglist)
@@ -175,12 +175,12 @@
 
 
 def singletagmerge(p1nodes, p2nodes):
-    '''
+    """
     merge the nodes corresponding to a single tag
 
     Note that the inputs are lists of node-linenum pairs (i.e. not just lists
     of nodes)
-    '''
+    """
     if not p2nodes:
         return p1nodes
     if not p1nodes:
@@ -221,10 +221,10 @@
 
 
 def merge(repo, fcd, fco, fca):
-    '''
+    """
     Merge the tags of two revisions, taking into account the base tags
     Try to minimize the diff between the merged tags and the first parent tags
-    '''
+    """
     ui = repo.ui
     # read the p1, p2 and base tags
     # only keep the line numbers for the p1 tags
--- a/mercurial/tags.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/tags.py	Fri Nov 27 17:03:29 2020 -0500
@@ -177,12 +177,12 @@
 
 
 def findglobaltags(ui, repo):
-    '''Find global tags in a repo: return a tagsmap
+    """Find global tags in a repo: return a tagsmap
 
     tagsmap: tag name to (node, hist) 2-tuples.
 
     The tags cache is read and updated as a side-effect of calling.
-    '''
+    """
     (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
     if cachetags is not None:
         assert not shouldwrite
@@ -267,7 +267,7 @@
 
 
 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
-    '''Read tag definitions from a file (or any source of lines).
+    """Read tag definitions from a file (or any source of lines).
 
     This function returns two sortdicts with similar information:
 
@@ -283,7 +283,7 @@
     When calcnodelines is False the hextaglines dict is not calculated (an
     empty dict is returned). This is done to improve this function's
     performance in cases where the line numbers are not needed.
-    '''
+    """
 
     bintaghist = util.sortdict()
     hextaglines = util.sortdict()
@@ -325,14 +325,14 @@
 
 
 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
-    '''Read tag definitions from a file (or any source of lines).
+    """Read tag definitions from a file (or any source of lines).
 
     Returns a mapping from tag name to (node, hist).
 
     "node" is the node id from the last line read for that name. "hist"
     is the list of node ids previously associated with it (in file order).
     All node ids are binary, not hex.
-    '''
+    """
     filetags, nodelines = _readtaghist(
         ui, repo, lines, fn, recode=recode, calcnodelines=calcnodelines
     )
@@ -390,7 +390,7 @@
 
 
 def _readtagcache(ui, repo):
-    '''Read the tag cache.
+    """Read the tag cache.
 
     Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
 
@@ -406,7 +406,7 @@
 
     If the cache is not up to date, the caller is responsible for reading tag
     info from each returned head. (See findglobaltags().)
-    '''
+    """
     try:
         cachefile = repo.cachevfs(_filename(repo), b'r')
         # force reading the file for static-http
@@ -549,7 +549,7 @@
 
 
 def tag(repo, names, node, message, local, user, date, editor=False):
-    '''tag a revision with one or more symbolic names.
+    """tag a revision with one or more symbolic names.
 
     names is a list of strings or, when adding a single tag, names may be a
     string.
@@ -567,7 +567,7 @@
 
     user: name of user to use if committing
 
-    date: date tuple to use if committing'''
+    date: date tuple to use if committing"""
 
     if not local:
         m = matchmod.exact([b'.hgtags'])
--- a/mercurial/templatefilters.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/templatefilters.py	Fri Nov 27 17:03:29 2020 -0500
@@ -548,8 +548,7 @@
 
 
 def loadfilter(ui, extname, registrarobj):
-    """Load template filter from specified registrarobj
-    """
+    """Load template filter from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         filters[name] = func
 
--- a/mercurial/templatefuncs.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/templatefuncs.py	Fri Nov 27 17:03:29 2020 -0500
@@ -912,8 +912,7 @@
 
 
 def loadfunction(ui, extname, registrarobj):
-    """Load template function from specified registrarobj
-    """
+    """Load template function from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         funcs[name] = func
 
--- a/mercurial/templatekw.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/templatekw.py	Fri Nov 27 17:03:29 2020 -0500
@@ -994,8 +994,7 @@
 
 
 def loadkeyword(ui, extname, registrarobj):
-    """Load template keyword from specified registrarobj
-    """
+    """Load template keyword from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         keywords[name] = func
 
--- a/mercurial/templater.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/templater.py	Fri Nov 27 17:03:29 2020 -0500
@@ -663,7 +663,7 @@
 
 
 class engine(object):
-    '''template expansion engine.
+    """template expansion engine.
 
     template expansion works like this. a map file contains key=value
     pairs. if value is quoted, it is treated as string. otherwise, it
@@ -680,7 +680,7 @@
     {key%format}.
 
     filter uses function to transform value. syntax is
-    {key|filter1|filter2|...}.'''
+    {key|filter1|filter2|...}."""
 
     def __init__(self, loader, filters=None, defaults=None, resources=None):
         self._loader = loader
@@ -781,9 +781,9 @@
             return False
 
     def process(self, t, mapping):
-        '''Perform expansion. t is name of map element to expand.
+        """Perform expansion. t is name of map element to expand.
         mapping contains added elements for use during expansion. Is a
-        generator.'''
+        generator."""
         func, data = self._load(t)
         return self._expand(func, data, mapping)
 
@@ -857,7 +857,11 @@
         if subresource:
             data = subresource.read()
             conf.parse(
-                abs, data, sections=sections, remap=remap, include=include,
+                abs,
+                data,
+                sections=sections,
+                remap=remap,
+                include=include,
             )
 
     data = fp.read()
@@ -1094,12 +1098,12 @@
 
 
 def open_template(name, templatepath=None):
-    '''returns a file-like object for the given template, and its full path
+    """returns a file-like object for the given template, and its full path
 
     If the name is a relative path and we're in a frozen binary, the template
     will be read from the mercurial.templates package instead. The returned path
     will then be the relative path.
-    '''
+    """
     # Does the name point directly to a map file?
     if os.path.isfile(name) or os.path.isabs(name):
         return name, open(name, mode='rb')
--- a/mercurial/testing/storage.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/testing/storage.py	Fri Nov 27 17:03:29 2020 -0500
@@ -1021,7 +1021,12 @@
     def testcensored(self):
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
+        stored1 = storageutil.packmeta(
+            {
+                b'censored': b'tombstone',
+            },
+            b'',
+        )
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1050,7 +1055,12 @@
 
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
+        stored1 = storageutil.packmeta(
+            {
+                b'censored': b'tombstone',
+            },
+            b'',
+        )
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1232,7 +1242,12 @@
         # Attempt to apply a delta made against a censored revision.
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
+        stored1 = storageutil.packmeta(
+            {
+                b'censored': b'tombstone',
+            },
+            b'',
+        )
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid)
--- a/mercurial/transaction.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/transaction.py	Fri Nov 27 17:03:29 2020 -0500
@@ -425,10 +425,10 @@
 
     @active
     def replace(self, file, offset):
-        '''
+        """
         replace can only replace already committed entries
         that are not pending in the queue
-        '''
+        """
         if file in self._newfiles:
             if not offset:
                 return
@@ -476,9 +476,9 @@
 
     @active
     def writepending(self):
-        '''write pending file to temporary version
+        """write pending file to temporary version
 
-        This is used to allow hooks to view a transaction before commit'''
+        This is used to allow hooks to view a transaction before commit"""
         categories = sorted(self._pendingcallback)
         for cat in categories:
             # remove callback since the data will have been flushed
@@ -489,8 +489,7 @@
 
     @active
     def hasfinalize(self, category):
-        """check is a callback already exist for a category
-        """
+        """check is a callback already exist for a category"""
         return category in self._finalizecallback
 
     @active
@@ -533,11 +532,11 @@
 
     @active
     def addvalidator(self, category, callback):
-        """ adds a callback to be called when validating the transaction.
+        """adds a callback to be called when validating the transaction.
 
         The transaction will be given as the first argument to the callback.
 
-        callback should raise exception if to abort transaction """
+        callback should raise exception if to abort transaction"""
         self._validatecallback[category] = callback
 
     @active
@@ -624,9 +623,9 @@
 
     @active
     def abort(self):
-        '''abort the transaction (generally called on error, or when the
+        """abort the transaction (generally called on error, or when the
         transaction is not explicitly committed before going out of
-        scope)'''
+        scope)"""
         self._abort()
 
     def _writeundo(self):
--- a/mercurial/treediscovery.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/treediscovery.py	Fri Nov 27 17:03:29 2020 -0500
@@ -117,7 +117,10 @@
             for p in pycompat.xrange(0, len(r), 10):
                 with remote.commandexecutor() as e:
                     branches = e.callcommand(
-                        b'branches', {b'nodes': r[p : p + 10],}
+                        b'branches',
+                        {
+                            b'nodes': r[p : p + 10],
+                        },
                     ).result()
 
                 for b in branches:
--- a/mercurial/txnutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/txnutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -13,20 +13,20 @@
 
 
 def mayhavepending(root):
-    '''return whether 'root' may have pending changes, which are
+    """return whether 'root' may have pending changes, which are
     visible to this process.
-    '''
+    """
     return root == encoding.environ.get(b'HG_PENDING')
 
 
 def trypending(root, vfs, filename, **kwargs):
-    '''Open  file to be read according to HG_PENDING environment variable
+    """Open  file to be read according to HG_PENDING environment variable
 
     This opens '.pending' of specified 'filename' only when HG_PENDING
     is equal to 'root'.
 
     This returns '(fp, is_pending_opened)' tuple.
-    '''
+    """
     if mayhavepending(root):
         try:
             return (vfs(b'%s.pending' % filename, **kwargs), True)
--- a/mercurial/ui.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/ui.py	Fri Nov 27 17:03:29 2020 -0500
@@ -925,7 +925,7 @@
                 yield section, name, value
 
     def plain(self, feature=None):
-        '''is plain mode active?
+        """is plain mode active?
 
         Plain mode means that all configuration variables which affect
         the behavior and output of Mercurial should be
@@ -939,7 +939,7 @@
         - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
         - False if feature is disabled by default and not included in HGPLAIN
         - True otherwise
-        '''
+        """
         if (
             b'HGPLAIN' not in encoding.environ
             and b'HGPLAINEXCEPT' not in encoding.environ
@@ -1112,7 +1112,7 @@
         return self._colormode != b'win32'
 
     def write(self, *args, **opts):
-        '''write args to output
+        """write args to output
 
         By default, this method simply writes to the buffer or stdout.
         Color mode can be set on the UI class to have the output decorated
@@ -1133,7 +1133,7 @@
         When labeling output for a specific command, a label of
         "cmdname.type" is recommended. For example, status issues
         a label of "status.modified" for modified files.
-        '''
+        """
         dest = self._fout
 
         # inlined _write() for speed
@@ -1453,9 +1453,9 @@
         return _reqexithandlers
 
     def atexit(self, func, *args, **kwargs):
-        '''register a function to run after dispatching a request
+        """register a function to run after dispatching a request
 
-        Handlers do not stay registered across request boundaries.'''
+        Handlers do not stay registered across request boundaries."""
         self._exithandlers.append((func, args, kwargs))
         return func
 
@@ -1484,8 +1484,14 @@
         alldefaults = frozenset([b"text", b"curses"])
 
         featureinterfaces = {
-            b"chunkselector": [b"text", b"curses",],
-            b"histedit": [b"text", b"curses",],
+            b"chunkselector": [
+                b"text",
+                b"curses",
+            ],
+            b"histedit": [
+                b"text",
+                b"curses",
+            ],
         }
 
         # Feature-specific interface
@@ -1532,7 +1538,7 @@
         return choseninterface
 
     def interactive(self):
-        '''is interactive input allowed?
+        """is interactive input allowed?
 
         An interactive session is a session where input can be reasonably read
         from `sys.stdin'. If this function returns false, any attempt to read
@@ -1544,7 +1550,7 @@
         to a terminal device.
 
         This function refers to input only; for output, see `ui.formatted()'.
-        '''
+        """
         i = self.configbool(b"ui", b"interactive")
         if i is None:
             # some environments replace stdin without implementing isatty
@@ -1554,8 +1560,7 @@
         return i
 
     def termwidth(self):
-        '''how wide is the terminal in columns?
-        '''
+        """how wide is the terminal in columns?"""
         if b'COLUMNS' in encoding.environ:
             try:
                 return int(encoding.environ[b'COLUMNS'])
@@ -1564,7 +1569,7 @@
         return scmutil.termsize(self)[0]
 
     def formatted(self):
-        '''should formatted output be used?
+        """should formatted output be used?
 
         It is often desirable to format the output to suite the output medium.
         Examples of this are truncating long lines or colorizing messages.
@@ -1579,7 +1584,7 @@
 
         This function refers to output only; for input, see `ui.interactive()'.
         This function always returns false when in plain mode, see `ui.plain()'.
-        '''
+        """
         if self.plain():
             return False
 
@@ -1746,40 +1751,40 @@
             raise error.ResponseExpected()
 
     def status(self, *msg, **opts):
-        '''write status message to output (if ui.quiet is False)
+        """write status message to output (if ui.quiet is False)
 
         This adds an output label of "ui.status".
-        '''
+        """
         if not self.quiet:
             self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
 
     def warn(self, *msg, **opts):
-        '''write warning message to output (stderr)
+        """write warning message to output (stderr)
 
         This adds an output label of "ui.warning".
-        '''
+        """
         self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
 
     def error(self, *msg, **opts):
-        '''write error message to output (stderr)
+        """write error message to output (stderr)
 
         This adds an output label of "ui.error".
-        '''
+        """
         self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
 
     def note(self, *msg, **opts):
-        '''write note to output (if ui.verbose is True)
+        """write note to output (if ui.verbose is True)
 
         This adds an output label of "ui.note".
-        '''
+        """
         if self.verbose:
             self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
 
     def debug(self, *msg, **opts):
-        '''write debug message to output (if ui.debugflag is True)
+        """write debug message to output (if ui.debugflag is True)
 
         This adds an output label of "ui.debug".
-        '''
+        """
         if self.debugflag:
             self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
             self.log(b'debug', b'%s', b''.join(msg))
@@ -1875,12 +1880,12 @@
         errprefix=None,
         blockedtag=None,
     ):
-        '''execute shell command with appropriate output stream. command
+        """execute shell command with appropriate output stream. command
         output will be redirected if fout is not stdout.
 
         if command fails and onerr is None, return status, else raise onerr
         object as exception.
-        '''
+        """
         if blockedtag is None:
             # Long cmds tend to be because of an absolute path on cmd. Keep
             # the tail end instead
@@ -1907,9 +1912,9 @@
         return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
 
     def traceback(self, exc=None, force=False):
-        '''print exception traceback if traceback printing enabled or forced.
+        """print exception traceback if traceback printing enabled or forced.
         only to call in exception handler. returns true if traceback
-        printed.'''
+        printed."""
         if self.tracebackflag or force:
             if exc is None:
                 exc = sys.exc_info()
@@ -2011,7 +2016,7 @@
         self._loggers[name] = logger
 
     def log(self, event, msgfmt, *msgargs, **opts):
-        '''hook for logging facility extensions
+        """hook for logging facility extensions
 
         event should be a readily-identifiable subsystem, which will
         allow filtering.
@@ -2020,7 +2025,7 @@
         *msgargs are %-formatted into it.
 
         **opts currently has no defined meanings.
-        '''
+        """
         if not self._loggers:
             return
         activeloggers = [
@@ -2040,7 +2045,7 @@
             self._loggers = registeredloggers
 
     def label(self, msg, label):
-        '''style msg based on supplied label
+        """style msg based on supplied label
 
         If some color mode is enabled, this will add the necessary control
         characters to apply such color. In addition, 'debug' color mode adds
@@ -2048,7 +2053,7 @@
 
         ui.write(s, 'label') is equivalent to
         ui.write(ui.label(s, 'label')).
-        '''
+        """
         if self._colormode is not None:
             return color.colorlabel(self, msg, label)
         return msg
--- a/mercurial/url.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/url.py	Fri Nov 27 17:03:29 2020 -0500
@@ -35,13 +35,13 @@
 
 
 def escape(s, quote=None):
-    '''Replace special characters "&", "<" and ">" to HTML-safe sequences.
+    """Replace special characters "&", "<" and ">" to HTML-safe sequences.
     If the optional flag quote is true, the quotation mark character (")
     is also translated.
 
     This is the same as cgi.escape in Python, but always operates on
     bytes, whereas cgi.escape in Python 3 only works on unicodes.
-    '''
+    """
     s = s.replace(b"&", b"&amp;")
     s = s.replace(b"<", b"&lt;")
     s = s.replace(b">", b"&gt;")
@@ -586,7 +586,7 @@
     loggingopts=None,
     sendaccept=True,
 ):
-    '''
+    """
     construct an opener suitable for urllib2
     authinfo will be added to the password manager
 
@@ -600,7 +600,7 @@
 
     ``sendaccept`` allows controlling whether the ``Accept`` request header
     is sent. The header is sent by default.
-    '''
+    """
     timeout = ui.configwith(float, b'http', b'timeout')
     handlers = []
 
--- a/mercurial/urllibcompat.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/urllibcompat.py	Fri Nov 27 17:03:29 2020 -0500
@@ -83,10 +83,22 @@
     )
     import urllib.response
 
-    urlreq._registeraliases(urllib.response, (b"addclosehook", b"addinfourl",))
+    urlreq._registeraliases(
+        urllib.response,
+        (
+            b"addclosehook",
+            b"addinfourl",
+        ),
+    )
     import urllib.error
 
-    urlerr._registeraliases(urllib.error, (b"HTTPError", b"URLError",))
+    urlerr._registeraliases(
+        urllib.error,
+        (
+            b"HTTPError",
+            b"URLError",
+        ),
+    )
     import http.server
 
     httpserver._registeraliases(
@@ -179,12 +191,28 @@
             b"urlopen",
         ),
     )
-    urlreq._registeraliases(urlparse, (b"urlparse", b"urlunparse",))
+    urlreq._registeraliases(
+        urlparse,
+        (
+            b"urlparse",
+            b"urlunparse",
+        ),
+    )
     urlreq._registeralias(urlparse, b"parse_qs", b"parseqs")
     urlreq._registeralias(urlparse, b"parse_qsl", b"parseqsl")
-    urlerr._registeraliases(urllib2, (b"HTTPError", b"URLError",))
+    urlerr._registeraliases(
+        urllib2,
+        (
+            b"HTTPError",
+            b"URLError",
+        ),
+    )
     httpserver._registeraliases(
-        BaseHTTPServer, (b"HTTPServer", b"BaseHTTPRequestHandler",)
+        BaseHTTPServer,
+        (
+            b"HTTPServer",
+            b"BaseHTTPRequestHandler",
+        ),
     )
     httpserver._registeraliases(
         SimpleHTTPServer, (b"SimpleHTTPRequestHandler",)
--- a/mercurial/util.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/util.py	Fri Nov 27 17:03:29 2020 -0500
@@ -1264,7 +1264,7 @@
 
 
 class sortdict(collections.OrderedDict):
-    '''a simple sorted dictionary
+    """a simple sorted dictionary
 
     >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
     >>> d2 = d1.copy()
@@ -1276,7 +1276,7 @@
     >>> d1.insert(1, b'a.5', 0.5)
     >>> d1
     sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
-    '''
+    """
 
     def __setitem__(self, key, value):
         if key in self:
@@ -1761,8 +1761,8 @@
 
 
 def increasingchunks(source, min=1024, max=65536):
-    '''return no less than min bytes per chunk while data remains,
-    doubling min after each chunk until it reaches max'''
+    """return no less than min bytes per chunk while data remains,
+    doubling min after each chunk until it reaches max"""
 
     def log2(x):
         if not x:
@@ -1833,7 +1833,7 @@
 
 
 def pathto(root, n1, n2):
-    '''return the relative path from one place to another.
+    """return the relative path from one place to another.
     root should use os.sep to separate directories
     n1 should use os.sep to separate directories
     n2 should use "/" to separate directories
@@ -1842,7 +1842,7 @@
     If n1 is a relative path, it's assumed it's
     relative to root.
     n2 should always be relative to root.
-    '''
+    """
     if not n1:
         return localpath(n2)
     if os.path.isabs(n1):
@@ -1892,7 +1892,7 @@
 
 
 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
-    '''copy a file, preserving mode and optionally other stat info like
+    """copy a file, preserving mode and optionally other stat info like
     atime/mtime
 
     checkambig argument is used with filestat, and is useful only if
@@ -1900,7 +1900,7 @@
     repo.wlock).
 
     copystat and checkambig should be exclusive.
-    '''
+    """
     assert not (copystat and checkambig)
     oldstat = None
     if os.path.lexists(dest):
@@ -2017,7 +2017,7 @@
 
 
 def checkwinfilename(path):
-    r'''Check that the base-relative path is a valid filename on Windows.
+    r"""Check that the base-relative path is a valid filename on Windows.
     Returns None if the path is ok, or a UI string describing the problem.
 
     >>> checkwinfilename(b"just/a/normal/path")
@@ -2039,7 +2039,7 @@
     "filename ends with '\\', which is invalid on Windows"
     >>> checkwinfilename(b"foo\\/bar")
     "directory name ends with '\\', which is invalid on Windows"
-    '''
+    """
     if path.endswith(b'\\'):
         return _(b"filename ends with '\\', which is invalid on Windows")
     if b'\\/' in path:
@@ -2175,11 +2175,11 @@
             _re2 = False
 
     def compile(self, pat, flags=0):
-        '''Compile a regular expression, using re2 if possible
+        """Compile a regular expression, using re2 if possible
 
         For best performance, use only re2-compatible regexp features. The
         only flags from the re module that are re2-compatible are
-        IGNORECASE and MULTILINE.'''
+        IGNORECASE and MULTILINE."""
         if _re2 is None:
             self._checkre2()
         if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
@@ -2195,11 +2195,11 @@
 
     @propertycache
     def escape(self):
-        '''Return the version of escape corresponding to self.compile.
+        """Return the version of escape corresponding to self.compile.
 
         This is imperfect because whether re2 or re is used for a particular
         function depends on the flags, etc, but it's the best we can do.
-        '''
+        """
         global _re2
         if _re2 is None:
             self._checkre2()
@@ -2215,7 +2215,7 @@
 
 
 def fspath(name, root):
-    '''Get name in the case stored in the filesystem
+    """Get name in the case stored in the filesystem
 
     The name should be relative to root, and be normcase-ed for efficiency.
 
@@ -2223,7 +2223,7 @@
     called, for case-sensitive filesystems (simply because it's expensive).
 
     The root should be normcase-ed, too.
-    '''
+    """
 
     def _makefspathcacheentry(dir):
         return {normcase(n): n for n in os.listdir(dir)}
@@ -2301,11 +2301,11 @@
 
 
 def splitpath(path):
-    '''Split path by os.sep.
+    """Split path by os.sep.
     Note that this function does not use os.altsep because this is
     an alternative of simple "xxx.split(os.sep)".
     It is recommended to use os.path.normpath() before using this
-    function if need.'''
+    function if need."""
     return path.split(pycompat.ossep)
 
 
@@ -2459,7 +2459,7 @@
 
 
 class atomictempfile(object):
-    '''writable file object that atomically updates a file
+    """writable file object that atomically updates a file
 
     All writes will go to a temporary copy of the original file. Call
     close() when you are done writing, and atomictempfile will rename
@@ -2470,7 +2470,7 @@
     checkambig argument of constructor is used with filestat, and is
     useful only if target file is guarded by any lock (e.g. repo.lock
     or repo.wlock).
-    '''
+    """
 
     def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
         self.__name = name  # permanent name
@@ -3365,7 +3365,7 @@
 
 
 def timed(func):
-    '''Report the execution time of a function call to stderr.
+    """Report the execution time of a function call to stderr.
 
     During development, use as a decorator when you need to measure
     the cost of a function, e.g. as follows:
@@ -3373,7 +3373,7 @@
     @util.timed
     def foo(a, b, c):
         pass
-    '''
+    """
 
     def wrapper(*args, **kwargs):
         with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
@@ -3404,7 +3404,7 @@
 
 
 def sizetoint(s):
-    '''Convert a space specifier to a byte count.
+    """Convert a space specifier to a byte count.
 
     >>> sizetoint(b'30')
     30
@@ -3412,7 +3412,7 @@
     2252
     >>> sizetoint(b'6M')
     6291456
-    '''
+    """
     t = s.strip().lower()
     try:
         for k, u in _sizeunits:
@@ -3424,9 +3424,9 @@
 
 
 class hooks(object):
-    '''A collection of hook functions that can be used to extend a
+    """A collection of hook functions that can be used to extend a
     function's behavior. Hooks are called in lexicographic order,
-    based on the names of their sources.'''
+    based on the names of their sources."""
 
     def __init__(self):
         self._hooks = []
@@ -3443,7 +3443,7 @@
 
 
 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
-    '''Yields lines for a nicely formatted stacktrace.
+    """Yields lines for a nicely formatted stacktrace.
     Skips the 'skip' last entries, then return the last 'depth' entries.
     Each file+linenumber is formatted according to fileline.
     Each line is formatted according to line.
@@ -3453,7 +3453,7 @@
       function
 
     Not be used in production code but very convenient while developing.
-    '''
+    """
     entries = [
         (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
         for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
@@ -3475,12 +3475,12 @@
     depth=0,
     prefix=b'',
 ):
-    '''Writes a message to f (stderr) with a nicely formatted stacktrace.
+    """Writes a message to f (stderr) with a nicely formatted stacktrace.
     Skips the 'skip' entries closest to the call, then show 'depth' entries.
     By default it will flush stdout first.
     It can be used everywhere and intentionally does not require an ui object.
     Not be used in production code but very convenient while developing.
-    '''
+    """
     if otherf:
         otherf.flush()
     f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
--- a/mercurial/utils/cborutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/utils/cborutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -641,19 +641,28 @@
 
                 elif special == SPECIAL_START_ARRAY:
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': [],}
+                        {
+                            b'remaining': value,
+                            b'v': [],
+                        }
                     )
                     self._state = self._STATE_WANT_ARRAY_VALUE
 
                 elif special == SPECIAL_START_MAP:
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': {},}
+                        {
+                            b'remaining': value,
+                            b'v': {},
+                        }
                     )
                     self._state = self._STATE_WANT_MAP_KEY
 
                 elif special == SPECIAL_START_SET:
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': set(),}
+                        {
+                            b'remaining': value,
+                            b'v': set(),
+                        }
                     )
                     self._state = self._STATE_WANT_SET_VALUE
 
@@ -684,7 +693,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     # self._state doesn't need changed.
@@ -711,7 +723,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_SET_VALUE
@@ -775,7 +790,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_ARRAY_VALUE
@@ -789,7 +807,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_MAP_KEY
@@ -803,7 +824,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_SET_VALUE
--- a/mercurial/utils/compression.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/utils/compression.py	Fri Nov 27 17:03:29 2020 -0500
@@ -29,7 +29,8 @@
 CLIENTROLE = b'client'
 
 compewireprotosupport = collections.namedtuple(
-    'compenginewireprotosupport', ('name', 'serverpriority', 'clientpriority'),
+    'compenginewireprotosupport',
+    ('name', 'serverpriority', 'clientpriority'),
 )
 
 
--- a/mercurial/utils/dateutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/utils/dateutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -53,12 +53,17 @@
     b'%I:%M%p',
 )
 
-extendeddateformats = defaultdateformats + (b"%Y", b"%Y-%m", b"%b", b"%b %Y",)
+extendeddateformats = defaultdateformats + (
+    b"%Y",
+    b"%Y-%m",
+    b"%b",
+    b"%b %Y",
+)
 
 
 def makedate(timestamp=None):
-    '''Return a unix timestamp (or the current time) as a (unixtime,
-    offset) tuple based off the local timezone.'''
+    """Return a unix timestamp (or the current time) as a (unixtime,
+    offset) tuple based off the local timezone."""
     if timestamp is None:
         timestamp = time.time()
     if timestamp < 0:
@@ -115,7 +120,7 @@
 
 def parsetimezone(s):
     """find a trailing timezone, if any, in string, and return a
-       (offset, remainder) pair"""
+    (offset, remainder) pair"""
     s = pycompat.bytestr(s)
 
     if s.endswith(b"GMT") or s.endswith(b"UTC"):
--- a/mercurial/utils/procutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/utils/procutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -292,10 +292,10 @@
 
 
 def tempfilter(s, cmd):
-    '''filter string S through a pair of temporary files with CMD.
+    """filter string S through a pair of temporary files with CMD.
     CMD is used as a template to create the real command to be run,
     with the strings INFILE and OUTFILE replaced by the real names of
-    the temporary files generated.'''
+    the temporary files generated."""
     inname, outname = None, None
     try:
         infd, inname = pycompat.mkstemp(prefix=b'hg-filter-in-')
@@ -465,17 +465,16 @@
 
 
 def tonativeenv(env):
-    '''convert the environment from bytes to strings suitable for Popen(), etc.
-    '''
+    """convert the environment from bytes to strings suitable for Popen(), etc."""
     return pycompat.rapply(tonativestr, env)
 
 
 def system(cmd, environ=None, cwd=None, out=None):
-    '''enhanced shell command execution.
+    """enhanced shell command execution.
     run with environment maybe modified, maybe in different dir.
 
     if out is specified, it is assumed to be a file-like object that has a
-    write() method. stdout and stderr will be redirected to out.'''
+    write() method. stdout and stderr will be redirected to out."""
     try:
         stdout.flush()
     except Exception:
@@ -685,14 +684,14 @@
         record_wait=None,
         stdin_bytes=None,
     ):
-        '''Spawn a command without waiting for it to finish.
+        """Spawn a command without waiting for it to finish.
 
 
         When `record_wait` is not None, the spawned process will not be fully
         detached and the `record_wait` argument will be called with a the
         `Subprocess.wait` function for the spawned process.  This is mostly
         useful for developers that need to make sure the spawned process
-        finished before a certain point. (eg: writing test)'''
+        finished before a certain point. (eg: writing test)"""
         if pycompat.isdarwin:
             # avoid crash in CoreFoundation in case another thread
             # calls gui() while we're calling fork().
--- a/mercurial/utils/stringutil.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/utils/stringutil.py	Fri Nov 27 17:03:29 2020 -0500
@@ -494,15 +494,15 @@
 
 @attr.s(hash=True)
 class mailmapping(object):
-    '''Represents a username/email key or value in
-    a mailmap file'''
+    """Represents a username/email key or value in
+    a mailmap file"""
 
     email = attr.ib()
     name = attr.ib(default=None)
 
 
 def _ismailmaplineinvalid(names, emails):
-    '''Returns True if the parsed names and emails
+    """Returns True if the parsed names and emails
     in a mailmap entry are invalid.
 
     >>> # No names or emails fails
@@ -522,7 +522,7 @@
     >>> emails = [b'proper@email.com', b'commit@email.com']
     >>> _ismailmaplineinvalid(names, emails)
     False
-    '''
+    """
     return not emails or not names and len(emails) < 2
 
 
@@ -597,11 +597,13 @@
             continue
 
         mailmapkey = mailmapping(
-            email=emails[-1], name=names[-1] if len(names) == 2 else None,
+            email=emails[-1],
+            name=names[-1] if len(names) == 2 else None,
         )
 
         mailmap[mailmapkey] = mailmapping(
-            email=emails[0], name=names[0] if names else None,
+            email=emails[0],
+            name=names[0] if names else None,
         )
 
     return mailmap
@@ -659,7 +661,7 @@
 
 
 def isauthorwellformed(author):
-    '''Return True if the author field is well formed
+    """Return True if the author field is well formed
     (ie "Contributor Name <contrib@email.dom>")
 
     >>> isauthorwellformed(b'Good Author <good@author.com>')
@@ -676,7 +678,7 @@
     False
     >>> isauthorwellformed(b'Bad Author <author>')
     False
-    '''
+    """
     return _correctauthorformat.match(author) is not None
 
 
--- a/mercurial/vfs.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/vfs.py	Fri Nov 27 17:03:29 2020 -0500
@@ -83,12 +83,12 @@
 
     @util.propertycache
     def open(self):
-        '''Open ``path`` file, which is relative to vfs root.
+        """Open ``path`` file, which is relative to vfs root.
 
         Newly created directories are marked as "not to be indexed by
         the content indexing service", if ``notindexed`` is specified
         for "write" mode access.
-        '''
+        """
         return self.__call__
 
     def read(self, path):
@@ -142,9 +142,9 @@
         return os.path.islink(self.join(path))
 
     def isfileorlink(self, path=None):
-        '''return whether path is a regular file or a symlink
+        """return whether path is a regular file or a symlink
 
-        Unlike isfile, this doesn't follow symlinks.'''
+        Unlike isfile, this doesn't follow symlinks."""
         try:
             st = self.lstat(path)
         except OSError:
@@ -228,8 +228,7 @@
         return util.readlink(self.join(path))
 
     def removedirs(self, path=None):
-        """Remove a leaf directory and all empty intermediate ones
-        """
+        """Remove a leaf directory and all empty intermediate ones"""
         return util.removedirs(self.join(path))
 
     def rmdir(self, path=None):
@@ -332,7 +331,7 @@
 
 
 class vfs(abstractvfs):
-    '''Operate files relative to a base directory
+    """Operate files relative to a base directory
 
     This class is used to hide the details of COW semantics and
     remote file access from higher level code.
@@ -340,7 +339,7 @@
     'cacheaudited' should be enabled only if (a) vfs object is short-lived, or
     (b) the base directory is managed by hg and considered sort-of append-only.
     See pathutil.pathauditor() for details.
-    '''
+    """
 
     def __init__(
         self,
@@ -397,7 +396,7 @@
         auditpath=True,
         makeparentdirs=True,
     ):
-        '''Open ``path`` file, which is relative to vfs root.
+        """Open ``path`` file, which is relative to vfs root.
 
         By default, parent directories are created as needed. Newly created
         directories are marked as "not to be indexed by the content indexing
@@ -426,7 +425,7 @@
         truncation), if it is owned by another. Therefore, use
         combination of append mode and checkambig=True only in limited
         cases (see also issue5418 and issue5584 for detail).
-        '''
+        """
         if auditpath:
             self._auditpath(path, mode)
         f = self.join(path)
--- a/mercurial/win32.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/win32.py	Fri Nov 27 17:03:29 2020 -0500
@@ -385,13 +385,13 @@
 
 
 def checkcertificatechain(cert, build=True):
-    '''Tests the given certificate to see if there is a complete chain to a
-       trusted root certificate.  As a side effect, missing certificates are
-       downloaded and installed unless ``build=False``.  True is returned if a
-       chain to a trusted root exists (even if built on the fly), otherwise
-       False.  NB: A chain to a trusted root does NOT imply that the certificate
-       is valid.
-    '''
+    """Tests the given certificate to see if there is a complete chain to a
+    trusted root certificate.  As a side effect, missing certificates are
+    downloaded and installed unless ``build=False``.  True is returned if a
+    chain to a trusted root exists (even if built on the fly), otherwise
+    False.  NB: A chain to a trusted root does NOT imply that the certificate
+    is valid.
+    """
 
     chainctxptr = ctypes.POINTER(CERT_CHAIN_CONTEXT)
 
@@ -488,8 +488,8 @@
 
 
 def testpid(pid):
-    '''return True if pid is still running or unable to
-    determine, False otherwise'''
+    """return True if pid is still running or unable to
+    determine, False otherwise"""
     h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid)
     if h:
         try:
@@ -576,10 +576,10 @@
 
 
 def setsignalhandler():
-    '''Register a termination handler for console events including
+    """Register a termination handler for console events including
     CTRL+C. python signal handlers do not work well with socket
     operations.
-    '''
+    """
 
     def handler(event):
         _kernel32.ExitProcess(1)
@@ -627,8 +627,8 @@
 
 
 def enablevtmode():
-    '''Enable virtual terminal mode for the associated console.  Return True if
-    enabled, else False.'''
+    """Enable virtual terminal mode for the associated console.  Return True if
+    enabled, else False."""
 
     ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
 
--- a/mercurial/windows.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/windows.py	Fri Nov 27 17:03:29 2020 -0500
@@ -195,13 +195,13 @@
 
 
 class winstdout(object):
-    '''Some files on Windows misbehave.
+    """Some files on Windows misbehave.
 
     When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
 
     When writing too many bytes to a console at the same, a "Not enough space"
     error may happen. Python 3 already works around that.
-    '''
+    """
 
     def __init__(self, fp):
         self.fp = fp
@@ -497,11 +497,11 @@
 
 
 def findexe(command):
-    '''Find executable for command searching like cmd.exe does.
+    """Find executable for command searching like cmd.exe does.
     If command is a basename then PATH is searched for command.
     PATH isn't searched if command is an absolute or relative path.
     An extension from PATHEXT is found and added if not present.
-    If command isn't found None is returned.'''
+    If command isn't found None is returned."""
     pathext = encoding.environ.get(b'PATHEXT', b'.COM;.EXE;.BAT;.CMD')
     pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
     if os.path.splitext(command)[1].lower() in pathexts:
@@ -529,10 +529,10 @@
 
 
 def statfiles(files):
-    '''Stat each file in files. Yield each stat, or None if a file
+    """Stat each file in files. Yield each stat, or None if a file
     does not exist or has a type we don't care about.
 
-    Cluster and cache stat per directory to minimize number of OS stat calls.'''
+    Cluster and cache stat per directory to minimize number of OS stat calls."""
     dircache = {}  # dirname -> filename -> status | None if file does not exist
     getkind = stat.S_IFMT
     for nf in files:
@@ -630,14 +630,14 @@
 
 
 def lookupreg(key, valname=None, scope=None):
-    ''' Look up a key/value name in the Windows registry.
+    """Look up a key/value name in the Windows registry.
 
     valname: value name. If unspecified, the default value for the key
     is used.
     scope: optionally specify scope for registry lookup, this can be
     a sequence of scopes to look up in order. Default (CURRENT_USER,
     LOCAL_MACHINE).
-    '''
+    """
     if scope is None:
         scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
     elif not isinstance(scope, (list, tuple)):
--- a/mercurial/wireprotoframing.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/wireprotoframing.py	Fri Nov 27 17:03:29 2020 -0500
@@ -456,7 +456,10 @@
 def createalternatelocationresponseframe(stream, requestid, location):
     data = {
         b'status': b'redirect',
-        b'location': {b'url': location.url, b'mediatype': location.mediatype,},
+        b'location': {
+            b'url': location.url,
+            b'mediatype': location.mediatype,
+        },
     }
 
     for a in (
@@ -490,7 +493,12 @@
 def createcommanderrorresponse(stream, requestid, message, args=None):
     # TODO should this be using a list of {'msg': ..., 'args': {}} so atom
     # formatting works consistently?
-    m = {b'status': b'error', b'error': {b'message': message,}}
+    m = {
+        b'status': b'error',
+        b'error': {
+            b'message': message,
+        },
+    }
 
     if args:
         m[b'error'][b'args'] = args
@@ -510,7 +518,12 @@
     assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
 
     payload = b''.join(
-        cborutil.streamencode({b'type': errtype, b'message': [{b'msg': msg}],})
+        cborutil.streamencode(
+            {
+                b'type': errtype,
+                b'message': [{b'msg': msg}],
+            }
+        )
     )
 
     yield stream.makeframe(
@@ -1292,14 +1305,18 @@
                 for frame in gen:
                     yield frame
 
-        return b'sendframes', {b'framegen': makegen(),}
+        return b'sendframes', {
+            b'framegen': makegen(),
+        }
 
     def _handlesendframes(self, framegen):
         if self._deferoutput:
             self._bufferedframegens.append(framegen)
             return b'noop', {}
         else:
-            return b'sendframes', {b'framegen': framegen,}
+            return b'sendframes', {
+                b'framegen': framegen,
+            }
 
     def onservererror(self, stream, requestid, msg):
         ensureserverstream(stream)
@@ -1351,7 +1368,9 @@
         return s
 
     def _makeerrorresult(self, msg):
-        return b'error', {b'message': msg,}
+        return b'error', {
+            b'message': msg,
+        }
 
     def _makeruncommandresult(self, requestid):
         entry = self._receivingcommands[requestid]
@@ -1397,7 +1416,9 @@
         )
 
     def _makewantframeresult(self):
-        return b'wantframe', {b'state': self._state,}
+        return b'wantframe', {
+            b'state': self._state,
+        }
 
     def _validatecommandrequestframe(self, frame):
         new = frame.flags & FLAG_COMMAND_REQUEST_NEW
@@ -1802,7 +1823,9 @@
             return (
                 request,
                 b'sendframes',
-                {b'framegen': self._makecommandframes(request),},
+                {
+                    b'framegen': self._makecommandframes(request),
+                },
             )
 
     def flushcommands(self):
@@ -1835,7 +1858,9 @@
                 for frame in self._makecommandframes(request):
                     yield frame
 
-        return b'sendframes', {b'framegen': makeframes(),}
+        return b'sendframes', {
+            b'framegen': makeframes(),
+        }
 
     def _makecommandframes(self, request):
         """Emit frames to issue a command request.
@@ -1851,7 +1876,9 @@
 
             payload = b''.join(
                 cborutil.streamencode(
-                    {b'contentencodings': self._clientcontentencoders,}
+                    {
+                        b'contentencodings': self._clientcontentencoders,
+                    }
                 )
             )
 
--- a/mercurial/wireprototypes.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/wireprototypes.py	Fri Nov 27 17:03:29 2020 -0500
@@ -33,14 +33,23 @@
 
 # All available wire protocol transports.
 TRANSPORTS = {
-    SSHV1: {b'transport': b'ssh', b'version': 1,},
+    SSHV1: {
+        b'transport': b'ssh',
+        b'version': 1,
+    },
     SSHV2: {
         b'transport': b'ssh',
         # TODO mark as version 2 once all commands are implemented.
         b'version': 1,
     },
-    b'http-v1': {b'transport': b'http', b'version': 1,},
-    HTTP_WIREPROTO_V2: {b'transport': b'http', b'version': 2,},
+    b'http-v1': {
+        b'transport': b'http',
+        b'version': 1,
+    },
+    HTTP_WIREPROTO_V2: {
+        b'transport': b'http',
+        b'version': 2,
+    },
 }
 
 
--- a/mercurial/wireprotov1peer.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/wireprotov1peer.py	Fri Nov 27 17:03:29 2020 -0500
@@ -36,7 +36,7 @@
 
 
 def batchable(f):
-    '''annotation for batchable methods
+    """annotation for batchable methods
 
     Such methods must implement a coroutine as follows:
 
@@ -56,7 +56,7 @@
     method, but adds the original method as an attribute called "batchable",
     which is used by remotebatch to split the call into separate encoding and
     decoding phases.
-    '''
+    """
 
     def plain(*args, **opts):
         batchable = f(*args, **opts)
@@ -474,7 +474,7 @@
             return changegroupmod.cg1unpacker(f, b'UN')
 
     def unbundle(self, bundle, heads, url):
-        '''Send cg (a readable file-like object representing the
+        """Send cg (a readable file-like object representing the
         changegroup to push, typically a chunkbuffer object) to the
         remote server as a bundle.
 
@@ -485,7 +485,7 @@
 
         `url` is the url the client thinks it's pushing to, which is
         visible to hooks.
-        '''
+        """
 
         if heads != [b'force'] and self.capable(b'unbundlehash'):
             heads = wireprototypes.encodelist(
@@ -655,6 +655,5 @@
         raise NotImplementedError()
 
     def _abort(self, exception):
-        """clearly abort the wire protocol connection and raise the exception
-        """
+        """clearly abort the wire protocol connection and raise the exception"""
         raise NotImplementedError()
--- a/mercurial/wireprotov1server.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/wireprotov1server.py	Fri Nov 27 17:03:29 2020 -0500
@@ -602,10 +602,10 @@
 
 @wireprotocommand(b'stream_out', permission=b'pull')
 def stream(repo, proto):
-    '''If the server supports streaming clone, it advertises the "stream"
+    """If the server supports streaming clone, it advertises the "stream"
     capability with a value representing the version and flags of the repo
     it is serving. Client checks to see if it understands the format.
-    '''
+    """
     return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
 
 
--- a/mercurial/wireprotov2server.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/wireprotov2server.py	Fri Nov 27 17:03:29 2020 -0500
@@ -982,7 +982,10 @@
         b'revisions': {
             b'type': b'list',
             b'example': [
-                {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
+                {
+                    b'type': b'changesetexplicit',
+                    b'nodes': [b'abcdef...'],
+                }
             ],
         },
         b'fields': {
@@ -1166,14 +1169,20 @@
             b'default': lambda: False,
             b'example': True,
         },
-        b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
+        b'nodes': {
+            b'type': b'list',
+            b'example': [b'0123456...'],
+        },
         b'fields': {
             b'type': b'set',
             b'default': set,
             b'example': {b'parents', b'revision'},
             b'validvalues': {b'parents', b'revision', b'linknode'},
         },
-        b'path': {b'type': b'bytes', b'example': b'foo.txt',},
+        b'path': {
+            b'type': b'bytes',
+            b'example': b'foo.txt',
+        },
     },
     permission=b'pull',
     # TODO censoring a file revision won't invalidate the cache.
@@ -1262,7 +1271,10 @@
         b'revisions': {
             b'type': b'list',
             b'example': [
-                {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
+                {
+                    b'type': b'changesetexplicit',
+                    b'nodes': [b'abcdef...'],
+                }
             ],
         },
     },
@@ -1375,7 +1387,12 @@
 
 @wireprotocommand(
     b'listkeys',
-    args={b'namespace': {b'type': b'bytes', b'example': b'ns',},},
+    args={
+        b'namespace': {
+            b'type': b'bytes',
+            b'example': b'ns',
+        },
+    },
     permission=b'pull',
 )
 def listkeysv2(repo, proto, namespace):
@@ -1390,7 +1407,12 @@
 
 @wireprotocommand(
     b'lookup',
-    args={b'key': {b'type': b'bytes', b'example': b'foo',},},
+    args={
+        b'key': {
+            b'type': b'bytes',
+            b'example': b'foo',
+        },
+    },
     permission=b'pull',
 )
 def lookupv2(repo, proto, key):
@@ -1415,7 +1437,10 @@
 @wireprotocommand(
     b'manifestdata',
     args={
-        b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
+        b'nodes': {
+            b'type': b'list',
+            b'example': [b'0123456...'],
+        },
         b'haveparents': {
             b'type': b'bool',
             b'default': lambda: False,
@@ -1427,7 +1452,10 @@
             b'example': {b'parents', b'revision'},
             b'validvalues': {b'parents', b'revision'},
         },
-        b'tree': {b'type': b'bytes', b'example': b'',},
+        b'tree': {
+            b'type': b'bytes',
+            b'example': b'',
+        },
     },
     permission=b'pull',
     cachekeyfn=makecommandcachekeyfn(b'manifestdata', 1, allargs=True),
@@ -1485,10 +1513,22 @@
 @wireprotocommand(
     b'pushkey',
     args={
-        b'namespace': {b'type': b'bytes', b'example': b'ns',},
-        b'key': {b'type': b'bytes', b'example': b'key',},
-        b'old': {b'type': b'bytes', b'example': b'old',},
-        b'new': {b'type': b'bytes', b'example': b'new',},
+        b'namespace': {
+            b'type': b'bytes',
+            b'example': b'ns',
+        },
+        b'key': {
+            b'type': b'bytes',
+            b'example': b'key',
+        },
+        b'old': {
+            b'type': b'bytes',
+            b'example': b'old',
+        },
+        b'new': {
+            b'type': b'bytes',
+            b'example': b'new',
+        },
     },
     permission=b'push',
 )
--- a/mercurial/worker.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/mercurial/worker.py	Fri Nov 27 17:03:29 2020 -0500
@@ -116,8 +116,8 @@
 
 
 def worthwhile(ui, costperop, nops, threadsafe=True):
-    '''try to determine whether the benefit of multiple processes can
-    outweigh the cost of starting them'''
+    """try to determine whether the benefit of multiple processes can
+    outweigh the cost of starting them"""
 
     if not threadsafe and _DISALLOW_THREAD_UNSAFE:
         return False
@@ -131,7 +131,7 @@
 def worker(
     ui, costperarg, func, staticargs, args, hasretval=False, threadsafe=True
 ):
-    '''run a function, possibly in parallel in multiple worker
+    """run a function, possibly in parallel in multiple worker
     processes.
 
     returns a progress iterator
@@ -153,7 +153,7 @@
     threadsafe - whether work items are thread safe and can be executed using
     a thread-based worker. Should be disabled for CPU heavy tasks that don't
     release the GIL.
-    '''
+    """
     enabled = ui.configbool(b'worker', b'enabled')
     if enabled and worthwhile(ui, costperarg, len(args), threadsafe=threadsafe):
         return _platformworker(ui, func, staticargs, args, hasretval)
@@ -306,10 +306,10 @@
 
 
 def _posixexitstatus(code):
-    '''convert a posix exit status into the same form returned by
+    """convert a posix exit status into the same form returned by
     os.spawnv
 
-    returns None if the process was stopped instead of exiting'''
+    returns None if the process was stopped instead of exiting"""
     if os.WIFEXITED(code):
         return os.WEXITSTATUS(code)
     elif os.WIFSIGNALED(code):
@@ -423,7 +423,7 @@
 
 
 def partition(lst, nslices):
-    '''partition a list into N slices of roughly equal size
+    """partition a list into N slices of roughly equal size
 
     The current strategy takes every Nth element from the input. If
     we ever write workers that need to preserve grouping in input
@@ -450,6 +450,6 @@
         What we should really be doing is have workers read filenames from a
         ordered queue. This preserves locality and also keeps any worker from
         getting more than one file out of balance.
-    '''
+    """
     for i in range(nslices):
         yield lst[i::nslices]
--- a/setup.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/setup.py	Fri Nov 27 17:03:29 2020 -0500
@@ -816,7 +816,8 @@
                 if sys.version_info[0] >= 3:
                     fsdecode = os.fsdecode
                 dest = os.path.join(
-                    os.path.dirname(self.hgtarget), fsdecode(dllbasename),
+                    os.path.dirname(self.hgtarget),
+                    fsdecode(dllbasename),
                 )
 
                 if not os.path.exists(dest):
@@ -1066,7 +1067,7 @@
 
 
 class hginstalllib(install_lib):
-    '''
+    """
     This is a specialization of install_lib that replaces the copy_file used
     there so that it supports setting the mode of files after copying them,
     instead of just preserving the mode that the files originally had.  If your
@@ -1075,7 +1076,7 @@
 
     Note that just passing keep_permissions=False to copy_file would be
     insufficient, as it might still be applying a umask.
-    '''
+    """
 
     def run(self):
         realcopyfile = file_util.copy_file
@@ -1103,11 +1104,11 @@
 
 
 class hginstallscripts(install_scripts):
-    '''
+    """
     This is a specialization of install_scripts that replaces the @LIBDIR@ with
     the configured directory for modules. If possible, the path is made relative
     to the directory for scripts.
-    '''
+    """
 
     def initialize_options(self):
         install_scripts.initialize_options(self)
@@ -1400,8 +1401,7 @@
 
 
 class RustExtension(Extension):
-    """Base classes for concrete Rust Extension classes.
-    """
+    """Base classes for concrete Rust Extension classes."""
 
     rusttargetdir = os.path.join('rust', 'target', 'release')
 
@@ -1547,7 +1547,10 @@
         include_dirs=common_include_dirs,
         extra_compile_args=common_cflags,
         depends=common_depends
-        + ['mercurial/cext/charencode.h', 'mercurial/cext/revlog.h',],
+        + [
+            'mercurial/cext/charencode.h',
+            'mercurial/cext/revlog.h',
+        ],
     ),
     Extension(
         'mercurial.cext.osutil',
@@ -1635,10 +1638,19 @@
     msvccompiler.MSVCCompiler = HackedMSVCCompiler
 
 packagedata = {
-    'mercurial': ['locale/*/LC_MESSAGES/hg.mo', 'dummycert.pem',],
-    'mercurial.defaultrc': ['*.rc',],
-    'mercurial.helptext': ['*.txt',],
-    'mercurial.helptext.internals': ['*.txt',],
+    'mercurial': [
+        'locale/*/LC_MESSAGES/hg.mo',
+        'dummycert.pem',
+    ],
+    'mercurial.defaultrc': [
+        '*.rc',
+    ],
+    'mercurial.helptext': [
+        '*.txt',
+    ],
+    'mercurial.helptext.internals': [
+        '*.txt',
+    ],
 }
 
 
--- a/tests/badserverext.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/badserverext.py	Fri Nov 27 17:03:29 2020 -0500
@@ -44,16 +44,24 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'badserver', b'closeafteraccept', default=False,
+    b'badserver',
+    b'closeafteraccept',
+    default=False,
 )
 configitem(
-    b'badserver', b'closeafterrecvbytes', default=b'0',
+    b'badserver',
+    b'closeafterrecvbytes',
+    default=b'0',
 )
 configitem(
-    b'badserver', b'closeaftersendbytes', default=b'0',
+    b'badserver',
+    b'closeaftersendbytes',
+    default=b'0',
 )
 configitem(
-    b'badserver', b'closebeforeaccept', default=False,
+    b'badserver',
+    b'closebeforeaccept',
+    default=False,
 )
 
 # We can't adjust __class__ on a socket instance. So we define a proxy type.
--- a/tests/fakedirstatewritetime.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/fakedirstatewritetime.py	Fri Nov 27 17:03:29 2020 -0500
@@ -27,7 +27,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'fakedirstatewritetime', b'fakenow', default=None,
+    b'fakedirstatewritetime',
+    b'fakenow',
+    default=None,
 )
 
 parsers = policy.importmod('parsers')
--- a/tests/fakepatchtime.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/fakepatchtime.py	Fri Nov 27 17:03:29 2020 -0500
@@ -14,7 +14,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'fakepatchtime', b'fakenow', default=None,
+    b'fakepatchtime',
+    b'fakenow',
+    default=None,
 )
 
 
--- a/tests/flagprocessorext.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/flagprocessorext.py	Fri Nov 27 17:03:29 2020 -0500
@@ -139,10 +139,20 @@
 
     # Register flag processors for each extension
     flagutil.addflagprocessor(
-        REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
+        REVIDX_NOOP,
+        (
+            noopdonothingread,
+            noopdonothing,
+            validatehash,
+        ),
     )
     flagutil.addflagprocessor(
-        REVIDX_BASE64, (b64decode, b64encode, bypass,),
+        REVIDX_BASE64,
+        (
+            b64decode,
+            b64encode,
+            bypass,
+        ),
     )
     flagutil.addflagprocessor(
         REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
--- a/tests/hghave.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/hghave.py	Fri Nov 27 17:03:29 2020 -0500
@@ -1047,7 +1047,7 @@
     version_regex = b'black, version ([0-9a-b.]+)'
     version = matchoutput(blackcmd, version_regex)
     sv = distutils.version.StrictVersion
-    return version and sv(_bytes2sys(version.group(1))) >= sv('19.10b0')
+    return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
 
 
 @check('pytype', 'the pytype type checker')
--- a/tests/hypothesishelpers.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/hypothesishelpers.py	Fri Nov 27 17:03:29 2020 -0500
@@ -44,8 +44,7 @@
 
 
 def roundtrips(data, decode, encode):
-    """helper to tests function that must do proper encode/decode roundtripping
-    """
+    """helper to tests function that must do proper encode/decode roundtripping"""
 
     @given(data)
     def testroundtrips(value):
@@ -71,6 +70,11 @@
     st.builds(
         lambda s, e: s.encode(e),
         st.text(),
-        st.sampled_from(['utf-8', 'utf-16',]),
+        st.sampled_from(
+            [
+                'utf-8',
+                'utf-16',
+            ]
+        ),
     )
 ) | st.binary()
--- a/tests/run-tests.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/run-tests.py	Fri Nov 27 17:03:29 2020 -0500
@@ -534,7 +534,9 @@
         help="install and use chg wrapper in place of hg",
     )
     hgconf.add_argument(
-        "--chg-debug", action="store_true", help="show chg debug logs",
+        "--chg-debug",
+        action="store_true",
+        help="show chg debug logs",
     )
     hgconf.add_argument("--compiler", help="compiler to build with")
     hgconf.add_argument(
@@ -1193,7 +1195,10 @@
         if self._keeptmpdir:
             log(
                 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
-                % (_bytes2sys(self._testtmp), _bytes2sys(self._threadtmp),)
+                % (
+                    _bytes2sys(self._testtmp),
+                    _bytes2sys(self._threadtmp),
+                )
             )
         else:
             try:
@@ -2091,11 +2096,11 @@
 
     @staticmethod
     def parsehghaveoutput(lines):
-        '''Parse hghave log lines.
+        """Parse hghave log lines.
 
         Return tuple of lists (missing, failed):
           * the missing/unknown features
-          * the features for which existence check failed'''
+          * the features for which existence check failed"""
         missing = []
         failed = []
         for line in lines:
@@ -2155,12 +2160,10 @@
             self.color = pygmentspresent
 
     def onStart(self, test):
-        """ Can be overriden by custom TestResult
-        """
+        """Can be overriden by custom TestResult"""
 
     def onEnd(self):
-        """ Can be overriden by custom TestResult
-        """
+        """Can be overriden by custom TestResult"""
 
     def addFailure(self, test, reason):
         self.failures.append((test, reason))
@@ -3168,7 +3171,9 @@
         vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
         vlog("# Using PATH", os.environ["PATH"])
         vlog(
-            "# Using", _bytes2sys(IMPL_PATH), _bytes2sys(osenvironb[IMPL_PATH]),
+            "# Using",
+            _bytes2sys(IMPL_PATH),
+            _bytes2sys(osenvironb[IMPL_PATH]),
         )
         vlog("# Writing to directory", _bytes2sys(self._outputdir))
 
--- a/tests/test-absorb-filefixupstate.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-absorb-filefixupstate.py	Fri Nov 27 17:03:29 2020 -0500
@@ -78,7 +78,13 @@
 testfilefixup(case0, b'222', [b'', b'222'])
 
 # input case 1: 3 lines, each commit adds one line
-case1 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2, 3]), (b'3', [3]),])
+case1 = buildcontents(
+    [
+        (b'1', [1, 2, 3]),
+        (b'2', [2, 3]),
+        (b'3', [3]),
+    ]
+)
 
 # 1:1 line mapping
 testfilefixup(case1, b'123', case1)
@@ -121,7 +127,13 @@
 testfilefixup(case1, b'12b3', case1)
 
 # input case 2: delete in the middle
-case2 = buildcontents([(b'11', [1, 2]), (b'22', [1]), (b'33', [1, 2]),])
+case2 = buildcontents(
+    [
+        (b'11', [1, 2]),
+        (b'22', [1]),
+        (b'33', [1, 2]),
+    ]
+)
 
 # deletion (optimize code should make it 2 chunks)
 testfilefixup(
@@ -136,7 +148,13 @@
 testfilefixup(case2, b'aaa', case2)
 
 # input case 3: rev 3 reverts rev 2
-case3 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2]), (b'3', [1, 2, 3]),])
+case3 = buildcontents(
+    [
+        (b'1', [1, 2, 3]),
+        (b'2', [2]),
+        (b'3', [1, 2, 3]),
+    ]
+)
 
 # 1:1 line mapping
 testfilefixup(case3, b'13', case3)
@@ -159,7 +177,13 @@
     [
         (b'1', [1, 2, 3]),
         (b'2', [2, 3]),
-        (b'3', [1, 2,]),
+        (
+            b'3',
+            [
+                1,
+                2,
+            ],
+        ),
         (b'4', [1, 3]),
         (b'5', [3]),
         (b'6', [2, 3]),
@@ -183,7 +207,13 @@
 testfilefixup(case4, b'', [b'', b'34', b'37', b''])
 
 # input case 5: replace a small chunk which is near a deleted line
-case5 = buildcontents([(b'12', [1, 2]), (b'3', [1]), (b'4', [1, 2]),])
+case5 = buildcontents(
+    [
+        (b'12', [1, 2]),
+        (b'3', [1]),
+        (b'4', [1, 2]),
+    ]
+)
 
 testfilefixup(case5, b'1cd4', [b'', b'1cd34', b'1cd4'])
 
--- a/tests/test-ancestor.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-ancestor.py	Fri Nov 27 17:03:29 2020 -0500
@@ -24,13 +24,13 @@
 
 
 def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
-    '''nodes: total number of nodes in the graph
+    """nodes: total number of nodes in the graph
     rootprob: probability that a new node (not 0) will be a root
     mergeprob: probability that, excluding a root a node will be a merge
     prevprob: probability that p1 will be the previous node
 
     return value is a graph represented as an adjacency list.
-    '''
+    """
     graph = [None] * nodes
     for i in xrange(nodes):
         if i == 0 or rng.random() < rootprob:
@@ -228,7 +228,11 @@
         print("remaining (sorted): %s" % sorted(list(revs)))
 
     for i, (bases, revs) in enumerate(
-        (({10}, {11}), ({11}, {10}), ({7}, {9, 11}),)
+        (
+            ({10}, {11}),
+            ({11}, {10}),
+            ({7}, {9, 11}),
+        )
     ):
         print("%% missingancestors(), example %d" % (i + 1))
         missanc = ancestor.incrementalmissingancestors(graph.get, bases)
--- a/tests/test-batching.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-batching.py	Fri Nov 27 17:03:29 2020 -0500
@@ -30,11 +30,17 @@
 class localthing(thing):
     def foo(self, one, two=None):
         if one:
-            return b"%s and %s" % (one, two,)
+            return b"%s and %s" % (
+                one,
+                two,
+            )
         return b"Nope"
 
     def bar(self, b, a):
-        return b"%s und %s" % (b, a,)
+        return b"%s und %s" % (
+            b,
+            a,
+        )
 
     def greet(self, name=None):
         return b"Hello, %s" % name
@@ -176,7 +182,15 @@
             args = b','.join(n + b'=' + escapearg(v) for n, v in args)
             req.append(name + b':' + args)
         req = b';'.join(req)
-        res = self._submitone(b'batch', [(b'cmds', req,)])
+        res = self._submitone(
+            b'batch',
+            [
+                (
+                    b'cmds',
+                    req,
+                )
+            ],
+        )
         for r in res.split(b';'):
             yield r
 
@@ -190,7 +204,16 @@
 
     @wireprotov1peer.batchable
     def foo(self, one, two=None):
-        encargs = [(b'one', mangle(one),), (b'two', mangle(two),)]
+        encargs = [
+            (
+                b'one',
+                mangle(one),
+            ),
+            (
+                b'two',
+                mangle(two),
+            ),
+        ]
         encresref = wireprotov1peer.future()
         yield encargs, encresref
         yield unmangle(encresref.value)
@@ -198,14 +221,33 @@
     @wireprotov1peer.batchable
     def bar(self, b, a):
         encresref = wireprotov1peer.future()
-        yield [(b'b', mangle(b),), (b'a', mangle(a),)], encresref
+        yield [
+            (
+                b'b',
+                mangle(b),
+            ),
+            (
+                b'a',
+                mangle(a),
+            ),
+        ], encresref
         yield unmangle(encresref.value)
 
     # greet is coded directly. It therefore does not support batching. If it
     # does appear in a batch, the batch is split around greet, and the call to
     # greet is done in its own roundtrip.
     def greet(self, name=None):
-        return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)]))
+        return unmangle(
+            self._submitone(
+                b'greet',
+                [
+                    (
+                        b'name',
+                        mangle(name),
+                    )
+                ],
+            )
+        )
 
 
 # demo remote usage
--- a/tests/test-cbor.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-cbor.py	Fri Nov 27 17:03:29 2020 -0500
@@ -690,7 +690,12 @@
 
         self.assertEqual(
             list(cborutil.streamencodearrayfromiter(source)),
-            [b'\x9f', b'\x43', b'foo', b'\xff',],
+            [
+                b'\x9f',
+                b'\x43',
+                b'foo',
+                b'\xff',
+            ],
         )
 
         dest = b''.join(cborutil.streamencodearrayfromiter(source))
@@ -799,7 +804,11 @@
 class SetTests(TestCase):
     def testempty(self):
         self.assertEqual(
-            list(cborutil.streamencode(set())), [b'\xd9\x01\x02', b'\x80',]
+            list(cborutil.streamencode(set())),
+            [
+                b'\xd9\x01\x02',
+                b'\x80',
+            ],
         )
 
         self.assertEqual(cborutil.decodeall(b'\xd9\x01\x02\x80'), [set()])
@@ -914,14 +923,26 @@
         ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\x80',])  # empty array
+        encoded = b''.join(
+            [
+                b'\xd9\x01\x02',
+                b'\x81',
+                b'\x80',
+            ]
+        )  # empty array
 
         with self.assertRaisesRegex(
             cborutil.CBORDecodeError, 'collections not allowed as set values'
         ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\xa0',])  # empty map
+        encoded = b''.join(
+            [
+                b'\xd9\x01\x02',
+                b'\x81',
+                b'\xa0',
+            ]
+        )  # empty map
 
         with self.assertRaisesRegex(
             cborutil.CBORDecodeError, 'collections not allowed as set values'
@@ -1059,7 +1080,13 @@
         ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([b'\xa1', b'\x80', b'\x43foo',])  # empty array
+        encoded = b''.join(
+            [
+                b'\xa1',
+                b'\x80',
+                b'\x43foo',
+            ]
+        )  # empty array
 
         with self.assertRaisesRegex(
             cborutil.CBORDecodeError, 'collections not supported as map keys'
@@ -1260,7 +1287,10 @@
 
     def testpartialinput(self):
         encoded = b''.join(
-            [b'\x82', b'\x01',]  # array of 2 elements  # integer 1
+            [
+                b'\x82',
+                b'\x01',
+            ]  # array of 2 elements  # integer 1
         )
 
         with self.assertRaisesRegex(
--- a/tests/test-doctest.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-doctest.py	Fri Nov 27 17:03:29 2020 -0500
@@ -76,7 +76,9 @@
     sys.exit(0)
 
 files = subprocess.check_output(
-    "hg files --print0 \"%s\"" % fileset, shell=True, cwd=cwd,
+    "hg files --print0 \"%s\"" % fileset,
+    shell=True,
+    cwd=cwd,
 ).split(b'\0')
 
 if sys.version_info[0] >= 3:
--- a/tests/test-linelog.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-linelog.py	Fri Nov 27 17:03:29 2020 -0500
@@ -69,29 +69,60 @@
         ll.replacelines(1, 0, 0, 0, 3)
         self.assertEqual(
             [(l.rev, l.linenum) for l in ll.annotate(1)],
-            [(1, 0), (1, 1), (1, 2),],
+            [
+                (1, 0),
+                (1, 1),
+                (1, 2),
+            ],
         )
         # Replace line 1 with a new line
         ll.replacelines(2, 1, 2, 1, 2)
         self.assertEqual(
             [(l.rev, l.linenum) for l in ll.annotate(2)],
-            [(1, 0), (2, 1), (1, 2),],
+            [
+                (1, 0),
+                (2, 1),
+                (1, 2),
+            ],
         )
         # delete a line out of 2
         ll.replacelines(3, 1, 2, 0, 0)
         self.assertEqual(
-            [(l.rev, l.linenum) for l in ll.annotate(3)], [(1, 0), (1, 2),]
+            [(l.rev, l.linenum) for l in ll.annotate(3)],
+            [
+                (1, 0),
+                (1, 2),
+            ],
         )
         # annotation of 1 is unchanged
         self.assertEqual(
             [(l.rev, l.linenum) for l in ll.annotate(1)],
-            [(1, 0), (1, 1), (1, 2),],
+            [
+                (1, 0),
+                (1, 1),
+                (1, 2),
+            ],
         )
         ll.annotate(3)  # set internal state to revision 3
         start = ll.getoffset(0)
         end = ll.getoffset(1)
-        self.assertEqual(ll.getalllines(start, end), [(1, 0), (2, 1), (1, 1),])
-        self.assertEqual(ll.getalllines(), [(1, 0), (2, 1), (1, 1), (1, 2),])
+        self.assertEqual(
+            ll.getalllines(start, end),
+            [
+                (1, 0),
+                (2, 1),
+                (1, 1),
+            ],
+        )
+        self.assertEqual(
+            ll.getalllines(),
+            [
+                (1, 0),
+                (2, 1),
+                (1, 1),
+                (1, 2),
+            ],
+        )
 
     def testparseclinelogfile(self):
         # This data is what the replacements in testsimpleedits
@@ -116,14 +147,26 @@
         llc = linelog.linelog.fromdata(data)
         self.assertEqual(
             [(l.rev, l.linenum) for l in llc.annotate(1)],
-            [(1, 0), (1, 1), (1, 2),],
+            [
+                (1, 0),
+                (1, 1),
+                (1, 2),
+            ],
         )
         self.assertEqual(
             [(l.rev, l.linenum) for l in llc.annotate(2)],
-            [(1, 0), (2, 1), (1, 2),],
+            [
+                (1, 0),
+                (2, 1),
+                (1, 2),
+            ],
         )
         self.assertEqual(
-            [(l.rev, l.linenum) for l in llc.annotate(3)], [(1, 0), (1, 2),]
+            [(l.rev, l.linenum) for l in llc.annotate(3)],
+            [
+                (1, 0),
+                (1, 2),
+            ],
         )
         # Check we emit the same bytecode.
         ll = linelog.linelog()
--- a/tests/test-lock.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-lock.py	Fri Nov 27 17:03:29 2020 -0500
@@ -73,7 +73,10 @@
             self._acquirecalled,
             called,
             'expected acquire to be %s but was actually %s'
-            % (self._tocalled(called), self._tocalled(self._acquirecalled),),
+            % (
+                self._tocalled(called),
+                self._tocalled(self._acquirecalled),
+            ),
         )
 
     def resetacquirefn(self):
@@ -84,7 +87,10 @@
             self._releasecalled,
             called,
             'expected release to be %s but was actually %s'
-            % (self._tocalled(called), self._tocalled(self._releasecalled),),
+            % (
+                self._tocalled(called),
+                self._tocalled(self._releasecalled),
+            ),
         )
 
     def assertpostreleasecalled(self, called):
@@ -104,7 +110,10 @@
             actual,
             exists,
             'expected lock to %s but actually did %s'
-            % (self._toexists(exists), self._toexists(actual),),
+            % (
+                self._toexists(exists),
+                self._toexists(actual),
+            ),
         )
 
     def _tocalled(self, called):
--- a/tests/test-manifest.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-manifest.py	Fri Nov 27 17:03:29 2020 -0500
@@ -22,7 +22,12 @@
 BIN_HASH_3 = binascii.unhexlify(HASH_3)
 A_SHORT_MANIFEST = (
     b'bar/baz/qux.py\0%(hash2)s%(flag2)s\n' b'foo\0%(hash1)s%(flag1)s\n'
-) % {b'hash1': HASH_1, b'flag1': b'', b'hash2': HASH_2, b'flag2': b'l',}
+) % {
+    b'hash1': HASH_1,
+    b'flag1': b'',
+    b'hash2': HASH_2,
+    b'flag2': b'l',
+}
 
 A_DEEPER_MANIFEST = (
     b'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
@@ -265,9 +270,9 @@
         self.assertEqual(len(m), len(list(m)))
 
     def testMatchesMetadata(self):
-        '''Tests matches() for a few specific files to make sure that both
+        """Tests matches() for a few specific files to make sure that both
         the set of files as well as their flags and nodeids are correct in
-        the resulting manifest.'''
+        the resulting manifest."""
         m = self.parsemanifest(A_HUGE_MANIFEST)
 
         match = matchmod.exact([b'file1', b'file200', b'file300'])
@@ -281,9 +286,9 @@
         self.assertEqual(w, m2.text())
 
     def testMatchesNonexistentFile(self):
-        '''Tests matches() for a small set of specific files, including one
+        """Tests matches() for a small set of specific files, including one
         nonexistent file to make sure in only matches against existing files.
-        '''
+        """
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.exact(
@@ -296,8 +301,8 @@
         )
 
     def testMatchesNonexistentDirectory(self):
-        '''Tests matches() for a relpath match on a directory that doesn't
-        actually exist.'''
+        """Tests matches() for a relpath match on a directory that doesn't
+        actually exist."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(
@@ -308,8 +313,7 @@
         self.assertEqual([], m2.keys())
 
     def testMatchesExactLarge(self):
-        '''Tests matches() for files matching a large list of exact files.
-        '''
+        """Tests matches() for files matching a large list of exact files."""
         m = self.parsemanifest(A_HUGE_MANIFEST)
 
         flist = m.keys()[80:300]
@@ -328,8 +332,8 @@
         self.assertEqual(m.keys(), m2.keys())
 
     def testMatchesDirectory(self):
-        '''Tests matches() on a relpath match on a directory, which should
-        match against all files within said directory.'''
+        """Tests matches() on a relpath match on a directory, which should
+        match against all files within said directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(
@@ -353,9 +357,9 @@
         )
 
     def testMatchesExactPath(self):
-        '''Tests matches() on an exact match on a directory, which should
+        """Tests matches() on an exact match on a directory, which should
         result in an empty manifest because you can't perform an exact match
-        against a directory.'''
+        against a directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.exact([b'a/b'])
@@ -364,8 +368,8 @@
         self.assertEqual([], m2.keys())
 
     def testMatchesCwd(self):
-        '''Tests matches() on a relpath match with the current directory ('.')
-        when not in the root directory.'''
+        """Tests matches() on a relpath match with the current directory ('.')
+        when not in the root directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(
@@ -389,8 +393,8 @@
         )
 
     def testMatchesWithPattern(self):
-        '''Tests matches() for files matching a pattern that reside
-        deeper than the specified directory.'''
+        """Tests matches() for files matching a pattern that reside
+        deeper than the specified directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(util.localpath(b'/repo'), b'', [b'a/b/*/*.txt'])
--- a/tests/test-match.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-match.py	Fri Nov 27 17:03:29 2020 -0500
@@ -91,7 +91,9 @@
 
     def testVisitdirRootfilesin(self):
         m = matchmod.match(
-            util.localpath(b'/repo'), b'', patterns=[b'rootfilesin:dir/subdir'],
+            util.localpath(b'/repo'),
+            b'',
+            patterns=[b'rootfilesin:dir/subdir'],
         )
         assert isinstance(m, matchmod.patternmatcher)
         self.assertFalse(m.visitdir(b'dir/subdir/x'))
@@ -103,7 +105,9 @@
 
     def testVisitchildrensetRootfilesin(self):
         m = matchmod.match(
-            util.localpath(b'/repo'), b'', patterns=[b'rootfilesin:dir/subdir'],
+            util.localpath(b'/repo'),
+            b'',
+            patterns=[b'rootfilesin:dir/subdir'],
         )
         assert isinstance(m, matchmod.patternmatcher)
         self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
--- a/tests/test-pathencode.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-pathencode.py	Fri Nov 27 17:03:29 2020 -0500
@@ -54,8 +54,8 @@
 
 
 def buildprobtable(fp, cmd='hg manifest tip'):
-    '''Construct and print a table of probabilities for path name
-    components.  The numbers are percentages.'''
+    """Construct and print a table of probabilities for path name
+    components.  The numbers are percentages."""
 
     counts = collections.defaultdict(lambda: 0)
     for line in os.popen(cmd).read().splitlines():
--- a/tests/test-remotefilelog-datapack.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-remotefilelog-datapack.py	Fri Nov 27 17:03:29 2020 -0500
@@ -82,8 +82,7 @@
         return self.datapackreader(path)
 
     def _testAddSingle(self, content):
-        """Test putting a simple blob into a pack and reading it out.
-        """
+        """Test putting a simple blob into a pack and reading it out."""
         filename = b"foo"
         node = self.getHash(content)
 
@@ -124,8 +123,7 @@
             self.assertEqual(content, chain[0][4])
 
     def testAddDeltas(self):
-        """Test putting multiple delta blobs into a pack and read the chain.
-        """
+        """Test putting multiple delta blobs into a pack and read the chain."""
         revisions = []
         filename = b"foo"
         lastnode = nullid
@@ -148,8 +146,7 @@
             self.assertEqual(content, chain[-i - 1][4])
 
     def testPackMany(self):
-        """Pack many related and unrelated objects.
-        """
+        """Pack many related and unrelated objects."""
         # Build a random pack file
         revisions = []
         blobs = {}
@@ -198,8 +195,7 @@
             self.assertEqual(parsedmeta, origmeta)
 
     def testGetMissing(self):
-        """Test the getmissing() api.
-        """
+        """Test the getmissing() api."""
         revisions = []
         filename = b"foo"
         lastnode = nullid
--- a/tests/test-remotefilelog-histpack.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-remotefilelog-histpack.py	Fri Nov 27 17:03:29 2020 -0500
@@ -76,8 +76,7 @@
         return historypack.historypack(path)
 
     def testAddSingle(self):
-        """Test putting a single entry into a pack and reading it out.
-        """
+        """Test putting a single entry into a pack and reading it out."""
         filename = b"foo"
         node = self.getFakeHash()
         p1 = self.getFakeHash()
@@ -140,8 +139,7 @@
             self.assertEqual(acopyfrom, copyfrom)
 
     def testPackMany(self):
-        """Pack many related and unrelated ancestors.
-        """
+        """Pack many related and unrelated ancestors."""
         # Build a random pack file
         allentries = {}
         ancestorcounts = {}
@@ -201,8 +199,7 @@
             self.assertEqual(acopyfrom, copyfrom)
 
     def testGetMissing(self):
-        """Test the getmissing() api.
-        """
+        """Test the getmissing() api."""
         revisions = []
         filename = b"foo"
         for i in range(10):
--- a/tests/test-revlog-raw.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-revlog-raw.py	Fri Nov 27 17:03:29 2020 -0500
@@ -87,10 +87,10 @@
 
 
 def appendrev(rlog, text, tr, isext=False, isdelta=True):
-    '''Append a revision. If isext is True, set the EXTSTORED flag so flag
+    """Append a revision. If isext is True, set the EXTSTORED flag so flag
     processor will be used (and rawtext is different from text). If isdelta is
     True, force the revision to be a delta, otherwise it's full text.
-    '''
+    """
     nextrev = len(rlog)
     p1 = rlog.node(nextrev - 1)
     p2 = node.nullid
@@ -111,7 +111,7 @@
 
 
 def addgroupcopy(rlog, tr, destname=b'_destrevlog.i', optimaldelta=True):
-    '''Copy revlog to destname using revlog.addgroup. Return the copied revlog.
+    """Copy revlog to destname using revlog.addgroup. Return the copied revlog.
 
     This emulates push or pull. They use changegroup. Changegroup requires
     repo to work. We don't have a repo, so a dummy changegroup is used.
@@ -122,7 +122,7 @@
 
     This exercises some revlog.addgroup (and revlog._addrevision(text=None))
     code path, which is not covered by "appendrev" alone.
-    '''
+    """
 
     class dummychangegroup(object):
         @staticmethod
@@ -174,10 +174,10 @@
 
 
 def lowlevelcopy(rlog, tr, destname=b'_destrevlog.i'):
-    '''Like addgroupcopy, but use the low level revlog._addrevision directly.
+    """Like addgroupcopy, but use the low level revlog._addrevision directly.
 
     It exercises some code paths that are hard to reach easily otherwise.
-    '''
+    """
     dlog = newrevlog(destname, recreate=True)
     for r in rlog:
         p1 = rlog.node(r - 1)
@@ -218,13 +218,13 @@
 
 
 def genbits(n):
-    '''Given a number n, generate (2 ** (n * 2) + 1) numbers in range(2 ** n).
+    """Given a number n, generate (2 ** (n * 2) + 1) numbers in range(2 ** n).
     i.e. the generated numbers have a width of n bits.
 
     The combination of two adjacent numbers will cover all possible cases.
     That is to say, given any x, y where both x, and y are in range(2 ** n),
     there is an x followed immediately by y in the generated sequence.
-    '''
+    """
     m = 2 ** n
 
     # Gray Code. See https://en.wikipedia.org/wiki/Gray_code
@@ -255,7 +255,7 @@
 
 
 def writecases(rlog, tr):
-    '''Write some revisions interested to the test.
+    """Write some revisions interested to the test.
 
     The test is interested in 3 properties of a revision:
 
@@ -281,7 +281,7 @@
     mentioned above.
 
     Return expected [(text, rawtext)].
-    '''
+    """
     result = []
     for i, x in enumerate(genbits(3)):
         isdelta, isext, isempty = bool(x & 1), bool(x & 2), bool(x & 4)
--- a/tests/test-rust-revlog.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-rust-revlog.py	Fri Nov 27 17:03:29 2020 -0500
@@ -17,7 +17,8 @@
 
 
 @unittest.skipIf(
-    rustext is None, "rustext module revlog relies on is not available",
+    rustext is None,
+    "rustext module revlog relies on is not available",
 )
 class RustRevlogIndexTest(revlogtesting.RevlogBasedTestBase):
     def test_heads(self):
--- a/tests/test-verify-repo-operations.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-verify-repo-operations.py	Fri Nov 27 17:03:29 2020 -0500
@@ -112,7 +112,13 @@
     min_size=1,
 ).map(lambda s: s.encode('utf-8'))
 
-extensions = st.sampled_from(('shelve', 'mq', 'blackbox',))
+extensions = st.sampled_from(
+    (
+        'shelve',
+        'mq',
+        'blackbox',
+    )
+)
 
 
 @contextmanager
@@ -233,7 +239,12 @@
                         t = r.read()
                         assert ext not in t, t
                     output = subprocess.check_output(
-                        [runtests, tf, "--local",], stderr=subprocess.STDOUT
+                        [
+                            runtests,
+                            tf,
+                            "--local",
+                        ],
+                        stderr=subprocess.STDOUT,
                     )
                     assert "Ran 1 test" in output, output
             except subprocess.CalledProcessError as e:
@@ -307,7 +318,8 @@
         return content
 
     @rule(
-        target=branches, name=safetext,
+        target=branches,
+        name=safetext,
     )
     def genbranch(self, name):
         return name
@@ -343,7 +355,10 @@
                 "$ python -c 'import binascii; "
                 "print(binascii.unhexlify(\"%s\"))' > %s"
             )
-            % (binascii.hexlify(content), pipes.quote(path),)
+            % (
+                binascii.hexlify(content),
+                pipes.quote(path),
+            )
         )
 
     @rule(path=paths)
@@ -354,7 +369,9 @@
     @rule(path=paths)
     def forgetpath(self, path):
         if os.path.exists(path):
-            with acceptableerrors("file is already untracked",):
+            with acceptableerrors(
+                "file is already untracked",
+            ):
                 self.hg("forget", "--", path)
 
     @rule(s=st.none() | st.integers(0, 100))
@@ -420,7 +437,9 @@
         return self.configperrepo.setdefault(self.currentrepo, {})
 
     @rule(
-        target=repos, source=repos, name=reponames,
+        target=repos,
+        source=repos,
+        name=reponames,
     )
     def clone(self, source, name):
         if not os.path.exists(os.path.join("..", name)):
@@ -430,7 +449,8 @@
         return name
 
     @rule(
-        target=repos, name=reponames,
+        target=repos,
+        name=reponames,
     )
     def fresh(self, name):
         if not os.path.exists(os.path.join("..", name)):
@@ -453,14 +473,16 @@
     @rule()
     def pull(self, repo=repos):
         with acceptableerrors(
-            "repository default not found", "repository is unrelated",
+            "repository default not found",
+            "repository is unrelated",
         ):
             self.hg("pull")
 
     @rule(newbranch=st.booleans())
     def push(self, newbranch):
         with acceptableerrors(
-            "default repository not configured", "no changes found",
+            "default repository not configured",
+            "no changes found",
         ):
             if newbranch:
                 self.hg("push", "--new-branch")
@@ -507,7 +529,8 @@
     @rule(branch=branches, clean=st.booleans())
     def update(self, branch, clean):
         with acceptableerrors(
-            'unknown revision', 'parse error',
+            'unknown revision',
+            'parse error',
         ):
             if clean:
                 self.hg("update", "-C", "--", branch)
@@ -570,7 +593,12 @@
 
 
 settings.register_profile(
-    'default', settings(timeout=300, stateful_step_count=50, max_examples=10,)
+    'default',
+    settings(
+        timeout=300,
+        stateful_step_count=50,
+        max_examples=10,
+    ),
 )
 
 settings.register_profile(
--- a/tests/test-wireproto-clientreactor.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-wireproto-clientreactor.py	Fri Nov 27 17:03:29 2020 -0500
@@ -486,13 +486,19 @@
 
         response1 = b''.join(
             cborutil.streamencode(
-                {b'status': b'ok', b'extra': b'response1' * 10,}
+                {
+                    b'status': b'ok',
+                    b'extra': b'response1' * 10,
+                }
             )
         )
 
         response2 = b''.join(
             cborutil.streamencode(
-                {b'status': b'error', b'extra': b'response2' * 10,}
+                {
+                    b'status': b'error',
+                    b'extra': b'response2' * 10,
+                }
             )
         )
 
@@ -678,13 +684,19 @@
 
         response1 = b''.join(
             cborutil.streamencode(
-                {b'status': b'ok', b'extra': b'response1' * 10,}
+                {
+                    b'status': b'ok',
+                    b'extra': b'response1' * 10,
+                }
             )
         )
 
         response2 = b''.join(
             cborutil.streamencode(
-                {b'status': b'error', b'extra': b'response2' * 10,}
+                {
+                    b'status': b'error',
+                    b'extra': b'response2' * 10,
+                }
             )
         )
 
--- a/tests/test-wireproto-framing.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-wireproto-framing.py	Fri Nov 27 17:03:29 2020 -0500
@@ -192,7 +192,12 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo', [], []), (b'bar', [], []),]
+                stream,
+                1,
+                [
+                    (b'foo', [], []),
+                    (b'bar', [], []),
+                ],
             )
         )
 
@@ -210,7 +215,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo %s', [b'val1'], []),]
+                stream,
+                1,
+                [
+                    (b'foo %s', [b'val1'], []),
+                ],
             )
         )
 
@@ -228,7 +237,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo %s %s', [b'val', b'value'], []),]
+                stream,
+                1,
+                [
+                    (b'foo %s %s', [b'val', b'value'], []),
+                ],
             )
         )
 
@@ -246,7 +259,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo', [], [b'label']),]
+                stream,
+                1,
+                [
+                    (b'foo', [], [b'label']),
+                ],
             )
         )
 
@@ -264,7 +281,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo %s', [b'arg'], [b'label']),]
+                stream,
+                1,
+                [
+                    (b'foo %s', [b'arg'], [b'label']),
+                ],
             )
         )
 
--- a/tests/test-wireproto-serverreactor.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-wireproto-serverreactor.py	Fri Nov 27 17:03:29 2020 -0500
@@ -202,7 +202,10 @@
             {
                 b'requestid': 1,
                 b'command': b'command',
-                b'args': {b'key': b'val', b'foo': b'bar',},
+                b'args': {
+                    b'key': b'val',
+                    b'foo': b'bar',
+                },
                 b'redirect': None,
                 b'data': b'value1value2',
             },
@@ -356,7 +359,10 @@
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'error')
         self.assertEqual(
-            results[1][1], {b'message': b'request with ID 1 already received',}
+            results[1][1],
+            {
+                b'message': b'request with ID 1 already received',
+            },
         )
 
     def testinterleavedcommands(self):
@@ -364,7 +370,10 @@
             cborutil.streamencode(
                 {
                     b'name': b'command1',
-                    b'args': {b'foo': b'bar', b'key1': b'val',},
+                    b'args': {
+                        b'foo': b'bar',
+                        b'key1': b'val',
+                    },
                 }
             )
         )
@@ -372,7 +381,10 @@
             cborutil.streamencode(
                 {
                     b'name': b'command3',
-                    b'args': {b'biz': b'baz', b'key': b'val',},
+                    b'args': {
+                        b'biz': b'baz',
+                        b'key': b'val',
+                    },
                 }
             )
         )
@@ -461,7 +473,10 @@
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'error')
         self.assertEqual(
-            results[1][1], {b'message': b'command data frame without flags',}
+            results[1][1],
+            {
+                b'message': b'command data frame without flags',
+            },
         )
 
     def testframefornonreceivingrequest(self):
@@ -651,7 +666,10 @@
 
         self.assertaction(results[0], b'error')
         self.assertEqual(
-            results[0][1], {b'message': b'request with ID 1 is already active',}
+            results[0][1],
+            {
+                b'message': b'request with ID 1 is already active',
+            },
         )
 
     def testduplicaterequestonactivecommandnosend(self):
@@ -668,7 +686,10 @@
         results = list(sendcommandframes(reactor, instream, 1, b'command1', {}))
         self.assertaction(results[0], b'error')
         self.assertEqual(
-            results[0][1], {b'message': b'request with ID 1 is already active',}
+            results[0][1],
+            {
+                b'message': b'request with ID 1 is already active',
+            },
         )
 
     def testduplicaterequestaftersend(self):
@@ -763,7 +784,9 @@
 
         data = b''.join(
             cborutil.streamencode(
-                {b'contentencodings': [b'value1', b'value2'],}
+                {
+                    b'contentencodings': [b'value1', b'value2'],
+                }
             )
         )
 
@@ -811,7 +834,10 @@
         )
         self.assertaction(result, b'error')
         self.assertEqual(
-            result[1], {b'message': b'expected command request frame; got 8',}
+            result[1],
+            {
+                b'message': b'expected command request frame; got 8',
+            },
         )
 
 
--- a/tests/test-wsgirequest.py	Fri Nov 27 17:00:00 2020 -0500
+++ b/tests/test-wsgirequest.py	Fri Nov 27 17:03:29 2020 -0500
@@ -49,7 +49,12 @@
         self.assertEqual(len(r.headers), 0)
 
     def testcustomport(self):
-        r = parse(DEFAULT_ENV, extra={'SERVER_PORT': '8000',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SERVER_PORT': '8000',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver:8000')
         self.assertEqual(r.baseurl, b'http://testserver:8000')
@@ -58,7 +63,10 @@
 
         r = parse(
             DEFAULT_ENV,
-            extra={'SERVER_PORT': '4000', 'wsgi.url_scheme': 'https',},
+            extra={
+                'SERVER_PORT': '4000',
+                'wsgi.url_scheme': 'https',
+            },
         )
 
         self.assertEqual(r.url, b'https://testserver:4000')
@@ -67,7 +75,12 @@
         self.assertEqual(r.advertisedbaseurl, b'https://testserver:4000')
 
     def testhttphost(self):
-        r = parse(DEFAULT_ENV, extra={'HTTP_HOST': 'altserver',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'HTTP_HOST': 'altserver',
+            },
+        )
 
         self.assertEqual(r.url, b'http://altserver')
         self.assertEqual(r.baseurl, b'http://altserver')
@@ -75,7 +88,12 @@
         self.assertEqual(r.advertisedbaseurl, b'http://testserver')
 
     def testscriptname(self):
-        r = parse(DEFAULT_ENV, extra={'SCRIPT_NAME': '',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SCRIPT_NAME': '',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -85,7 +103,12 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertIsNone(r.dispatchpath)
 
-        r = parse(DEFAULT_ENV, extra={'SCRIPT_NAME': '/script',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SCRIPT_NAME': '/script',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/script')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -95,7 +118,12 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertIsNone(r.dispatchpath)
 
-        r = parse(DEFAULT_ENV, extra={'SCRIPT_NAME': '/multiple words',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SCRIPT_NAME': '/multiple words',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/multiple%20words')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -106,7 +134,12 @@
         self.assertIsNone(r.dispatchpath)
 
     def testpathinfo(self):
-        r = parse(DEFAULT_ENV, extra={'PATH_INFO': '',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'PATH_INFO': '',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -116,7 +149,12 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertEqual(r.dispatchpath, b'')
 
-        r = parse(DEFAULT_ENV, extra={'PATH_INFO': '/pathinfo',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'PATH_INFO': '/pathinfo',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/pathinfo')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -126,7 +164,12 @@
         self.assertEqual(r.dispatchparts, [b'pathinfo'])
         self.assertEqual(r.dispatchpath, b'pathinfo')
 
-        r = parse(DEFAULT_ENV, extra={'PATH_INFO': '/one/two/',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'PATH_INFO': '/one/two/',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/one/two/')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -139,7 +182,10 @@
     def testscriptandpathinfo(self):
         r = parse(
             DEFAULT_ENV,
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/pathinfo',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/pathinfo',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/script/pathinfo')
@@ -208,7 +254,9 @@
             parse(
                 DEFAULT_ENV,
                 reponame=b'repo',
-                extra={'PATH_INFO': '/pathinfo',},
+                extra={
+                    'PATH_INFO': '/pathinfo',
+                },
             )
 
         with self.assertRaisesRegex(
@@ -217,13 +265,17 @@
             parse(
                 DEFAULT_ENV,
                 reponame=b'repo',
-                extra={'PATH_INFO': '/repoextra/path',},
+                extra={
+                    'PATH_INFO': '/repoextra/path',
+                },
             )
 
         r = parse(
             DEFAULT_ENV,
             reponame=b'repo',
-            extra={'PATH_INFO': '/repo/path1/path2',},
+            extra={
+                'PATH_INFO': '/repo/path1/path2',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/repo/path1/path2')
@@ -238,7 +290,9 @@
         r = parse(
             DEFAULT_ENV,
             reponame=b'prefix/repo',
-            extra={'PATH_INFO': '/prefix/repo/path1/path2',},
+            extra={
+                'PATH_INFO': '/prefix/repo/path1/path2',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/prefix/repo/path1/path2')
@@ -307,7 +361,9 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver',
-            extra={'PATH_INFO': '/path1/path2',},
+            extra={
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -347,7 +403,9 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver/altpath',
-            extra={'PATH_INFO': '/path1/path2',},
+            extra={
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -365,7 +423,9 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver/altpath/',
-            extra={'PATH_INFO': '/path1/path2',},
+            extra={
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -383,7 +443,10 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver',
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/path1/path2',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/script/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -399,7 +462,10 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver/altroot',
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/path1/path2',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/script/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -418,7 +484,10 @@
             DEFAULT_ENV,
             reponame=b'repo',
             altbaseurl=b'http://altserver/altroot',
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/repo/path1/path2',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/repo/path1/path2',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/script/repo/path1/path2')