merge with stable stable 5.7rc0
authorPulkit Goyal <7895pulkit@gmail.com>
Tue, 19 Jan 2021 21:48:43 +0530
branchstable
changeset 46338 9da65e3cf370
parent 46337 6e81446bf1d9 (diff)
parent 46147 b308d750de82 (current diff)
child 46339 8814c0d20245
merge with stable
--- a/.hgignore	Thu Dec 24 15:58:08 2020 +0900
+++ b/.hgignore	Tue Jan 19 21:48:43 2021 +0530
@@ -26,6 +26,7 @@
 tests/hypothesis-generated
 tests/annotated
 tests/exceptions
+tests/python3
 tests/*.err
 tests/htmlcov
 build
@@ -49,6 +50,7 @@
 .DS_Store
 tags
 cscope.*
+.vscode/*
 .idea/*
 .asv/*
 .pytype/*
@@ -63,6 +65,10 @@
 # Generated wheels
 wheelhouse/
 
+syntax: rootglob
+# See Profiling in rust/README.rst
+.cargo/config
+
 syntax: regexp
 ^\.pc/
 ^\.(pydev)?project
--- a/Makefile	Thu Dec 24 15:58:08 2020 +0900
+++ b/Makefile	Tue Jan 19 21:48:43 2021 +0530
@@ -5,7 +5,18 @@
 # % make PREFIX=/opt/ install
 
 export PREFIX=/usr/local
-PYTHON?=python
+
+# Default to Python 3.
+#
+# Windows ships Python 3 as `python.exe`, which may not be on PATH.  py.exe is.
+ifeq ($(OS),Windows_NT)
+PYTHON?=py -3
+else
+PYTHON?=python3
+endif
+
+PYOXIDIZER?=pyoxidizer
+
 $(eval HGROOT := $(shell pwd))
 HGPYTHONS ?= $(HGROOT)/build/pythons
 PURE=
@@ -181,27 +192,21 @@
 # Packaging targets
 
 packaging_targets := \
-  centos5 \
-  centos6 \
   centos7 \
   centos8 \
   deb \
-  docker-centos5 \
-  docker-centos6 \
   docker-centos7 \
   docker-centos8 \
   docker-debian-bullseye \
   docker-debian-buster \
   docker-debian-stretch \
   docker-fedora \
-  docker-ubuntu-trusty \
-  docker-ubuntu-trusty-ppa \
   docker-ubuntu-xenial \
   docker-ubuntu-xenial-ppa \
-  docker-ubuntu-artful \
-  docker-ubuntu-artful-ppa \
   docker-ubuntu-bionic \
   docker-ubuntu-bionic-ppa \
+  docker-ubuntu-focal \
+  docker-ubuntu-focal-ppa \
   fedora \
   linux-wheels \
   linux-wheels-x86_64 \
@@ -250,9 +255,12 @@
 	  --resources contrib/packaging/macosx/ \
 	  "$${OUTPUTDIR:-dist/}"/Mercurial-"$${HGVER}"-macosx"$${OSXVER}".pkg
 
+pyoxidizer:
+	$(PYOXIDIZER) build --path ./rust/hgcli --release
+
 .PHONY: help all local build doc cleanbutpackages clean install install-bin \
 	install-doc install-home install-home-bin install-home-doc \
 	dist dist-notests check tests rust-tests check-code format-c \
-	update-pot \
+	update-pot pyoxidizer \
 	$(packaging_targets) \
 	osx
--- a/contrib/automation/hgautomation/aws.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/automation/hgautomation/aws.py	Tue Jan 19 21:48:43 2021 +0530
@@ -152,7 +152,11 @@
 
 
 IAM_INSTANCE_PROFILES = {
-    'ephemeral-ec2-1': {'roles': ['ephemeral-ec2-role-1',],}
+    'ephemeral-ec2-1': {
+        'roles': [
+            'ephemeral-ec2-role-1',
+        ],
+    }
 }
 
 
@@ -469,10 +473,22 @@
 
     images = ec2resource.images.filter(
         Filters=[
-            {'Name': 'owner-id', 'Values': [owner_id],},
-            {'Name': 'state', 'Values': ['available'],},
-            {'Name': 'image-type', 'Values': ['machine'],},
-            {'Name': 'name', 'Values': [name],},
+            {
+                'Name': 'owner-id',
+                'Values': [owner_id],
+            },
+            {
+                'Name': 'state',
+                'Values': ['available'],
+            },
+            {
+                'Name': 'image-type',
+                'Values': ['machine'],
+            },
+            {
+                'Name': 'name',
+                'Values': [name],
+            },
         ]
     )
 
@@ -519,10 +535,13 @@
         print('adding security group %s' % actual)
 
         group_res = ec2resource.create_security_group(
-            Description=group['description'], GroupName=actual,
+            Description=group['description'],
+            GroupName=actual,
         )
 
-        group_res.authorize_ingress(IpPermissions=group['ingress'],)
+        group_res.authorize_ingress(
+            IpPermissions=group['ingress'],
+        )
 
         security_groups[name] = group_res
 
@@ -614,7 +633,10 @@
     while True:
         res = ssmclient.describe_instance_information(
             Filters=[
-                {'Key': 'InstanceIds', 'Values': [i.id for i in instances],},
+                {
+                    'Key': 'InstanceIds',
+                    'Values': [i.id for i in instances],
+                },
             ],
         )
 
@@ -636,7 +658,9 @@
         InstanceIds=[i.id for i in instances],
         DocumentName=document_name,
         Parameters=parameters,
-        CloudWatchOutputConfig={'CloudWatchOutputEnabled': True,},
+        CloudWatchOutputConfig={
+            'CloudWatchOutputEnabled': True,
+        },
     )
 
     command_id = res['Command']['CommandId']
@@ -645,7 +669,8 @@
         while True:
             try:
                 res = ssmclient.get_command_invocation(
-                    CommandId=command_id, InstanceId=instance.id,
+                    CommandId=command_id,
+                    InstanceId=instance.id,
                 )
             except botocore.exceptions.ClientError as e:
                 if e.response['Error']['Code'] == 'InvocationDoesNotExist':
@@ -799,19 +824,32 @@
     instance.stop()
 
     ec2client.get_waiter('instance_stopped').wait(
-        InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
+        InstanceIds=[instance.id],
+        WaiterConfig={
+            'Delay': 5,
+        },
     )
     print('%s is stopped' % instance.id)
 
-    image = instance.create_image(Name=name, Description=description,)
+    image = instance.create_image(
+        Name=name,
+        Description=description,
+    )
 
     image.create_tags(
-        Tags=[{'Key': 'HGIMAGEFINGERPRINT', 'Value': fingerprint,},]
+        Tags=[
+            {
+                'Key': 'HGIMAGEFINGERPRINT',
+                'Value': fingerprint,
+            },
+        ]
     )
 
     print('waiting for image %s' % image.id)
 
-    ec2client.get_waiter('image_available').wait(ImageIds=[image.id],)
+    ec2client.get_waiter('image_available').wait(
+        ImageIds=[image.id],
+    )
 
     print('image %s available as %s' % (image.id, image.name))
 
@@ -837,7 +875,9 @@
         ssh_username = 'admin'
     elif distro == 'debian10':
         image = find_image(
-            ec2resource, DEBIAN_ACCOUNT_ID_2, 'debian-10-amd64-20190909-10',
+            ec2resource,
+            DEBIAN_ACCOUNT_ID_2,
+            'debian-10-amd64-20190909-10',
         )
         ssh_username = 'admin'
     elif distro == 'ubuntu18.04':
@@ -1066,7 +1106,9 @@
 
 
 def ensure_windows_dev_ami(
-    c: AWSConnection, prefix='hg-', base_image_name=WINDOWS_BASE_IMAGE_NAME,
+    c: AWSConnection,
+    prefix='hg-',
+    base_image_name=WINDOWS_BASE_IMAGE_NAME,
 ):
     """Ensure Windows Development AMI is available and up-to-date.
 
@@ -1190,7 +1232,9 @@
             ssmclient,
             [instance],
             'AWS-RunPowerShellScript',
-            {'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),},
+            {
+                'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'),
+            },
         )
 
         # Reboot so all updates are fully applied.
@@ -1202,7 +1246,10 @@
         print('rebooting instance %s' % instance.id)
         instance.stop()
         ec2client.get_waiter('instance_stopped').wait(
-            InstanceIds=[instance.id], WaiterConfig={'Delay': 5,}
+            InstanceIds=[instance.id],
+            WaiterConfig={
+                'Delay': 5,
+            },
         )
 
         instance.start()
--- a/contrib/automation/hgautomation/cli.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/automation/hgautomation/cli.py	Tue Jan 19 21:48:43 2021 +0530
@@ -282,16 +282,20 @@
         help='Path for local state files',
     )
     parser.add_argument(
-        '--aws-region', help='AWS region to use', default='us-west-2',
+        '--aws-region',
+        help='AWS region to use',
+        default='us-west-2',
     )
 
     subparsers = parser.add_subparsers()
 
     sp = subparsers.add_parser(
-        'bootstrap-linux-dev', help='Bootstrap Linux development environments',
+        'bootstrap-linux-dev',
+        help='Bootstrap Linux development environments',
     )
     sp.add_argument(
-        '--distros', help='Comma delimited list of distros to bootstrap',
+        '--distros',
+        help='Comma delimited list of distros to bootstrap',
     )
     sp.add_argument(
         '--parallel',
@@ -312,13 +316,17 @@
     sp.set_defaults(func=bootstrap_windows_dev)
 
     sp = subparsers.add_parser(
-        'build-all-windows-packages', help='Build all Windows packages',
+        'build-all-windows-packages',
+        help='Build all Windows packages',
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
-        '--version', help='Mercurial version string to use',
+        '--version',
+        help='Mercurial version string to use',
     )
     sp.add_argument(
         '--base-image-name',
@@ -328,7 +336,8 @@
     sp.set_defaults(func=build_all_windows_packages)
 
     sp = subparsers.add_parser(
-        'build-inno', help='Build Inno Setup installer(s)',
+        'build-inno',
+        help='Build Inno Setup installer(s)',
     )
     sp.add_argument(
         '--python-version',
@@ -346,10 +355,13 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
-        '--version', help='Mercurial version string to use in installer',
+        '--version',
+        help='Mercurial version string to use in installer',
     )
     sp.add_argument(
         '--base-image-name',
@@ -359,7 +371,8 @@
     sp.set_defaults(func=build_inno)
 
     sp = subparsers.add_parser(
-        'build-windows-wheel', help='Build Windows wheel(s)',
+        'build-windows-wheel',
+        help='Build Windows wheel(s)',
     )
     sp.add_argument(
         '--python-version',
@@ -376,7 +389,9 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
         '--base-image-name',
@@ -402,10 +417,13 @@
         default=['x64'],
     )
     sp.add_argument(
-        '--revision', help='Mercurial revision to build', default='.',
+        '--revision',
+        help='Mercurial revision to build',
+        default='.',
     )
     sp.add_argument(
-        '--version', help='Mercurial version string to use in installer',
+        '--version',
+        help='Mercurial version string to use in installer',
     )
     sp.add_argument(
         '--base-image-name',
@@ -421,11 +439,15 @@
     sp.set_defaults(func=terminate_ec2_instances)
 
     sp = subparsers.add_parser(
-        'purge-ec2-resources', help='Purge all EC2 resources managed by us',
+        'purge-ec2-resources',
+        help='Purge all EC2 resources managed by us',
     )
     sp.set_defaults(func=purge_ec2_resources)
 
-    sp = subparsers.add_parser('run-tests-linux', help='Run tests on Linux',)
+    sp = subparsers.add_parser(
+        'run-tests-linux',
+        help='Run tests on Linux',
+    )
     sp.add_argument(
         '--distro',
         help='Linux distribution to run tests on',
@@ -468,10 +490,13 @@
     sp.set_defaults(func=run_tests_linux)
 
     sp = subparsers.add_parser(
-        'run-tests-windows', help='Run tests on Windows',
+        'run-tests-windows',
+        help='Run tests on Windows',
     )
     sp.add_argument(
-        '--instance-type', help='EC2 instance type to use', default='t3.medium',
+        '--instance-type',
+        help='EC2 instance type to use',
+        default='t3.medium',
     )
     sp.add_argument(
         '--python-version',
@@ -486,7 +511,8 @@
         default='x64',
     )
     sp.add_argument(
-        '--test-flags', help='Extra command line flags to pass to run-tests.py',
+        '--test-flags',
+        help='Extra command line flags to pass to run-tests.py',
     )
     sp.add_argument(
         '--base-image-name',
@@ -514,10 +540,12 @@
         help='Skip uploading to www.mercurial-scm.org',
     )
     sp.add_argument(
-        '--ssh-username', help='SSH username for mercurial-scm.org',
+        '--ssh-username',
+        help='SSH username for mercurial-scm.org',
     )
     sp.add_argument(
-        'version', help='Mercurial version string to locate local packages',
+        'version',
+        help='Mercurial version string to locate local packages',
     )
     sp.set_defaults(func=publish_windows_artifacts)
 
--- a/contrib/automation/hgautomation/windows.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/automation/hgautomation/windows.py	Tue Jan 19 21:48:43 2021 +0530
@@ -362,7 +362,8 @@
             raise Exception("unhandled arch: %s" % arch)
 
         ps = BUILD_INNO_PYTHON3.format(
-            pyoxidizer_target=target_triple, version=version,
+            pyoxidizer_target=target_triple,
+            version=version,
         )
     else:
         extra_args = []
@@ -427,7 +428,8 @@
             raise Exception("unhandled arch: %s" % arch)
 
         ps = BUILD_WIX_PYTHON3.format(
-            pyoxidizer_target=target_triple, version=version,
+            pyoxidizer_target=target_triple,
+            version=version,
         )
     else:
         extra_args = []
@@ -460,7 +462,10 @@
 
     python_path = 'python%s-%s' % (python_version.replace('.', ''), arch)
 
-    ps = RUN_TESTS.format(python_path=python_path, test_flags=test_flags or '',)
+    ps = RUN_TESTS.format(
+        python_path=python_path,
+        test_flags=test_flags or '',
+    )
 
     run_powershell(winrm_client, ps)
 
--- a/contrib/byteify-strings.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/byteify-strings.py	Tue Jan 19 21:48:43 2021 +0530
@@ -213,15 +213,19 @@
             fn = t.string
 
             # *attr() builtins don't accept byte strings to 2nd argument.
-            if fn in (
-                'getattr',
-                'setattr',
-                'hasattr',
-                'safehasattr',
-                'wrapfunction',
-                'wrapclass',
-                'addattr',
-            ) and (opts['allow-attr-methods'] or not _isop(i - 1, '.')):
+            if (
+                fn
+                in (
+                    'getattr',
+                    'setattr',
+                    'hasattr',
+                    'safehasattr',
+                    'wrapfunction',
+                    'wrapclass',
+                    'addattr',
+                )
+                and (opts['allow-attr-methods'] or not _isop(i - 1, '.'))
+            ):
                 arg1idx = _findargnofcall(1)
                 if arg1idx is not None:
                     _ensuresysstr(arg1idx)
--- a/contrib/check-code.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/check-code.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # check-code - a style and portability checker for Mercurial
 #
@@ -620,13 +620,17 @@
 ]
 
 inutilpats = [
-    [(r'\bui\.', "don't use ui in util"),],
+    [
+        (r'\bui\.', "don't use ui in util"),
+    ],
     # warnings
     [],
 ]
 
 inrevlogpats = [
-    [(r'\brepo\.', "don't use repo in revlog"),],
+    [
+        (r'\brepo\.', "don't use repo in revlog"),
+    ],
     # warnings
     [],
 ]
--- a/contrib/check-commit	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/check-commit	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright 2014 Matt Mackall <mpm@selenic.com>
 #
--- a/contrib/check-config.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/check-config.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # check-config - a config flag documentation checker for Mercurial
 #
--- a/contrib/check-py3-compat.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/check-py3-compat.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # check-py3-compat - check Python 3 compatibility of Mercurial files
 #
--- a/contrib/chg/chg.c	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/chg/chg.c	Tue Jan 19 21:48:43 2021 +0530
@@ -451,9 +451,10 @@
  */
 static int isunsupported(int argc, const char *argv[])
 {
-	enum { SERVE = 1,
-	       DAEMON = 2,
-	       SERVEDAEMON = SERVE | DAEMON,
+	enum {
+		SERVE = 1,
+		DAEMON = 2,
+		SERVEDAEMON = SERVE | DAEMON,
 	};
 	unsigned int state = 0;
 	int i;
--- a/contrib/chg/hgclient.c	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/chg/hgclient.c	Tue Jan 19 21:48:43 2021 +0530
@@ -26,15 +26,16 @@
 #include "procutil.h"
 #include "util.h"
 
-enum { CAP_GETENCODING = 0x0001,
-       CAP_RUNCOMMAND = 0x0002,
-       /* cHg extension: */
-       CAP_ATTACHIO = 0x0100,
-       CAP_CHDIR = 0x0200,
-       CAP_SETENV = 0x0800,
-       CAP_SETUMASK2 = 0x1000,
-       CAP_VALIDATE = 0x2000,
-       CAP_SETPROCNAME = 0x4000,
+enum {
+	CAP_GETENCODING = 0x0001,
+	CAP_RUNCOMMAND = 0x0002,
+	/* cHg extension: */
+	CAP_ATTACHIO = 0x0100,
+	CAP_CHDIR = 0x0200,
+	CAP_SETENV = 0x0800,
+	CAP_SETUMASK2 = 0x1000,
+	CAP_VALIDATE = 0x2000,
+	CAP_SETPROCNAME = 0x4000,
 };
 
 typedef struct {
--- a/contrib/debugcmdserver.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/debugcmdserver.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Dumps output generated by Mercurial's command server in a formatted style to a
 # given file or stderr if '-' is specified. Output is also written in its raw
--- a/contrib/dumprevlog	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/dumprevlog	Tue Jan 19 21:48:43 2021 +0530
@@ -1,13 +1,13 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 # Dump revlogs as raw data stream
 # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump
 
 from __future__ import absolute_import, print_function
 
 import sys
+from mercurial.node import hex
 from mercurial import (
     encoding,
-    node,
     pycompat,
     revlog,
 )
@@ -38,9 +38,9 @@
         n = r.node(i)
         p = r.parents(n)
         d = r.revision(n)
-        printb(b"node: %s" % node.hex(n))
+        printb(b"node: %s" % hex(n))
         printb(b"linkrev: %d" % r.linkrev(i))
-        printb(b"parents: %s %s" % (node.hex(p[0]), node.hex(p[1])))
+        printb(b"parents: %s %s" % (hex(p[0]), hex(p[1])))
         printb(b"length: %d" % len(d))
         printb(b"-start-")
         printb(d)
--- a/contrib/fuzz/Makefile	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/fuzz/Makefile	Tue Jan 19 21:48:43 2021 +0530
@@ -11,7 +11,7 @@
 LIB_FUZZING_ENGINE ?= standalone_fuzz_target_runner.o
 
 PYTHON_CONFIG ?= $$OUT/sanpy/bin/python-config
-PYTHON_CONFIG_FLAGS ?= --ldflags
+PYTHON_CONFIG_FLAGS ?= --ldflags --embed
 
 CXXFLAGS += -Wno-deprecated-register
 
--- a/contrib/heptapod-ci.yml	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/heptapod-ci.yml	Tue Jan 19 21:48:43 2021 +0530
@@ -1,20 +1,24 @@
-image: octobus/ci-mercurial-core
+stages:
+  - tests
+  - phabricator
 
-# The runner made a clone as root.
-# We make a new clone owned by user used to run the step.
-before_script:
-    - hg clone . /tmp/mercurial-ci/ --noupdate
-    - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
-    - cd /tmp/mercurial-ci/rust/rhg
-    - cargo build
-    - cd /tmp/mercurial-ci/
-    - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
+image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG
 
 variables:
     PYTHON: python
     TEST_HGMODULEPOLICY: "allow"
 
 .runtests_template: &runtests
+    stage: tests
+    # The runner made a clone as root.
+    # We make a new clone owned by user used to run the step.
+    before_script:
+      - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no
+      - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'`
+      - cd /tmp/mercurial-ci/rust/rhg
+      - cargo build
+      - cd /tmp/mercurial-ci/
+      - ls -1 tests/test-check-*.* > /tmp/check-tests.txt
     script:
         - echo "python used, $PYTHON"
         - echo "$RUNTEST_ARGS"
@@ -32,20 +36,21 @@
         PYTHON: python3
 
 rust-cargo-test-py2: &rust_cargo_test
+    stage: tests
     script:
         - echo "python used, $PYTHON"
         - make rust-tests
 
 rust-cargo-test-py3:
+    stage: tests
     <<: *rust_cargo_test
     variables:
         PYTHON: python3
 
-rust-cargo-test-py3-dirstate-tree:
-    <<: *rust_cargo_test
-    variables:
-        PYTHON: python3
-        HG_RUST_FEATURES: dirstate-tree
+phabricator-refresh:
+    stage: phabricator
+    script:
+      - "./contrib/phab-refresh-stack.sh --comment \":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\""
 
 test-py2:
     <<: *runtests
@@ -88,17 +93,15 @@
         PYTHON: python3
         TEST_HGMODULEPOLICY: "rust+c"
 
-test-py3-rust-dirstate-tree:
-    <<: *runtests
-    variables:
-        HGWITHRUSTEXT: cpython
-        RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt"
-        PYTHON: python3
-        TEST_HGMODULEPOLICY: "rust+c"
-        HG_RUST_FEATURES: "dirstate-tree"
-
 test-py2-chg:
     <<: *runtests
     variables:
         RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
         TEST_HGMODULEPOLICY: "c"
+
+test-py3-chg:
+    <<: *runtests
+    variables:
+        PYTHON: python3
+        RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg"
+        TEST_HGMODULEPOLICY: "c"
--- a/contrib/hg-ssh	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/hg-ssh	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright 2005-2007 by Intevation GmbH <intevation@intevation.de>
 #
--- a/contrib/hgperf	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/hgperf	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # hgperf - measure performance of Mercurial commands
 #
--- a/contrib/hgweb.fcgi	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/hgweb.fcgi	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # An example FastCGI script for use with flup, edit as necessary
 
--- a/contrib/import-checker.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/import-checker.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from __future__ import absolute_import, print_function
 
--- a/contrib/packaging/Makefile	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/Makefile	Tue Jan 19 21:48:43 2021 +0530
@@ -9,19 +9,18 @@
   xenial \
   bionic \
   cosmic \
-  disco
+  focal
 
 FEDORA_RELEASE := 31
 
 CENTOS_RELEASES := \
-  5 \
-  6 \
   7 \
   8
 
 # Build a Python for these CentOS releases.
-CENTOS_WITH_PYTHON_RELEASES := 5 6
-CENTOS_WITH_NONVERSIONED_PYTHON := 5 6 7
+CENTOS_WITH_PYTHON_RELEASES :=
+CENTOS_WITH_NONVERSIONED_PYTHON :=
+CENTOS_WITH_36_DOCUTILS := 7
 
 help:
 	@echo 'Packaging Make Targets'
@@ -112,13 +111,13 @@
 .PHONY: centos$(1)
 centos$(1):
 	mkdir -p $$(HGROOT)/packages/centos$(1)
-	./buildrpm $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython,$$(if $$(filter $(1),$$(CENTOS_WITH_NONVERSIONED_PYTHON)),--python python,))
+	./buildrpm $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython,$$(if $$(filter $(1),$$(CENTOS_WITH_NONVERSIONED_PYTHON)),--python python,))$$(if $$(filter $(1),$$(CENTOS_WITH_36_DOCUTILS)), --docutilspackage python36-docutils,)
 	cp $$(HGROOT)/contrib/packaging/rpmbuild/RPMS/*/* $$(HGROOT)/packages/centos$(1)
 	cp $$(HGROOT)/contrib/packaging/rpmbuild/SRPMS/* $$(HGROOT)/packages/centos$(1)
 
 .PHONY: docker-centos$(1)
 docker-centos$(1):
-	./dockerrpm centos$(1) $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython,$$(if $$(filter $(1),$$(CENTOS_WITH_NONVERSIONED_PYTHON)),--python python,))
+	./dockerrpm centos$(1) $$(if $$(filter $(1),$$(CENTOS_WITH_PYTHON_RELEASES)),--withpython,$$(if $$(filter $(1),$$(CENTOS_WITH_NONVERSIONED_PYTHON)),--python python,))$$(if $$(filter $(1),$$(CENTOS_WITH_36_DOCUTILS)), --docutilspackage python36-docutils,)
 
 endef
 
--- a/contrib/packaging/buildrpm	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/buildrpm	Tue Jan 19 21:48:43 2021 +0530
@@ -7,6 +7,7 @@
 BUILD=1
 RPMBUILDDIR="$PWD/rpmbuild"
 PYTHONEXE=python3
+DOCUTILSPACKAGE=python3-docutils
 
 while [ "$1" ]; do
     case "$1" in
@@ -25,6 +26,11 @@
         PYTHONMD5=f1a2ace631068444831d01485466ece0
         PYTHONEXE=python
         ;;
+    --docutilspackage)
+        shift
+        DOCUTILSPACKAGE="$1"
+        shift
+        ;;
     --rpmbuilddir )
         shift
         RPMBUILDDIR="$1"
@@ -50,7 +56,7 @@
     exit 1
 fi
 
-gethgversion
+gethgversion $PYTHONEXE
 
 if [ -z "$type" ] ; then
     release=1
@@ -70,7 +76,7 @@
 fi
 
 mkdir -p $RPMBUILDDIR/{SOURCES,BUILD,SRPMS,RPMS}
-$HG archive -t tgz $RPMBUILDDIR/SOURCES/mercurial-$version-$release.tar.gz
+$PYTHONEXE $HG archive -t tgz $RPMBUILDDIR/SOURCES/mercurial-$version-$release.tar.gz
 if [ "$PYTHONVER" ]; then
 (
     mkdir -p build
@@ -106,7 +112,7 @@
 
 if echo $version | grep '+' > /dev/null 2>&1; then
     latesttag="`echo $version | sed -e 's/+.*//'`"
-    $HG log -r .:"$latesttag" -fM \
+    $PYTHONEXE $HG log -r .:"$latesttag" -fM \
         --template '{date|hgdate}\t{author}\t{desc|firstline}\n' | python -c '
 import sys, time
 
@@ -130,9 +136,9 @@
 
 else
 
-    $HG log \
+    $PYTHONEXE $HG log \
          --template '{date|hgdate}\t{author}\t{desc|firstline}\n' \
-         .hgtags | python -c '
+         .hgtags | $PYTHONEXE -c '
 import sys, time
 
 def datestr(date, format):
@@ -149,6 +155,9 @@
 sed -i \
     -e "s/^%define withpython.*$/%define withpython $RPMPYTHONVER/" \
     $rpmspec
+sed -i \
+    -e "s/^%global pythondocutils.*$/%global pythondocutils $DOCUTILSPACKAGE/" \
+    $rpmspec
 
 if [ "$BUILD" ]; then
     rpmbuild --define "_topdir $RPMBUILDDIR" -ba $rpmspec --clean
--- a/contrib/packaging/docker/centos5	Thu Dec 24 15:58:08 2020 +0900
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,27 +0,0 @@
-FROM centos:centos5
-
-RUN groupadd -g %GID% build && \
-    useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
-
-RUN \
-	sed -i 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo && \
-	sed -i 's/^#\(baseurl=\)http:\/\/mirror.centos.org\/centos/\1http:\/\/vault.centos.org/' /etc/yum.repos.d/*.repo && \
-	sed -i 's/\$releasever/5.11/' /etc/yum.repos.d/*.repo
-
-RUN yum install -y \
-	gcc \
-	gettext \
-	make \
-	python-devel \
-	python-docutils \
-	rpm-build \
-	tar
-
-# For creating repo meta data
-RUN yum install -y \
-	bzip2-devel \
-	createrepo \
-	ncurses-devel \
-	openssl-devel \
-	readline-devel \
-	zlib-devel
--- a/contrib/packaging/docker/centos6	Thu Dec 24 15:58:08 2020 +0900
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,24 +0,0 @@
-FROM centos:centos6
-
-RUN groupadd -g %GID% build && \
-    useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
-
-RUN yum install -y \
-	gcc \
-	gettext \
-	make \
-	python-devel \
-	python-docutils \
-	rpm-build \
-	tar
-
-# For creating repo meta data
-RUN yum install -y createrepo
-
-# For python
-RUN yum install -y \
-	bzip2-devel \
-	ncurses-devel \
-	openssl-devel \
-	readline-devel \
-	zlib-devel
--- a/contrib/packaging/docker/centos7	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/docker/centos7	Tue Jan 19 21:48:43 2021 +0530
@@ -3,14 +3,18 @@
 RUN groupadd -g %GID% build && \
     useradd -u %UID% -g %GID% -s /bin/bash -d /build -m build
 
+RUN yum install -y epel-release
 RUN yum install -y \
 	gcc \
 	gettext \
 	make \
-	python-devel \
-	python-docutils \
+	python3-devel \
+	python36-docutils \
 	rpm-build \
 	tar
 
 # For creating repo meta data
 RUN yum install -y createrepo
+
+# For rust extensions
+RUN yum install -y cargo
--- a/contrib/packaging/docker/centos8	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/docker/centos8	Tue Jan 19 21:48:43 2021 +0530
@@ -13,3 +13,6 @@
 
 # For creating repo meta data
 RUN yum install -y createrepo
+
+# For rust extensions
+RUN yum install -y cargo
--- a/contrib/packaging/docker/ubuntu.template	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/docker/ubuntu.template	Tue Jan 19 21:48:43 2021 +0530
@@ -3,6 +3,10 @@
 RUN groupadd -g 1000 build && \
     useradd -u 1000 -g 1000 -s /bin/bash -d /build -m build
 
+ARG DEBIAN_FRONTEND=noninteractive
+
+ENV TZ=%TZ%
+
 RUN apt-get update && apt-get install -y \
   build-essential \
   debhelper \
--- a/contrib/packaging/dockerdeb	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/dockerdeb	Tue Jan 19 21:48:43 2021 +0530
@@ -12,10 +12,12 @@
 
 OUTPUTDIR=${OUTPUTDIR:=$ROOTDIR/packages/$PLATFORM}
 CONTAINER=hg-docker-$PLATFORM
+TZ=`ls -la /etc/localtime | cut -d/ -f7-9`
 
 DOCKER=$($BUILDDIR/hg-docker docker-path)
 
 $BUILDDIR/hg-docker build \
+    --build-arg TZ=$TZ \
     --build-arg CODENAME=$CODENAME \
     $BUILDDIR/docker/$DISTID.template \
     $CONTAINER
@@ -25,7 +27,11 @@
 # container and hope it's writable. Whee.
 dn=$(basename $ROOTDIR)
 
-DBUILDUSER=build
+if [[ -z "${HG_DOCKER_OWN_USER:-}" ]]; then
+    DBUILDUSER=build
+else
+    DBUILDUSER="$(id -u):$(id -g)"
+fi
 
 if [ $(uname) = "Darwin" ] ; then
     $DOCKER run -u $DBUILDUSER --rm -v $PWD/..:/mnt $CONTAINER \
--- a/contrib/packaging/hgpackaging/cli.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/hgpackaging/cli.py	Tue Jan 19 21:48:43 2021 +0530
@@ -44,7 +44,11 @@
         )
     else:
         inno.build_with_py2exe(
-            SOURCE_DIR, build_dir, pathlib.Path(python), iscc, version=version,
+            SOURCE_DIR,
+            build_dir,
+            pathlib.Path(python),
+            iscc,
+            version=version,
         )
 
 
--- a/contrib/packaging/hgpackaging/inno.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/hgpackaging/inno.py	Tue Jan 19 21:48:43 2021 +0530
@@ -198,7 +198,11 @@
     except jinja2.TemplateSyntaxError as e:
         raise Exception(
             'template syntax error at %s:%d: %s'
-            % (e.name, e.lineno, e.message,)
+            % (
+                e.name,
+                e.lineno,
+                e.message,
+            )
         )
 
     content = template.render(package_files=package_files)
--- a/contrib/packaging/hgpackaging/pyoxidizer.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/hgpackaging/pyoxidizer.py	Tue Jan 19 21:48:43 2021 +0530
@@ -127,6 +127,7 @@
 
         # Write out a default editor.rc file to configure notepad as the
         # default editor.
+        os.makedirs(out_dir / "defaultrc", exist_ok=True)
         with (out_dir / "defaultrc" / "editor.rc").open(
             "w", encoding="utf-8"
         ) as fh:
--- a/contrib/packaging/hgpackaging/wix.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/hgpackaging/wix.py	Tue Jan 19 21:48:43 2021 +0530
@@ -517,7 +517,10 @@
         args.append(str(build_dir / ('%s.wixobj' % source[:-4])))
 
     args.extend(
-        [str(build_dir / 'stage.wixobj'), str(build_dir / 'mercurial.wixobj'),]
+        [
+            str(build_dir / 'stage.wixobj'),
+            str(build_dir / 'mercurial.wixobj'),
+        ]
     )
 
     subprocess.run(args, cwd=str(source_dir), check=True)
--- a/contrib/packaging/mercurial.spec	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/mercurial.spec	Tue Jan 19 21:48:43 2021 +0530
@@ -3,6 +3,7 @@
 %define withpython %{nil}
 
 %global pythonexe python3
+%global pythondocutils python3-docutils
 
 %if "%{?withpython}"
 
@@ -39,7 +40,7 @@
 %if "%{?withpython}"
 BuildRequires: readline-devel, openssl-devel, ncurses-devel, zlib-devel, bzip2-devel
 %else
-BuildRequires: %{pythonexe} >= %{pythonver}, %{pythonexe}-devel, %{pythonexe}-docutils >= 0.5
+BuildRequires: %{pythonexe} >= %{pythonver}, %{pythonexe}-devel, %{pythondocutils}
 Requires: %{pythonexe} >= %{pythonver}
 %endif
 # The hgk extension uses the wish tcl interpreter, but we don't enforce it
@@ -109,14 +110,14 @@
 LD_LIBRARY_PATH=$PYPATH $PYPATH/python setup.py install --root="$RPM_BUILD_ROOT"
 cd -
 
-PATH=$PYPATH:$PATH LD_LIBRARY_PATH=$PYPATH make install PYTHON=%{pythonexe} DESTDIR=$RPM_BUILD_ROOT PREFIX=%{hgpyprefix} MANDIR=%{_mandir}
+PATH=$PYPATH:$PATH LD_LIBRARY_PATH=$PYPATH make install PYTHON=%{pythonexe} DESTDIR=$RPM_BUILD_ROOT PREFIX=%{hgpyprefix} MANDIR=%{_mandir} PURE="--rust"
 mkdir -p $RPM_BUILD_ROOT%{_bindir}
 ( cd $RPM_BUILD_ROOT%{_bindir}/ && ln -s ../..%{hgpyprefix}/bin/hg . )
 ( cd $RPM_BUILD_ROOT%{_bindir}/ && ln -s ../..%{hgpyprefix}/bin/python2.? %{pythonhg} )
 
 %else
 
-make install PYTHON=%{pythonexe} DESTDIR=$RPM_BUILD_ROOT PREFIX=%{_prefix} MANDIR=%{_mandir}
+make install PYTHON=%{pythonexe} DESTDIR=$RPM_BUILD_ROOT PREFIX=%{_prefix} MANDIR=%{_mandir} PURE="--rust"
 
 %endif
 
--- a/contrib/packaging/packagelib.sh	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/packagelib.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -8,16 +8,21 @@
 #
 # node: the node|short hg was built from, or empty if built from a tag
 gethgversion() {
+    if [ -z "${1+x}" ]; then
+        python="python"
+    else
+        python="$1"
+    fi
     export HGRCPATH=
     export HGPLAIN=
 
-    make cleanbutpackages
-    make local PURE=--pure
+    make cleanbutpackages PYTHON=$python
+    make local PURE=--pure PYTHON=$python
     HG="$PWD/hg"
 
-    "$HG" version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
+    $python "$HG" version > /dev/null || { echo 'abort: hg version failed!'; exit 1 ; }
 
-    hgversion=`LANGUAGE=C "$HG" version | sed -ne 's/.*(version \(.*\))$/\1/p'`
+    hgversion=`LANGUAGE=C $python "$HG" version | sed -ne 's/.*(version \(.*\))$/\1/p'`
 
     if echo $hgversion | grep + > /dev/null 2>&1 ; then
         tmp=`echo $hgversion | cut -d+ -f 2`
--- a/contrib/packaging/requirements-windows-py2.txt	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/requirements-windows-py2.txt	Tue Jan 19 21:48:43 2021 +0530
@@ -45,3 +45,15 @@
     --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \
     --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e \
     # via dulwich
+windows-curses==2.1.0 \
+    --hash=sha256:261fde5680d1ce4ce116908996b9a3cfb0ffb03ea68d42240f62b56a9fa6af2c \
+    --hash=sha256:66034dc9a705d87308cc9ea90836f4ee60008a1d5e2c1d34ace627f60268158b \
+    --hash=sha256:669caad3ae16faf2d201d7ab3b8af418a2fd074d8a39d60ca26f3acb34b6afe5 \
+    --hash=sha256:73bd3eebccfda55330783f165151de115bfa238d1332f0b2e224b550d6187840 \
+    --hash=sha256:89a6d973f88cfe49b41ea80164dcbec209d296e0cec34a02002578b0bf464a64 \
+    --hash=sha256:8ba7c000d7ffa5452bbd0966b96e69261e4f117ebe510aeb8771a9650197b7f0 \
+    --hash=sha256:97084c6b37b1534f6a28a514d521dfae402f77dcbad42b14ee32e8d5bdc13648 \
+    --hash=sha256:9e474a181f96d60429a4766145628264e60b72e7715876f9135aeb2e842f9433 \
+    --hash=sha256:cfe64c30807c146ef8d094412f90f2a2c81ad6aefff3ebfe8e37aabe2f801303 \
+    --hash=sha256:ff8c67f74b88944d99fa9d22971c05c335bc74f149120f0a69340c2c3a595497 \
+    # via -r contrib/packaging/requirements-windows.txt.in
--- a/contrib/packaging/requirements-windows-py3.txt	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/requirements-windows-py3.txt	Tue Jan 19 21:48:43 2021 +0530
@@ -4,10 +4,50 @@
 #
 #    pip-compile --generate-hashes --output-file=contrib/packaging/requirements-windows-py3.txt contrib/packaging/requirements-windows.txt.in
 #
+cached-property==1.5.2 \
+    --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \
+    --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0 \
+    # via pygit2
 certifi==2020.6.20 \
     --hash=sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3 \
     --hash=sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41 \
     # via dulwich
+cffi==1.14.4 \
+    --hash=sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e \
+    --hash=sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d \
+    --hash=sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a \
+    --hash=sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec \
+    --hash=sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362 \
+    --hash=sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668 \
+    --hash=sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c \
+    --hash=sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b \
+    --hash=sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06 \
+    --hash=sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698 \
+    --hash=sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2 \
+    --hash=sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c \
+    --hash=sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7 \
+    --hash=sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009 \
+    --hash=sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03 \
+    --hash=sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b \
+    --hash=sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909 \
+    --hash=sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53 \
+    --hash=sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35 \
+    --hash=sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26 \
+    --hash=sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b \
+    --hash=sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb \
+    --hash=sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293 \
+    --hash=sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd \
+    --hash=sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d \
+    --hash=sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3 \
+    --hash=sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d \
+    --hash=sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca \
+    --hash=sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d \
+    --hash=sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775 \
+    --hash=sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375 \
+    --hash=sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b \
+    --hash=sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b \
+    --hash=sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f \
+    # via pygit2
 docutils==0.16 \
     --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
     --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \
@@ -35,6 +75,29 @@
     --hash=sha256:4e34ea2fdec90c1c43d6610b5a5fafa1b9097db1802948e90caf5763974b8f8d \
     --hash=sha256:9aeadd006a852b78f4b4ef7c7556c2774d2432bbef8ee538a3e9089ac8b11466 \
     # via -r contrib/packaging/requirements-windows.txt.in
+pycparser==2.20 \
+    --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
+    --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 \
+    # via cffi
+pygit2==1.4.0 ; python_version >= "3" \
+    --hash=sha256:0d298098e286eeda000e49ca7e1b41f87300e10dd8b9d06b32b008bd61f50b83 \
+    --hash=sha256:0ee135eb2cd8b07ce1374f3596cc5c3213472d6389bad6a4c5d87d8e267e93e9 \
+    --hash=sha256:32eb863d6651d4890ced318505ea8dc229bd9637deaf29c898de1ab574d727a0 \
+    --hash=sha256:37d6d7d6d7804c42a0fe23425c72e38093488525092fc5e51a05684e63503ce7 \
+    --hash=sha256:41204b6f3406d9f53147710f3cc485d77181ba67f57c34d36b7c86de1c14a18c \
+    --hash=sha256:818c91b582109d90580c5da74af783738838353f15eb12eeb734d80a974b05a3 \
+    --hash=sha256:8306a302487dac67df7af6a064bb37e8a8eb4138958f9560ff49ff162e185dab \
+    --hash=sha256:9c2f2d9ef59513007b66f6534b000792b614de3faf60313a0a68f6b8571aea85 \
+    --hash=sha256:9c8d5881eb709e2e2e13000b507a131bd5fb91a879581030088d0ddffbcd19af \
+    --hash=sha256:b422e417739def0a136a6355723dfe8a5ffc83db5098076f28a14f1d139779c1 \
+    --hash=sha256:cbeb38ab1df9b5d8896548a11e63aae8a064763ab5f1eabe4475e6b8a78ee1c8 \
+    --hash=sha256:cf00481ddf053e549a6edd0216bdc267b292d261eae02a67bb3737de920cbf88 \
+    --hash=sha256:d0d889144e9487d926fecea947c3f39ce5f477e521d7d467d2e66907e4cd657d \
+    --hash=sha256:ddb7a1f6d38063e8724abfa1cfdfb0f9b25014b8bca0546274b7a84b873a3888 \
+    --hash=sha256:e9037a7d810750fe23c9f5641ef14a0af2525ff03e14752cd4f73e1870ecfcb0 \
+    --hash=sha256:ec5c0365a9bdfcac1609d20868507b28685ec5ea7cc3a2c903c9b62ef2e0bbc0 \
+    --hash=sha256:fdd8ba30cda277290e000322f505132f590cf89bd7d31829b45a3cb57447ec32 \
+    # via -r contrib/packaging/requirements-windows.txt.in
 pygments==2.7.1 \
     --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \
     --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 \
@@ -47,6 +110,16 @@
     --hash=sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2 \
     --hash=sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e \
     # via dulwich
+windows-curses==2.2.0 \
+    --hash=sha256:1452d771ec6f9b3fef037da2b169196a9a12be4e86a6c27dd579adac70c42028 \
+    --hash=sha256:267544e4f60c09af6505e50a69d7f01d7f8a281cf4bd4fc7efc3b32b9a4ef64e \
+    --hash=sha256:389228a3df556102e72450f599283094168aa82eee189f501ad9f131a0fc92e1 \
+    --hash=sha256:84336fe470fa07288daec5c684dec74c0766fec6b3511ccedb4c494804acfbb7 \
+    --hash=sha256:9aa6ff60be76f5de696dc6dbf7897e3b1e6abcf4c0f741e9a0ee22cd6ef382f8 \
+    --hash=sha256:c4a8ce00e82635f06648cc40d99f470be4e3ffeb84f9f7ae9d6a4f68ec6361e7 \
+    --hash=sha256:c5cd032bc7d0f03224ab55c925059d98e81795098d59bbd10f7d05c7ea9677ce \
+    --hash=sha256:fc0be372fe6da3c39d7093154ce029115a927bf287f34b4c615e2b3f8c23dfaa \
+    # via -r contrib/packaging/requirements-windows.txt.in
 zipp==3.4.0 \
     --hash=sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108 \
     --hash=sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb \
--- a/contrib/packaging/requirements-windows.txt.in	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/packaging/requirements-windows.txt.in	Tue Jan 19 21:48:43 2021 +0530
@@ -3,7 +3,9 @@
 dulwich < 0.20 ; python_version <= '2.7'
 dulwich ; python_version >= '3'
 keyring
+pygit2 ; python_version >= '3'
 pygments
 # Need to list explicitly so dependency gets pulled in when
 # not running on Windows.
 pywin32-ctypes
+windows-curses
--- a/contrib/perf-utils/perf-revlog-write-plot.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/perf-utils/perf-revlog-write-plot.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 #  Copyright 2018 Paul Morelle <Paul.Morelle@octobus.net>
 #
--- a/contrib/perf.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/perf.py	Tue Jan 19 21:48:43 2021 +0530
@@ -291,7 +291,9 @@
         experimental=True,
     )
     configitem(
-        b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'pre-run',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
         b'perf',
@@ -310,19 +312,29 @@
     # compatibility fix for a11fd395e83f
     # hg version: 5.2
     configitem(
-        b'perf', b'presleep', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'presleep',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'stub', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'stub',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'parentscount', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'parentscount',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'all-timing', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'all-timing',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'pre-run', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'pre-run',
+        default=mercurial.configitems.dynamicdefault,
     )
     configitem(
         b'perf',
@@ -330,7 +342,9 @@
         default=mercurial.configitems.dynamicdefault,
     )
     configitem(
-        b'perf', b'run-limits', default=mercurial.configitems.dynamicdefault,
+        b'perf',
+        b'run-limits',
+        default=mercurial.configitems.dynamicdefault,
     )
 
 
@@ -385,8 +399,7 @@
         from mercurial import node
 
         class defaultformatter(object):
-            """Minimized composition of baseformatter and plainformatter
-            """
+            """Minimized composition of baseformatter and plainformatter"""
 
             def __init__(self, ui, topic, opts):
                 self._ui = ui
@@ -658,8 +671,7 @@
 
 
 def getsvfs(repo):
-    """Return appropriate object to access files under .hg/store
-    """
+    """Return appropriate object to access files under .hg/store"""
     # for "historical portability":
     # repo.svfs has been available since 2.3 (or 7034365089bf)
     svfs = getattr(repo, 'svfs', None)
@@ -670,8 +682,7 @@
 
 
 def getvfs(repo):
-    """Return appropriate object to access files under .hg
-    """
+    """Return appropriate object to access files under .hg"""
     # for "historical portability":
     # repo.vfs has been available since 2.3 (or 7034365089bf)
     vfs = getattr(repo, 'vfs', None)
@@ -682,8 +693,7 @@
 
 
 def repocleartagscachefunc(repo):
-    """Return the function to clear tags cache according to repo internal API
-    """
+    """Return the function to clear tags cache according to repo internal API"""
     if util.safehasattr(repo, b'_tagscache'):  # since 2.0 (or 9dca7653b525)
         # in this case, setattr(repo, '_tagscache', None) or so isn't
         # correct way to clear tags cache, because existing code paths
@@ -847,7 +857,9 @@
 @command(
     b'perftags',
     formatteropts
-    + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
+    + [
+        (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
+    ],
 )
 def perftags(ui, repo, **opts):
     opts = _byteskwargs(opts)
@@ -900,8 +912,7 @@
 
 @command(b'perfdiscovery', formatteropts, b'PATH')
 def perfdiscovery(ui, repo, path, **opts):
-    """benchmark discovery between local repo and the peer at given path
-    """
+    """benchmark discovery between local repo and the peer at given path"""
     repos = [repo, None]
     timer, fm = gettimer(ui, opts)
     path = ui.expandpath(path)
@@ -919,7 +930,9 @@
 @command(
     b'perfbookmarks',
     formatteropts
-    + [(b'', b'clear-revlogs', False, b'refresh changelog and manifest'),],
+    + [
+        (b'', b'clear-revlogs', False, b'refresh changelog and manifest'),
+    ],
 )
 def perfbookmarks(ui, repo, **opts):
     """benchmark parsing bookmarks from disk to memory"""
@@ -1184,8 +1197,7 @@
 
 @command(b'perfdirstatedirs', formatteropts)
 def perfdirstatedirs(ui, repo, **opts):
-    """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache
-    """
+    """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     repo.dirstate.hasdir(b"a")
@@ -1245,8 +1257,7 @@
 
 @command(b'perfdirstatewrite', formatteropts)
 def perfdirstatewrite(ui, repo, **opts):
-    """benchmap the time it take to write a dirstate on disk
-    """
+    """benchmap the time it take to write a dirstate on disk"""
     opts = _byteskwargs(opts)
     timer, fm = gettimer(ui, opts)
     ds = repo.dirstate
@@ -1359,7 +1370,9 @@
 
 @command(
     b'perfphases',
-    [(b'', b'full', False, b'include file reading time too'),],
+    [
+        (b'', b'full', False, b'include file reading time too'),
+    ],
     b"",
 )
 def perfphases(ui, repo, **opts):
@@ -1614,12 +1627,12 @@
     mercurial.revlog._prereadsize = 2 ** 24  # disable lazy parser in old hg
 
     unfi = repo.unfiltered()
-    clearcaches = opts['clear_caches']
+    clearcaches = opts[b'clear_caches']
     # find the filecache func directly
     # This avoid polluting the benchmark with the filecache logic
     makecl = unfi.__class__.changelog.func
     if not opts[b'rev']:
-        raise error.Abort('use --rev to specify revisions to look up')
+        raise error.Abort(b'use --rev to specify revisions to look up')
     revs = scmutil.revrange(repo, opts[b'rev'])
     cl = repo.changelog
     nodes = [cl.node(r) for r in revs]
@@ -1839,7 +1852,10 @@
 
 @command(
     b'perftemplating',
-    [(b'r', b'rev', [], b'revisions to run the template on'),] + formatteropts,
+    [
+        (b'r', b'rev', [], b'revisions to run the template on'),
+    ]
+    + formatteropts,
 )
 def perftemplating(ui, repo, testedtemplate=None, **opts):
     """test the rendering time of a given template"""
@@ -2193,10 +2209,18 @@
                 }
                 if dostats:
                     alldata['nbrevs'].append(
-                        (data['nbrevs'], base.hex(), parent.hex(),)
+                        (
+                            data['nbrevs'],
+                            base.hex(),
+                            parent.hex(),
+                        )
                     )
                     alldata['nbmissingfiles'].append(
-                        (data['nbmissingfiles'], base.hex(), parent.hex(),)
+                        (
+                            data['nbmissingfiles'],
+                            base.hex(),
+                            parent.hex(),
+                        )
                     )
                 if dotiming:
                     begin = util.timer()
@@ -2207,10 +2231,18 @@
                     data['nbrenamedfiles'] = len(renames)
                     if dostats:
                         alldata['time'].append(
-                            (data['time'], base.hex(), parent.hex(),)
+                            (
+                                data['time'],
+                                base.hex(),
+                                parent.hex(),
+                            )
                         )
                         alldata['nbrenames'].append(
-                            (data['nbrenamedfiles'], base.hex(), parent.hex(),)
+                            (
+                                data['nbrenamedfiles'],
+                                base.hex(),
+                                parent.hex(),
+                            )
                         )
                 fm.startitem()
                 fm.data(**data)
@@ -3321,7 +3353,9 @@
 
 @command(
     b'perfvolatilesets',
-    [(b'', b'clear-obsstore', False, b'drop obsstore between each call.'),]
+    [
+        (b'', b'clear-obsstore', False, b'drop obsstore between each call.'),
+    ]
     + formatteropts,
 )
 def perfvolatilesets(ui, repo, *names, **opts):
@@ -3450,7 +3484,7 @@
             printname = name
             if name is None:
                 printname = b'unfiltered'
-            timer(getbranchmap(name), title=str(printname))
+            timer(getbranchmap(name), title=printname)
     finally:
         branchcacheread.restore()
         branchcachewrite.restore()
@@ -3807,8 +3841,7 @@
     ],
 )
 def perfwrite(ui, repo, **opts):
-    """microbenchmark ui.write (and others)
-    """
+    """microbenchmark ui.write (and others)"""
     opts = _byteskwargs(opts)
 
     write = getattr(ui, _sysstr(opts[b'write_method']))
--- a/contrib/phab-clean.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/phab-clean.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # A small script to automatically reject idle Diffs
 #
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/phab-refresh-stack.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,41 @@
+#!/bin/bash
+set -eu
+
+revision_in_stack=`hg log \
+    --rev '.#stack and ::. and topic()' \
+    -T '\nONE-REV\n' \
+    | grep 'ONE-REV' | wc -l`
+revision_on_phab=`hg log \
+    --rev '.#stack and ::. and topic() and desc("re:\nDifferential Revision: [^\n]+D\d+$")'\
+    -T '\nONE-REV\n' \
+    | grep 'ONE-REV' | wc -l`
+
+if [[ $revision_in_stack -eq 0 ]]; then
+    echo "stack is empty" >&2
+    exit 0
+fi
+
+if [[ $revision_on_phab -eq 0 ]]; then
+    echo "no tracked diff in this stack" >&2
+    exit 0
+fi
+
+if [[ $revision_on_phab -lt $revision_in_stack ]]; then
+    echo "not all stack changesets (${revision_in_stack}) have matching Phabricator Diff (${revision_on_phab})" >&2
+    exit 2
+fi
+
+if [[ "$PHABRICATOR_TOKEN" == "" ]]; then
+    echo 'missing $PHABRICATOR_TOKEN variable' >&2
+    exit 2
+fi
+
+hg \
+--config extensions.phabricator= \
+--config phabricator.url=https://phab.mercurial-scm.org/ \
+--config phabricator.callsign=HG \
+--config auth.phabricator.schemes=https \
+--config auth.phabricator.prefix=phab.mercurial-scm.org \
+--config auth.phabricator.phabtoken=$PHABRICATOR_TOKEN \
+phabsend --rev '.#stack and ::. and topic()' \
+"$@"
--- a/contrib/python-hook-examples.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/python-hook-examples.py	Tue Jan 19 21:48:43 2021 +0530
@@ -9,12 +9,12 @@
 
 
 def diffstat(ui, repo, **kwargs):
-    '''Example usage:
+    """Example usage:
 
     [hooks]
     commit.diffstat = python:/path/to/this/file.py:diffstat
     changegroup.diffstat = python:/path/to/this/file.py:diffstat
-    '''
+    """
     if kwargs.get('parent2'):
         return
     node = kwargs['node']
--- a/contrib/python-zstandard/make_cffi.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/python-zstandard/make_cffi.py	Tue Jan 19 21:48:43 2021 +0530
@@ -53,7 +53,10 @@
 # Headers whose preprocessed output will be fed into cdef().
 HEADERS = [
     os.path.join(HERE, "zstd", *p)
-    for p in (("zstd.h",), ("dictBuilder", "zdict.h"),)
+    for p in (
+        ("zstd.h",),
+        ("dictBuilder", "zdict.h"),
+    )
 ]
 
 INCLUDE_DIRS = [
@@ -80,12 +83,20 @@
 if compiler.compiler_type == "unix":
     args = list(compiler.executables["compiler"])
     args.extend(
-        ["-E", "-DZSTD_STATIC_LINKING_ONLY", "-DZDICT_STATIC_LINKING_ONLY",]
+        [
+            "-E",
+            "-DZSTD_STATIC_LINKING_ONLY",
+            "-DZDICT_STATIC_LINKING_ONLY",
+        ]
     )
 elif compiler.compiler_type == "msvc":
     args = [compiler.cc]
     args.extend(
-        ["/EP", "/DZSTD_STATIC_LINKING_ONLY", "/DZDICT_STATIC_LINKING_ONLY",]
+        [
+            "/EP",
+            "/DZSTD_STATIC_LINKING_ONLY",
+            "/DZDICT_STATIC_LINKING_ONLY",
+        ]
     )
 else:
     raise Exception("unsupported compiler type: %s" % compiler.compiler_type)
--- a/contrib/python3-ratchet.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/python3-ratchet.py	Tue Jan 19 21:48:43 2021 +0530
@@ -26,7 +26,9 @@
 
 _hgenv = dict(os.environ)
 _hgenv.update(
-    {'HGPLAIN': '1',}
+    {
+        'HGPLAIN': '1',
+    }
 )
 
 _HG_FIRST_CHANGE = '9117c6561b0bd7792fa13b50d28239d51b78e51f'
--- a/contrib/revsetbenchmarks.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/revsetbenchmarks.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 # Measure the performance of a list of revsets against multiple revisions
 # defined by parameter. Checkout one by one and run perfrevset with every
--- a/contrib/simplemerge	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/simplemerge	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 from __future__ import absolute_import
 
 import getopt
--- a/contrib/synthrepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/synthrepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -122,7 +122,7 @@
     optionalrepo=True,
 )
 def analyze(ui, repo, *revs, **opts):
-    '''create a simple model of a repository to use for later synthesis
+    """create a simple model of a repository to use for later synthesis
 
     This command examines every changeset in the given range (or all
     of history if none are specified) and creates a simple statistical
@@ -133,7 +133,7 @@
     :hg:`synthesize` to create or augment a repository with synthetic
     commits that have a structure that is statistically similar to the
     analyzed repository.
-    '''
+    """
     root = repo.root
     if not root.endswith(os.path.sep):
         root += os.path.sep
@@ -281,7 +281,7 @@
     _('hg synthesize [OPTION].. DESCFILE'),
 )
 def synthesize(ui, repo, descpath, **opts):
-    '''synthesize commits based on a model of an existing repository
+    """synthesize commits based on a model of an existing repository
 
     The model must have been generated by :hg:`analyze`. Commits will
     be generated randomly according to the probabilities described in
@@ -293,7 +293,7 @@
     names, words will be chosen randomly from a dictionary that is
     presumed to contain one word per line. Use --dict to specify the
     path to an alternate dictionary to use.
-    '''
+    """
     try:
         fp = hg.openpath(ui, descpath)
     except Exception as err:
@@ -542,12 +542,12 @@
     replacements = {'': ''}
 
     def rename(dirpath):
-        '''Recursively rename the directory and all path prefixes.
+        """Recursively rename the directory and all path prefixes.
 
         The mapping from path to renamed path is stored for all path prefixes
         as in dynamic programming, ensuring linear runtime and consistent
         renaming regardless of iteration order through the model.
-        '''
+        """
         if dirpath in replacements:
             return replacements[dirpath]
         head, _ = os.path.split(dirpath)
--- a/contrib/testparseutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/testparseutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -81,8 +81,7 @@
 
 
 class embeddedmatcher(object):  # pytype: disable=ignored-metaclass
-    """Base class to detect embedded code fragments in *.t test script
-    """
+    """Base class to detect embedded code fragments in *.t test script"""
 
     __metaclass__ = abc.ABCMeta
 
@@ -103,8 +102,7 @@
 
     @abc.abstractmethod
     def isinside(self, ctx, line):
-        """Examine whether line is inside embedded code, if not yet endsat
-        """
+        """Examine whether line is inside embedded code, if not yet endsat"""
 
     @abc.abstractmethod
     def ignores(self, ctx):
--- a/contrib/undumprevlog	Thu Dec 24 15:58:08 2020 +0900
+++ b/contrib/undumprevlog	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 # Undump a dump from dumprevlog
 # $ hg init
 # $ undumprevlog < repo.dump
@@ -6,9 +6,9 @@
 from __future__ import absolute_import, print_function
 
 import sys
+from mercurial.node import bin
 from mercurial import (
     encoding,
-    node,
     revlog,
     transaction,
     vfs as vfsmod,
@@ -31,13 +31,13 @@
         r = revlog.revlog(opener, f)
         procutil.stdout.write(b'%s\n' % f)
     elif l.startswith("node:"):
-        n = node.bin(l[6:-1])
+        n = bin(l[6:-1])
     elif l.startswith("linkrev:"):
         lr = int(l[9:-1])
     elif l.startswith("parents:"):
         p = l[9:-1].split()
-        p1 = node.bin(p[0])
-        p2 = node.bin(p[1])
+        p1 = bin(p[0])
+        p2 = bin(p[1])
     elif l.startswith("length:"):
         length = int(l[8:-1])
         sys.stdin.readline()  # start marker
--- a/doc/check-seclevel.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/doc/check-seclevel.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # checkseclevel - checking section title levels in each online help document
 
--- a/doc/docchecker	Thu Dec 24 15:58:08 2020 +0900
+++ b/doc/docchecker	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # docchecker - look for problematic markup
 #
--- a/doc/gendoc.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/doc/gendoc.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """usage: %s DOC ...
 
 where DOC is the name of a document
--- a/doc/hgmanpage.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/doc/hgmanpage.py	Tue Jan 19 21:48:43 2021 +0530
@@ -822,7 +822,10 @@
         # man 7 man argues to use ".IP" instead of ".TP"
         self.body.append(
             '.IP %s %d\n'
-            % (next(self._list_char[-1]), self._list_char[-1].get_width(),)
+            % (
+                next(self._list_char[-1]),
+                self._list_char[-1].get_width(),
+            )
         )
 
     def depart_list_item(self, node):
--- a/doc/runrst	Thu Dec 24 15:58:08 2020 +0900
+++ b/doc/runrst	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # runrst - register custom roles and run correct writer
 #
--- a/hg	Thu Dec 24 15:58:08 2020 +0900
+++ b/hg	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # mercurial - scalable distributed SCM
 #
@@ -21,6 +21,22 @@
         libdir = os.path.abspath(libdir)
     sys.path.insert(0, libdir)
 
+# Make `pip install --user ...` packages available to the official Windows
+# build.  Most py2 packaging installs directly into the system python
+# environment, so no changes are necessary for other platforms.  The Windows
+# py2 package uses py2exe, which lacks a `site` module.  Hardcode it according
+# to the documentation.
+if getattr(sys, 'frozen', None) == 'console_exe':
+    vi = sys.version_info
+    sys.path.append(
+        os.path.join(
+            os.environ['APPDATA'],
+            'Python',
+            'Python%d%d' % (vi[0], vi[1]),
+            'site-packages',
+        )
+    )
+
 from hgdemandimport import tracing
 
 with tracing.log('hg script'):
--- a/hgext/absorb.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/absorb.py	Tue Jan 19 21:48:43 2021 +0530
@@ -36,6 +36,11 @@
 import collections
 
 from mercurial.i18n import _
+from mercurial.node import (
+    hex,
+    nullid,
+    short,
+)
 from mercurial import (
     cmdutil,
     commands,
@@ -44,7 +49,6 @@
     error,
     linelog,
     mdiff,
-    node,
     obsolete,
     patch,
     phases,
@@ -102,7 +106,7 @@
         return b''
 
     def node(self):
-        return node.nullid
+        return nullid
 
 
 def uniq(lst):
@@ -367,7 +371,7 @@
                 idx = (max(rev - 1, 0)) // 2
                 self.ui.write(
                     _(b'%s: chunk %d:%d -> %d lines\n')
-                    % (node.short(self.fctxs[idx].node()), a1, a2, len(blines))
+                    % (short(self.fctxs[idx].node()), a1, a2, len(blines))
                 )
             self.linelog.replacelines(rev, a1, a2, b1, b2)
         if self.opts.get(b'edit_lines', False):
@@ -486,7 +490,7 @@
             editortext += _(b'HG: %s/%s %s %s\n') % (
                 b'|' * i,
                 b'-' * (len(visiblefctxs) - i + 1),
-                node.short(f.node()),
+                short(f.node()),
                 f.description().split(b'\n', 1)[0],
             )
         editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs))
@@ -511,7 +515,7 @@
         # run editor
         editedtext = self.ui.edit(editortext, b'', action=b'absorb')
         if not editedtext:
-            raise error.Abort(_(b'empty editor text'))
+            raise error.InputError(_(b'empty editor text'))
         # parse edited result
         contents = [b''] * len(self.fctxs)
         leftpadpos = 4
@@ -520,7 +524,7 @@
             if l.startswith(b'HG:'):
                 continue
             if l[colonpos - 1 : colonpos + 2] != b' : ':
-                raise error.Abort(_(b'malformed line: %s') % l)
+                raise error.InputError(_(b'malformed line: %s') % l)
             linecontent = l[colonpos + 2 :]
             for i, ch in enumerate(
                 pycompat.bytestr(l[leftpadpos : colonpos - 1])
@@ -816,7 +820,7 @@
         if self.ui.debugflag:
             return b'%d:%s' % (ctx.rev(), ctx.hex())
         else:
-            return b'%d:%s' % (ctx.rev(), node.short(ctx.node()))
+            return b'%d:%s' % (ctx.rev(), short(ctx.node()))
 
     def _getnewfilecontents(self, ctx):
         """(ctx) -> {path: str}
@@ -849,7 +853,7 @@
                 changes.append((name, hsh))
                 if self.ui.verbose:
                     self.ui.write(
-                        _(b'moving bookmark %s to %s\n') % (name, node.hex(hsh))
+                        _(b'moving bookmark %s to %s\n') % (name, hex(hsh))
                     )
             else:
                 changes.append((name, None))
@@ -920,7 +924,7 @@
         the commit is a clone from ctx, with a (optionally) different p1, and
         different file contents replaced by memworkingcopy.
         """
-        parents = p1 and (p1, node.nullid)
+        parents = p1 and (p1, nullid)
         extra = ctx.extra()
         if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'):
             extra[b'absorb_source'] = ctx.hex()
@@ -1018,7 +1022,7 @@
         limit = ui.configint(b'absorb', b'max-stack-size')
         headctx = repo[b'.']
         if len(headctx.parents()) > 1:
-            raise error.Abort(_(b'cannot absorb into a merge'))
+            raise error.InputError(_(b'cannot absorb into a merge'))
         stack = getdraftstack(headctx, limit)
         if limit and len(stack) >= limit:
             ui.warn(
@@ -1029,7 +1033,7 @@
                 % limit
             )
     if not stack:
-        raise error.Abort(_(b'no mutable changeset to change'))
+        raise error.InputError(_(b'no mutable changeset to change'))
     if targetctx is None:  # default to working copy
         targetctx = repo[None]
     if pats is None:
@@ -1076,7 +1080,7 @@
                 b"apply changes (y/N)? $$ &Yes $$ &No", default=1
             )
         ):
-            raise error.Abort(_(b'absorb cancelled\n'))
+            raise error.CanceledError(_(b'absorb cancelled\n'))
 
         state.apply()
         if state.commit():
--- a/hgext/acl.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/acl.py	Tue Jan 19 21:48:43 2021 +0530
@@ -239,25 +239,44 @@
 
 # deprecated config: acl.config
 configitem(
-    b'acl', b'config', default=None,
+    b'acl',
+    b'config',
+    default=None,
 )
 configitem(
-    b'acl.groups', b'.*', default=None, generic=True,
+    b'acl.groups',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.deny.branches', b'.*', default=None, generic=True,
+    b'acl.deny.branches',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.allow.branches', b'.*', default=None, generic=True,
+    b'acl.allow.branches',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.deny', b'.*', default=None, generic=True,
+    b'acl.deny',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl.allow', b'.*', default=None, generic=True,
+    b'acl.allow',
+    b'.*',
+    default=None,
+    generic=True,
 )
 configitem(
-    b'acl', b'sources', default=lambda: [b'serve'],
+    b'acl',
+    b'sources',
+    default=lambda: [b'serve'],
 )
 
 
--- a/hgext/automv.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/automv.py	Tue Jan 19 21:48:43 2021 +0530
@@ -42,7 +42,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'automv', b'similarity', default=95,
+    b'automv',
+    b'similarity',
+    default=95,
 )
 
 
--- a/hgext/beautifygraph.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/beautifygraph.py	Tue Jan 19 21:48:43 2021 +0530
@@ -70,7 +70,7 @@
     if node == b'o':
         return b'\xE2\x97\x8B'  # U+25CB ○
     if node == b'@':
-        return b'\xE2\x97\x8D'  # U+25CD ◍
+        return b'\xE2\x97\x89'  # U+25C9 ◉
     if node == b'%':
         return b'\xE2\x97\x8D'  # U+25CE ◎
     if node == b'*':
--- a/hgext/blackbox.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/blackbox.py	Tue Jan 19 21:48:43 2021 +0530
@@ -72,19 +72,29 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'blackbox', b'dirty', default=False,
+    b'blackbox',
+    b'dirty',
+    default=False,
 )
 configitem(
-    b'blackbox', b'maxsize', default=b'1 MB',
+    b'blackbox',
+    b'maxsize',
+    default=b'1 MB',
 )
 configitem(
-    b'blackbox', b'logsource', default=False,
+    b'blackbox',
+    b'logsource',
+    default=False,
 )
 configitem(
-    b'blackbox', b'maxfiles', default=7,
+    b'blackbox',
+    b'maxfiles',
+    default=7,
 )
 configitem(
-    b'blackbox', b'track', default=lambda: [b'*'],
+    b'blackbox',
+    b'track',
+    default=lambda: [b'*'],
 )
 configitem(
     b'blackbox',
@@ -92,7 +102,9 @@
     default=lambda: [b'chgserver', b'cmdserver', b'extension'],
 )
 configitem(
-    b'blackbox', b'date-format', default=b'%Y/%m/%d %H:%M:%S',
+    b'blackbox',
+    b'date-format',
+    default=b'%Y/%m/%d %H:%M:%S',
 )
 
 _lastlogger = loggingutil.proxylogger()
@@ -189,14 +201,15 @@
 
 @command(
     b'blackbox',
-    [(b'l', b'limit', 10, _(b'the number of events to show')),],
+    [
+        (b'l', b'limit', 10, _(b'the number of events to show')),
+    ],
     _(b'hg blackbox [OPTION]...'),
     helpcategory=command.CATEGORY_MAINTENANCE,
     helpbasic=True,
 )
 def blackbox(ui, repo, *revs, **opts):
-    '''view the recent repository events
-    '''
+    """view the recent repository events"""
 
     if not repo.vfs.exists(b'blackbox.log'):
         return
--- a/hgext/bugzilla.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/bugzilla.py	Tue Jan 19 21:48:43 2021 +0530
@@ -325,22 +325,34 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'bugzilla', b'apikey', default=b'',
+    b'bugzilla',
+    b'apikey',
+    default=b'',
 )
 configitem(
-    b'bugzilla', b'bzdir', default=b'/var/www/html/bugzilla',
+    b'bugzilla',
+    b'bzdir',
+    default=b'/var/www/html/bugzilla',
 )
 configitem(
-    b'bugzilla', b'bzemail', default=None,
+    b'bugzilla',
+    b'bzemail',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'bzurl', default=b'http://localhost/bugzilla/',
+    b'bugzilla',
+    b'bzurl',
+    default=b'http://localhost/bugzilla/',
 )
 configitem(
-    b'bugzilla', b'bzuser', default=None,
+    b'bugzilla',
+    b'bzuser',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'db', default=b'bugs',
+    b'bugzilla',
+    b'db',
+    default=b'bugs',
 )
 configitem(
     b'bugzilla',
@@ -353,19 +365,29 @@
     ),
 )
 configitem(
-    b'bugzilla', b'fixresolution', default=b'FIXED',
+    b'bugzilla',
+    b'fixresolution',
+    default=b'FIXED',
 )
 configitem(
-    b'bugzilla', b'fixstatus', default=b'RESOLVED',
+    b'bugzilla',
+    b'fixstatus',
+    default=b'RESOLVED',
 )
 configitem(
-    b'bugzilla', b'host', default=b'localhost',
+    b'bugzilla',
+    b'host',
+    default=b'localhost',
 )
 configitem(
-    b'bugzilla', b'notify', default=configitem.dynamicdefault,
+    b'bugzilla',
+    b'notify',
+    default=configitem.dynamicdefault,
 )
 configitem(
-    b'bugzilla', b'password', default=None,
+    b'bugzilla',
+    b'password',
+    default=None,
 )
 configitem(
     b'bugzilla',
@@ -377,25 +399,39 @@
     ),
 )
 configitem(
-    b'bugzilla', b'strip', default=0,
+    b'bugzilla',
+    b'strip',
+    default=0,
 )
 configitem(
-    b'bugzilla', b'style', default=None,
+    b'bugzilla',
+    b'style',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'template', default=None,
+    b'bugzilla',
+    b'template',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'timeout', default=5,
+    b'bugzilla',
+    b'timeout',
+    default=5,
 )
 configitem(
-    b'bugzilla', b'user', default=b'bugs',
+    b'bugzilla',
+    b'user',
+    default=b'bugs',
 )
 configitem(
-    b'bugzilla', b'usermap', default=None,
+    b'bugzilla',
+    b'usermap',
+    default=None,
 )
 configitem(
-    b'bugzilla', b'version', default=None,
+    b'bugzilla',
+    b'version',
+    default=None,
 )
 
 
@@ -430,29 +466,29 @@
         '''remove bug IDs where node occurs in comment text from bugs.'''
 
     def updatebug(self, bugid, newstate, text, committer):
-        '''update the specified bug. Add comment text and set new states.
+        """update the specified bug. Add comment text and set new states.
 
         If possible add the comment as being from the committer of
         the changeset. Otherwise use the default Bugzilla user.
-        '''
+        """
 
     def notify(self, bugs, committer):
-        '''Force sending of Bugzilla notification emails.
+        """Force sending of Bugzilla notification emails.
 
         Only required if the access method does not trigger notification
         emails automatically.
-        '''
+        """
 
 
 # Bugzilla via direct access to MySQL database.
 class bzmysql(bzaccess):
-    '''Support for direct MySQL access to Bugzilla.
+    """Support for direct MySQL access to Bugzilla.
 
     The earliest Bugzilla version this is tested with is version 2.16.
 
     If your Bugzilla is version 3.4 or above, you are strongly
     recommended to use the XMLRPC access method instead.
-    '''
+    """
 
     @staticmethod
     def sql_buglist(ids):
@@ -581,9 +617,9 @@
             return userid
 
     def get_bugzilla_user(self, committer):
-        '''See if committer is a registered bugzilla user. Return
+        """See if committer is a registered bugzilla user. Return
         bugzilla username and userid if so. If not, return default
-        bugzilla username and userid.'''
+        bugzilla username and userid."""
         user = self.map_committer(committer)
         try:
             userid = self.get_user_id(user)
@@ -604,10 +640,10 @@
         return (user, userid)
 
     def updatebug(self, bugid, newstate, text, committer):
-        '''update bug state with comment text.
+        """update bug state with comment text.
 
         Try adding comment as committer of changeset, otherwise as
-        default bugzilla user.'''
+        default bugzilla user."""
         if len(newstate) > 0:
             self.ui.warn(_(b"Bugzilla/MySQL cannot update bug state\n"))
 
@@ -871,7 +907,7 @@
             return b"@%s = %s" % (fieldname, pycompat.bytestr(value))
 
     def send_bug_modify_email(self, bugid, commands, comment, committer):
-        '''send modification message to Bugzilla bug via email.
+        """send modification message to Bugzilla bug via email.
 
         The message format is documented in the Bugzilla email_in.pl
         specification. commands is a list of command lines, comment is the
@@ -880,7 +916,7 @@
         To stop users from crafting commit comments with
         Bugzilla commands, specify the bug ID via the message body, rather
         than the subject line, and leave a blank line after it.
-        '''
+        """
         user = self.map_committer(committer)
         matches = self.bzproxy.User.get(
             {b'match': [user], b'token': self.bztoken}
@@ -1018,11 +1054,11 @@
                 del bugs[bugid]
 
     def updatebug(self, bugid, newstate, text, committer):
-        '''update the specified bug. Add comment text and set new states.
+        """update the specified bug. Add comment text and set new states.
 
         If possible add the comment as being from the committer of
         the changeset. Otherwise use the default Bugzilla user.
-        '''
+        """
         bugmod = {}
         if b'hours' in newstate:
             bugmod[b'work_time'] = newstate[b'hours']
@@ -1052,11 +1088,11 @@
             self.ui.debug(b'added comment to bug %s\n' % bugid)
 
     def notify(self, bugs, committer):
-        '''Force sending of Bugzilla notification emails.
+        """Force sending of Bugzilla notification emails.
 
         Only required if the access method does not trigger notification
         emails automatically.
-        '''
+        """
         pass
 
 
@@ -1094,12 +1130,12 @@
         self.split_re = re.compile(br'\D+')
 
     def find_bugs(self, ctx):
-        '''return bugs dictionary created from commit comment.
+        """return bugs dictionary created from commit comment.
 
         Extract bug info from changeset comments. Filter out any that are
         not known to Bugzilla, and any that already have a reference to
         the given changeset in their comments.
-        '''
+        """
         start = 0
         bugs = {}
         bugmatch = self.bug_re.search(ctx.description(), start)
@@ -1154,8 +1190,8 @@
         '''update bugzilla bug with reference to changeset.'''
 
         def webroot(root):
-            '''strip leading prefix of repo root and turn into
-            url-safe path.'''
+            """strip leading prefix of repo root and turn into
+            url-safe path."""
             count = int(self.ui.config(b'bugzilla', b'strip'))
             root = util.pconvert(root)
             while count > 0:
@@ -1197,9 +1233,9 @@
 
 
 def hook(ui, repo, hooktype, node=None, **kwargs):
-    '''add comment to bugzilla for each changeset that refers to a
+    """add comment to bugzilla for each changeset that refers to a
     bugzilla bug id. only add a comment once per bug, so same change
-    seen multiple times does not fill bug with duplicate data.'''
+    seen multiple times does not fill bug with duplicate data."""
     if node is None:
         raise error.Abort(
             _(b'hook type %s does not pass a changeset id') % hooktype
--- a/hgext/churn.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/churn.py	Tue Jan 19 21:48:43 2021 +0530
@@ -156,7 +156,7 @@
     inferrepo=True,
 )
 def churn(ui, repo, *pats, **opts):
-    '''histogram of changes to the repository
+    """histogram of changes to the repository
 
     This command will display a histogram representing the number
     of changed lines or revisions, grouped according to the given
@@ -193,7 +193,7 @@
     Such a file may be specified with the --aliases option, otherwise
     a .hgchurn file will be looked for in the working directory root.
     Aliases will be split from the rightmost "=".
-    '''
+    """
 
     def pad(s, l):
         return s + b" " * (l - encoding.colwidth(s))
--- a/hgext/clonebundles.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/clonebundles.py	Tue Jan 19 21:48:43 2021 +0530
@@ -205,6 +205,7 @@
 from __future__ import absolute_import
 
 from mercurial import (
+    bundlecaches,
     extensions,
     wireprotov1server,
 )
@@ -218,7 +219,7 @@
     # Only advertise if a manifest exists. This does add some I/O to requests.
     # But this should be cheaper than a wasted network round trip due to
     # missing file.
-    if repo.vfs.exists(b'clonebundles.manifest'):
+    if repo.vfs.exists(bundlecaches.CB_MANIFEST_FILE):
         caps.append(b'clonebundles')
 
     return caps
--- a/hgext/convert/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -536,7 +536,7 @@
     norepo=True,
 )
 def debugcvsps(ui, *args, **opts):
-    '''create changeset information from CVS
+    """create changeset information from CVS
 
     This command is intended as a debugging tool for the CVS to
     Mercurial converter, and can be used as a direct replacement for
@@ -545,7 +545,7 @@
     Hg debugcvsps reads the CVS rlog for current directory (or any
     named directory) in the CVS repository, and converts the log to a
     series of changesets based on matching commit log entries and
-    dates.'''
+    dates."""
     return cvsps.debugcvsps(ui, *args, **opts)
 
 
--- a/hgext/convert/bzr.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/bzr.py	Tue Jan 19 21:48:43 2021 +0530
@@ -21,7 +21,11 @@
 
 # these do not work with demandimport, blacklist
 demandimport.IGNORES.update(
-    [b'bzrlib.transactions', b'bzrlib.urlutils', b'ElementPath',]
+    [
+        b'bzrlib.transactions',
+        b'bzrlib.urlutils',
+        b'ElementPath',
+    ]
 )
 
 try:
--- a/hgext/convert/common.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/common.py	Tue Jan 19 21:48:43 2021 +0530
@@ -172,8 +172,8 @@
         self.encoding = b'utf-8'
 
     def checkhexformat(self, revstr, mapname=b'splicemap'):
-        """ fails if revstr is not a 40 byte hex. mercurial and git both uses
-            such format for their revision numbering
+        """fails if revstr is not a 40 byte hex. mercurial and git both uses
+        such format for their revision numbering
         """
         if not re.match(br'[0-9a-fA-F]{40,40}$', revstr):
             raise error.Abort(
@@ -283,8 +283,7 @@
         return False
 
     def hasnativeclose(self):
-        """Return true if this source has ability to close branch.
-        """
+        """Return true if this source has ability to close branch."""
         return False
 
     def lookuprev(self, rev):
@@ -303,8 +302,8 @@
 
     def checkrevformat(self, revstr, mapname=b'splicemap'):
         """revstr is a string that describes a revision in the given
-           source control system.  Return true if revstr has correct
-           format.
+        source control system.  Return true if revstr has correct
+        format.
         """
         return True
 
--- a/hgext/convert/convcmd.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/convcmd.py	Tue Jan 19 21:48:43 2021 +0530
@@ -96,7 +96,7 @@
 
 
 def mapbranch(branch, branchmap):
-    '''
+    """
     >>> bmap = {b'default': b'branch1'}
     >>> for i in [b'', None]:
     ...     mapbranch(i, bmap)
@@ -115,7 +115,7 @@
     'branch4'
     'branch4'
     'branch5'
-    '''
+    """
     # If branch is None or empty, this commit is coming from the source
     # repository's default branch and destined for the default branch in the
     # destination repository. For such commits, using a literal "default"
@@ -228,14 +228,14 @@
         self.branchmap = mapfile(ui, opts.get(b'branchmap'))
 
     def parsesplicemap(self, path):
-        """ check and validate the splicemap format and
-            return a child/parents dictionary.
-            Format checking has two parts.
-            1. generic format which is same across all source types
-            2. specific format checking which may be different for
-               different source type.  This logic is implemented in
-               checkrevformat function in source files like
-               hg.py, subversion.py etc.
+        """check and validate the splicemap format and
+        return a child/parents dictionary.
+        Format checking has two parts.
+        1. generic format which is same across all source types
+        2. specific format checking which may be different for
+           different source type.  This logic is implemented in
+           checkrevformat function in source files like
+           hg.py, subversion.py etc.
         """
 
         if not path:
@@ -275,8 +275,8 @@
         return m
 
     def walktree(self, heads):
-        '''Return a mapping that identifies the uncommitted parents of every
-        uncommitted changeset.'''
+        """Return a mapping that identifies the uncommitted parents of every
+        uncommitted changeset."""
         visit = list(heads)
         known = set()
         parents = {}
@@ -332,8 +332,8 @@
             parents[c] = pc
 
     def toposort(self, parents, sortmode):
-        '''Return an ordering such that every uncommitted changeset is
-        preceded by all its uncommitted ancestors.'''
+        """Return an ordering such that every uncommitted changeset is
+        preceded by all its uncommitted ancestors."""
 
         def mapchildren(parents):
             """Return a (children, roots) tuple where 'children' maps parent
--- a/hgext/convert/cvsps.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/cvsps.py	Tue Jan 19 21:48:43 2021 +0530
@@ -29,25 +29,25 @@
 
 
 class logentry(object):
-    '''Class logentry has the following attributes:
-        .author    - author name as CVS knows it
-        .branch    - name of branch this revision is on
-        .branches  - revision tuple of branches starting at this revision
-        .comment   - commit message
-        .commitid  - CVS commitid or None
-        .date      - the commit date as a (time, tz) tuple
-        .dead      - true if file revision is dead
-        .file      - Name of file
-        .lines     - a tuple (+lines, -lines) or None
-        .parent    - Previous revision of this entry
-        .rcs       - name of file as returned from CVS
-        .revision  - revision number as tuple
-        .tags      - list of tags on the file
-        .synthetic - is this a synthetic "file ... added on ..." revision?
-        .mergepoint - the branch that has been merged from (if present in
-                      rlog output) or None
-        .branchpoints - the branches that start at the current entry or empty
-    '''
+    """Class logentry has the following attributes:
+    .author    - author name as CVS knows it
+    .branch    - name of branch this revision is on
+    .branches  - revision tuple of branches starting at this revision
+    .comment   - commit message
+    .commitid  - CVS commitid or None
+    .date      - the commit date as a (time, tz) tuple
+    .dead      - true if file revision is dead
+    .file      - Name of file
+    .lines     - a tuple (+lines, -lines) or None
+    .parent    - Previous revision of this entry
+    .rcs       - name of file as returned from CVS
+    .revision  - revision number as tuple
+    .tags      - list of tags on the file
+    .synthetic - is this a synthetic "file ... added on ..." revision?
+    .mergepoint - the branch that has been merged from (if present in
+                  rlog output) or None
+    .branchpoints - the branches that start at the current entry or empty
+    """
 
     def __init__(self, **entries):
         self.synthetic = False
@@ -580,20 +580,20 @@
 
 
 class changeset(object):
-    '''Class changeset has the following attributes:
-        .id        - integer identifying this changeset (list index)
-        .author    - author name as CVS knows it
-        .branch    - name of branch this changeset is on, or None
-        .comment   - commit message
-        .commitid  - CVS commitid or None
-        .date      - the commit date as a (time,tz) tuple
-        .entries   - list of logentry objects in this changeset
-        .parents   - list of one or two parent changesets
-        .tags      - list of tags on this changeset
-        .synthetic - from synthetic revision "file ... added on branch ..."
-        .mergepoint- the branch that has been merged from or None
-        .branchpoints- the branches that start at the current entry or empty
-    '''
+    """Class changeset has the following attributes:
+    .id        - integer identifying this changeset (list index)
+    .author    - author name as CVS knows it
+    .branch    - name of branch this changeset is on, or None
+    .comment   - commit message
+    .commitid  - CVS commitid or None
+    .date      - the commit date as a (time,tz) tuple
+    .entries   - list of logentry objects in this changeset
+    .parents   - list of one or two parent changesets
+    .tags      - list of tags on this changeset
+    .synthetic - from synthetic revision "file ... added on branch ..."
+    .mergepoint- the branch that has been merged from or None
+    .branchpoints- the branches that start at the current entry or empty
+    """
 
     def __init__(self, **entries):
         self.id = None
@@ -945,10 +945,10 @@
 
 
 def debugcvsps(ui, *args, **opts):
-    '''Read CVS rlog for current directory or named path in
+    """Read CVS rlog for current directory or named path in
     repository, and convert the log to changesets based on matching
     commit log entries and dates.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     if opts[b"new_cache"]:
         cache = b"write"
--- a/hgext/convert/filemap.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/filemap.py	Tue Jan 19 21:48:43 2021 +0530
@@ -19,14 +19,14 @@
 
 
 def rpairs(path):
-    '''Yield tuples with path split at '/', starting with the full path.
+    """Yield tuples with path split at '/', starting with the full path.
     No leading, trailing or double '/', please.
     >>> for x in rpairs(b'foo/bar/baz'): print(x)
     ('foo/bar/baz', '')
     ('foo/bar', 'baz')
     ('foo', 'bar/baz')
     ('.', 'foo/bar/baz')
-    '''
+    """
     i = len(path)
     while i != -1:
         yield path[:i], path[i + 1 :]
@@ -35,17 +35,17 @@
 
 
 def normalize(path):
-    ''' We use posixpath.normpath to support cross-platform path format.
-    However, it doesn't handle None input. So we wrap it up. '''
+    """We use posixpath.normpath to support cross-platform path format.
+    However, it doesn't handle None input. So we wrap it up."""
     if path is None:
         return None
     return posixpath.normpath(path)
 
 
 class filemapper(object):
-    '''Map and filter filenames when importing.
+    """Map and filter filenames when importing.
     A name can be mapped to itself, a new name, or None (omit from new
-    repository).'''
+    repository)."""
 
     def __init__(self, ui, path=None):
         self.ui = ui
--- a/hgext/convert/git.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/git.py	Tue Jan 19 21:48:43 2021 +0530
@@ -9,10 +9,10 @@
 import os
 
 from mercurial.i18n import _
+from mercurial.node import nullhex
 from mercurial import (
     config,
     error,
-    node as nodemod,
     pycompat,
 )
 
@@ -192,7 +192,7 @@
         return heads
 
     def catfile(self, rev, ftype):
-        if rev == nodemod.nullhex:
+        if rev == nullhex:
             raise IOError
         self.catfilepipe[0].write(rev + b'\n')
         self.catfilepipe[0].flush()
@@ -214,7 +214,7 @@
         return data
 
     def getfile(self, name, rev):
-        if rev == nodemod.nullhex:
+        if rev == nullhex:
             return None, None
         if name == b'.hgsub':
             data = b'\n'.join([m.hgsub() for m in self.submoditer()])
@@ -228,7 +228,7 @@
         return data, mode
 
     def submoditer(self):
-        null = nodemod.nullhex
+        null = nullhex
         for m in sorted(self.submodules, key=lambda p: p.path):
             if m.node != null:
                 yield m
@@ -316,7 +316,7 @@
                 subexists[0] = True
                 if entry[4] == b'D' or renamesource:
                     subdeleted[0] = True
-                    changes.append((b'.hgsub', nodemod.nullhex))
+                    changes.append((b'.hgsub', nullhex))
                 else:
                     changes.append((b'.hgsub', b''))
             elif entry[1] == b'160000' or entry[0] == b':160000':
@@ -324,7 +324,7 @@
                     subexists[0] = True
             else:
                 if renamesource:
-                    h = nodemod.nullhex
+                    h = nullhex
                 self.modecache[(f, h)] = (p and b"x") or (s and b"l") or b""
                 changes.append((f, h))
 
@@ -361,7 +361,7 @@
 
         if subexists[0]:
             if subdeleted[0]:
-                changes.append((b'.hgsubstate', nodemod.nullhex))
+                changes.append((b'.hgsubstate', nullhex))
             else:
                 self.retrievegitmodules(version)
                 changes.append((b'.hgsubstate', b''))
--- a/hgext/convert/hg.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/convert/hg.py	Tue Jan 19 21:48:43 2021 +0530
@@ -24,6 +24,12 @@
 
 from mercurial.i18n import _
 from mercurial.pycompat import open
+from mercurial.node import (
+    bin,
+    hex,
+    nullhex,
+    nullid,
+)
 from mercurial import (
     bookmarks,
     context,
@@ -32,7 +38,6 @@
     hg,
     lock as lockmod,
     merge as mergemod,
-    node as nodemod,
     phases,
     pycompat,
     scmutil,
@@ -155,7 +160,7 @@
                 continue
             revid = revmap.get(source.lookuprev(s[0]))
             if not revid:
-                if s[0] == nodemod.nullhex:
+                if s[0] == nullhex:
                     revid = s[0]
                 else:
                     # missing, but keep for hash stability
@@ -174,7 +179,7 @@
 
             revid = s[0]
             subpath = s[1]
-            if revid != nodemod.nullhex:
+            if revid != nullhex:
                 revmap = self.subrevmaps.get(subpath)
                 if revmap is None:
                     revmap = mapfile(
@@ -295,13 +300,13 @@
         parents = pl
         nparents = len(parents)
         if self.filemapmode and nparents == 1:
-            m1node = self.repo.changelog.read(nodemod.bin(parents[0]))[0]
+            m1node = self.repo.changelog.read(bin(parents[0]))[0]
             parent = parents[0]
 
         if len(parents) < 2:
-            parents.append(nodemod.nullid)
+            parents.append(nullid)
         if len(parents) < 2:
-            parents.append(nodemod.nullid)
+            parents.append(nullid)
         p2 = parents.pop(0)
 
         text = commit.desc
@@ -332,12 +337,12 @@
 
             # Only transplant stores its reference in binary
             if label == b'transplant_source':
-                node = nodemod.hex(node)
+                node = hex(node)
 
             newrev = revmap.get(node)
             if newrev is not None:
                 if label == b'transplant_source':
-                    newrev = nodemod.bin(newrev)
+                    newrev = bin(newrev)
 
                 extra[label] = newrev
 
@@ -351,7 +356,7 @@
             p2 = parents.pop(0)
             p1ctx = self.repo[p1]
             p2ctx = None
-            if p2 != nodemod.nullid:
+            if p2 != nullid:
                 p2ctx = self.repo[p2]
             fileset = set(files)
             if full:
@@ -389,7 +394,7 @@
                     origctx = commit.ctx
                 else:
                     origctx = None
-                node = nodemod.hex(self.repo.commitctx(ctx, origctx=origctx))
+                node = hex(self.repo.commitctx(ctx, origctx=origctx))
 
                 # If the node value has changed, but the phase is lower than
                 # draft, set it back to draft since it hasn't been exposed
@@ -398,7 +403,7 @@
                     ctx = self.repo[node]
                     if ctx.phase() < phases.draft:
                         phases.registernew(
-                            self.repo, tr, phases.draft, [ctx.node()]
+                            self.repo, tr, phases.draft, [ctx.rev()]
                         )
 
             text = b"(octopus merge fixup)\n"
@@ -406,7 +411,7 @@
 
         if self.filemapmode and nparents == 1:
             man = self.repo.manifestlog.getstorage(b'')
-            mnode = self.repo.changelog.read(nodemod.bin(p2))[0]
+            mnode = self.repo.changelog.read(bin(p2))[0]
             closed = b'close' in commit.extra
             if not closed and not man.cmp(m1node, man.revision(mnode)):
                 self.ui.status(_(b"filtering out empty revision\n"))
@@ -416,7 +421,7 @@
 
     def puttags(self, tags):
         tagparent = self.repo.branchtip(self.tagsbranch, ignoremissing=True)
-        tagparent = tagparent or nodemod.nullid
+        tagparent = tagparent or nullid
 
         oldlines = set()
         for branch, heads in pycompat.iteritems(self.repo.branchmap()):
@@ -468,7 +473,7 @@
             extra,
         )
         node = self.repo.commitctx(ctx)
-        return nodemod.hex(node), nodemod.hex(tagparent)
+        return hex(node), hex(tagparent)
 
     def setfilemapmode(self, active):
         self.filemapmode = active
@@ -484,7 +489,7 @@
             self.ui.status(_(b"updating bookmarks\n"))
             destmarks = self.repo._bookmarks
             changes = [
-                (bookmark, nodemod.bin(updatedbookmark[bookmark]))
+                (bookmark, bin(updatedbookmark[bookmark]))
                 for bookmark in updatedbookmark
             ]
             destmarks.applychanges(self.repo, tr, changes)
@@ -577,7 +582,7 @@
         return [p for p in ctx.parents() if p and self.keep(p.node())]
 
     def getheads(self):
-        return [nodemod.hex(h) for h in self._heads if self.keep(h)]
+        return [hex(h) for h in self._heads if self.keep(h)]
 
     def getfile(self, name, rev):
         try:
@@ -678,9 +683,7 @@
             for t in self.repo.tagslist()
             if self.repo.tagtype(t[0]) == b'global'
         ]
-        return {
-            name: nodemod.hex(node) for name, node in tags if self.keep(node)
-        }
+        return {name: hex(node) for name, node in tags if self.keep(node)}
 
     def getchangedfiles(self, rev, i):
         ctx = self._changectx(rev)
@@ -718,7 +721,7 @@
 
     def lookuprev(self, rev):
         try:
-            return nodemod.hex(self.repo.lookup(rev))
+            return hex(self.repo.lookup(rev))
         except (error.RepoError, error.LookupError):
             return None
 
--- a/hgext/eol.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/eol.py	Tue Jan 19 21:48:43 2021 +0530
@@ -118,13 +118,19 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'eol', b'fix-trailing-newline', default=False,
+    b'eol',
+    b'fix-trailing-newline',
+    default=False,
 )
 configitem(
-    b'eol', b'native', default=pycompat.oslinesep,
+    b'eol',
+    b'native',
+    default=pycompat.oslinesep,
 )
 configitem(
-    b'eol', b'only-consistent', default=True,
+    b'eol',
+    b'only-consistent',
+    default=True,
 )
 
 # Matches a lone LF, i.e., one that is not part of CRLF.
@@ -274,13 +280,13 @@
                 return eolfile(ui, repo.root, data)
             except (IOError, LookupError):
                 pass
-    except errormod.ParseError as inst:
+    except errormod.ConfigError as inst:
         ui.warn(
             _(
                 b"warning: ignoring .hgeol file due to parse error "
                 b"at %s: %s\n"
             )
-            % (inst.args[1], inst.args[0])
+            % (inst.location, inst.message)
         )
     return None
 
--- a/hgext/extdiff.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/extdiff.py	Tue Jan 19 21:48:43 2021 +0530
@@ -118,19 +118,29 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'extdiff', br'opts\..*', default=b'', generic=True,
+    b'extdiff',
+    br'opts\..*',
+    default=b'',
+    generic=True,
 )
 
 configitem(
-    b'extdiff', br'gui\..*', generic=True,
+    b'extdiff',
+    br'gui\..*',
+    generic=True,
 )
 
 configitem(
-    b'diff-tools', br'.*\.diffargs$', default=None, generic=True,
+    b'diff-tools',
+    br'.*\.diffargs$',
+    default=None,
+    generic=True,
 )
 
 configitem(
-    b'diff-tools', br'.*\.gui$', generic=True,
+    b'diff-tools',
+    br'.*\.gui$',
+    generic=True,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -141,9 +151,9 @@
 
 
 def snapshot(ui, repo, files, node, tmproot, listsubrepos):
-    '''snapshot files as of some revision
+    """snapshot files as of some revision
     if not using snapshot, -I/-X does not work and recursive diff
-    in tools like kdiff3 and meld displays too many files.'''
+    in tools like kdiff3 and meld displays too many files."""
     dirname = os.path.basename(repo.root)
     if dirname == b"":
         dirname = b"root"
@@ -230,9 +240,9 @@
 
 
 def _systembackground(cmd, environ=None, cwd=None):
-    ''' like 'procutil.system', but returns the Popen object directly
-        so we don't have to wait on it.
-    '''
+    """like 'procutil.system', but returns the Popen object directly
+    so we don't have to wait on it.
+    """
     env = procutil.shellenviron(environ)
     proc = subprocess.Popen(
         procutil.tonativestr(cmd),
@@ -385,9 +395,9 @@
 def diffrevs(
     ui,
     repo,
-    node1a,
-    node1b,
-    node2,
+    ctx1a,
+    ctx1b,
+    ctx2,
     matcher,
     tmproot,
     cmdline,
@@ -399,10 +409,10 @@
     subrepos = opts.get(b'subrepos')
 
     # calculate list of files changed between both revs
-    st = repo.status(node1a, node2, matcher, listsubrepos=subrepos)
+    st = ctx1a.status(ctx2, matcher, listsubrepos=subrepos)
     mod_a, add_a, rem_a = set(st.modified), set(st.added), set(st.removed)
     if do3way:
-        stb = repo.status(node1b, node2, matcher, listsubrepos=subrepos)
+        stb = ctx1b.status(ctx2, matcher, listsubrepos=subrepos)
         mod_b, add_b, rem_b = (
             set(stb.modified),
             set(stb.added),
@@ -415,32 +425,34 @@
     if not common:
         return 0
 
-    # Always make a copy of node1a (and node1b, if applicable)
+    # Always make a copy of ctx1a (and ctx1b, if applicable)
     # dir1a should contain files which are:
-    #   * modified or removed from node1a to node2
-    #   * modified or added from node1b to node2
-    #     (except file added from node1a to node2 as they were not present in
-    #     node1a)
+    #   * modified or removed from ctx1a to ctx2
+    #   * modified or added from ctx1b to ctx2
+    #     (except file added from ctx1a to ctx2 as they were not present in
+    #     ctx1a)
     dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
-    dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot, subrepos)[0]
-    rev1a = b'@%d' % repo[node1a].rev()
+    dir1a = snapshot(ui, repo, dir1a_files, ctx1a.node(), tmproot, subrepos)[0]
+    rev1a = b'' if ctx1a.rev() is None else b'@%d' % ctx1a.rev()
     if do3way:
         # file calculation criteria same as dir1a
         dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
-        dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot, subrepos)[0]
-        rev1b = b'@%d' % repo[node1b].rev()
+        dir1b = snapshot(
+            ui, repo, dir1b_files, ctx1b.node(), tmproot, subrepos
+        )[0]
+        rev1b = b'@%d' % ctx1b.rev()
     else:
         dir1b = None
         rev1b = b''
 
     fnsandstat = []
 
-    # If node2 in not the wc or there is >1 change, copy it
+    # If ctx2 is not the wc or there is >1 change, copy it
     dir2root = b''
     rev2 = b''
-    if node2:
-        dir2 = snapshot(ui, repo, modadd, node2, tmproot, subrepos)[0]
-        rev2 = b'@%d' % repo[node2].rev()
+    if ctx2.node() is not None:
+        dir2 = snapshot(ui, repo, modadd, ctx2.node(), tmproot, subrepos)[0]
+        rev2 = b'@%d' % ctx2.rev()
     elif len(common) > 1:
         # we only actually need to get the files to copy back to
         # the working dir in this case (because the other cases
@@ -530,22 +542,31 @@
 
 
 def dodiff(ui, repo, cmdline, pats, opts, guitool=False):
-    '''Do the actual diff:
+    """Do the actual diff:
 
     - copy to a temp structure if diffing 2 internal revisions
     - copy to a temp structure if diffing working revision with
       another one and more than 1 file is changed
     - just invoke the diff for a single file in the working dir
-    '''
+    """
 
     cmdutil.check_at_most_one_arg(opts, b'rev', b'change')
     revs = opts.get(b'rev')
+    from_rev = opts.get(b'from')
+    to_rev = opts.get(b'to')
     change = opts.get(b'change')
     do3way = b'$parent2' in cmdline
 
     if change:
         ctx2 = scmutil.revsingle(repo, change, None)
         ctx1a, ctx1b = ctx2.p1(), ctx2.p2()
+    elif from_rev or to_rev:
+        repo = scmutil.unhidehashlikerevs(
+            repo, [from_rev] + [to_rev], b'nowarn'
+        )
+        ctx1a = scmutil.revsingle(repo, from_rev, None)
+        ctx1b = repo[nullid]
+        ctx2 = scmutil.revsingle(repo, to_rev, None)
     else:
         ctx1a, ctx2 = scmutil.revpair(repo, revs)
         if not revs:
@@ -553,36 +574,34 @@
         else:
             ctx1b = repo[nullid]
 
-    node1a = ctx1a.node()
-    node1b = ctx1b.node()
-    node2 = ctx2.node()
-
     # Disable 3-way merge if there is only one parent
     if do3way:
-        if node1b == nullid:
+        if ctx1b.node() == nullid:
             do3way = False
 
-    matcher = scmutil.match(repo[node2], pats, opts)
+    matcher = scmutil.match(ctx2, pats, opts)
 
     if opts.get(b'patch'):
         if opts.get(b'subrepos'):
             raise error.Abort(_(b'--patch cannot be used with --subrepos'))
         if opts.get(b'per_file'):
             raise error.Abort(_(b'--patch cannot be used with --per-file'))
-        if node2 is None:
+        if ctx2.node() is None:
             raise error.Abort(_(b'--patch requires two revisions'))
 
     tmproot = pycompat.mkdtemp(prefix=b'extdiff.')
     try:
         if opts.get(b'patch'):
-            return diffpatch(ui, repo, node1a, node2, tmproot, matcher, cmdline)
+            return diffpatch(
+                ui, repo, ctx1a.node(), ctx2.node(), tmproot, matcher, cmdline
+            )
 
         return diffrevs(
             ui,
             repo,
-            node1a,
-            node1b,
-            node2,
+            ctx1a,
+            ctx1b,
+            ctx2,
             matcher,
             tmproot,
             cmdline,
@@ -605,7 +624,9 @@
             _(b'pass option to comparison program'),
             _(b'OPT'),
         ),
-        (b'r', b'rev', [], _(b'revision'), _(b'REV')),
+        (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
+        (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
+        (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
         (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
         (
             b'',
@@ -628,14 +649,16 @@
 
 @command(
     b'extdiff',
-    [(b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),]
+    [
+        (b'p', b'program', b'', _(b'comparison program to run'), _(b'CMD')),
+    ]
     + extdiffopts,
     _(b'hg extdiff [OPT]... [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     inferrepo=True,
 )
 def extdiff(ui, repo, *pats, **opts):
-    '''use external program to diff repository (or selected files)
+    """use external program to diff repository (or selected files)
 
     Show differences between revisions for the specified files, using
     an external program. The default program used is diff, with
@@ -647,11 +670,8 @@
     additional options to the program, use -o/--option. These will be
     passed before the names of the directories or files to compare.
 
-    When two revision arguments are given, then changes are shown
-    between those revisions. If only one revision is specified then
-    that revision is compared to the working directory, and, when no
-    revisions are specified, the working directory files are compared
-    to its parent.
+    The --from, --to, and --change options work the same way they do for
+    :hg:`diff`.
 
     The --per-file option runs the external program repeatedly on each
     file to diff, instead of once on two directories. By default,
@@ -664,7 +684,7 @@
 
     The --confirm option will prompt the user before each invocation of
     the external program. It is ignored if --per-file isn't specified.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     program = opts.get(b'program')
     option = opts.get(b'option')
--- a/hgext/factotum.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/factotum.py	Tue Jan 19 21:48:43 2021 +0530
@@ -70,13 +70,19 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'factotum', b'executable', default=b'/bin/auth/factotum',
+    b'factotum',
+    b'executable',
+    default=b'/bin/auth/factotum',
 )
 configitem(
-    b'factotum', b'mountpoint', default=b'/mnt/factotum',
+    b'factotum',
+    b'mountpoint',
+    default=b'/mnt/factotum',
 )
 configitem(
-    b'factotum', b'service', default=b'hg',
+    b'factotum',
+    b'service',
+    default=b'hg',
 )
 
 
--- a/hgext/fastannotate/context.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/fastannotate/context.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,12 +17,16 @@
     open,
     setattr,
 )
+from mercurial.node import (
+    bin,
+    hex,
+    short,
+)
 from mercurial import (
     error,
     linelog as linelogmod,
     lock as lockmod,
     mdiff,
-    node,
     pycompat,
     scmutil,
     util,
@@ -150,7 +154,7 @@
     diffoptstr = stringutil.pprint(
         sorted((k, getattr(diffopts, k)) for k in mdiff.diffopts.defaults)
     )
-    return node.hex(hashutil.sha1(diffoptstr).digest())[:6]
+    return hex(hashutil.sha1(diffoptstr).digest())[:6]
 
 
 _defaultdiffopthash = hashdiffopts(mdiff.defaultopts)
@@ -308,7 +312,7 @@
         # command could give us a revision number even if the user passes a
         # commit hash.
         if isinstance(rev, int):
-            rev = node.hex(self.repo.changelog.node(rev))
+            rev = hex(self.repo.changelog.node(rev))
 
         # fast path: if rev is in the main branch already
         directly, revfctx = self.canannotatedirectly(rev)
@@ -493,7 +497,7 @@
         result = True
         f = None
         if not isinstance(rev, int) and rev is not None:
-            hsh = {20: bytes, 40: node.bin}.get(len(rev), lambda x: None)(rev)
+            hsh = {20: bytes, 40: bin}.get(len(rev), lambda x: None)(rev)
             if hsh is not None and (hsh, self.path) in self.revmap:
                 f = hsh
         if f is None:
@@ -598,7 +602,7 @@
                             self.ui.debug(
                                 b'fastannotate: reading %s line #%d '
                                 b'to resolve lines %r\n'
-                                % (node.short(hsh), linenum, idxs)
+                                % (short(hsh), linenum, idxs)
                             )
                         fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
                         lines = mdiff.splitnewlines(fctx.data())
@@ -610,8 +614,7 @@
 
             # run the annotate and the lines should match to the file content
             self.ui.debug(
-                b'fastannotate: annotate %s to resolve lines\n'
-                % node.short(hsh)
+                b'fastannotate: annotate %s to resolve lines\n' % short(hsh)
             )
             linelog.annotate(rev)
             fctx = self._resolvefctx(hsh, revmap.rev2path(rev))
@@ -640,12 +643,10 @@
             hsh = f.node()
         llrev = self.revmap.hsh2rev(hsh)
         if not llrev:
-            raise faerror.CorruptedFileError(
-                b'%s is not in revmap' % node.hex(hsh)
-            )
+            raise faerror.CorruptedFileError(b'%s is not in revmap' % hex(hsh))
         if (self.revmap.rev2flag(llrev) & revmapmod.sidebranchflag) != 0:
             raise faerror.CorruptedFileError(
-                b'%s is not in revmap mainbranch' % node.hex(hsh)
+                b'%s is not in revmap mainbranch' % hex(hsh)
             )
         self.linelog.annotate(llrev)
         result = [
--- a/hgext/fastannotate/formatter.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/fastannotate/formatter.py	Tue Jan 19 21:48:43 2021 +0530
@@ -6,9 +6,12 @@
 # GNU General Public License version 2 or any later version.
 from __future__ import absolute_import
 
+from mercurial.node import (
+    hex,
+    short,
+)
 from mercurial import (
     encoding,
-    node,
     pycompat,
     templatefilters,
     util,
@@ -116,9 +119,9 @@
     @util.propertycache
     def _hexfunc(self):
         if self.ui.debugflag or self.opts.get(b'long_hash'):
-            return node.hex
+            return hex
         else:
-            return node.short
+            return short
 
     def end(self):
         pass
@@ -168,7 +171,7 @@
 
     @util.propertycache
     def _hexfunc(self):
-        return node.hex
+        return hex
 
     def end(self):
         self.ui.write(b'\n]\n')
--- a/hgext/fetch.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/fetch.py	Tue Jan 19 21:48:43 2021 +0530
@@ -54,7 +54,7 @@
     helpcategory=command.CATEGORY_REMOTE_REPO_MANAGEMENT,
 )
 def fetch(ui, repo, source=b'default', **opts):
-    '''pull changes from a remote repository, merge new changes if needed.
+    """pull changes from a remote repository, merge new changes if needed.
 
     This finds all changes from the repository at the specified path
     or URL and adds them to the local repository.
@@ -71,7 +71,7 @@
     See :hg:`help dates` for a list of formats valid for -d/--date.
 
     Returns 0 on success.
-    '''
+    """
 
     opts = pycompat.byteskwargs(opts)
     date = opts.get(b'date')
--- a/hgext/fix.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/fix.py	Tue Jan 19 21:48:43 2021 +0530
@@ -372,7 +372,7 @@
 
 
 def getworkqueue(ui, repo, pats, opts, revstofix, basectxs):
-    """"Constructs the list of files to be fixed at specific revisions
+    """Constructs the list of files to be fixed at specific revisions
 
     It is up to the caller how to consume the work items, and the only
     dependence between them is that replacement revisions must be committed in
@@ -417,7 +417,7 @@
         revs = repo.revs(b'(not public() and not obsolete()) or wdir()')
     elif opts[b'source']:
         source_revs = scmutil.revrange(repo, opts[b'source'])
-        revs = set(repo.revs(b'%ld::', source_revs))
+        revs = set(repo.revs(b'(%ld::) - obsolete()', source_revs))
         if wdirrev in source_revs:
             # `wdir()::` is currently empty, so manually add wdir
             revs.add(wdirrev)
@@ -427,8 +427,8 @@
         revs = set(scmutil.revrange(repo, opts[b'rev']))
         if opts.get(b'working_dir'):
             revs.add(wdirrev)
-    for rev in revs:
-        checkfixablectx(ui, repo, repo[rev])
+        for rev in revs:
+            checkfixablectx(ui, repo, repo[rev])
     # Allow fixing only wdir() even if there's an unfinished operation
     if not (len(revs) == 1 and wdirrev in revs):
         cmdutil.checkunfinished(repo)
@@ -439,7 +439,7 @@
         raise error.Abort(b'unresolved conflicts', hint=b"use 'hg resolve'")
     if not revs:
         raise error.Abort(
-            b'no changesets specified', hint=b'use --rev or --working-dir'
+            b'no changesets specified', hint=b'use --source or --working-dir'
         )
     return revs
 
--- a/hgext/fsmonitor/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/fsmonitor/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -154,25 +154,40 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'fsmonitor', b'mode', default=b'on',
+    b'fsmonitor',
+    b'mode',
+    default=b'on',
 )
 configitem(
-    b'fsmonitor', b'walk_on_invalidate', default=False,
+    b'fsmonitor',
+    b'walk_on_invalidate',
+    default=False,
 )
 configitem(
-    b'fsmonitor', b'timeout', default=b'2',
+    b'fsmonitor',
+    b'timeout',
+    default=b'2',
 )
 configitem(
-    b'fsmonitor', b'blacklistusers', default=list,
+    b'fsmonitor',
+    b'blacklistusers',
+    default=list,
+)
+configitem(
+    b'fsmonitor',
+    b'watchman_exe',
+    default=b'watchman',
 )
 configitem(
-    b'fsmonitor', b'watchman_exe', default=b'watchman',
+    b'fsmonitor',
+    b'verbose',
+    default=True,
+    experimental=True,
 )
 configitem(
-    b'fsmonitor', b'verbose', default=True, experimental=True,
-)
-configitem(
-    b'experimental', b'fsmonitor.transaction_notify', default=False,
+    b'experimental',
+    b'fsmonitor.transaction_notify',
+    default=False,
 )
 
 # This extension is incompatible with the following blacklisted extensions
@@ -271,11 +286,11 @@
 
 
 def overridewalk(orig, self, match, subrepos, unknown, ignored, full=True):
-    '''Replacement for dirstate.walk, hooking into Watchman.
+    """Replacement for dirstate.walk, hooking into Watchman.
 
     Whenever full is False, ignored is False, and the Watchman client is
     available, use Watchman combined with saved state to possibly return only a
-    subset of files.'''
+    subset of files."""
 
     def bail(reason):
         self._ui.debug(b'fsmonitor: fallback to core status, %s\n' % reason)
@@ -731,8 +746,8 @@
 
 
 def wrapsymlink(orig, source, link_name):
-    ''' if we create a dangling symlink, also touch the parent dir
-    to encourage fsevents notifications to work more correctly '''
+    """if we create a dangling symlink, also touch the parent dir
+    to encourage fsevents notifications to work more correctly"""
     try:
         return orig(source, link_name)
     finally:
@@ -743,13 +758,13 @@
 
 
 class state_update(object):
-    ''' This context manager is responsible for dispatching the state-enter
-        and state-leave signals to the watchman service. The enter and leave
-        methods can be invoked manually (for scenarios where context manager
-        semantics are not possible). If parameters oldnode and newnode are None,
-        they will be populated based on current working copy in enter and
-        leave, respectively. Similarly, if the distance is none, it will be
-        calculated based on the oldnode and newnode in the leave method.'''
+    """This context manager is responsible for dispatching the state-enter
+    and state-leave signals to the watchman service. The enter and leave
+    methods can be invoked manually (for scenarios where context manager
+    semantics are not possible). If parameters oldnode and newnode are None,
+    they will be populated based on current working copy in enter and
+    leave, respectively. Similarly, if the distance is none, it will be
+    calculated based on the oldnode and newnode in the leave method."""
 
     def __init__(
         self,
--- a/hgext/fsmonitor/pywatchman/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/fsmonitor/pywatchman/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -282,11 +282,11 @@
 
 class SocketTimeout(WatchmanError):
     """A specialized exception raised for socket timeouts during communication to/from watchman.
-       This makes it easier to implement non-blocking loops as callers can easily distinguish
-       between a routine timeout and an actual error condition.
+    This makes it easier to implement non-blocking loops as callers can easily distinguish
+    between a routine timeout and an actual error condition.
 
-       Note that catching WatchmanError will also catch this as it is a super-class, so backwards
-       compatibility in exception handling is preserved.
+    Note that catching WatchmanError will also catch this as it is a super-class, so backwards
+    compatibility in exception handling is preserved.
     """
 
 
@@ -323,7 +323,7 @@
         pass
 
     def readLine(self):
-        """ read a line
+        """read a line
         Maintains its own buffer, callers of the transport should not mix
         calls to readBytes and readLine.
         """
@@ -409,7 +409,7 @@
 
 
 def _get_overlapped_result_ex_impl(pipe, olap, nbytes, millis, alertable):
-    """ Windows 7 and earlier does not support GetOverlappedResultEx. The
+    """Windows 7 and earlier does not support GetOverlappedResultEx. The
     alternative is to use GetOverlappedResult and wait for read or write
     operation to complete. This is done be using CreateEvent and
     WaitForSingleObjectEx. CreateEvent, WaitForSingleObjectEx
@@ -510,9 +510,9 @@
         self.timeout = int(value * 1000)
 
     def readBytes(self, size):
-        """ A read can block for an unbounded amount of time, even if the
-            kernel reports that the pipe handle is signalled, so we need to
-            always perform our reads asynchronously
+        """A read can block for an unbounded amount of time, even if the
+        kernel reports that the pipe handle is signalled, so we need to
+        always perform our reads asynchronously
         """
 
         # try to satisfy the read from any buffered data
@@ -627,7 +627,7 @@
 
 
 class CLIProcessTransport(Transport):
-    """ open a pipe to the cli to talk to the service
+    """open a pipe to the cli to talk to the service
     This intended to be used only in the test harness!
 
     The CLI is an oddball because we only support JSON input
@@ -739,8 +739,8 @@
 
 
 class ImmutableBserCodec(BserCodec):
-    """ use the BSER encoding, decoding values using the newer
-        immutable object support """
+    """use the BSER encoding, decoding values using the newer
+    immutable object support"""
 
     def _loads(self, response):
         return bser.loads(
@@ -817,8 +817,8 @@
 
 
 class ImmutableBser2Codec(Bser2WithFallbackCodec, ImmutableBserCodec):
-    """ use the BSER encoding, decoding values using the newer
-        immutable object support """
+    """use the BSER encoding, decoding values using the newer
+    immutable object support"""
 
     pass
 
@@ -1050,7 +1050,7 @@
             self.sendConn = None
 
     def receive(self):
-        """ receive the next PDU from the watchman service
+        """receive the next PDU from the watchman service
 
         If the client has activated subscriptions or logs then
         this PDU may be a unilateral PDU sent by the service to
@@ -1098,7 +1098,7 @@
         return False
 
     def getLog(self, remove=True):
-        """ Retrieve buffered log data
+        """Retrieve buffered log data
 
         If remove is true the data will be removed from the buffer.
         Otherwise it will be left in the buffer
@@ -1109,7 +1109,7 @@
         return res
 
     def getSubscription(self, name, remove=True, root=None):
-        """ Retrieve the data associated with a named subscription
+        """Retrieve the data associated with a named subscription
 
         If remove is True (the default), the subscription data is removed
         from the buffer.  Otherwise the data is returned but left in
@@ -1144,7 +1144,7 @@
         return sub
 
     def query(self, *args):
-        """ Send a query to the watchman service and return the response
+        """Send a query to the watchman service and return the response
 
         This call will block until the response is returned.
         If any unilateral responses are sent by the service in between
--- a/hgext/fsmonitor/pywatchman/capabilities.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/fsmonitor/pywatchman/capabilities.py	Tue Jan 19 21:48:43 2021 +0530
@@ -55,8 +55,8 @@
 
 
 def synthesize(vers, opts):
-    """ Synthesize a capability enabled version response
-        This is a very limited emulation for relatively recent feature sets
+    """Synthesize a capability enabled version response
+    This is a very limited emulation for relatively recent feature sets
     """
     parsed_version = parse_version(vers["version"])
     vers["capabilities"] = {}
--- a/hgext/git/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/git/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -29,13 +29,24 @@
     index,
 )
 
+# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
+# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
+# be specifying the version(s) of Mercurial they are tested with, or
+# leave the attribute unspecified.
+testedwith = b'ships-with-hg-core'
+
 configtable = {}
 configitem = registrar.configitem(configtable)
 # git.log-index-cache-miss: internal knob for testing
 configitem(
-    b"git", b"log-index-cache-miss", default=False,
+    b"git",
+    b"log-index-cache-miss",
+    default=False,
 )
 
+getversion = gitutil.pygit2_version
+
+
 # TODO: extract an interface for this in core
 class gitstore(object):  # store.basicstore):
     def __init__(self, path, vfstype):
@@ -224,8 +235,7 @@
         return bname
 
     def applychanges(self, repo, tr, changes):
-        """Apply a list of changes to bookmarks
-        """
+        """Apply a list of changes to bookmarks"""
         # TODO: this should respect transactions, but that's going to
         # require enlarging the gitbmstore to know how to do in-memory
         # temporary writes and read those back prior to transaction
--- a/hgext/git/dirstate.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/git/dirstate.py	Tue Jan 19 21:48:43 2021 +0530
@@ -4,11 +4,11 @@
 import errno
 import os
 
+from mercurial.node import nullid
 from mercurial import (
     error,
     extensions,
     match as matchmod,
-    node as nodemod,
     pycompat,
     scmutil,
     util,
@@ -81,14 +81,14 @@
         except pygit2.GitError:
             # Typically happens when peeling HEAD fails, as in an
             # empty repository.
-            return nodemod.nullid
+            return nullid
 
     def p2(self):
         # TODO: MERGE_HEAD? something like that, right?
-        return nodemod.nullid
+        return nullid
 
-    def setparents(self, p1, p2=nodemod.nullid):
-        assert p2 == nodemod.nullid, b'TODO merging support'
+    def setparents(self, p1, p2=nullid):
+        assert p2 == nullid, b'TODO merging support'
         self.git.head.set_target(gitutil.togitnode(p1))
 
     @util.propertycache
@@ -102,7 +102,7 @@
 
     def parents(self):
         # TODO how on earth do we find p2 if a merge is in flight?
-        return self.p1(), nodemod.nullid
+        return self.p1(), nullid
 
     def __iter__(self):
         return (pycompat.fsencode(f.path) for f in self.git.index)
--- a/hgext/git/gitlog.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/git/gitlog.py	Tue Jan 19 21:48:43 2021 +0530
@@ -2,6 +2,14 @@
 
 from mercurial.i18n import _
 
+from mercurial.node import (
+    bin,
+    hex,
+    nullhex,
+    nullid,
+    nullrev,
+    wdirhex,
+)
 from mercurial import (
     ancestor,
     changelog as hgchangelog,
@@ -9,7 +17,6 @@
     encoding,
     error,
     manifest,
-    node as nodemod,
     pycompat,
 )
 from mercurial.interfaces import (
@@ -39,7 +46,7 @@
         )
 
     def rev(self, n):
-        if n == nodemod.nullid:
+        if n == nullid:
             return -1
         t = self._db.execute(
             'SELECT rev FROM changelog WHERE node = ?', (gitutil.togitnode(n),)
@@ -49,14 +56,14 @@
         return t[0]
 
     def node(self, r):
-        if r == nodemod.nullrev:
-            return nodemod.nullid
+        if r == nullrev:
+            return nullid
         t = self._db.execute(
             'SELECT node FROM changelog WHERE rev = ?', (r,)
         ).fetchone()
         if t is None:
             raise error.LookupError(r, b'00changelog.i', _(b'no node'))
-        return nodemod.bin(t[0])
+        return bin(t[0])
 
     def hasnode(self, n):
         t = self._db.execute(
@@ -123,10 +130,10 @@
     @property
     def nodemap(self):
         r = {
-            nodemod.bin(v[0]): v[1]
+            bin(v[0]): v[1]
             for v in self._db.execute('SELECT node, rev FROM changelog')
         }
-        r[nodemod.nullid] = nodemod.nullrev
+        r[nullid] = nullrev
         return r
 
     def tip(self):
@@ -134,8 +141,8 @@
             'SELECT node FROM changelog ORDER BY rev DESC LIMIT 1'
         ).fetchone()
         if t:
-            return nodemod.bin(t[0])
-        return nodemod.nullid
+            return bin(t[0])
+        return nullid
 
     def revs(self, start=0, stop=None):
         if stop is None:
@@ -148,17 +155,23 @@
         )
         return (int(r[0]) for r in t)
 
+    def tiprev(self):
+        t = self._db.execute(
+            'SELECT rev FROM changelog ' 'ORDER BY REV DESC ' 'LIMIT 1'
+        )
+        return next(t)
+
     def _partialmatch(self, id):
-        if nodemod.wdirhex.startswith(id):
+        if wdirhex.startswith(id):
             raise error.WdirUnsupported
         candidates = [
-            nodemod.bin(x[0])
+            bin(x[0])
             for x in self._db.execute(
                 'SELECT node FROM changelog WHERE node LIKE ?', (id + b'%',)
             )
         ]
-        if nodemod.nullhex.startswith(id):
-            candidates.append(nodemod.nullid)
+        if nullhex.startswith(id):
+            candidates.append(nullid)
         if len(candidates) > 1:
             raise error.AmbiguousPrefixLookupError(
                 id, b'00changelog.i', _(b'ambiguous identifier')
@@ -171,7 +184,7 @@
         return 0
 
     def shortest(self, node, minlength=1):
-        nodehex = nodemod.hex(node)
+        nodehex = hex(node)
         for attempt in pycompat.xrange(minlength, len(nodehex) + 1):
             candidate = nodehex[:attempt]
             matches = int(
@@ -203,7 +216,7 @@
         else:
             n = nodeorrev
         # handle looking up nullid
-        if n == nodemod.nullid:
+        if n == nullid:
             return hgchangelog._changelogrevision(extra={})
         hn = gitutil.togitnode(n)
         # We've got a real commit!
@@ -220,7 +233,7 @@
             for r in self._db.execute(
                 'SELECT filename FROM changedfiles '
                 'WHERE node = ? and filenode = ?',
-                (hn, nodemod.nullhex),
+                (hn, nullhex),
             )
         ]
         c = self.gitrepo[hn]
@@ -261,7 +274,7 @@
         nullrev.
         """
         if common is None:
-            common = [nodemod.nullrev]
+            common = [nullrev]
 
         return ancestor.incrementalmissingancestors(self.parentrevs, common)
 
@@ -281,7 +294,7 @@
         not supplied, uses all of the revlog's heads.  If common is not
         supplied, uses nullid."""
         if common is None:
-            common = [nodemod.nullid]
+            common = [nullid]
         if heads is None:
             heads = self.heads()
 
@@ -296,12 +309,12 @@
         c = []
         p = self.rev(node)
         for r in self.revs(start=p + 1):
-            prevs = [pr for pr in self.parentrevs(r) if pr != nodemod.nullrev]
+            prevs = [pr for pr in self.parentrevs(r) if pr != nullrev]
             if prevs:
                 for pr in prevs:
                     if pr == p:
                         c.append(self.node(r))
-            elif p == nodemod.nullrev:
+            elif p == nullrev:
                 c.append(self.node(r))
         return c
 
@@ -317,7 +330,7 @@
 
     # Cleanup opportunity: this is *identical* to the revlog.py version
     def isancestorrev(self, a, b):
-        if a == nodemod.nullrev:
+        if a == nullrev:
             return True
         elif a == b:
             return True
@@ -331,8 +344,8 @@
         if hn != gitutil.nullgit:
             c = self.gitrepo[hn]
         else:
-            return nodemod.nullrev, nodemod.nullrev
-        p1 = p2 = nodemod.nullrev
+            return nullrev, nullrev
+        p1 = p2 = nullrev
         if c.parents:
             p1 = self.rev(c.parents[0].id.raw)
             if len(c.parents) > 2:
@@ -380,9 +393,9 @@
     ):
         parents = []
         hp1, hp2 = gitutil.togitnode(p1), gitutil.togitnode(p2)
-        if p1 != nodemod.nullid:
+        if p1 != nullid:
             parents.append(hp1)
-        if p2 and p2 != nodemod.nullid:
+        if p2 and p2 != nullid:
             parents.append(hp2)
         assert date is not None
         timestamp, tz = date
@@ -413,7 +426,7 @@
         return self.get(b'', node)
 
     def get(self, relpath, node):
-        if node == nodemod.nullid:
+        if node == nullid:
             # TODO: this should almost certainly be a memgittreemanifestctx
             return manifest.memtreemanifestctx(self, relpath)
         commit = self.gitrepo[gitutil.togitnode(node)]
@@ -434,7 +447,7 @@
         self.path = path
 
     def read(self, node):
-        if node == nodemod.nullid:
+        if node == nullid:
             return b''
         return self.gitrepo[gitutil.togitnode(node)].data
 
@@ -444,7 +457,7 @@
         if isinstance(node, int):
             assert False, b'todo revnums for nodes'
         if len(node) == 40:
-            node = nodemod.bin(node)
+            node = bin(node)
         hnode = gitutil.togitnode(node)
         if hnode in self.gitrepo:
             return node
@@ -494,7 +507,7 @@
         ).fetchone()
         if maybe is None:
             raise IndexError('gitlog %r out of range %d' % (self.path, rev))
-        return nodemod.bin(maybe[0])
+        return bin(maybe[0])
 
     def parents(self, node):
         gn = gitutil.togitnode(node)
@@ -519,7 +532,7 @@
                 index.fill_in_filelog(self.gitrepo, self._db, commit, gp, gn)
                 return self.parents(node)
             else:
-                ps.append(nodemod.bin(p))
+                ps.append(bin(p))
         return ps
 
     def renamed(self, node):
--- a/hgext/git/gitutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/git/gitutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -20,6 +20,19 @@
     return pygit2_module
 
 
+def pygit2_version():
+    mod = get_pygit2()
+    v = "N/A"
+
+    if mod:
+        try:
+            v = mod.__version__
+        except AttributeError:
+            pass
+
+    return b"(pygit2 %s)" % v.encode("utf-8")
+
+
 def togitnode(n):
     """Wrapper to convert a Mercurial binary node to a unicode hexlified node.
 
--- a/hgext/git/index.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/git/index.py	Tue Jan 19 21:48:43 2021 +0530
@@ -5,11 +5,14 @@
 import sqlite3
 
 from mercurial.i18n import _
+from mercurial.node import (
+    nullhex,
+    nullid,
+)
 
 from mercurial import (
     encoding,
     error,
-    node as nodemod,
     pycompat,
 )
 
@@ -278,7 +281,7 @@
     for pos, commit in enumerate(walker):
         if prog is not None:
             prog.update(pos)
-        p1 = p2 = nodemod.nullhex
+        p1 = p2 = nullhex
         if len(commit.parents) > 2:
             raise error.ProgrammingError(
                 (
@@ -315,9 +318,7 @@
                 )
             new_files = (p.delta.new_file for p in patchgen)
             files = {
-                nf.path: nf.id.hex
-                for nf in new_files
-                if nf.id.raw != nodemod.nullid
+                nf.path: nf.id.hex for nf in new_files if nf.id.raw != nullid
             }
             for p, n in files.items():
                 # We intentionally set NULLs for any file parentage
--- a/hgext/git/manifest.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/git/manifest.py	Tue Jan 19 21:48:43 2021 +0530
@@ -127,7 +127,7 @@
         return dir in self._dirs
 
     def diff(self, other, match=lambda x: True, clean=False):
-        '''Finds changes between the current manifest and m2.
+        """Finds changes between the current manifest and m2.
 
         The result is returned as a dict with filename as key and
         values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
@@ -135,7 +135,7 @@
         in the current/other manifest. Where the file does not exist,
         the nodeid will be None and the flags will be the empty
         string.
-        '''
+        """
         result = {}
 
         def _iterativediff(t1, t2, subdir):
--- a/hgext/githelp.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/githelp.py	Tue Jan 19 21:48:43 2021 +0530
@@ -59,10 +59,10 @@
     helpbasic=True,
 )
 def githelp(ui, repo, *args, **kwargs):
-    '''suggests the Mercurial equivalent of the given git command
+    """suggests the Mercurial equivalent of the given git command
 
     Usage: hg githelp -- <git command>
-    '''
+    """
 
     if len(args) == 0 or (len(args) == 1 and args[0] == b'git'):
         raise error.Abort(
--- a/hgext/gpg.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/gpg.py	Tue Jan 19 21:48:43 2021 +0530
@@ -11,12 +11,17 @@
 import os
 
 from mercurial.i18n import _
+from mercurial.node import (
+    bin,
+    hex,
+    nullid,
+    short,
+)
 from mercurial import (
     cmdutil,
     error,
     help,
     match,
-    node as hgnode,
     pycompat,
     registrar,
 )
@@ -37,13 +42,20 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'gpg', b'cmd', default=b'gpg',
+    b'gpg',
+    b'cmd',
+    default=b'gpg',
 )
 configitem(
-    b'gpg', b'key', default=None,
+    b'gpg',
+    b'key',
+    default=None,
 )
 configitem(
-    b'gpg', b'.*', default=None, generic=True,
+    b'gpg',
+    b'.*',
+    default=None,
+    generic=True,
 )
 
 # Custom help category
@@ -78,7 +90,11 @@
             fp.close()
             gpgcmd = (
                 b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\""
-                % (self.path, sigfile, datafile,)
+                % (
+                    self.path,
+                    sigfile,
+                    datafile,
+                )
             )
             ret = procutil.filter(b"", gpgcmd)
         finally:
@@ -144,7 +160,7 @@
     # read the heads
     fl = repo.file(b".hgsigs")
     for r in reversed(fl.heads()):
-        fn = b".hgsigs|%s" % hgnode.short(r)
+        fn = b".hgsigs|%s" % short(r)
         for item in parsefile(fl.read(r).splitlines(), fn):
             yield item
     try:
@@ -161,7 +177,7 @@
     fn, ln = context
     node, version, sig = sigdata
     prefix = b"%s:%d" % (fn, ln)
-    node = hgnode.bin(node)
+    node = bin(node)
 
     data = node2txt(repo, node, version)
     sig = binascii.a2b_base64(sig)
@@ -212,7 +228,7 @@
         revs[r].extend(keys)
     for rev in sorted(revs, reverse=True):
         for k in revs[rev]:
-            r = b"%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
+            r = b"%5d:%s" % (rev, hex(repo.changelog.node(rev)))
             ui.write(b"%-30s %s\n" % (keystr(ui, k), r))
 
 
@@ -221,7 +237,7 @@
     """verify all the signatures there may be for a particular revision"""
     mygpg = newgpg(ui)
     rev = repo.lookup(rev)
-    hexrev = hgnode.hex(rev)
+    hexrev = hex(rev)
     keys = []
 
     for data, context in sigwalk(repo):
@@ -232,11 +248,11 @@
                 keys.extend(k)
 
     if not keys:
-        ui.write(_(b"no valid signature for %s\n") % hgnode.short(rev))
+        ui.write(_(b"no valid signature for %s\n") % short(rev))
         return
 
     # print summary
-    ui.write(_(b"%s is signed by:\n") % hgnode.short(rev))
+    ui.write(_(b"%s is signed by:\n") % short(rev))
     for key in keys:
         ui.write(b" %s\n" % keystr(ui, key))
 
@@ -298,9 +314,7 @@
     if revs:
         nodes = [repo.lookup(n) for n in revs]
     else:
-        nodes = [
-            node for node in repo.dirstate.parents() if node != hgnode.nullid
-        ]
+        nodes = [node for node in repo.dirstate.parents() if node != nullid]
         if len(nodes) > 1:
             raise error.Abort(
                 _(b'uncommitted merge - please provide a specific revision')
@@ -309,10 +323,8 @@
             nodes = [repo.changelog.tip()]
 
     for n in nodes:
-        hexnode = hgnode.hex(n)
-        ui.write(
-            _(b"signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))
-        )
+        hexnode = hex(n)
+        ui.write(_(b"signing %d:%s\n") % (repo.changelog.rev(n), short(n)))
         # build data
         data = node2txt(repo, n, sigver)
         sig = mygpg.sign(data)
@@ -349,10 +361,7 @@
     if not message:
         # we don't translate commit messages
         message = b"\n".join(
-            [
-                b"Added signature for changeset %s" % hgnode.short(n)
-                for n in nodes
-            ]
+            [b"Added signature for changeset %s" % short(n) for n in nodes]
         )
     try:
         editor = cmdutil.getcommiteditor(
@@ -368,7 +377,7 @@
 def node2txt(repo, node, ver):
     """map a manifest into some text"""
     if ver == b"0":
-        return b"%s\n" % hgnode.hex(node)
+        return b"%s\n" % hex(node)
     else:
         raise error.Abort(_(b"unknown signature version"))
 
--- a/hgext/hgk.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/hgk.py	Tue Jan 19 21:48:43 2021 +0530
@@ -65,7 +65,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'hgk', b'path', default=b'hgk',
+    b'hgk',
+    b'path',
+    default=b'hgk',
 )
 
 
--- a/hgext/histedit.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/histedit.py	Tue Jan 19 21:48:43 2021 +0530
@@ -34,7 +34,7 @@
  #
  # Commands:
  #  p, pick = use commit
- #  e, edit = use commit, but stop for amending
+ #  e, edit = use commit, but allow edits before making new commit
  #  f, fold = use commit, but combine it with the one above
  #  r, roll = like fold, but discard this commit's description and date
  #  d, drop = remove commit from history
@@ -57,7 +57,7 @@
  #
  # Commands:
  #  p, pick = use commit
- #  e, edit = use commit, but stop for amending
+ #  e, edit = use commit, but allow edits before making new commit
  #  f, fold = use commit, but combine it with the one above
  #  r, roll = like fold, but discard this commit's description and date
  #  d, drop = remove commit from history
@@ -209,6 +209,11 @@
     getattr,
     open,
 )
+from mercurial.node import (
+    bin,
+    hex,
+    short,
+)
 from mercurial import (
     bundle2,
     cmdutil,
@@ -225,7 +230,6 @@
     merge as mergemod,
     mergestate as mergestatemod,
     mergeutil,
-    node,
     obsolete,
     pycompat,
     registrar,
@@ -247,22 +251,34 @@
 configtable = {}
 configitem = registrar.configitem(configtable)
 configitem(
-    b'experimental', b'histedit.autoverb', default=False,
+    b'experimental',
+    b'histedit.autoverb',
+    default=False,
 )
 configitem(
-    b'histedit', b'defaultrev', default=None,
+    b'histedit',
+    b'defaultrev',
+    default=None,
 )
 configitem(
-    b'histedit', b'dropmissing', default=False,
+    b'histedit',
+    b'dropmissing',
+    default=False,
 )
 configitem(
-    b'histedit', b'linelen', default=80,
+    b'histedit',
+    b'linelen',
+    default=80,
 )
 configitem(
-    b'histedit', b'singletransaction', default=False,
+    b'histedit',
+    b'singletransaction',
+    default=False,
 )
 configitem(
-    b'ui', b'interface.histedit', default=None,
+    b'ui',
+    b'interface.histedit',
+    default=None,
 )
 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
 
@@ -280,7 +296,7 @@
 
 
 def geteditcomment(ui, first, last):
-    """ construct the editor comment
+    """construct the editor comment
     The comment includes::
      - an intro
      - sorted primary commands
@@ -392,8 +408,8 @@
 
     def _write(self, fp):
         fp.write(b'v1\n')
-        fp.write(b'%s\n' % node.hex(self.parentctxnode))
-        fp.write(b'%s\n' % node.hex(self.topmost))
+        fp.write(b'%s\n' % hex(self.parentctxnode))
+        fp.write(b'%s\n' % hex(self.topmost))
         fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
         fp.write(b'%d\n' % len(self.actions))
         for action in self.actions:
@@ -403,8 +419,8 @@
             fp.write(
                 b'%s%s\n'
                 % (
-                    node.hex(replacement[0]),
-                    b''.join(node.hex(r) for r in replacement[1]),
+                    hex(replacement[0]),
+                    b''.join(hex(r) for r in replacement[1]),
                 )
             )
         backupfile = self.backupfile
@@ -420,10 +436,10 @@
         lines[index]  # version number
         index += 1
 
-        parentctxnode = node.bin(lines[index])
+        parentctxnode = bin(lines[index])
         index += 1
 
-        topmost = node.bin(lines[index])
+        topmost = bin(lines[index])
         index += 1
 
         keep = lines[index] == b'True'
@@ -446,9 +462,9 @@
         index += 1
         for i in pycompat.xrange(replacementlen):
             replacement = lines[index]
-            original = node.bin(replacement[:40])
+            original = bin(replacement[:40])
             succ = [
-                node.bin(replacement[i : i + 40])
+                bin(replacement[i : i + 40])
                 for i in range(40, len(replacement), 40)
             ]
             replacements.append((original, succ))
@@ -477,18 +493,17 @@
 
     @classmethod
     def fromrule(cls, state, rule):
-        """Parses the given rule, returning an instance of the histeditaction.
-        """
+        """Parses the given rule, returning an instance of the histeditaction."""
         ruleid = rule.strip().split(b' ', 1)[0]
         # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
         # Check for validation of rule ids and get the rulehash
         try:
-            rev = node.bin(ruleid)
+            rev = bin(ruleid)
         except TypeError:
             try:
                 _ctx = scmutil.revsingle(state.repo, ruleid)
                 rulehash = _ctx.hex()
-                rev = node.bin(rulehash)
+                rev = bin(rulehash)
             except error.RepoLookupError:
                 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
         return cls(state, rev)
@@ -496,7 +511,7 @@
     def verify(self, prev, expected, seen):
         """ Verifies semantic correctness of the rule"""
         repo = self.repo
-        ha = node.hex(self.node)
+        ha = hex(self.node)
         self.node = scmutil.resolvehexnodeidprefix(repo, ha)
         if self.node is None:
             raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
@@ -507,14 +522,13 @@
         if self.node not in expected:
             raise error.ParseError(
                 _(b'%s "%s" changeset was not a candidate')
-                % (self.verb, node.short(self.node)),
+                % (self.verb, short(self.node)),
                 hint=_(b'only use listed changesets'),
             )
         # and only one command per node
         if self.node in seen:
             raise error.ParseError(
-                _(b'duplicated command for changeset %s')
-                % node.short(self.node)
+                _(b'duplicated command for changeset %s') % short(self.node)
             )
 
     def torule(self):
@@ -525,13 +539,16 @@
         """
         ctx = self.repo[self.node]
         ui = self.repo.ui
-        summary = (
-            cmdutil.rendertemplate(
+        # We don't want color codes in the commit message template, so
+        # disable the label() template function while we render it.
+        with ui.configoverride(
+            {(b'templatealias', b'label(l,x)'): b"x"}, b'histedit'
+        ):
+            summary = cmdutil.rendertemplate(
                 ctx, ui.config(b'histedit', b'summary-template')
             )
-            or b''
-        )
-        summary = summary.splitlines()[0]
+        # Handle the fact that `''.splitlines() => []`
+        summary = summary.splitlines()[0] if summary else b''
         line = b'%s %s %s' % (self.verb, ctx, summary)
         # trim to 75 columns by default so it's not stupidly wide in my editor
         # (the 5 more are left for verb)
@@ -541,9 +558,9 @@
 
     def tostate(self):
         """Print an action in format used by histedit state files
-           (the first line is a verb, the remainder is the second)
+        (the first line is a verb, the remainder is the second)
         """
-        return b"%s\n%s" % (self.verb, node.hex(self.node))
+        return b"%s\n%s" % (self.verb, hex(self.node))
 
     def run(self):
         """Runs the action. The default behavior is simply apply the action's
@@ -564,8 +581,7 @@
         repo.dirstate.setbranch(rulectx.branch())
         if stats.unresolvedcount:
             raise error.InterventionRequired(
-                _(b'Fix up the change (%s %s)')
-                % (self.verb, node.short(self.node)),
+                _(b'Fix up the change (%s %s)') % (self.verb, short(self.node)),
                 hint=_(b'hg histedit --continue to resume'),
             )
 
@@ -600,8 +616,7 @@
         ctx = self.repo[b'.']
         if ctx.node() == self.state.parentctxnode:
             self.repo.ui.warn(
-                _(b'%s: skipping changeset (no changes)\n')
-                % node.short(self.node)
+                _(b'%s: skipping changeset (no changes)\n') % short(self.node)
             )
             return ctx, [(self.node, tuple())]
         if ctx.node() == self.node:
@@ -670,7 +685,7 @@
     for c in ctxs:
         if not c.mutable():
             raise error.ParseError(
-                _(b"cannot fold into public change %s") % node.short(c.node())
+                _(b"cannot fold into public change %s") % short(c.node())
             )
     base = firstctx.p1()
 
@@ -772,23 +787,28 @@
     def run(self):
         rulectx = self.repo[self.node]
         if rulectx.p1().node() == self.state.parentctxnode:
-            self.repo.ui.debug(b'node %s unchanged\n' % node.short(self.node))
+            self.repo.ui.debug(b'node %s unchanged\n' % short(self.node))
             return rulectx, []
 
         return super(pick, self).run()
 
 
-@action([b'edit', b'e'], _(b'use commit, but stop for amending'), priority=True)
+@action(
+    [b'edit', b'e'],
+    _(b'use commit, but allow edits before making new commit'),
+    priority=True,
+)
 class edit(histeditaction):
     def run(self):
         repo = self.repo
         rulectx = repo[self.node]
         hg.update(repo, self.state.parentctxnode, quietempty=True)
         applychanges(repo.ui, repo, rulectx, {})
+        hint = _(b'to edit %s, `hg histedit --continue` after making changes')
         raise error.InterventionRequired(
-            _(b'Editing (%s), you may commit or record as needed now.')
-            % node.short(self.node),
-            hint=_(b'hg histedit --continue to resume'),
+            _(b'Editing (%s), commit as needed now to split the change')
+            % short(self.node),
+            hint=hint % short(self.node),
         )
 
     def commiteditor(self):
@@ -809,7 +829,7 @@
             c = repo[prev.node]
         if not c.mutable():
             raise error.ParseError(
-                _(b"cannot fold into public change %s") % node.short(c.node())
+                _(b"cannot fold into public change %s") % short(c.node())
             )
 
     def continuedirty(self):
@@ -818,7 +838,7 @@
 
         commit = commitfuncfor(repo, rulectx)
         commit(
-            text=b'fold-temp-revision %s' % node.short(self.node),
+            text=b'fold-temp-revision %s' % short(self.node),
             user=rulectx.user(),
             date=rulectx.date(),
             extra=rulectx.extra(),
@@ -830,7 +850,7 @@
         rulectx = repo[self.node]
         parentctxnode = self.state.parentctxnode
         if ctx.node() == parentctxnode:
-            repo.ui.warn(_(b'%s: empty changeset\n') % node.short(self.node))
+            repo.ui.warn(_(b'%s: empty changeset\n') % short(self.node))
             return ctx, [(self.node, (parentctxnode,))]
 
         parentctx = repo[parentctxnode]
@@ -844,7 +864,7 @@
                     b'%s: cannot fold - working copy is not a '
                     b'descendant of previous commit %s\n'
                 )
-                % (node.short(self.node), node.short(parentctxnode))
+                % (short(self.node), short(parentctxnode))
             )
             return ctx, [(self.node, (ctx.node(),))]
 
@@ -958,7 +978,7 @@
         if self.node in expected:
             msg = _(b'%s "%s" changeset was an edited list candidate')
             raise error.ParseError(
-                msg % (self.verb, node.short(self.node)),
+                msg % (self.verb, short(self.node)),
                 hint=_(b'base must only use unlisted changesets'),
             )
 
@@ -1152,11 +1172,8 @@
 
     @util.propertycache
     def desc(self):
-        summary = (
-            cmdutil.rendertemplate(
-                self.ctx, self.ui.config(b'histedit', b'summary-template')
-            )
-            or b''
+        summary = cmdutil.rendertemplate(
+            self.ctx, self.ui.config(b'histedit', b'summary-template')
         )
         if summary:
             return summary
@@ -1178,8 +1195,8 @@
 
 # ============ EVENTS ===============
 def movecursor(state, oldpos, newpos):
-    '''Change the rule/changeset that the cursor is pointing to, regardless of
-    current mode (you can switch between patches from the view patch window).'''
+    """Change the rule/changeset that the cursor is pointing to, regardless of
+    current mode (you can switch between patches from the view patch window)."""
     state[b'pos'] = newpos
 
     mode, _ = state[b'mode']
@@ -1256,8 +1273,8 @@
 
 
 def changeview(state, delta, unit):
-    '''Change the region of whatever is being viewed (a patch or the list of
-    changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
+    """Change the region of whatever is being viewed (a patch or the list of
+    changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'."""
     mode, _ = state[b'mode']
     if mode != MODE_PATCH:
         return
@@ -1582,8 +1599,12 @@
         b'mode': (MODE_INIT, MODE_INIT),
         b'page_height': None,
         b'modes': {
-            MODE_RULES: {b'line_offset': 0,},
-            MODE_PATCH: {b'line_offset': 0,},
+            MODE_RULES: {
+                b'line_offset': 0,
+            },
+            MODE_PATCH: {
+                b'line_offset': 0,
+            },
         },
         b'repo': repo,
     }
@@ -1701,8 +1722,7 @@
         revs = between(repo, root, topmost, keep)
         if not revs:
             raise error.Abort(
-                _(b'%s is not an ancestor of working directory')
-                % node.short(root)
+                _(b'%s is not an ancestor of working directory') % short(root)
             )
 
         ctxs = []
@@ -2055,16 +2075,16 @@
     if mapping:
         for prec, succs in pycompat.iteritems(mapping):
             if not succs:
-                ui.debug(b'histedit: %s is dropped\n' % node.short(prec))
+                ui.debug(b'histedit: %s is dropped\n' % short(prec))
             else:
                 ui.debug(
                     b'histedit: %s is replaced by %s\n'
-                    % (node.short(prec), node.short(succs[0]))
+                    % (short(prec), short(succs[0]))
                 )
                 if len(succs) > 1:
                     m = b'histedit:                            %s'
                     for n in succs[1:]:
-                        ui.debug(m % node.short(n))
+                        ui.debug(m % short(n))
 
     if not state.keep:
         if mapping:
@@ -2109,7 +2129,7 @@
     try:
         state.read()
         __, leafs, tmpnodes, __ = processreplacement(state)
-        ui.debug(b'restore wc to old parent %s\n' % node.short(state.topmost))
+        ui.debug(b'restore wc to old parent %s\n' % short(state.topmost))
 
         # Recover our old commits if necessary
         if not state.topmost in repo and state.backupfile:
@@ -2163,7 +2183,7 @@
     state.read()
     if not rules:
         comment = geteditcomment(
-            ui, node.short(state.parentctxnode), node.short(state.topmost)
+            ui, short(state.parentctxnode), short(state.topmost)
         )
         rules = ruleeditor(repo, ui, state.actions, comment)
     else:
@@ -2204,7 +2224,7 @@
     revs = between(repo, root, topmost, state.keep)
     if not revs:
         raise error.Abort(
-            _(b'%s is not an ancestor of working directory') % node.short(root)
+            _(b'%s is not an ancestor of working directory') % short(root)
         )
 
     ctxs = [repo[r] for r in revs]
@@ -2241,7 +2261,7 @@
             )
 
     if not rules:
-        comment = geteditcomment(ui, node.short(root), node.short(topmost))
+        comment = geteditcomment(ui, short(root), short(topmost))
         actions = [pick(state, r) for r in revs]
         rules = ruleeditor(repo, ui, actions, comment)
     else:
@@ -2445,12 +2465,12 @@
         actions[:0] = drops
     elif missing:
         raise error.ParseError(
-            _(b'missing rules for changeset %s') % node.short(missing[0]),
+            _(b'missing rules for changeset %s') % short(missing[0]),
             hint=_(
                 b'use "drop %s" to discard, see also: '
                 b"'hg help -e histedit.config'"
             )
-            % node.short(missing[0]),
+            % short(missing[0]),
         )
 
 
@@ -2604,7 +2624,7 @@
         if common_nodes:
             raise error.Abort(
                 _(b"histedit in progress, can't strip %s")
-                % b', '.join(node.short(x) for x in common_nodes)
+                % b', '.join(short(x) for x in common_nodes)
             )
     return orig(ui, repo, nodelist, *args, **kwargs)
 
--- a/hgext/hooklib/changeset_obsoleted.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/hooklib/changeset_obsoleted.py	Tue Jan 19 21:48:43 2021 +0530
@@ -40,10 +40,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'notify_obsoleted', b'domain', default=None,
+    b'notify_obsoleted',
+    b'domain',
+    default=None,
 )
 configitem(
-    b'notify_obsoleted', b'messageidseed', default=None,
+    b'notify_obsoleted',
+    b'messageidseed',
+    default=None,
 )
 configitem(
     b'notify_obsoleted',
--- a/hgext/hooklib/changeset_published.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/hooklib/changeset_published.py	Tue Jan 19 21:48:43 2021 +0530
@@ -39,10 +39,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'notify_published', b'domain', default=None,
+    b'notify_published',
+    b'domain',
+    default=None,
 )
 configitem(
-    b'notify_published', b'messageidseed', default=None,
+    b'notify_published',
+    b'messageidseed',
+    default=None,
 )
 configitem(
     b'notify_published',
--- a/hgext/infinitepush/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/infinitepush/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -154,37 +154,59 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'infinitepush', b'server', default=False,
+    b'infinitepush',
+    b'server',
+    default=False,
 )
 configitem(
-    b'infinitepush', b'storetype', default=b'',
+    b'infinitepush',
+    b'storetype',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'indextype', default=b'',
+    b'infinitepush',
+    b'indextype',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'indexpath', default=b'',
+    b'infinitepush',
+    b'indexpath',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'storeallparts', default=False,
+    b'infinitepush',
+    b'storeallparts',
+    default=False,
 )
 configitem(
-    b'infinitepush', b'reponame', default=b'',
+    b'infinitepush',
+    b'reponame',
+    default=b'',
 )
 configitem(
-    b'scratchbranch', b'storepath', default=b'',
+    b'scratchbranch',
+    b'storepath',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'branchpattern', default=b'',
+    b'infinitepush',
+    b'branchpattern',
+    default=b'',
 )
 configitem(
-    b'infinitepush', b'pushtobundlestore', default=False,
+    b'infinitepush',
+    b'pushtobundlestore',
+    default=False,
 )
 configitem(
-    b'experimental', b'server-bundlestore-bookmark', default=b'',
+    b'experimental',
+    b'server-bundlestore-bookmark',
+    default=b'',
 )
 configitem(
-    b'experimental', b'infinitepush-scratchpush', default=False,
+    b'experimental',
+    b'infinitepush-scratchpush',
+    default=False,
 )
 
 experimental = b'experimental'
@@ -249,13 +271,13 @@
 
 
 def _tryhoist(ui, remotebookmark):
-    '''returns a bookmarks with hoisted part removed
+    """returns a bookmarks with hoisted part removed
 
     Remotenames extension has a 'hoist' config that allows to use remote
     bookmarks without specifying remote path. For example, 'hg update master'
     works as well as 'hg update remote/master'. We want to allow the same in
     infinitepush.
-    '''
+    """
 
     if common.isremotebooksenabled(ui):
         hoist = ui.config(b'remotenames', b'hoistedpeer') + b'/'
@@ -427,11 +449,11 @@
 
 
 def _includefilelogstobundle(bundlecaps, bundlerepo, bundlerevs, ui):
-    '''Tells remotefilelog to include all changed files to the changegroup
+    """Tells remotefilelog to include all changed files to the changegroup
 
     By default remotefilelog doesn't include file content to the changegroup.
     But we need to include it if we are fetching from bundlestore.
-    '''
+    """
     changedfiles = set()
     cl = bundlerepo.changelog
     for r in bundlerevs:
@@ -457,11 +479,11 @@
 
 
 def _rebundle(bundlerepo, bundleroots, unknownhead):
-    '''
+    """
     Bundle may include more revision then user requested. For example,
     if user asks for revision but bundle also consists its descendants.
     This function will filter out all revision that user is not requested.
-    '''
+    """
     parts = []
 
     version = b'02'
@@ -499,10 +521,10 @@
 
 
 def _generateoutputparts(head, bundlerepo, bundleroots, bundlefile):
-    '''generates bundle that will be send to the user
+    """generates bundle that will be send to the user
 
     returns tuple with raw bundle string and bundle type
-    '''
+    """
     parts = []
     if not _needsrebundling(head, bundlerepo):
         with util.posixfile(bundlefile, b"rb") as f:
@@ -1022,7 +1044,12 @@
                         )
                         rpart.addparam(b'return', b'1', mandatory=False)
 
-            op.records.add(part.type, {b'return': 1,})
+            op.records.add(
+                part.type,
+                {
+                    b'return': 1,
+                },
+            )
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -1112,7 +1139,12 @@
                     bundle2._processpart(op, part)
 
             if handleallparts:
-                op.records.add(part.type, {b'return': 1,})
+                op.records.add(
+                    part.type,
+                    {
+                        b'return': 1,
+                    },
+                )
             if bundlepart:
                 bundler.addpart(bundlepart)
 
@@ -1284,11 +1316,11 @@
 
 
 def bundle2pushkey(orig, op, part):
-    '''Wrapper of bundle2.handlepushkey()
+    """Wrapper of bundle2.handlepushkey()
 
     The only goal is to skip calling the original function if flag is set.
     It's set if infinitepush push is happening.
-    '''
+    """
     if op.records[scratchbranchparttype + b'_skippushkey']:
         if op.reply is not None:
             rpart = op.reply.newpart(b'reply:pushkey')
@@ -1300,11 +1332,11 @@
 
 
 def bundle2handlephases(orig, op, part):
-    '''Wrapper of bundle2.handlephases()
+    """Wrapper of bundle2.handlephases()
 
     The only goal is to skip calling the original function if flag is set.
     It's set if infinitepush push is happening.
-    '''
+    """
 
     if op.records[scratchbranchparttype + b'_skipphaseheads']:
         return
@@ -1313,11 +1345,11 @@
 
 
 def _asyncsavemetadata(root, nodes):
-    '''starts a separate process that fills metadata for the nodes
+    """starts a separate process that fills metadata for the nodes
 
     This function creates a separate process and doesn't wait for it's
     completion. This was done to avoid slowing down pushes
-    '''
+    """
 
     maxnodes = 50
     if len(nodes) > maxnodes:
--- a/hgext/infinitepush/bundleparts.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/infinitepush/bundleparts.py	Tue Jan 19 21:48:43 2021 +0530
@@ -6,13 +6,13 @@
 from __future__ import absolute_import
 
 from mercurial.i18n import _
+from mercurial.node import hex
 
 from mercurial import (
     bundle2,
     changegroup,
     error,
     extensions,
-    node as nodemod,
     pycompat,
     revsetlang,
     util,
@@ -54,7 +54,7 @@
         params[b'bookprevnode'] = b''
         bookmarks = repo._bookmarks
         if bookmark in bookmarks:
-            params[b'bookprevnode'] = nodemod.hex(bookmarks[bookmark])
+            params[b'bookprevnode'] = hex(bookmarks[bookmark])
 
     # Do not send pushback bundle2 part with bookmarks if remotenames extension
     # is enabled. It will be handled manually in `_push()`
@@ -90,11 +90,11 @@
 
 
 def _handlelfs(repo, missing):
-    '''Special case if lfs is enabled
+    """Special case if lfs is enabled
 
     If lfs is enabled then we need to call prepush hook
     to make sure large files are uploaded to lfs
-    '''
+    """
     try:
         lfsmod = extensions.find(b'lfs')
         lfsmod.wrapper.uploadblobsfromrevs(repo, missing)
--- a/hgext/infinitepush/indexapi.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/infinitepush/indexapi.py	Tue Jan 19 21:48:43 2021 +0530
@@ -47,8 +47,7 @@
         raise NotImplementedError()
 
     def deletebookmarks(self, patterns):
-        """Accepts list of bookmarks and deletes them.
-        """
+        """Accepts list of bookmarks and deletes them."""
         raise NotImplementedError()
 
     def getbundle(self, node):
--- a/hgext/infinitepush/sqlindexapi.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/infinitepush/sqlindexapi.py	Tue Jan 19 21:48:43 2021 +0530
@@ -28,9 +28,9 @@
 
 
 class sqlindexapi(indexapi.indexapi):
-    '''
+    """
     Sql backend for infinitepush index. See schema.sql
-    '''
+    """
 
     def __init__(
         self,
--- a/hgext/infinitepush/store.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/infinitepush/store.py	Tue Jan 19 21:48:43 2021 +0530
@@ -9,11 +9,9 @@
 import os
 import subprocess
 
+from mercurial.node import hex
 from mercurial.pycompat import open
-from mercurial import (
-    node,
-    pycompat,
-)
+from mercurial import pycompat
 from mercurial.utils import (
     hashutil,
     procutil,
@@ -86,7 +84,7 @@
         return os.path.join(self._dirpath(filename), filename)
 
     def write(self, data):
-        filename = node.hex(hashutil.sha1(data).digest())
+        filename = hex(hashutil.sha1(data).digest())
         dirpath = self._dirpath(filename)
 
         if not os.path.exists(dirpath):
--- a/hgext/journal.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/journal.py	Tue Jan 19 21:48:43 2021 +0530
@@ -19,6 +19,11 @@
 import weakref
 
 from mercurial.i18n import _
+from mercurial.node import (
+    bin,
+    hex,
+    nullid,
+)
 
 from mercurial import (
     bookmarks,
@@ -31,7 +36,6 @@
     localrepo,
     lock,
     logcmdutil,
-    node,
     pycompat,
     registrar,
     util,
@@ -113,8 +117,8 @@
     new = list(new)
     if util.safehasattr(dirstate, 'journalstorage'):
         # only record two hashes if there was a merge
-        oldhashes = old[:1] if old[1] == node.nullid else old
-        newhashes = new[:1] if new[1] == node.nullid else new
+        oldhashes = old[:1] if old[1] == nullid else old
+        newhashes = new[:1] if new[1] == nullid else new
         dirstate.journalstorage.record(
             wdirparenttype, b'.', oldhashes, newhashes
         )
@@ -127,7 +131,7 @@
     if util.safehasattr(repo, 'journal'):
         oldmarks = bookmarks.bmstore(repo)
         for mark, value in pycompat.iteritems(store):
-            oldvalue = oldmarks.get(mark, node.nullid)
+            oldvalue = oldmarks.get(mark, nullid)
             if value != oldvalue:
                 repo.journal.record(bookmarktype, mark, oldvalue, value)
     return orig(store, fp)
@@ -248,8 +252,8 @@
         ) = line.split(b'\n')
         timestamp, tz = time.split()
         timestamp, tz = float(timestamp), int(tz)
-        oldhashes = tuple(node.bin(hash) for hash in oldhashes.split(b','))
-        newhashes = tuple(node.bin(hash) for hash in newhashes.split(b','))
+        oldhashes = tuple(bin(hash) for hash in oldhashes.split(b','))
+        newhashes = tuple(bin(hash) for hash in newhashes.split(b','))
         return cls(
             (timestamp, tz),
             user,
@@ -263,8 +267,8 @@
     def __bytes__(self):
         """bytes representation for storage"""
         time = b' '.join(map(pycompat.bytestr, self.timestamp))
-        oldhashes = b','.join([node.hex(hash) for hash in self.oldhashes])
-        newhashes = b','.join([node.hex(hash) for hash in self.newhashes])
+        oldhashes = b','.join([hex(hash) for hash in self.oldhashes])
+        newhashes = b','.join([hex(hash) for hash in self.newhashes])
         return b'\n'.join(
             (
                 time,
--- a/hgext/keyword.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/keyword.py	Tue Jan 19 21:48:43 2021 +0530
@@ -158,13 +158,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'keywordset', b'svn', default=False,
+    b'keywordset',
+    b'svn',
+    default=False,
 )
 # date like in cvs' $Date
 @templatefilter(b'utcdate', intype=templateutil.date)
 def utcdate(date):
-    '''Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
-    '''
+    """Date. Returns a UTC-date in this format: "2009/08/18 11:00:13"."""
     dateformat = b'%Y/%m/%d %H:%M:%S'
     return dateutil.datestr((date[0], 0), dateformat)
 
@@ -172,18 +173,18 @@
 # date like in svn's $Date
 @templatefilter(b'svnisodate', intype=templateutil.date)
 def svnisodate(date):
-    '''Date. Returns a date in this format: "2009-08-18 13:00:13
+    """Date. Returns a date in this format: "2009-08-18 13:00:13
     +0200 (Tue, 18 Aug 2009)".
-    '''
+    """
     return dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
 
 
 # date like in svn's $Id
 @templatefilter(b'svnutcdate', intype=templateutil.date)
 def svnutcdate(date):
-    '''Date. Returns a UTC-date in this format: "2009-08-18
+    """Date. Returns a UTC-date in this format: "2009-08-18
     11:00:13Z".
-    '''
+    """
     dateformat = b'%Y-%m-%d %H:%M:%SZ'
     return dateutil.datestr((date[0], 0), dateformat)
 
@@ -221,25 +222,25 @@
 
 
 def _shrinktext(text, subfunc):
-    '''Helper for keyword expansion removal in text.
-    Depending on subfunc also returns number of substitutions.'''
+    """Helper for keyword expansion removal in text.
+    Depending on subfunc also returns number of substitutions."""
     return subfunc(br'$\1$', text)
 
 
 def _preselect(wstatus, changed):
-    '''Retrieves modified and added files from a working directory state
+    """Retrieves modified and added files from a working directory state
     and returns the subset of each contained in given changed files
-    retrieved from a change context.'''
+    retrieved from a change context."""
     modified = [f for f in wstatus.modified if f in changed]
     added = [f for f in wstatus.added if f in changed]
     return modified, added
 
 
 class kwtemplater(object):
-    '''
+    """
     Sets up keyword templates, corresponding keyword regex, and
     provides keyword substitution functions.
-    '''
+    """
 
     def __init__(self, ui, repo, inc, exc):
         self.ui = ui
@@ -304,8 +305,8 @@
         return data
 
     def iskwfile(self, cand, ctx):
-        '''Returns subset of candidates which are configured for keyword
-        expansion but are not symbolic links.'''
+        """Returns subset of candidates which are configured for keyword
+        expansion but are not symbolic links."""
         return [f for f in cand if self.match(f) and b'l' not in ctx.flags(f)]
 
     def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
@@ -374,18 +375,18 @@
         return lines
 
     def wread(self, fname, data):
-        '''If in restricted mode returns data read from wdir with
-        keyword substitutions removed.'''
+        """If in restricted mode returns data read from wdir with
+        keyword substitutions removed."""
         if self.restrict:
             return self.shrink(fname, data)
         return data
 
 
 class kwfilelog(filelog.filelog):
-    '''
+    """
     Subclass of filelog to hook into its read, add, cmp methods.
     Keywords are "stored" unexpanded, and processed on reading.
-    '''
+    """
 
     def __init__(self, opener, kwt, path):
         super(kwfilelog, self).__init__(opener, path)
@@ -411,8 +412,8 @@
 
 
 def _status(ui, repo, wctx, kwt, *pats, **opts):
-    '''Bails out if [keyword] configuration is not active.
-    Returns status of working directory.'''
+    """Bails out if [keyword] configuration is not active.
+    Returns status of working directory."""
     if kwt:
         opts = pycompat.byteskwargs(opts)
         return repo.status(
@@ -448,7 +449,7 @@
     optionalrepo=True,
 )
 def demo(ui, repo, *args, **opts):
-    '''print [keywordmaps] configuration and an expansion example
+    """print [keywordmaps] configuration and an expansion example
 
     Show current, custom, or default keyword template maps and their
     expansions.
@@ -459,7 +460,7 @@
     Use -d/--default to disable current configuration.
 
     See :hg:`help templates` for information on templates and filters.
-    '''
+    """
 
     def demoitems(section, items):
         ui.write(b'[%s]\n' % section)
@@ -547,12 +548,12 @@
     inferrepo=True,
 )
 def expand(ui, repo, *pats, **opts):
-    '''expand keywords in the working directory
+    """expand keywords in the working directory
 
     Run after (re)enabling keyword expansion.
 
     kwexpand refuses to run if given files contain local changes.
-    '''
+    """
     # 3rd argument sets expansion to True
     _kwfwrite(ui, repo, True, *pats, **opts)
 
@@ -569,7 +570,7 @@
     inferrepo=True,
 )
 def files(ui, repo, *pats, **opts):
-    '''show files configured for keyword expansion
+    """show files configured for keyword expansion
 
     List which files in the working directory are matched by the
     [keyword] configuration patterns.
@@ -588,7 +589,7 @@
       k = keyword expansion candidate (not tracked)
       I = ignored
       i = ignored (not tracked)
-    '''
+    """
     kwt = getattr(repo, '_keywordkwt', None)
     wctx = repo[None]
     status = _status(ui, repo, wctx, kwt, *pats, **opts)
@@ -634,12 +635,12 @@
     inferrepo=True,
 )
 def shrink(ui, repo, *pats, **opts):
-    '''revert expanded keywords in the working directory
+    """revert expanded keywords in the working directory
 
     Must be run before changing/disabling active keywords.
 
     kwshrink refuses to run if given files contain local changes.
-    '''
+    """
     # 3rd argument sets expansion to False
     _kwfwrite(ui, repo, False, *pats, **opts)
 
@@ -648,8 +649,8 @@
 
 
 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
-    '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
-    rejects or conflicts due to expanded keywords in working dir.'''
+    """Monkeypatch/wrap patch.patchfile.__init__ to avoid
+    rejects or conflicts due to expanded keywords in working dir."""
     orig(self, ui, gp, backend, store, eolmode)
     kwt = getattr(getattr(backend, 'repo', None), '_keywordkwt', None)
     if kwt:
@@ -702,7 +703,7 @@
 
 
 def kw_copy(orig, ui, repo, pats, opts, rename=False):
-    '''Wraps cmdutil.copy so that copy/rename destinations do not
+    """Wraps cmdutil.copy so that copy/rename destinations do not
     contain expanded keywords.
     Note that the source of a regular file destination may also be a
     symlink:
@@ -710,7 +711,7 @@
     cp sym x; hg cp -A sym x   -> x is file (maybe expanded keywords)
     For the latter we have to follow the symlink to find out whether its
     target is configured for expansion and we therefore must unexpand the
-    keywords in the destination.'''
+    keywords in the destination."""
     kwt = getattr(repo, '_keywordkwt', None)
     if kwt is None:
         return orig(ui, repo, pats, opts, rename)
@@ -722,9 +723,9 @@
         cwd = repo.getcwd()
 
         def haskwsource(dest):
-            '''Returns true if dest is a regular file and configured for
+            """Returns true if dest is a regular file and configured for
             expansion or a symlink which points to a file configured for
-            expansion. '''
+            expansion."""
             source = repo.dirstate.copied(dest)
             if b'l' in wctx.flags(source):
                 source = pathutil.canonpath(
@@ -785,12 +786,12 @@
 
 
 def uisetup(ui):
-    ''' Monkeypatches dispatch._parse to retrieve user command.
+    """Monkeypatches dispatch._parse to retrieve user command.
     Overrides file method to return kwfilelog instead of filelog
     if file matches user configuration.
     Wraps commit to overwrite configured files with updated
     keyword substitutions.
-    Monkeypatches patch and webcommands.'''
+    Monkeypatches patch and webcommands."""
 
     def kwdispatch_parse(orig, ui, args):
         '''Monkeypatch dispatch._parse to obtain running hg command.'''
--- a/hgext/largefiles/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -111,9 +111,7 @@
     extensions,
     exthelper,
     hg,
-    httppeer,
     localrepo,
-    sshpeer,
     wireprotov1server,
 )
 
@@ -136,13 +134,19 @@
 eh.merge(proto.eh)
 
 eh.configitem(
-    b'largefiles', b'minsize', default=eh.configitem.dynamicdefault,
+    b'largefiles',
+    b'minsize',
+    default=eh.configitem.dynamicdefault,
 )
 eh.configitem(
-    b'largefiles', b'patterns', default=list,
+    b'largefiles',
+    b'patterns',
+    default=list,
 )
 eh.configitem(
-    b'largefiles', b'usercache', default=None,
+    b'largefiles',
+    b'usercache',
+    default=None,
 )
 
 cmdtable = eh.cmdtable
@@ -184,13 +188,6 @@
     )
     # TODO also wrap wireproto.commandsv2 once heads is implemented there.
 
-    # can't do this in reposetup because it needs to have happened before
-    # wirerepo.__init__ is called
-    proto.ssholdcallstream = sshpeer.sshv1peer._callstream
-    proto.httpoldcallstream = httppeer.httppeer._callstream
-    sshpeer.sshv1peer._callstream = proto.sshrepocallstream
-    httppeer.httppeer._callstream = proto.httprepocallstream
-
     # override some extensions' stuff as well
     for name, module in extensions.extensions():
         if name == b'rebase':
--- a/hgext/largefiles/basestore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/basestore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,8 +17,8 @@
 
 
 class StoreError(Exception):
-    '''Raised when there is a problem getting files from or putting
-    files to a central store.'''
+    """Raised when there is a problem getting files from or putting
+    files to a central store."""
 
     def __init__(self, filename, hash, url, detail):
         self.filename = filename
@@ -49,19 +49,19 @@
         raise NotImplementedError(b'abstract method')
 
     def exists(self, hashes):
-        '''Check to see if the store contains the given hashes. Given an
-        iterable of hashes it returns a mapping from hash to bool.'''
+        """Check to see if the store contains the given hashes. Given an
+        iterable of hashes it returns a mapping from hash to bool."""
         raise NotImplementedError(b'abstract method')
 
     def get(self, files):
-        '''Get the specified largefiles from the store and write to local
+        """Get the specified largefiles from the store and write to local
         files under repo.root.  files is a list of (filename, hash)
         tuples.  Return (success, missing), lists of files successfully
         downloaded and those not found in the store.  success is a list
         of (filename, hash) tuples; missing is a list of filenames that
         we could not get.  (The detailed error message will already have
         been presented to the user, so missing is just supplied as a
-        summary.)'''
+        summary.)"""
         success = []
         missing = []
         ui = self.ui
@@ -123,9 +123,9 @@
         return True
 
     def verify(self, revs, contents=False):
-        '''Verify the existence (and, optionally, contents) of every big
+        """Verify the existence (and, optionally, contents) of every big
         file revision referenced by every changeset in revs.
-        Return 0 if all is well, non-zero on any errors.'''
+        Return 0 if all is well, non-zero on any errors."""
 
         self.ui.status(
             _(b'searching %d changesets for largefiles\n') % len(revs)
@@ -163,17 +163,17 @@
         return int(failed)
 
     def _getfile(self, tmpfile, filename, hash):
-        '''Fetch one revision of one file from the store and write it
+        """Fetch one revision of one file from the store and write it
         to tmpfile.  Compute the hash of the file on-the-fly as it
         downloads and return the hash.  Close tmpfile.  Raise
         StoreError if unable to download the file (e.g. it does not
-        exist in the store).'''
+        exist in the store)."""
         raise NotImplementedError(b'abstract method')
 
     def _verifyfiles(self, contents, filestocheck):
-        '''Perform the actual verification of files in the store.
+        """Perform the actual verification of files in the store.
         'contents' controls verification of content hash.
         'filestocheck' is list of files to check.
         Returns _true_ if any problems are found!
-        '''
+        """
         raise NotImplementedError(b'abstract method')
--- a/hgext/largefiles/lfcommands.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/lfcommands.py	Tue Jan 19 21:48:43 2021 +0530
@@ -14,6 +14,11 @@
 import shutil
 
 from mercurial.i18n import _
+from mercurial.node import (
+    bin,
+    hex,
+    nullid,
+)
 
 from mercurial import (
     cmdutil,
@@ -23,7 +28,6 @@
     hg,
     lock,
     match as matchmod,
-    node,
     pycompat,
     scmutil,
     util,
@@ -66,7 +70,7 @@
     inferrepo=True,
 )
 def lfconvert(ui, src, dest, *pats, **opts):
-    '''convert a normal repository to a largefiles repository
+    """convert a normal repository to a largefiles repository
 
     Convert repository SOURCE to a new repository DEST, identical to
     SOURCE except that certain files will be converted as largefiles:
@@ -82,7 +86,7 @@
     repository.
 
     Use --to-normal to convert largefiles back to normal files; after
-    this, the DEST repository can be used without largefiles at all.'''
+    this, the DEST repository can be used without largefiles at all."""
 
     opts = pycompat.byteskwargs(opts)
     if opts[b'to_normal']:
@@ -111,7 +115,7 @@
             rsrc[ctx]
             for ctx in rsrc.changelog.nodesbetween(None, rsrc.heads())[0]
         )
-        revmap = {node.nullid: node.nullid}
+        revmap = {nullid: nullid}
         if tolfile:
             # Lock destination to prevent modification while it is converted to.
             # Don't need to lock src because we are just reading from its
@@ -275,7 +279,7 @@
                 # largefile was modified, update standins
                 m = hashutil.sha1(b'')
                 m.update(ctx[f].data())
-                hash = node.hex(m.digest())
+                hash = hex(m.digest())
                 if f not in lfiletohash or lfiletohash[f] != hash:
                     rdst.wwrite(f, ctx[f].data(), ctx[f].flags())
                     executable = b'x' in ctx[f].flags()
@@ -336,7 +340,7 @@
 # Generate list of changed files
 def _getchangedfiles(ctx, parents):
     files = set(ctx.files())
-    if node.nullid not in parents:
+    if nullid not in parents:
         mc = ctx.manifest()
         for pctx in ctx.parents():
             for fn in pctx.manifest().diff(mc):
@@ -350,7 +354,7 @@
     for p in ctx.parents():
         parents.append(revmap[p.node()])
     while len(parents) < 2:
-        parents.append(node.nullid)
+        parents.append(nullid)
     return parents
 
 
@@ -380,12 +384,12 @@
             ui.warn(_(b'skipping incorrectly formatted tag %s\n') % line)
             continue
         try:
-            newid = node.bin(id)
+            newid = bin(id)
         except TypeError:
             ui.warn(_(b'skipping incorrectly formatted id %s\n') % id)
             continue
         try:
-            newdata.append(b'%s %s\n' % (node.hex(revmap[newid]), name))
+            newdata.append(b'%s %s\n' % (hex(revmap[newid]), name))
         except KeyError:
             ui.warn(_(b'no mapping for id %s\n') % id)
             continue
@@ -393,8 +397,8 @@
 
 
 def _islfile(file, ctx, matcher, size):
-    '''Return true if file should be considered a largefile, i.e.
-    matcher matches it or it is larger than size.'''
+    """Return true if file should be considered a largefile, i.e.
+    matcher matches it or it is larger than size."""
     # never store special .hg* files as largefiles
     if file == b'.hgtags' or file == b'.hgignore' or file == b'.hgsigs':
         return False
@@ -440,11 +444,11 @@
 
 
 def verifylfiles(ui, repo, all=False, contents=False):
-    '''Verify that every largefile revision in the current changeset
+    """Verify that every largefile revision in the current changeset
     exists in the central store.  With --contents, also verify that
     the contents of each local largefile file revision are correct (SHA-1 hash
     matches the revision ID).  With --all, check every changeset in
-    this repository.'''
+    this repository."""
     if all:
         revs = repo.revs(b'all()')
     else:
@@ -455,12 +459,12 @@
 
 
 def cachelfiles(ui, repo, node, filelist=None):
-    '''cachelfiles ensures that all largefiles needed by the specified revision
+    """cachelfiles ensures that all largefiles needed by the specified revision
     are present in the repository's largefile cache.
 
     returns a tuple (cached, missing).  cached is the list of files downloaded
     by this operation; missing is the list of files that were needed but could
-    not be found.'''
+    not be found."""
     lfiles = lfutil.listlfiles(repo, node)
     if filelist:
         lfiles = set(lfiles) & set(filelist)
@@ -502,11 +506,11 @@
 def updatelfiles(
     ui, repo, filelist=None, printmessage=None, normallookup=False
 ):
-    '''Update largefiles according to standins in the working directory
+    """Update largefiles according to standins in the working directory
 
     If ``printmessage`` is other than ``None``, it means "print (or
     ignore, for false) message forcibly".
-    '''
+    """
     statuswriter = lfutil.getstatuswriter(ui, repo, printmessage)
     with repo.wlock():
         lfdirstate = lfutil.openlfdirstate(ui, repo)
--- a/hgext/largefiles/lfutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/lfutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -15,7 +15,10 @@
 import stat
 
 from mercurial.i18n import _
-from mercurial.node import hex
+from mercurial.node import (
+    hex,
+    nullid,
+)
 from mercurial.pycompat import open
 
 from mercurial import (
@@ -24,7 +27,6 @@
     error,
     httpconnection,
     match as matchmod,
-    node,
     pycompat,
     scmutil,
     sparse,
@@ -80,10 +82,10 @@
 
 
 def usercachepath(ui, hash):
-    '''Return the correct location in the "global" largefiles cache for a file
+    """Return the correct location in the "global" largefiles cache for a file
     with the given hash.
     This cache is used for sharing of largefiles across repositories - both
-    to preserve download bandwidth and storage space.'''
+    to preserve download bandwidth and storage space."""
     return os.path.join(_usercachedir(ui), hash)
 
 
@@ -143,9 +145,9 @@
 
 
 def findfile(repo, hash):
-    '''Return store path of the largefile with the specified hash.
+    """Return store path of the largefile with the specified hash.
     As a side effect, the file might be linked from user cache.
-    Return None if the file can't be found locally.'''
+    Return None if the file can't be found locally."""
     path, exists = findstorepath(repo, hash)
     if exists:
         repo.ui.note(_(b'found %s in store\n') % hash)
@@ -191,10 +193,10 @@
 
 
 def openlfdirstate(ui, repo, create=True):
-    '''
+    """
     Return a dirstate object that tracks largefiles: i.e. its root is
     the repo root, but it is saved in .hg/largefiles/dirstate.
-    '''
+    """
     vfs = repo.vfs
     lfstoredir = longname
     opener = vfsmod.vfs(vfs.join(lfstoredir))
@@ -245,8 +247,8 @@
 
 
 def listlfiles(repo, rev=None, matcher=None):
-    '''return a list of largefiles in the working copy or the
-    specified changeset'''
+    """return a list of largefiles in the working copy or the
+    specified changeset"""
 
     if matcher is None:
         matcher = getstandinmatcher(repo)
@@ -265,18 +267,18 @@
 
 
 def storepath(repo, hash, forcelocal=False):
-    '''Return the correct location in the repository largefiles store for a
-    file with the given hash.'''
+    """Return the correct location in the repository largefiles store for a
+    file with the given hash."""
     if not forcelocal and repo.shared():
         return repo.vfs.reljoin(repo.sharedpath, longname, hash)
     return repo.vfs.join(longname, hash)
 
 
 def findstorepath(repo, hash):
-    '''Search through the local store path(s) to find the file for the given
+    """Search through the local store path(s) to find the file for the given
     hash.  If the file is not found, its path in the primary store is returned.
     The return value is a tuple of (path, exists(path)).
-    '''
+    """
     # For shared repos, the primary store is in the share source.  But for
     # backward compatibility, force a lookup in the local store if it wasn't
     # found in the share source.
@@ -291,11 +293,11 @@
 
 
 def copyfromcache(repo, hash, filename):
-    '''Copy the specified largefile from the repo or system cache to
+    """Copy the specified largefile from the repo or system cache to
     filename in the repository. Return true on success or false if the
     file was not found in either cache (which should not happened:
     this is meant to be called only after ensuring that the needed
-    largefile exists in the cache).'''
+    largefile exists in the cache)."""
     wvfs = repo.wvfs
     path = findfile(repo, hash)
     if path is None:
@@ -354,8 +356,8 @@
 
 
 def linktousercache(repo, hash):
-    '''Link / copy the largefile with the specified hash from the store
-    to the cache.'''
+    """Link / copy the largefile with the specified hash from the store
+    to the cache."""
     path = usercachepath(repo.ui, hash)
     link(storepath(repo, hash), path)
 
@@ -380,9 +382,9 @@
 
 
 def composestandinmatcher(repo, rmatcher):
-    '''Return a matcher that accepts standins corresponding to the
+    """Return a matcher that accepts standins corresponding to the
     files accepted by rmatcher. Pass the list of files in the matcher
-    as the paths specified by the user.'''
+    as the paths specified by the user."""
     smatcher = getstandinmatcher(repo, rmatcher)
     isstandin = smatcher.matchfn
 
@@ -395,8 +397,8 @@
 
 
 def standin(filename):
-    '''Return the repo-relative path to the standin for the specified big
-    file.'''
+    """Return the repo-relative path to the standin for the specified big
+    file."""
     # Notes:
     # 1) Some callers want an absolute path, but for instance addlargefiles
     #    needs it repo-relative so it can be passed to repo[None].add().  So
@@ -408,8 +410,8 @@
 
 
 def isstandin(filename):
-    '''Return true if filename is a big file standin. filename must be
-    in Mercurial's internal form (slash-separated).'''
+    """Return true if filename is a big file standin. filename must be
+    in Mercurial's internal form (slash-separated)."""
     return filename.startswith(shortnameslash)
 
 
@@ -439,9 +441,9 @@
 
 
 def readasstandin(fctx):
-    '''read hex hash from given filectx of standin file
+    """read hex hash from given filectx of standin file
 
-    This encapsulates how "standin" data is stored into storage layer.'''
+    This encapsulates how "standin" data is stored into storage layer."""
     return fctx.data().strip()
 
 
@@ -451,8 +453,8 @@
 
 
 def copyandhash(instream, outfile):
-    '''Read bytes from instream (iterable) and write them to outfile,
-    computing the SHA-1 hash of the data along the way. Return the hash.'''
+    """Read bytes from instream (iterable) and write them to outfile,
+    computing the SHA-1 hash of the data along the way. Return the hash."""
     hasher = hashutil.sha1(b'')
     for data in instream:
         hasher.update(data)
@@ -610,7 +612,7 @@
     ) as progress:
         for i, n in enumerate(missing):
             progress.update(i)
-            parents = [p for p in repo[n].parents() if p != node.nullid]
+            parents = [p for p in repo[n].parents() if p != nullid]
 
             with lfstatus(repo, value=False):
                 ctx = repo[n]
@@ -635,11 +637,11 @@
 
 
 def updatestandinsbymatch(repo, match):
-    '''Update standins in the working directory according to specified match
+    """Update standins in the working directory according to specified match
 
     This returns (possibly modified) ``match`` object to be used for
     subsequent commit process.
-    '''
+    """
 
     ui = repo.ui
 
@@ -741,7 +743,7 @@
 
 
 class automatedcommithook(object):
-    '''Stateful hook to update standins at the 1st commit of resuming
+    """Stateful hook to update standins at the 1st commit of resuming
 
     For efficiency, updating standins in the working directory should
     be avoided while automated committing (like rebase, transplant and
@@ -750,7 +752,7 @@
     But the 1st commit of resuming automated committing (e.g. ``rebase
     --continue``) should update them, because largefiles may be
     modified manually.
-    '''
+    """
 
     def __init__(self, resuming):
         self.resuming = resuming
@@ -764,14 +766,14 @@
 
 
 def getstatuswriter(ui, repo, forcibly=None):
-    '''Return the function to write largefiles specific status out
+    """Return the function to write largefiles specific status out
 
     If ``forcibly`` is ``None``, this returns the last element of
     ``repo._lfstatuswriters`` as "default" writer function.
 
     Otherwise, this returns the function to always write out (or
     ignore if ``not forcibly``) status.
-    '''
+    """
     if forcibly is None and util.safehasattr(repo, b'_largefilesenabled'):
         return repo._lfstatuswriters[-1]
     else:
--- a/hgext/largefiles/localstore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/localstore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -20,9 +20,9 @@
 
 
 class localstore(basestore.basestore):
-    '''localstore first attempts to grab files out of the store in the remote
+    """localstore first attempts to grab files out of the store in the remote
     Mercurial repository.  Failing that, it attempts to grab the files from
-    the user cache.'''
+    the user cache."""
 
     def __init__(self, ui, repo, remote):
         self.remote = remote.local()
--- a/hgext/largefiles/overrides.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/overrides.py	Tue Jan 19 21:48:43 2021 +0530
@@ -37,11 +37,14 @@
     scmutil,
     smartset,
     subrepo,
-    upgrade,
     url as urlmod,
     util,
 )
 
+from mercurial.upgrade_utils import (
+    actions as upgrade_actions,
+)
+
 from . import (
     lfcommands,
     lfutil,
@@ -58,8 +61,8 @@
 
 
 def composelargefilematcher(match, manifest):
-    '''create a matcher that matches only the largefiles in the original
-    matcher'''
+    """create a matcher that matches only the largefiles in the original
+    matcher"""
     m = copy.copy(match)
     lfile = lambda f: lfutil.standin(f) in manifest
     m._files = [lf for lf in m._files if lfile(lf)]
@@ -586,11 +589,17 @@
                 mresult.addfile(lfile, b'k', None, b'replaces standin')
                 if branchmerge:
                     mresult.addfile(
-                        standin, b'k', None, b'replaced by non-standin',
+                        standin,
+                        b'k',
+                        None,
+                        b'replaced by non-standin',
                     )
                 else:
                     mresult.addfile(
-                        standin, b'r', None, b'replaced by non-standin',
+                        standin,
+                        b'r',
+                        None,
+                        b'replaced by non-standin',
                     )
         elif lm in (b'g', b'dc') and sm != b'r':
             if lm == b'dc':
@@ -610,7 +619,10 @@
                 if branchmerge:
                     # largefile can be restored from standin safely
                     mresult.addfile(
-                        lfile, b'k', None, b'replaced by standin',
+                        lfile,
+                        b'k',
+                        None,
+                        b'replaced by standin',
                     )
                     mresult.addfile(standin, b'k', None, b'replaces standin')
                 else:
@@ -628,7 +640,10 @@
             else:  # pick remote normal file
                 mresult.addfile(lfile, b'g', largs, b'replaces standin')
                 mresult.addfile(
-                    standin, b'r', None, b'replaced by non-standin',
+                    standin,
+                    b'r',
+                    None,
+                    b'replaced by non-standin',
                 )
 
     return mresult
@@ -1825,8 +1840,8 @@
     return result
 
 
-@eh.wrapfunction(upgrade, b'preservedrequirements')
-@eh.wrapfunction(upgrade, b'supporteddestrequirements')
+@eh.wrapfunction(upgrade_actions, b'preservedrequirements')
+@eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
 def upgraderequirements(orig, repo):
     reqs = orig(repo)
     if b'largefiles' in repo.requirements:
--- a/hgext/largefiles/proto.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/proto.py	Tue Jan 19 21:48:43 2021 +0530
@@ -5,7 +5,6 @@
 from __future__ import absolute_import
 
 import os
-import re
 
 from mercurial.i18n import _
 from mercurial.pycompat import open
@@ -33,14 +32,10 @@
 
 eh = exthelper.exthelper()
 
-# these will all be replaced by largefiles.uisetup
-ssholdcallstream = None
-httpoldcallstream = None
-
 
 def putlfile(repo, proto, sha):
-    '''Server command for putting a largefile into a repository's local store
-    and into the user cache.'''
+    """Server command for putting a largefile into a repository's local store
+    and into the user cache."""
     with proto.mayberedirectstdio() as output:
         path = lfutil.storepath(repo, sha)
         util.makedirs(os.path.dirname(path))
@@ -69,8 +64,8 @@
 
 
 def getlfile(repo, proto, sha):
-    '''Server command for retrieving a largefile from the repository-local
-    cache or user cache.'''
+    """Server command for retrieving a largefile from the repository-local
+    cache or user cache."""
     filename = lfutil.findfile(repo, sha)
     if not filename:
         raise error.Abort(
@@ -93,12 +88,12 @@
 
 
 def statlfile(repo, proto, sha):
-    '''Server command for checking if a largefile is present - returns '2\n' if
+    """Server command for checking if a largefile is present - returns '2\n' if
     the largefile is missing, '0\n' if it seems to be in good condition.
 
     The value 1 is reserved for mismatched checksum, but that is too expensive
     to be verified on every stat and must be caught be running 'hg verify'
-    server side.'''
+    server side."""
     filename = lfutil.findfile(repo, sha)
     if not filename:
         return wireprototypes.bytesresponse(b'2\n')
@@ -106,7 +101,27 @@
 
 
 def wirereposetup(ui, repo):
+    orig_commandexecutor = repo.commandexecutor
+
     class lfileswirerepository(repo.__class__):
+        def commandexecutor(self):
+            executor = orig_commandexecutor()
+            if self.capable(b'largefiles'):
+                orig_callcommand = executor.callcommand
+
+                class lfscommandexecutor(executor.__class__):
+                    def callcommand(self, command, args):
+                        if command == b'heads':
+                            command = b'lheads'
+                        return orig_callcommand(command, args)
+
+                executor.__class__ = lfscommandexecutor
+            return executor
+
+        @wireprotov1peer.batchable
+        def lheads(self):
+            return self.heads.batchable(self)
+
         def putlfile(self, sha, fd):
             # unfortunately, httprepository._callpush tries to convert its
             # input file-like into a bundle before sending it, so we can't use
@@ -194,28 +209,9 @@
 
 
 def heads(orig, repo, proto):
-    '''Wrap server command - largefile capable clients will know to call
-    lheads instead'''
+    """Wrap server command - largefile capable clients will know to call
+    lheads instead"""
     if lfutil.islfilesrepo(repo):
         return wireprototypes.ooberror(LARGEFILES_REQUIRED_MSG)
 
     return orig(repo, proto)
-
-
-def sshrepocallstream(self, cmd, **args):
-    if cmd == b'heads' and self.capable(b'largefiles'):
-        cmd = b'lheads'
-    if cmd == b'batch' and self.capable(b'largefiles'):
-        args['cmds'] = args[r'cmds'].replace(b'heads ', b'lheads ')
-    return ssholdcallstream(self, cmd, **args)
-
-
-headsre = re.compile(br'(^|;)heads\b')
-
-
-def httprepocallstream(self, cmd, **args):
-    if cmd == b'heads' and self.capable(b'largefiles'):
-        cmd = b'lheads'
-    if cmd == b'batch' and self.capable(b'largefiles'):
-        args['cmds'] = headsre.sub(b'lheads', args['cmds'])
-    return httpoldcallstream(self, cmd, **args)
--- a/hgext/largefiles/remotestore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/remotestore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -146,8 +146,8 @@
         raise NotImplementedError(b'abstract method')
 
     def _stat(self, hashes):
-        '''Get information about availability of files specified by
+        """Get information about availability of files specified by
         hashes in the remote store. Return dictionary mapping hashes
         to return code where 0 means that file is available, other
-        values if not.'''
+        values if not."""
         raise NotImplementedError(b'abstract method')
--- a/hgext/largefiles/reposetup.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/reposetup.py	Tue Jan 19 21:48:43 2021 +0530
@@ -360,7 +360,7 @@
         # TODO: _subdirlfs should be moved into "lfutil.py", because
         # it is referred only from "lfutil.updatestandinsbymatch"
         def _subdirlfs(self, files, lfiles):
-            '''
+            """
             Adjust matched file list
             If we pass a directory to commit whose only committable files
             are largefiles, the core commit code aborts before finding
@@ -370,7 +370,7 @@
             we explicitly add the largefiles to the match list and remove
             the directory.
             In other cases, we leave the match list unmodified.
-            '''
+            """
             actualfiles = []
             dirs = []
             regulars = []
--- a/hgext/largefiles/wirestore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/largefiles/wirestore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -30,13 +30,23 @@
         return self.remote.getlfile(hash)
 
     def _stat(self, hashes):
-        '''For each hash, return 0 if it is available, other values if not.
+        """For each hash, return 0 if it is available, other values if not.
         It is usually 2 if the largefile is missing, but might be 1 the server
-        has a corrupted copy.'''
+        has a corrupted copy."""
 
         with self.remote.commandexecutor() as e:
             fs = []
             for hash in hashes:
-                fs.append((hash, e.callcommand(b'statlfile', {b'sha': hash,})))
+                fs.append(
+                    (
+                        hash,
+                        e.callcommand(
+                            b'statlfile',
+                            {
+                                b'sha': hash,
+                            },
+                        ),
+                    )
+                )
 
             return {hash: f.result() for hash, f in fs}
--- a/hgext/lfs/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/lfs/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -125,19 +125,19 @@
 import sys
 
 from mercurial.i18n import _
+from mercurial.node import bin
 
 from mercurial import (
+    bundlecaches,
     config,
     context,
     error,
-    exchange,
     extensions,
     exthelper,
     filelog,
     filesetlang,
     localrepo,
     minifileset,
-    node,
     pycompat,
     revlog,
     scmutil,
@@ -172,33 +172,51 @@
 templatekeyword = eh.templatekeyword
 
 eh.configitem(
-    b'experimental', b'lfs.serve', default=True,
+    b'experimental',
+    b'lfs.serve',
+    default=True,
 )
 eh.configitem(
-    b'experimental', b'lfs.user-agent', default=None,
+    b'experimental',
+    b'lfs.user-agent',
+    default=None,
 )
 eh.configitem(
-    b'experimental', b'lfs.disableusercache', default=False,
+    b'experimental',
+    b'lfs.disableusercache',
+    default=False,
 )
 eh.configitem(
-    b'experimental', b'lfs.worker-enable', default=True,
+    b'experimental',
+    b'lfs.worker-enable',
+    default=True,
 )
 
 eh.configitem(
-    b'lfs', b'url', default=None,
+    b'lfs',
+    b'url',
+    default=None,
 )
 eh.configitem(
-    b'lfs', b'usercache', default=None,
+    b'lfs',
+    b'usercache',
+    default=None,
 )
 # Deprecated
 eh.configitem(
-    b'lfs', b'threshold', default=None,
+    b'lfs',
+    b'threshold',
+    default=None,
 )
 eh.configitem(
-    b'lfs', b'track', default=b'none()',
+    b'lfs',
+    b'track',
+    default=b'none()',
 )
 eh.configitem(
-    b'lfs', b'retry', default=5,
+    b'lfs',
+    b'retry',
+    default=5,
 )
 
 lfsprocessor = (
@@ -242,11 +260,10 @@
                 return 0
 
             last = kwargs.get('node_last')
-            _bin = node.bin
             if last:
-                s = repo.set(b'%n:%n', _bin(kwargs['node']), _bin(last))
+                s = repo.set(b'%n:%n', bin(kwargs['node']), bin(last))
             else:
-                s = repo.set(b'%n', _bin(kwargs['node']))
+                s = repo.set(b'%n', bin(kwargs['node']))
             match = repo._storenarrowmatch
             for ctx in s:
                 # TODO: is there a way to just walk the files in the commit?
@@ -351,7 +368,7 @@
     # Make bundle choose changegroup3 instead of changegroup2. This affects
     # "hg bundle" command. Note: it does not cover all bundle formats like
     # "packed1". Using "packed1" with lfs will likely cause trouble.
-    exchange._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
+    bundlecaches._bundlespeccontentopts[b"v2"][b"cg.version"] = b"03"
 
 
 @eh.filesetpredicate(b'lfs()')
--- a/hgext/lfs/blobstore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/lfs/blobstore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,12 +17,12 @@
 
 from mercurial.i18n import _
 from mercurial.pycompat import getattr
+from mercurial.node import hex
 
 from mercurial import (
     encoding,
     error,
     httpconnection as httpconnectionmod,
-    node,
     pathutil,
     pycompat,
     url as urlmod,
@@ -96,8 +96,7 @@
 
 
 class lfsuploadfile(httpconnectionmod.httpsendfile):
-    """a file-like object that supports keepalive.
-    """
+    """a file-like object that supports keepalive."""
 
     def __init__(self, ui, filename):
         super(lfsuploadfile, self).__init__(ui, filename, b'rb')
@@ -174,7 +173,7 @@
                 )
                 raise LfsRemoteError(_(msg) % (size, int(content_length)))
 
-            realoid = node.hex(sha256.digest())
+            realoid = hex(sha256.digest())
             if realoid != oid:
                 raise LfsCorruptionError(
                     _(b'corrupt remote lfs object: %s') % oid
@@ -225,7 +224,7 @@
             # Don't abort if corruption is detected, because `hg verify` will
             # give more useful info about the corruption- simply don't add the
             # hardlink.
-            if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
+            if verify or hex(hashlib.sha256(blob).digest()) == oid:
                 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
                 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
         else:
@@ -249,7 +248,7 @@
             for chunk in util.filechunkiter(fp, size=1048576):
                 sha256.update(chunk)
 
-        return oid == node.hex(sha256.digest())
+        return oid == hex(sha256.digest())
 
     def has(self, oid):
         """Returns True if the local blobstore contains the requested blob,
@@ -258,9 +257,9 @@
 
 
 def _urlerrorreason(urlerror):
-    '''Create a friendly message for the given URLError to be used in an
+    """Create a friendly message for the given URLError to be used in an
     LfsRemoteError message.
-    '''
+    """
     inst = urlerror
 
     if isinstance(urlerror.reason, Exception):
@@ -338,7 +337,10 @@
         ]
         requestdata = pycompat.bytesurl(
             json.dumps(
-                {'objects': objects, 'operation': pycompat.strurl(action),}
+                {
+                    'objects': objects,
+                    'operation': pycompat.strurl(action),
+                }
             )
         )
         url = b'%s/objects/batch' % self.baseurl
@@ -704,7 +706,7 @@
 
 
 def _verify(oid, content):
-    realoid = node.hex(hashlib.sha256(content).digest())
+    realoid = hex(hashlib.sha256(content).digest())
     if realoid != oid:
         raise LfsCorruptionError(
             _(b'detected corrupt lfs object: %s') % oid,
--- a/hgext/lfs/wrapper.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/lfs/wrapper.py	Tue Jan 19 21:48:43 2021 +0530
@@ -28,12 +28,16 @@
     pycompat,
     revlog,
     scmutil,
-    upgrade,
     util,
     vfs as vfsmod,
     wireprotov1server,
 )
 
+from mercurial.upgrade_utils import (
+    actions as upgrade_actions,
+    engine as upgrade_engine,
+)
+
 from mercurial.interfaces import repository
 
 from mercurial.utils import (
@@ -381,10 +385,10 @@
 
 
 def uploadblobsfromrevs(repo, revs):
-    '''upload lfs blobs introduced by revs
+    """upload lfs blobs introduced by revs
 
     Note: also used by other extensions e. g. infinitepush. avoid renaming.
-    '''
+    """
     if _canskipupload(repo):
         return
     pointers = extractpointers(repo, revs)
@@ -520,7 +524,7 @@
     remoteblob.writebatch(pointers, repo.svfs.lfslocalblobstore)
 
 
-@eh.wrapfunction(upgrade, b'_finishdatamigration')
+@eh.wrapfunction(upgrade_engine, b'finishdatamigration')
 def upgradefinishdatamigration(orig, ui, srcrepo, dstrepo, requirements):
     orig(ui, srcrepo, dstrepo, requirements)
 
@@ -537,8 +541,8 @@
                 lfutil.link(srclfsvfs.join(oid), dstlfsvfs.join(oid))
 
 
-@eh.wrapfunction(upgrade, b'preservedrequirements')
-@eh.wrapfunction(upgrade, b'supporteddestrequirements')
+@eh.wrapfunction(upgrade_actions, b'preservedrequirements')
+@eh.wrapfunction(upgrade_actions, b'supporteddestrequirements')
 def upgraderequirements(orig, repo):
     reqs = orig(repo)
     if b'lfs' in repo.requirements:
--- a/hgext/mq.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/mq.py	Tue Jan 19 21:48:43 2021 +0530
@@ -100,6 +100,7 @@
     revsetlang,
     scmutil,
     smartset,
+    strip,
     subrepoutil,
     util,
     vfs as vfsmod,
@@ -124,21 +125,29 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'mq', b'git', default=b'auto',
+    b'mq',
+    b'git',
+    default=b'auto',
 )
 configitem(
-    b'mq', b'keepchanges', default=False,
+    b'mq',
+    b'keepchanges',
+    default=False,
 )
 configitem(
-    b'mq', b'plain', default=False,
+    b'mq',
+    b'plain',
+    default=False,
 )
 configitem(
-    b'mq', b'secret', default=False,
+    b'mq',
+    b'secret',
+    default=False,
 )
 
 # force load strip extension formerly included in mq and import some utility
 try:
-    stripext = extensions.find(b'strip')
+    extensions.find(b'strip')
 except KeyError:
     # note: load is lazy so we could avoid the try-except,
     # but I (marmoute) prefer this explicit code.
@@ -149,14 +158,14 @@
         def log(self, event, msgfmt, *msgargs, **opts):
             pass
 
-    stripext = extensions.load(dummyui(), b'strip', b'')
-
-strip = stripext.strip
+    extensions.load(dummyui(), b'strip', b'')
+
+strip = strip.strip
 
 
 def checksubstate(repo, baserev=None):
-    '''return list of subrepos at a different revision than substate.
-    Abort if any subrepos have uncommitted changes.'''
+    """return list of subrepos at a different revision than substate.
+    Abort if any subrepos have uncommitted changes."""
     inclsubs = []
     wctx = repo[None]
     if baserev:
@@ -448,9 +457,9 @@
     __str__ = encoding.strmethod(__bytes__)
 
     def _delmsg(self):
-        '''Remove existing message, keeping the rest of the comments fields.
+        """Remove existing message, keeping the rest of the comments fields.
         If comments contains 'subject: ', message will prepend
-        the field and a blank line.'''
+        the field and a blank line."""
         if self.message:
             subj = b'subject: ' + self.message[0].lower()
             for i in pycompat.xrange(len(self.comments)):
@@ -948,8 +957,8 @@
         return (0, head)
 
     def patch(self, repo, patchfile):
-        '''Apply patchfile  to the working directory.
-        patchfile: name of patch file'''
+        """Apply patchfile  to the working directory.
+        patchfile: name of patch file"""
         files = set()
         try:
             fuzz = patchmod.patch(
@@ -1362,7 +1371,7 @@
 
     def new(self, repo, patchfn, *pats, **opts):
         """options:
-           msg: a string or a no-argument function returning a string
+        msg: a string or a no-argument function returning a string
         """
         opts = pycompat.byteskwargs(opts)
         msg = opts.get(b'msg')
@@ -1717,7 +1726,10 @@
             except:  # re-raises
                 self.ui.warn(_(b'cleaning up working directory...\n'))
                 cmdutil.revert(
-                    self.ui, repo, repo[b'.'], no_backup=True,
+                    self.ui,
+                    repo,
+                    repo[b'.'],
+                    no_backup=True,
                 )
                 # only remove unknown files that we know we touched or
                 # created while patching
@@ -2822,7 +2834,7 @@
     norepo=True,
 )
 def clone(ui, source, dest=None, **opts):
-    '''clone main and patch repository at same time
+    """clone main and patch repository at same time
 
     If source is local, destination will have no patches applied. If
     source is remote, this command can not check if patches are
@@ -2837,7 +2849,7 @@
     would be created by :hg:`init --mq`.
 
     Return 0 on success.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
 
     def patchdir(repo):
@@ -2936,7 +2948,10 @@
 
 @command(
     b"qseries",
-    [(b'm', b'missing', None, _(b'print patches not in series')),] + seriesopts,
+    [
+        (b'm', b'missing', None, _(b'print patches not in series')),
+    ]
+    + seriesopts,
     _(b'hg qseries [-ms]'),
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
@@ -3281,9 +3296,9 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def goto(ui, repo, patch, **opts):
-    '''push or pop patches until named patch is at top of stack
-
-    Returns 0 on success.'''
+    """push or pop patches until named patch is at top of stack
+
+    Returns 0 on success."""
     opts = pycompat.byteskwargs(opts)
     opts = fixkeepchangesopts(ui, opts)
     q = repo.mq
@@ -3320,7 +3335,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def guard(ui, repo, *args, **opts):
-    '''set or print guards for a patch
+    """set or print guards for a patch
 
     Guards control whether a patch can be pushed. A patch with no
     guards is always pushed. A patch with a positive guard ("+foo") is
@@ -3340,7 +3355,7 @@
       hg qguard other.patch -- +2.6.17 -stable
 
     Returns 0 on success.
-    '''
+    """
 
     def status(idx):
         guards = q.seriesguards[idx] or [b'unguarded']
@@ -3711,7 +3726,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def select(ui, repo, *args, **opts):
-    '''set or print guarded patches to push
+    """set or print guarded patches to push
 
     Use the :hg:`qguard` command to set or print guards on patch, then use
     qselect to tell mq which guards to use. A patch will be pushed if
@@ -3743,7 +3758,7 @@
     Use -s/--series to print a list of all guards in the series file
     (no other arguments needed). Use -v for more information.
 
-    Returns 0 on success.'''
+    Returns 0 on success."""
 
     q = repo.mq
     opts = pycompat.byteskwargs(opts)
@@ -3887,7 +3902,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def qqueue(ui, repo, name=None, **opts):
-    '''manage multiple patch queues
+    """manage multiple patch queues
 
     Supports switching between different patch queues, as well as creating
     new patch queues and deleting existing ones.
@@ -3906,7 +3921,7 @@
     active queue.
 
     Returns 0 on success.
-    '''
+    """
     q = repo.mq
     _defaultqueue = b'patches'
     _allqueues = b'patches.queues'
@@ -4249,8 +4264,7 @@
 
 @revsetpredicate(b'mq()')
 def revsetmq(repo, subset, x):
-    """Changesets managed by MQ.
-    """
+    """Changesets managed by MQ."""
     revsetlang.getargs(x, 0, 0, _(b"mq takes no arguments"))
     applied = {repo[r.node].rev() for r in repo.mq.applied}
     return smartset.baseset([r for r in subset if r in applied])
--- a/hgext/narrow/narrowbundle2.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/narrow/narrowbundle2.py	Tue Jan 19 21:48:43 2021 +0530
@@ -78,7 +78,14 @@
 
 
 def generateellipsesbundle2(
-    bundler, repo, include, exclude, version, common, heads, depth,
+    bundler,
+    repo,
+    include,
+    exclude,
+    version,
+    common,
+    heads,
+    depth,
 ):
     match = narrowspec.match(repo.root, include=include, exclude=exclude)
     if depth is not None:
@@ -113,7 +120,13 @@
 
 
 def generate_ellipses_bundle2_for_widening(
-    bundler, repo, oldmatch, newmatch, version, common, known,
+    bundler,
+    repo,
+    oldmatch,
+    newmatch,
+    version,
+    common,
+    known,
 ):
     common = set(common or [nullid])
     # Steps:
--- a/hgext/narrow/narrowcommands.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/narrow/narrowcommands.py	Tue Jan 19 21:48:43 2021 +0530
@@ -10,6 +10,11 @@
 import os
 
 from mercurial.i18n import _
+from mercurial.node import (
+    hex,
+    nullid,
+    short,
+)
 from mercurial import (
     bundle2,
     cmdutil,
@@ -21,7 +26,6 @@
     extensions,
     hg,
     narrowspec,
-    node,
     pathutil,
     pycompat,
     registrar,
@@ -99,7 +103,7 @@
 
         includes, excludes, profiles = sparse.parseconfig(ui, fdata, b'narrow')
         if profiles:
-            raise error.Abort(
+            raise error.ConfigError(
                 _(
                     b"cannot specify other files using '%include' in"
                     b" narrowspec"
@@ -184,9 +188,9 @@
     # we have all the nodes
     if wireprototypes.ELLIPSESCAP1 in pullop.remote.capabilities():
         kwargs[b'known'] = [
-            node.hex(ctx.node())
+            hex(ctx.node())
             for ctx in repo.set(b'::%ln', pullop.common)
-            if ctx.node() != node.nullid
+            if ctx.node() != nullid
         ]
         if not kwargs[b'known']:
             # Mercurial serializes an empty list as '' and deserializes it as
@@ -239,16 +243,16 @@
         maxnodes = 10
         if ui.verbose or len(visibletostrip) <= maxnodes:
             for n in visibletostrip:
-                ui.status(b'%s\n' % node.short(n))
+                ui.status(b'%s\n' % short(n))
         else:
             for n in visibletostrip[:maxnodes]:
-                ui.status(b'%s\n' % node.short(n))
+                ui.status(b'%s\n' % short(n))
             ui.status(
                 _(b'...and %d more, use --verbose to list all\n')
                 % (len(visibletostrip) - maxnodes)
             )
         if not force:
-            raise error.Abort(
+            raise error.StateError(
                 _(b'local changes found'),
                 hint=_(b'use --force-delete-local-changes to ignore'),
             )
@@ -362,7 +366,7 @@
             ds = repo.dirstate
             p1, p2 = ds.p1(), ds.p2()
             with ds.parentchange():
-                ds.setparents(node.nullid, node.nullid)
+                ds.setparents(nullid, nullid)
         if isoldellipses:
             with wrappedextraprepare:
                 exchange.pull(repo, remote, heads=common)
@@ -372,7 +376,7 @@
                 known = [
                     ctx.node()
                     for ctx in repo.set(b'::%ln', common)
-                    if ctx.node() != node.nullid
+                    if ctx.node() != nullid
                 ]
             with remote.commandexecutor() as e:
                 bundle = e.callcommand(
@@ -483,7 +487,7 @@
     """
     opts = pycompat.byteskwargs(opts)
     if requirements.NARROW_REQUIREMENT not in repo.requirements:
-        raise error.Abort(
+        raise error.InputError(
             _(
                 b'the tracked command is only supported on '
                 b'repositories cloned with --narrow'
@@ -493,7 +497,7 @@
     # Before supporting, decide whether it "hg tracked --clear" should mean
     # tracking no paths or all paths.
     if opts[b'clear']:
-        raise error.Abort(_(b'the --clear option is not yet supported'))
+        raise error.InputError(_(b'the --clear option is not yet supported'))
 
     # import rules from a file
     newrules = opts.get(b'import_rules')
@@ -502,7 +506,7 @@
             filepath = os.path.join(encoding.getcwd(), newrules)
             fdata = util.readfile(filepath)
         except IOError as inst:
-            raise error.Abort(
+            raise error.StorageError(
                 _(b"cannot read narrowspecs from '%s': %s")
                 % (filepath, encoding.strtolocal(inst.strerror))
             )
@@ -510,7 +514,7 @@
             ui, fdata, b'narrow'
         )
         if profiles:
-            raise error.Abort(
+            raise error.InputError(
                 _(
                     b"including other spec files using '%include' "
                     b"is not supported in narrowspec"
--- a/hgext/narrow/narrowwirepeer.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/narrow/narrowwirepeer.py	Tue Jan 19 21:48:43 2021 +0530
@@ -120,7 +120,13 @@
             )
         else:
             narrowbundle2.generate_ellipses_bundle2_for_widening(
-                bundler, repo, oldmatch, newmatch, cgversion, common, known,
+                bundler,
+                repo,
+                oldmatch,
+                newmatch,
+                cgversion,
+                common,
+                known,
             )
     except error.Abort as exc:
         bundler = bundle2.bundle20(repo.ui)
--- a/hgext/notify.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/notify.py	Tue Jan 19 21:48:43 2021 +0530
@@ -190,64 +190,104 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'notify', b'changegroup', default=None,
+    b'notify',
+    b'changegroup',
+    default=None,
 )
 configitem(
-    b'notify', b'config', default=None,
+    b'notify',
+    b'config',
+    default=None,
 )
 configitem(
-    b'notify', b'diffstat', default=True,
+    b'notify',
+    b'diffstat',
+    default=True,
 )
 configitem(
-    b'notify', b'domain', default=None,
+    b'notify',
+    b'domain',
+    default=None,
 )
 configitem(
-    b'notify', b'messageidseed', default=None,
+    b'notify',
+    b'messageidseed',
+    default=None,
 )
 configitem(
-    b'notify', b'fromauthor', default=None,
+    b'notify',
+    b'fromauthor',
+    default=None,
 )
 configitem(
-    b'notify', b'incoming', default=None,
+    b'notify',
+    b'incoming',
+    default=None,
 )
 configitem(
-    b'notify', b'maxdiff', default=300,
+    b'notify',
+    b'maxdiff',
+    default=300,
 )
 configitem(
-    b'notify', b'maxdiffstat', default=-1,
+    b'notify',
+    b'maxdiffstat',
+    default=-1,
 )
 configitem(
-    b'notify', b'maxsubject', default=67,
+    b'notify',
+    b'maxsubject',
+    default=67,
 )
 configitem(
-    b'notify', b'mbox', default=None,
+    b'notify',
+    b'mbox',
+    default=None,
 )
 configitem(
-    b'notify', b'merge', default=True,
+    b'notify',
+    b'merge',
+    default=True,
 )
 configitem(
-    b'notify', b'outgoing', default=None,
+    b'notify',
+    b'outgoing',
+    default=None,
 )
 configitem(
-    b'notify', b'reply-to-predecessor', default=False,
+    b'notify',
+    b'reply-to-predecessor',
+    default=False,
 )
 configitem(
-    b'notify', b'sources', default=b'serve',
+    b'notify',
+    b'sources',
+    default=b'serve',
 )
 configitem(
-    b'notify', b'showfunc', default=None,
+    b'notify',
+    b'showfunc',
+    default=None,
 )
 configitem(
-    b'notify', b'strip', default=0,
+    b'notify',
+    b'strip',
+    default=0,
 )
 configitem(
-    b'notify', b'style', default=None,
+    b'notify',
+    b'style',
+    default=None,
 )
 configitem(
-    b'notify', b'template', default=None,
+    b'notify',
+    b'template',
+    default=None,
 )
 configitem(
-    b'notify', b'test', default=True,
+    b'notify',
+    b'test',
+    default=True,
 )
 
 # template for single changeset can include email headers.
@@ -539,10 +579,10 @@
 
 
 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
-    '''send email notifications to interested subscribers.
+    """send email notifications to interested subscribers.
 
     if used as changegroup hook, send one email for all changesets in
-    changegroup. else send one email per changeset.'''
+    changegroup. else send one email per changeset."""
 
     n = notifier(ui, repo, hooktype)
     ctx = repo.unfiltered()[node]
--- a/hgext/pager.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/pager.py	Tue Jan 19 21:48:43 2021 +0530
@@ -41,7 +41,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'pager', b'attend', default=lambda: attended,
+    b'pager',
+    b'attend',
+    default=lambda: attended,
 )
 
 
--- a/hgext/patchbomb.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/patchbomb.py	Tue Jan 19 21:48:43 2021 +0530
@@ -83,6 +83,7 @@
 
 from mercurial.i18n import _
 from mercurial.pycompat import open
+from mercurial.node import bin
 from mercurial import (
     cmdutil,
     commands,
@@ -91,7 +92,6 @@
     formatter,
     hg,
     mail,
-    node as nodemod,
     patch,
     pycompat,
     registrar,
@@ -110,34 +110,54 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'patchbomb', b'bundletype', default=None,
+    b'patchbomb',
+    b'bundletype',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'bcc', default=None,
+    b'patchbomb',
+    b'bcc',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'cc', default=None,
+    b'patchbomb',
+    b'cc',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'confirm', default=False,
+    b'patchbomb',
+    b'confirm',
+    default=False,
 )
 configitem(
-    b'patchbomb', b'flagtemplate', default=None,
+    b'patchbomb',
+    b'flagtemplate',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'from', default=None,
+    b'patchbomb',
+    b'from',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'intro', default=b'auto',
+    b'patchbomb',
+    b'intro',
+    default=b'auto',
 )
 configitem(
-    b'patchbomb', b'publicurl', default=None,
+    b'patchbomb',
+    b'publicurl',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'reply-to', default=None,
+    b'patchbomb',
+    b'reply-to',
+    default=None,
 )
 configitem(
-    b'patchbomb', b'to', default=None,
+    b'patchbomb',
+    b'to',
+    default=None,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -148,8 +168,7 @@
 
 
 def _addpullheader(seq, ctx):
-    """Add a header pointing to a public URL where the changeset is available
-    """
+    """Add a header pointing to a public URL where the changeset is available"""
     repo = ctx.repo()
     # experimental config: patchbomb.publicurl
     # waiting for some logic that check that the changeset are available on the
@@ -287,7 +306,7 @@
         p = mail.mimetextpatch(
             b'\n'.join(patchlines), 'x-patch', opts.get(b'test')
         )
-        binnode = nodemod.bin(node)
+        binnode = bin(node)
         # if node is mq patch, it will have the patch file's name as a tag
         if not patchname:
             patchtags = [
@@ -656,7 +675,7 @@
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def email(ui, repo, *revs, **opts):
-    '''send changesets by email
+    """send changesets by email
 
     By default, diffs are sent in the format generated by
     :hg:`export`, one per message. The series starts with a "[PATCH 0
@@ -739,7 +758,7 @@
 
     Before using this command, you will need to enable email in your
     hgrc. See the [email] section in hgrc(5) for details.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
 
     _charsets = mail._charsets(ui)
--- a/hgext/phabricator.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/phabricator.py	Tue Jan 19 21:48:43 2021 +0530
@@ -35,6 +35,20 @@
     # the internal library.
     curlcmd = curl --connect-timeout 2 --retry 3 --silent
 
+    # retry failed command N time (default 0). Useful when using the extension
+    # over flakly connection.
+    #
+    # We wait `retry.interval` between each retry, in seconds.
+    # (default 1 second).
+    retry = 3
+    retry.interval = 10
+
+    # the retry option can combine well with the http.timeout one.
+    #
+    # For example to give up on http request after 20 seconds:
+    [http]
+    timeout=20
+
     [auth]
     example.schemes = https
     example.prefix = phab.example.com
@@ -53,6 +67,7 @@
 import mimetypes
 import operator
 import re
+import time
 
 from mercurial.node import bin, nullid, short
 from mercurial.i18n import _
@@ -108,42 +123,68 @@
 
 # developer config: phabricator.batchsize
 eh.configitem(
-    b'phabricator', b'batchsize', default=12,
+    b'phabricator',
+    b'batchsize',
+    default=12,
 )
 eh.configitem(
-    b'phabricator', b'callsign', default=None,
+    b'phabricator',
+    b'callsign',
+    default=None,
 )
 eh.configitem(
-    b'phabricator', b'curlcmd', default=None,
+    b'phabricator',
+    b'curlcmd',
+    default=None,
 )
 # developer config: phabricator.debug
 eh.configitem(
-    b'phabricator', b'debug', default=False,
+    b'phabricator',
+    b'debug',
+    default=False,
 )
 # developer config: phabricator.repophid
 eh.configitem(
-    b'phabricator', b'repophid', default=None,
+    b'phabricator',
+    b'repophid',
+    default=None,
 )
 eh.configitem(
-    b'phabricator', b'url', default=None,
+    b'phabricator',
+    b'retry',
+    default=0,
+)
+eh.configitem(
+    b'phabricator',
+    b'retry.interval',
+    default=1,
 )
 eh.configitem(
-    b'phabsend', b'confirm', default=False,
+    b'phabricator',
+    b'url',
+    default=None,
 )
 eh.configitem(
-    b'phabimport', b'secret', default=False,
+    b'phabsend',
+    b'confirm',
+    default=False,
 )
 eh.configitem(
-    b'phabimport', b'obsolete', default=False,
+    b'phabimport',
+    b'secret',
+    default=False,
+)
+eh.configitem(
+    b'phabimport',
+    b'obsolete',
+    default=False,
 )
 
 colortable = {
     b'phabricator.action.created': b'green',
     b'phabricator.action.skipped': b'magenta',
     b'phabricator.action.updated': b'magenta',
-    b'phabricator.desc': b'',
     b'phabricator.drev': b'bold',
-    b'phabricator.node': b'',
     b'phabricator.status.abandoned': b'magenta dim',
     b'phabricator.status.accepted': b'green bold',
     b'phabricator.status.closed': b'green',
@@ -168,8 +209,7 @@
 
 @eh.wrapfunction(localrepo, "loadhgrc")
 def _loadhgrc(orig, ui, wdirvfs, hgvfs, requirements, *args, **opts):
-    """Load ``.arcconfig`` content into a ui instance on repository open.
-    """
+    """Load ``.arcconfig`` content into a ui instance on repository open."""
     result = False
     arcconfig = {}
 
@@ -385,8 +425,25 @@
     else:
         urlopener = urlmod.opener(ui, authinfo)
         request = util.urlreq.request(pycompat.strurl(url), data=data)
-        with contextlib.closing(urlopener.open(request)) as rsp:
-            body = rsp.read()
+        max_try = ui.configint(b'phabricator', b'retry') + 1
+        timeout = ui.configwith(float, b'http', b'timeout')
+        for try_count in range(max_try):
+            try:
+                with contextlib.closing(
+                    urlopener.open(request, timeout=timeout)
+                ) as rsp:
+                    body = rsp.read()
+                break
+            except util.urlerr.urlerror as err:
+                if try_count == max_try - 1:
+                    raise
+                ui.debug(
+                    b'Conduit Request failed (try %d/%d): %r\n'
+                    % (try_count + 1, max_try, err)
+                )
+                # failing request might come from overloaded server
+                retry_interval = ui.configint(b'phabricator', b'retry.interval')
+                time.sleep(retry_interval)
     ui.debug(b'Conduit Response: %s\n' % body)
     parsed = pycompat.rapply(
         lambda x: encoding.unitolocal(x)
@@ -635,8 +692,7 @@
 
 @attr.s
 class phabhunk(dict):
-    """Represents a Differential hunk, which is owned by a Differential change
-    """
+    """Represents a Differential hunk, which is owned by a Differential change"""
 
     oldOffset = attr.ib(default=0)  # camelcase-required
     oldLength = attr.ib(default=0)  # camelcase-required
@@ -1220,9 +1276,8 @@
         b'phabricator.action.%s' % action,
     )
     drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev')
-    nodedesc = ui.label(bytes(ctx), b'phabricator.node')
-    desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
-    ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc))
+    summary = cmdutil.format_changeset_summary(ui, ctx, b'phabsend')
+    ui.write(_(b'%s - %s - %s\n') % (drevdesc, actiondesc, summary))
 
 
 def _amend_diff_properties(unfi, drevid, newnodes, diff):
@@ -1515,7 +1570,9 @@
                         mapping.get(old.p2().node(), (old.p2(),))[0],
                     ]
                     newdesc = rewriteutil.update_hash_refs(
-                        repo, newdesc, mapping,
+                        repo,
+                        newdesc,
+                        mapping,
                     )
                     new = context.metadataonlyctx(
                         repo,
@@ -1642,7 +1699,6 @@
     ui = repo.ui
     for rev in revs:
         ctx = repo[rev]
-        desc = ctx.description().splitlines()[0]
         oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
         if drevid:
             drevdesc = ui.label(b'D%d' % drevid, b'phabricator.drev')
@@ -1650,11 +1706,10 @@
             drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
 
         ui.write(
-            _(b'%s - %s: %s\n')
+            _(b'%s - %s\n')
             % (
                 drevdesc,
-                ui.label(bytes(ctx), b'phabricator.node'),
-                ui.label(desc, b'phabricator.desc'),
+                cmdutil.format_changeset_summary(ui, ctx, b'phabsend'),
             )
         )
 
@@ -2180,8 +2235,9 @@
         (b'', b'resign', False, _(b'resign as a reviewer from revisions')),
         (b'', b'commandeer', False, _(b'commandeer revisions')),
         (b'm', b'comment', b'', _(b'comment on the last revision')),
+        (b'r', b'rev', b'', _(b'local revision to update'), _(b'REV')),
     ],
-    _(b'DREVSPEC... [OPTIONS]'),
+    _(b'[DREVSPEC...| -r REV...] [OPTIONS]'),
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
     optionalrepo=True,
 )
@@ -2211,6 +2267,28 @@
     for f in flags:
         actions.append({b'type': f, b'value': True})
 
+    revs = opts.get(b'rev')
+    if revs:
+        if not repo:
+            raise error.InputError(_(b'--rev requires a repository'))
+
+        if specs:
+            raise error.InputError(_(b'cannot specify both DREVSPEC and --rev'))
+
+        drevmap = getdrevmap(repo, scmutil.revrange(repo, [revs]))
+        specs = []
+        unknown = []
+        for r, d in pycompat.iteritems(drevmap):
+            if d is None:
+                unknown.append(repo[r])
+            else:
+                specs.append(b'D%d' % d)
+        if unknown:
+            raise error.InputError(
+                _(b'selected revisions without a Differential: %s')
+                % scmutil.nodesummaries(repo, unknown)
+            )
+
     drevs = _getdrevs(ui, opts.get(b'stack'), specs)
     for i, drev in enumerate(drevs):
         if i + 1 == len(drevs) and opts.get(b'comment'):
@@ -2232,7 +2310,10 @@
     m = _differentialrevisiondescre.search(ctx.description())
     if m:
         return templateutil.hybriddict(
-            {b'url': m.group('url'), b'id': b"D%s" % m.group('id'),}
+            {
+                b'url': m.group('url'),
+                b'id': b"D%s" % m.group('id'),
+            }
         )
     else:
         tags = ctx.repo().nodetags(ctx.node())
@@ -2243,14 +2324,18 @@
                     url += b'/'
                 url += t
 
-                return templateutil.hybriddict({b'url': url, b'id': t,})
+                return templateutil.hybriddict(
+                    {
+                        b'url': url,
+                        b'id': t,
+                    }
+                )
     return None
 
 
 @eh.templatekeyword(b'phabstatus', requires={b'ctx', b'repo', b'ui'})
 def template_status(context, mapping):
-    """:phabstatus: String. Status of Phabricator differential.
-    """
+    """:phabstatus: String. Status of Phabricator differential."""
     ctx = context.resource(mapping, b'ctx')
     repo = context.resource(mapping, b'repo')
     ui = context.resource(mapping, b'ui')
@@ -2264,7 +2349,10 @@
     for drev in drevs:
         if int(drev[b'id']) == drevid:
             return templateutil.hybriddict(
-                {b'url': drev[b'uri'], b'status': drev[b'statusName'],}
+                {
+                    b'url': drev[b'uri'],
+                    b'status': drev[b'statusName'],
+                }
             )
     return None
 
--- a/hgext/purge.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/purge.py	Tue Jan 19 21:48:43 2021 +0530
@@ -67,7 +67,7 @@
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def purge(ui, repo, *dirs, **opts):
-    '''removes files not tracked by Mercurial
+    """removes files not tracked by Mercurial
 
     Delete files not known to Mercurial. This is useful to test local
     and uncommitted changes in an otherwise-clean source tree.
@@ -95,7 +95,7 @@
     you forgot to add to the repository. If you only want to print the
     list of files that this program would delete, use the --print
     option.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     cmdutil.check_at_most_one_arg(opts, b'all', b'ignored')
 
--- a/hgext/rebase.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/rebase.py	Tue Jan 19 21:48:43 2021 +0530
@@ -23,6 +23,7 @@
 from mercurial.node import (
     nullrev,
     short,
+    wdirrev,
 )
 from mercurial.pycompat import open
 from mercurial import (
@@ -37,7 +38,6 @@
     merge as mergemod,
     mergestate as mergestatemod,
     mergeutil,
-    node as nodemod,
     obsolete,
     obsutil,
     patch,
@@ -146,20 +146,9 @@
 
 def _ctxdesc(ctx):
     """short description for a context"""
-    desc = b'%d:%s "%s"' % (
-        ctx.rev(),
-        ctx,
-        ctx.description().split(b'\n', 1)[0],
+    return cmdutil.format_changeset_summary(
+        ctx.repo().ui, ctx, command=b'rebase'
     )
-    repo = ctx.repo()
-    names = []
-    for nsname, ns in pycompat.iteritems(repo.names):
-        if nsname == b'branches':
-            continue
-        names.extend(ns.names(repo, ctx.node()))
-    if names:
-        desc += b' (%s)' % b' '.join(names)
-    return desc
 
 
 class rebaseruntime(object):
@@ -518,14 +507,29 @@
         ui.note(_(b'rebase merging completed\n'))
 
     def _concludenode(self, rev, editor, commitmsg=None):
-        '''Commit the wd changes with parents p1 and p2.
+        """Commit the wd changes with parents p1 and p2.
 
         Reuse commit info from rev but also store useful information in extra.
-        Return node of committed revision.'''
+        Return node of committed revision."""
         repo = self.repo
         ctx = repo[rev]
         if commitmsg is None:
             commitmsg = ctx.description()
+
+        # Skip replacement if collapsing, as that degenerates to p1 for all
+        # nodes.
+        if not self.collapsef:
+            cl = repo.changelog
+            commitmsg = rewriteutil.update_hash_refs(
+                repo,
+                commitmsg,
+                {
+                    cl.node(oldrev): [cl.node(newrev)]
+                    for oldrev, newrev in self.state.items()
+                    if newrev != revtodo
+                },
+            )
+
         date = self.date
         if date is None:
             date = ctx.date()
@@ -1135,9 +1139,16 @@
         try:
             overrides = {(b'rebase', b'singletransaction'): True}
             with ui.configoverride(overrides, b'rebase'):
-                _origrebase(
-                    ui, repo, action, opts, rbsrt,
+                res = _origrebase(
+                    ui,
+                    repo,
+                    action,
+                    opts,
+                    rbsrt,
                 )
+                if res == _nothingtorebase():
+                    needsabort = False
+                    return res
         except error.ConflictResolutionRequired:
             ui.status(_(b'hit a merge conflict\n'))
             return 1
@@ -1366,7 +1377,7 @@
                 )
             return None
 
-    if nodemod.wdirrev in rebaseset:
+    if wdirrev in rebaseset:
         raise error.Abort(_(b'cannot rebase the working copy'))
     rebasingwcp = repo[b'.'].rev() in rebaseset
     ui.log(
@@ -1448,8 +1459,8 @@
 
 
 def commitmemorynode(repo, wctx, editor, extra, user, date, commitmsg):
-    '''Commit the memory changes with parents p1 and p2.
-    Return node of committed revision.'''
+    """Commit the memory changes with parents p1 and p2.
+    Return node of committed revision."""
     # By convention, ``extra['branch']`` (set by extrafn) clobbers
     # ``branch`` (used when passing ``--keepbranches``).
     branch = None
@@ -1476,8 +1487,8 @@
 
 
 def commitnode(repo, editor, extra, user, date, commitmsg):
-    '''Commit the wd changes with parents p1 and p2.
-    Return node of committed revision.'''
+    """Commit the wd changes with parents p1 and p2.
+    Return node of committed revision."""
     dsguard = util.nullcontextmanager()
     if not repo.ui.configbool(b'rebase', b'singletransaction'):
         dsguard = dirstateguard.dirstateguard(repo, b'rebase')
@@ -1966,11 +1977,11 @@
 
 
 def buildstate(repo, destmap, collapse):
-    '''Define which revisions are going to be rebased and where
+    """Define which revisions are going to be rebased and where
 
     repo: repo
     destmap: {srcrev: destrev}
-    '''
+    """
     rebaseset = destmap.keys()
     originalwd = repo[b'.'].rev()
 
--- a/hgext/record.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/record.py	Tue Jan 19 21:48:43 2021 +0530
@@ -39,7 +39,7 @@
     helpcategory=command.CATEGORY_COMMITTING,
 )
 def record(ui, repo, *pats, **opts):
-    '''interactively select changes to commit
+    """interactively select changes to commit
 
     If a list of files is omitted, all changes reported by :hg:`status`
     will be candidates for recording.
@@ -65,7 +65,7 @@
 
       ? - display help
 
-    This command is not available when committing a merge.'''
+    This command is not available when committing a merge."""
 
     if not ui.interactive():
         raise error.Abort(
@@ -106,11 +106,11 @@
     inferrepo=True,
 )
 def qrecord(ui, repo, patch, *pats, **opts):
-    '''interactively record a new patch
+    """interactively record a new patch
 
     See :hg:`help qnew` & :hg:`help record` for more information and
     usage.
-    '''
+    """
     return _qrecord(b'qnew', ui, repo, patch, *pats, **opts)
 
 
--- a/hgext/releasenotes.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/releasenotes.py	Tue Jan 19 21:48:43 2021 +0530
@@ -19,12 +19,12 @@
 
 from mercurial.i18n import _
 from mercurial.pycompat import open
+from mercurial.node import hex
 from mercurial import (
     cmdutil,
     config,
     error,
     minirst,
-    node,
     pycompat,
     registrar,
     scmutil,
@@ -381,7 +381,7 @@
             if not paragraphs:
                 repo.ui.warn(
                     _(b"error parsing releasenotes for revision: '%s'\n")
-                    % node.hex(ctx.node())
+                    % hex(ctx.node())
                 )
             if title:
                 notes.addtitleditem(directive, title, paragraphs)
--- a/hgext/remotefilelog/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -130,7 +130,10 @@
 import time
 import traceback
 
-from mercurial.node import hex
+from mercurial.node import (
+    hex,
+    wdirrev,
+)
 from mercurial.i18n import _
 from mercurial.pycompat import open
 from mercurial import (
@@ -150,7 +153,6 @@
     match as matchmod,
     merge,
     mergestate as mergestatemod,
-    node as nodemod,
     patch,
     pycompat,
     registrar,
@@ -758,10 +760,10 @@
     rcache = {}
 
     def getrenamed(fn, rev):
-        '''looks up all renames for a file (up to endrev) the first
+        """looks up all renames for a file (up to endrev) the first
         time the file is given. It indexes on the changerev and only
         parses the manifest if linkrev != changerev.
-        Returns rename info for fn at changerev rev.'''
+        Returns rename info for fn at changerev rev."""
         if rev in rcache.setdefault(fn, {}):
             return rcache[fn][rev]
 
@@ -822,8 +824,7 @@
 
 @command(b'gc', [], _(b'hg gc [REPO...]'), norepo=True)
 def gc(ui, *args, **opts):
-    '''garbage collect the client and server filelog caches
-    '''
+    """garbage collect the client and server filelog caches"""
     cachepaths = set()
 
     # get the system client cache
@@ -1092,7 +1093,7 @@
     if isenabled(repo):
         allfiles = []
         for rev, match in revmatches:
-            if rev == nodemod.wdirrev or rev is None:
+            if rev == wdirrev or rev is None:
                 continue
             ctx = repo[rev]
             mf = ctx.manifest()
@@ -1105,7 +1106,9 @@
 
 @command(
     b'debugremotefilelog',
-    [(b'd', b'decompress', None, _(b'decompress the filelog first')),],
+    [
+        (b'd', b'decompress', None, _(b'decompress the filelog first')),
+    ],
     _(b'hg debugremotefilelog <path>'),
     norepo=True,
 )
@@ -1115,7 +1118,9 @@
 
 @command(
     b'verifyremotefilelog',
-    [(b'd', b'decompress', None, _(b'decompress the filelogs first')),],
+    [
+        (b'd', b'decompress', None, _(b'decompress the filelogs first')),
+    ],
     _(b'hg verifyremotefilelogs <directory>'),
     norepo=True,
 )
--- a/hgext/remotefilelog/basepack.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/basepack.py	Tue Jan 19 21:48:43 2021 +0530
@@ -12,8 +12,8 @@
     getattr,
     open,
 )
+from mercurial.node import hex
 from mercurial import (
-    node as nodemod,
     policy,
     pycompat,
     util,
@@ -429,7 +429,7 @@
             return
 
         try:
-            sha = nodemod.hex(self.sha.digest())
+            sha = hex(self.sha.digest())
             self.packfp.close()
             self.writeindex()
 
--- a/hgext/remotefilelog/basestore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/basestore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -103,7 +103,7 @@
     def _cleanupdirectory(self, rootdir):
         """Removes the empty directories and unnecessary files within the root
         directory recursively. Note that this method does not remove the root
-        directory itself. """
+        directory itself."""
 
         oldfiles = set()
         otherfiles = set()
--- a/hgext/remotefilelog/contentstore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/contentstore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,8 +17,7 @@
 
 
 class ChainIndicies(object):
-    """A static class for easy reference to the delta chain indicies.
-    """
+    """A static class for easy reference to the delta chain indicies."""
 
     # The filename of this revision delta
     NAME = 0
@@ -73,8 +72,7 @@
 
     @basestore.baseunionstore.retriable
     def getdelta(self, name, node):
-        """Return the single delta entry for the given name/node pair.
-        """
+        """Return the single delta entry for the given name/node pair."""
         for store in self.stores:
             try:
                 return store.getdelta(name, node)
--- a/hgext/remotefilelog/debugcommands.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/debugcommands.py	Tue Jan 19 21:48:43 2021 +0530
@@ -9,14 +9,18 @@
 import os
 import zlib
 
-from mercurial.node import bin, hex, nullid, short
+from mercurial.node import (
+    bin,
+    hex,
+    nullid,
+    short,
+)
 from mercurial.i18n import _
 from mercurial.pycompat import open
 from mercurial import (
     error,
     filelog,
     lock as lockmod,
-    node as nodemod,
     pycompat,
     revlog,
 )
@@ -61,7 +65,7 @@
 
 def buildtemprevlog(repo, file):
     # get filename key
-    filekey = nodemod.hex(hashutil.sha1(file).digest())
+    filekey = hex(hashutil.sha1(file).digest())
     filedir = os.path.join(repo.path, b'store/data', filekey)
 
     # sort all entries based on linkrev
@@ -421,7 +425,7 @@
             % (
                 hashformatter(node),
                 hashformatter(deltabasenode),
-                nodemod.hex(hashutil.sha1(delta).digest()),
+                hex(hashutil.sha1(delta).digest()),
                 len(delta),
             )
         )
--- a/hgext/remotefilelog/fileserverclient.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/fileserverclient.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,7 +17,6 @@
 from mercurial.node import bin, hex, nullid
 from mercurial import (
     error,
-    node,
     pycompat,
     revlog,
     sshpeer,
@@ -47,12 +46,12 @@
 
 
 def getcachekey(reponame, file, id):
-    pathhash = node.hex(hashutil.sha1(file).digest())
+    pathhash = hex(hashutil.sha1(file).digest())
     return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
 
 
 def getlocalkey(file, id):
-    pathhash = node.hex(hashutil.sha1(file).digest())
+    pathhash = hex(hashutil.sha1(file).digest())
     return os.path.join(pathhash, id)
 
 
@@ -302,8 +301,7 @@
 
 
 class fileserverclient(object):
-    """A client for requesting files from the remote file server.
-    """
+    """A client for requesting files from the remote file server."""
 
     def __init__(self, repo):
         ui = repo.ui
@@ -568,8 +566,7 @@
     def prefetch(
         self, fileids, force=False, fetchdata=True, fetchhistory=False
     ):
-        """downloads the given file versions to the cache
-        """
+        """downloads the given file versions to the cache"""
         repo = self.repo
         idstocheck = []
         for file, id in fileids:
--- a/hgext/remotefilelog/remotefilectx.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/remotefilectx.py	Tue Jan 19 21:48:43 2021 +0530
@@ -63,8 +63,8 @@
             return self.linkrev()
 
     def filectx(self, fileid, changeid=None):
-        '''opens an arbitrary revision of the file without
-        opening a new filelog'''
+        """opens an arbitrary revision of the file without
+        opening a new filelog"""
         return remotefilectx(
             self._repo,
             self._path,
--- a/hgext/remotefilelog/remotefilelogserver.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/remotefilelogserver.py	Tue Jan 19 21:48:43 2021 +0530
@@ -40,8 +40,7 @@
 
 
 def setupserver(ui, repo):
-    """Sets up a normal Mercurial repo so it can serve files to shallow repos.
-    """
+    """Sets up a normal Mercurial repo so it can serve files to shallow repos."""
     onetimesetup(ui)
 
     # don't send files to shallow clients during pulls
@@ -79,8 +78,7 @@
 
 
 def onetimesetup(ui):
-    """Configures the wireprotocol for both clients and servers.
-    """
+    """Configures the wireprotocol for both clients and servers."""
     global onetime
     if onetime:
         return
@@ -281,8 +279,7 @@
 
 
 def getflogheads(repo, proto, path):
-    """A server api for requesting a filelog's heads
-    """
+    """A server api for requesting a filelog's heads"""
     flog = repo.file(path)
     heads = flog.heads()
     return b'\n'.join((hex(head) for head in heads if head != nullid))
@@ -309,8 +306,7 @@
 
 
 def getfiles(repo, proto):
-    """A server api for requesting particular versions of particular files.
-    """
+    """A server api for requesting particular versions of particular files."""
     if shallowutil.isenabled(repo):
         raise error.Abort(_(b'cannot fetch remote files from shallow repo'))
     if not isinstance(proto, _sshv1server):
--- a/hgext/remotefilelog/repack.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/repack.py	Tue Jan 19 21:48:43 2021 +0530
@@ -54,8 +54,7 @@
 
 
 def fullrepack(repo, options=None):
-    """If ``packsonly`` is True, stores creating only loose objects are skipped.
-    """
+    """If ``packsonly`` is True, stores creating only loose objects are skipped."""
     if util.safehasattr(repo, 'shareddatastores'):
         datasource = contentstore.unioncontentstore(*repo.shareddatastores)
         historysource = metadatastore.unionmetadatastore(
@@ -874,8 +873,7 @@
 
 
 class repackentry(object):
-    """Simple class representing a single revision entry in the repackledger.
-    """
+    """Simple class representing a single revision entry in the repackledger."""
 
     __slots__ = (
         'filename',
--- a/hgext/remotefilelog/shallowrepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/shallowrepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -161,11 +161,11 @@
             return path
 
         def maybesparsematch(self, *revs, **kwargs):
-            '''
+            """
             A wrapper that allows the remotefilelog to invoke sparsematch() if
             this is a sparse repository, or returns None if this is not a
             sparse repository.
-            '''
+            """
             if revs:
                 ret = sparse.matcher(repo, revs=revs)
             else:
@@ -217,8 +217,7 @@
         def backgroundprefetch(
             self, revs, base=None, repack=False, pats=None, opts=None
         ):
-            """Runs prefetch in background with optional repack
-            """
+            """Runs prefetch in background with optional repack"""
             cmd = [procutil.hgexecutable(), b'-R', repo.origroot, b'prefetch']
             if repack:
                 cmd.append(b'--repack')
--- a/hgext/remotefilelog/shallowutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotefilelog/shallowutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -15,9 +15,9 @@
 
 from mercurial.i18n import _
 from mercurial.pycompat import open
+from mercurial.node import hex
 from mercurial import (
     error,
-    node,
     pycompat,
     revlog,
     util,
@@ -39,12 +39,12 @@
 
 
 def getcachekey(reponame, file, id):
-    pathhash = node.hex(hashutil.sha1(file).digest())
+    pathhash = hex(hashutil.sha1(file).digest())
     return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
 
 
 def getlocalkey(file, id):
-    pathhash = node.hex(hashutil.sha1(file).digest())
+    pathhash = hex(hashutil.sha1(file).digest())
     return os.path.join(pathhash, id)
 
 
--- a/hgext/remotenames.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/remotenames.py	Tue Jan 19 21:48:43 2021 +0530
@@ -66,13 +66,19 @@
 revsetpredicate = registrar.revsetpredicate()
 
 configitem(
-    b'remotenames', b'bookmarks', default=True,
+    b'remotenames',
+    b'bookmarks',
+    default=True,
 )
 configitem(
-    b'remotenames', b'branches', default=True,
+    b'remotenames',
+    b'branches',
+    default=True,
 )
 configitem(
-    b'remotenames', b'hoistedpeer', default=b'default',
+    b'remotenames',
+    b'hoistedpeer',
+    default=b'default',
 )
 
 
--- a/hgext/schemes.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/schemes.py	Tue Jan 19 21:48:43 2021 +0530
@@ -142,8 +142,7 @@
 
 @command(b'debugexpandscheme', norepo=True)
 def expandscheme(ui, url, **opts):
-    """given a repo path, provide the scheme-expanded path
-    """
+    """given a repo path, provide the scheme-expanded path"""
     repo = hg._peerlookup(url)
     if isinstance(repo, ShortRepository):
         url = repo.resolve(url)
--- a/hgext/share.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/share.py	Tue Jan 19 21:48:43 2021 +0530
@@ -44,6 +44,25 @@
     that was cloned before.
 
     The default naming mode is "identity".
+
+.. container:: verbose
+
+    Sharing requirements and configs of source repository with shares:
+
+    By default creating a shared repository only enables sharing a common
+    history and does not share requirements and configs between them. This
+    may lead to problems in some cases, for example when you upgrade the
+    storage format from one repository but does not set related configs
+    in the shares.
+
+    Setting `format.exp-share-safe = True` enables sharing configs and
+    requirements. This only applies to shares which are done after enabling
+    the config option.
+
+    For enabling this in existing shares, enable the config option and reshare.
+
+    For resharing existing shares, make sure your working directory is clean
+    and there are no untracked files, delete that share and create a new share.
 '''
 
 from __future__ import absolute_import
@@ -75,7 +94,12 @@
     [
         (b'U', b'noupdate', None, _(b'do not create a working directory')),
         (b'B', b'bookmarks', None, _(b'also share bookmarks')),
-        (b'', b'relative', None, _(b'point to source using a relative path'),),
+        (
+            b'',
+            b'relative',
+            None,
+            _(b'point to source using a relative path'),
+        ),
     ],
     _(b'[-U] [-B] SOURCE [DEST]'),
     helpcategory=command.CATEGORY_REPO_CREATION,
--- a/hgext/split.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/split.py	Tue Jan 19 21:48:43 2021 +0530
@@ -72,7 +72,7 @@
     with repo.wlock(), repo.lock(), repo.transaction(b'split') as tr:
         revs = scmutil.revrange(repo, revlist or [b'.'])
         if len(revs) > 1:
-            raise error.Abort(_(b'cannot split multiple revisions'))
+            raise error.InputError(_(b'cannot split multiple revisions'))
 
         rev = revs.first()
         ctx = repo[rev]
@@ -82,7 +82,7 @@
             ui.status(_(b'nothing to split\n'))
             return 1
         if ctx.node() is None:
-            raise error.Abort(_(b'cannot split working directory'))
+            raise error.InputError(_(b'cannot split working directory'))
 
         if opts.get(b'rebase'):
             # Skip obsoleted descendants and their descendants so the rebase
@@ -98,7 +98,7 @@
         rewriteutil.precheck(repo, [rev] + torebase, b'split')
 
         if len(ctx.parents()) > 1:
-            raise error.Abort(_(b'cannot split a merge changeset'))
+            raise error.InputError(_(b'cannot split a merge changeset'))
 
         cmdutil.bailifchanged(repo)
 
@@ -142,9 +142,14 @@
             header = _(
                 b'HG: Splitting %s. So far it has been split into:\n'
             ) % short(ctx.node())
-            for c in committed:
-                firstline = c.description().split(b'\n', 1)[0]
-                header += _(b'HG: - %s: %s\n') % (short(c.node()), firstline)
+            # We don't want color codes in the commit message template, so
+            # disable the label() template function while we render it.
+            with ui.configoverride(
+                {(b'templatealias', b'label(l,x)'): b"x"}, b'split'
+            ):
+                for c in committed:
+                    summary = cmdutil.format_changeset_summary(ui, c, b'split')
+                    header += _(b'HG: - %s\n') % summary
             header += _(
                 b'HG: Write commit message for the next split changeset.\n'
             )
@@ -165,7 +170,7 @@
         committed.append(newctx)
 
     if not committed:
-        raise error.Abort(_(b'cannot split an empty revision'))
+        raise error.InputError(_(b'cannot split an empty revision'))
 
     scmutil.cleanupnodes(
         repo,
--- a/hgext/sqlitestore.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/sqlitestore.py	Tue Jan 19 21:48:43 2021 +0530
@@ -674,9 +674,10 @@
         linkmapper,
         transaction,
         addrevisioncb=None,
+        duplicaterevisioncb=None,
         maybemissingparents=False,
     ):
-        nodes = []
+        empty = True
 
         for node, p1, p2, linknode, deltabase, delta, wireflags in deltas:
             storeflags = 0
@@ -715,8 +716,6 @@
 
             linkrev = linkmapper(linknode)
 
-            nodes.append(node)
-
             if node in self._revisions:
                 # Possibly reset parents to make them proper.
                 entry = self._revisions[node]
@@ -741,6 +740,9 @@
                         (self._nodetorev[p1], entry.flags, entry.rid),
                     )
 
+                if duplicaterevisioncb:
+                    duplicaterevisioncb(self, node)
+                empty = False
                 continue
 
             if deltabase == nullid:
@@ -763,8 +765,9 @@
 
             if addrevisioncb:
                 addrevisioncb(self, node)
+            empty = False
 
-        return nodes
+        return not empty
 
     def censorrevision(self, tr, censornode, tombstone=b''):
         tombstone = storageutil.packmeta({b'censored': tombstone}, b'')
--- a/hgext/strip.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/strip.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,287 +1,22 @@
-"""strip changesets and their descendants from history
+"""strip changesets and their descendants from history (DEPRECATED)
+
+The functionality of this extension has been included in core Mercurial
+since version 5.7. Please use :hg:`debugstrip ...` instead.
 
 This extension allows you to strip changesets and all their descendants from the
 repository. See the command help for details.
 """
 from __future__ import absolute_import
 
-from mercurial.i18n import _
-from mercurial.pycompat import getattr
-from mercurial import (
-    bookmarks as bookmarksmod,
-    cmdutil,
-    error,
-    hg,
-    lock as lockmod,
-    mergestate as mergestatemod,
-    node as nodemod,
-    pycompat,
-    registrar,
-    repair,
-    scmutil,
-    util,
-)
+from mercurial import commands
 
-nullid = nodemod.nullid
-release = lockmod.release
-
-cmdtable = {}
-command = registrar.command(cmdtable)
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
 # be specifying the version(s) of Mercurial they are tested with, or
 # leave the attribute unspecified.
 testedwith = b'ships-with-hg-core'
 
-
-def checklocalchanges(repo, force=False):
-    s = repo.status()
-    if not force:
-        cmdutil.checkunfinished(repo)
-        cmdutil.bailifchanged(repo)
-    else:
-        cmdutil.checkunfinished(repo, skipmerge=True)
-    return s
-
-
-def _findupdatetarget(repo, nodes):
-    unode, p2 = repo.changelog.parents(nodes[0])
-    currentbranch = repo[None].branch()
-
-    if (
-        util.safehasattr(repo, b'mq')
-        and p2 != nullid
-        and p2 in [x.node for x in repo.mq.applied]
-    ):
-        unode = p2
-    elif currentbranch != repo[unode].branch():
-        pwdir = b'parents(wdir())'
-        revset = b'max(((parents(%ln::%r) + %r) - %ln::%r) and branch(%s))'
-        branchtarget = repo.revs(
-            revset, nodes, pwdir, pwdir, nodes, pwdir, currentbranch
-        )
-        if branchtarget:
-            cl = repo.changelog
-            unode = cl.node(branchtarget.first())
-
-    return unode
-
-
-def strip(
-    ui,
-    repo,
-    revs,
-    update=True,
-    backup=True,
-    force=None,
-    bookmarks=None,
-    soft=False,
-):
-    with repo.wlock(), repo.lock():
-
-        if update:
-            checklocalchanges(repo, force=force)
-            urev = _findupdatetarget(repo, revs)
-            hg.clean(repo, urev)
-            repo.dirstate.write(repo.currenttransaction())
-
-        if soft:
-            repair.softstrip(ui, repo, revs, backup)
-        else:
-            repair.strip(ui, repo, revs, backup)
-
-        repomarks = repo._bookmarks
-        if bookmarks:
-            with repo.transaction(b'strip') as tr:
-                if repo._activebookmark in bookmarks:
-                    bookmarksmod.deactivate(repo)
-                repomarks.applychanges(repo, tr, [(b, None) for b in bookmarks])
-            for bookmark in sorted(bookmarks):
-                ui.write(_(b"bookmark '%s' deleted\n") % bookmark)
-
-
-@command(
-    b"strip",
-    [
-        (
-            b'r',
-            b'rev',
-            [],
-            _(
-                b'strip specified revision (optional, '
-                b'can specify revisions without this '
-                b'option)'
-            ),
-            _(b'REV'),
-        ),
-        (
-            b'f',
-            b'force',
-            None,
-            _(
-                b'force removal of changesets, discard '
-                b'uncommitted changes (no backup)'
-            ),
-        ),
-        (b'', b'no-backup', None, _(b'do not save backup bundle')),
-        (b'', b'nobackup', None, _(b'do not save backup bundle (DEPRECATED)'),),
-        (b'n', b'', None, _(b'ignored  (DEPRECATED)')),
-        (
-            b'k',
-            b'keep',
-            None,
-            _(b"do not modify working directory during strip"),
-        ),
-        (
-            b'B',
-            b'bookmark',
-            [],
-            _(b"remove revs only reachable from given bookmark"),
-            _(b'BOOKMARK'),
-        ),
-        (
-            b'',
-            b'soft',
-            None,
-            _(b"simply drop changesets from visible history (EXPERIMENTAL)"),
-        ),
-    ],
-    _(b'hg strip [-k] [-f] [-B bookmark] [-r] REV...'),
-    helpcategory=command.CATEGORY_MAINTENANCE,
-)
-def stripcmd(ui, repo, *revs, **opts):
-    """strip changesets and all their descendants from the repository
-
-    The strip command removes the specified changesets and all their
-    descendants. If the working directory has uncommitted changes, the
-    operation is aborted unless the --force flag is supplied, in which
-    case changes will be discarded.
-
-    If a parent of the working directory is stripped, then the working
-    directory will automatically be updated to the most recent
-    available ancestor of the stripped parent after the operation
-    completes.
-
-    Any stripped changesets are stored in ``.hg/strip-backup`` as a
-    bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
-    be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
-    where BUNDLE is the bundle file created by the strip. Note that
-    the local revision numbers will in general be different after the
-    restore.
-
-    Use the --no-backup option to discard the backup bundle once the
-    operation completes.
-
-    Strip is not a history-rewriting operation and can be used on
-    changesets in the public phase. But if the stripped changesets have
-    been pushed to a remote repository you will likely pull them again.
-
-    Return 0 on success.
-    """
-    opts = pycompat.byteskwargs(opts)
-    backup = True
-    if opts.get(b'no_backup') or opts.get(b'nobackup'):
-        backup = False
-
-    cl = repo.changelog
-    revs = list(revs) + opts.get(b'rev')
-    revs = set(scmutil.revrange(repo, revs))
-
-    with repo.wlock():
-        bookmarks = set(opts.get(b'bookmark'))
-        if bookmarks:
-            repomarks = repo._bookmarks
-            if not bookmarks.issubset(repomarks):
-                raise error.Abort(
-                    _(b"bookmark '%s' not found")
-                    % b','.join(sorted(bookmarks - set(repomarks.keys())))
-                )
-
-            # If the requested bookmark is not the only one pointing to a
-            # a revision we have to only delete the bookmark and not strip
-            # anything. revsets cannot detect that case.
-            nodetobookmarks = {}
-            for mark, node in pycompat.iteritems(repomarks):
-                nodetobookmarks.setdefault(node, []).append(mark)
-            for marks in nodetobookmarks.values():
-                if bookmarks.issuperset(marks):
-                    rsrevs = scmutil.bookmarkrevs(repo, marks[0])
-                    revs.update(set(rsrevs))
-            if not revs:
-                with repo.lock(), repo.transaction(b'bookmark') as tr:
-                    bmchanges = [(b, None) for b in bookmarks]
-                    repomarks.applychanges(repo, tr, bmchanges)
-                for bookmark in sorted(bookmarks):
-                    ui.write(_(b"bookmark '%s' deleted\n") % bookmark)
-
-        if not revs:
-            raise error.Abort(_(b'empty revision set'))
-
-        descendants = set(cl.descendants(revs))
-        strippedrevs = revs.union(descendants)
-        roots = revs.difference(descendants)
-
-        # if one of the wdir parent is stripped we'll need
-        # to update away to an earlier revision
-        update = any(
-            p != nullid and cl.rev(p) in strippedrevs
-            for p in repo.dirstate.parents()
-        )
-
-        rootnodes = {cl.node(r) for r in roots}
-
-        q = getattr(repo, 'mq', None)
-        if q is not None and q.applied:
-            # refresh queue state if we're about to strip
-            # applied patches
-            if cl.rev(repo.lookup(b'qtip')) in strippedrevs:
-                q.applieddirty = True
-                start = 0
-                end = len(q.applied)
-                for i, statusentry in enumerate(q.applied):
-                    if statusentry.node in rootnodes:
-                        # if one of the stripped roots is an applied
-                        # patch, only part of the queue is stripped
-                        start = i
-                        break
-                del q.applied[start:end]
-                q.savedirty()
-
-        revs = sorted(rootnodes)
-        if update and opts.get(b'keep'):
-            urev = _findupdatetarget(repo, revs)
-            uctx = repo[urev]
-
-            # only reset the dirstate for files that would actually change
-            # between the working context and uctx
-            descendantrevs = repo.revs(b"only(., %d)", uctx.rev())
-            changedfiles = []
-            for rev in descendantrevs:
-                # blindly reset the files, regardless of what actually changed
-                changedfiles.extend(repo[rev].files())
-
-            # reset files that only changed in the dirstate too
-            dirstate = repo.dirstate
-            dirchanges = [f for f in dirstate if dirstate[f] != b'n']
-            changedfiles.extend(dirchanges)
-
-            repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
-            repo.dirstate.write(repo.currenttransaction())
-
-            # clear resolve state
-            mergestatemod.mergestate.clean(repo)
-
-            update = False
-
-        strip(
-            ui,
-            repo,
-            revs,
-            backup=backup,
-            update=update,
-            force=opts.get(b'force'),
-            bookmarks=bookmarks,
-            soft=opts[b'soft'],
-        )
-
-    return 0
+# This is a bit ugly, but a uisetup function that defines strip as an
+# alias for debugstrip would override any user alias for strip,
+# including aliases like "strip = strip --no-backup".
+commands.command.rename(old=b'debugstrip', new=b'debugstrip|strip')
--- a/hgext/transplant.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/transplant.py	Tue Jan 19 21:48:43 2021 +0530
@@ -19,6 +19,12 @@
 
 from mercurial.i18n import _
 from mercurial.pycompat import open
+from mercurial.node import (
+    bin,
+    hex,
+    nullid,
+    short,
+)
 from mercurial import (
     bundlerepo,
     cmdutil,
@@ -28,11 +34,9 @@
     logcmdutil,
     match,
     merge,
-    node as nodemod,
     patch,
     pycompat,
     registrar,
-    revlog,
     revset,
     scmutil,
     smartset,
@@ -62,10 +66,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'transplant', b'filter', default=None,
+    b'transplant',
+    b'filter',
+    default=None,
 )
 configitem(
-    b'transplant', b'log', default=None,
+    b'transplant',
+    b'log',
+    default=None,
 )
 
 
@@ -91,7 +99,7 @@
         abspath = os.path.join(self.path, self.transplantfile)
         if self.transplantfile and os.path.exists(abspath):
             for line in self.opener.read(self.transplantfile).splitlines():
-                lnode, rnode = map(revlog.bin, line.split(b':'))
+                lnode, rnode = map(bin, line.split(b':'))
                 list = self.transplants.setdefault(rnode, [])
                 list.append(transplantentry(lnode, rnode))
 
@@ -102,7 +110,7 @@
             fp = self.opener(self.transplantfile, b'w')
             for list in pycompat.itervalues(self.transplants):
                 for t in list:
-                    l, r = map(nodemod.hex, (t.lnode, t.rnode))
+                    l, r = map(hex, (t.lnode, t.rnode))
                     fp.write(l + b':' + r + b'\n')
             fp.close()
         self.dirty = False
@@ -140,8 +148,8 @@
         self.getcommiteditor = getcommiteditor
 
     def applied(self, repo, node, parent):
-        '''returns True if a node is already an ancestor of parent
-        or is parent or has already been transplanted'''
+        """returns True if a node is already an ancestor of parent
+        or is parent or has already been transplanted"""
         if hasnode(repo, parent):
             parentrev = repo.changelog.rev(parent)
         if hasnode(repo, node):
@@ -179,7 +187,7 @@
             tr = repo.transaction(b'transplant')
             for rev in revs:
                 node = revmap[rev]
-                revstr = b'%d:%s' % (rev, nodemod.short(node))
+                revstr = b'%d:%s' % (rev, short(node))
 
                 if self.applied(repo, node, p1):
                     self.ui.warn(
@@ -212,11 +220,11 @@
                         exchange.pull(repo, source.peer(), heads=[node])
 
                 skipmerge = False
-                if parents[1] != revlog.nullid:
+                if parents[1] != nullid:
                     if not opts.get(b'parent'):
                         self.ui.note(
                             _(b'skipping merge changeset %d:%s\n')
-                            % (rev, nodemod.short(node))
+                            % (rev, short(node))
                         )
                         skipmerge = True
                     else:
@@ -224,7 +232,7 @@
                         if parent not in parents:
                             raise error.Abort(
                                 _(b'%s is not a parent of %s')
-                                % (nodemod.short(parent), nodemod.short(node))
+                                % (short(parent), short(node))
                             )
                 else:
                     parent = parents[0]
@@ -259,13 +267,12 @@
                             raise
                         if n and domerge:
                             self.ui.status(
-                                _(b'%s merged at %s\n')
-                                % (revstr, nodemod.short(n))
+                                _(b'%s merged at %s\n') % (revstr, short(n))
                             )
                         elif n:
                             self.ui.status(
                                 _(b'%s transplanted to %s\n')
-                                % (nodemod.short(node), nodemod.short(n))
+                                % (short(node), short(n))
                             )
                     finally:
                         if patchfile:
@@ -305,7 +312,7 @@
                 ),
                 environ={
                     b'HGUSER': changelog[1],
-                    b'HGREVISION': nodemod.hex(node),
+                    b'HGREVISION': hex(node),
                 },
                 onerr=error.Abort,
                 errprefix=_(b'filter failed'),
@@ -329,9 +336,9 @@
 
         if log:
             # we don't translate messages inserted into commits
-            message += b'\n(transplanted from %s)' % nodemod.hex(node)
+            message += b'\n(transplanted from %s)' % hex(node)
 
-        self.ui.status(_(b'applying %s\n') % nodemod.short(node))
+        self.ui.status(_(b'applying %s\n') % short(node))
         self.ui.note(b'%s %s\n%s\n' % (user, date, message))
 
         if not patchfile and not merge:
@@ -373,9 +380,7 @@
             editor=self.getcommiteditor(),
         )
         if not n:
-            self.ui.warn(
-                _(b'skipping emptied changeset %s\n') % nodemod.short(node)
-            )
+            self.ui.warn(_(b'skipping emptied changeset %s\n') % short(node))
             return None
         if not merge:
             self.transplants.set(n, node)
@@ -391,13 +396,11 @@
             n, node = self.recover(repo, source, opts)
             if n:
                 self.ui.status(
-                    _(b'%s transplanted as %s\n')
-                    % (nodemod.short(node), nodemod.short(n))
+                    _(b'%s transplanted as %s\n') % (short(node), short(n))
                 )
             else:
                 self.ui.status(
-                    _(b'%s skipped due to empty diff\n')
-                    % (nodemod.short(node),)
+                    _(b'%s skipped due to empty diff\n') % (short(node),)
                 )
         seriespath = os.path.join(self.path, b'series')
         if not os.path.exists(seriespath):
@@ -426,7 +429,7 @@
                 if parent not in parents:
                     raise error.Abort(
                         _(b'%s is not a parent of %s')
-                        % (nodemod.short(parent), nodemod.short(node))
+                        % (short(parent), short(node))
                     )
             else:
                 merge = True
@@ -437,7 +440,7 @@
             if p1 != parent:
                 raise error.Abort(
                     _(b'working directory not at transplant parent %s')
-                    % nodemod.hex(parent)
+                    % hex(parent)
                 )
             if merge:
                 repo.setparents(p1, parents[1])
@@ -490,7 +493,7 @@
             if line.startswith(b'# Merges'):
                 cur = merges
                 continue
-            cur.append(revlog.bin(line))
+            cur.append(bin(line))
 
         return (nodes, merges)
 
@@ -502,17 +505,17 @@
             os.mkdir(self.path)
         series = self.opener(b'series', b'w')
         for rev in sorted(revmap):
-            series.write(nodemod.hex(revmap[rev]) + b'\n')
+            series.write(hex(revmap[rev]) + b'\n')
         if merges:
             series.write(b'# Merges\n')
             for m in merges:
-                series.write(nodemod.hex(m) + b'\n')
+                series.write(hex(m) + b'\n')
         series.close()
 
     def parselog(self, fp):
         parents = []
         message = []
-        node = revlog.nullid
+        node = nullid
         inmsg = False
         user = None
         date = None
@@ -524,9 +527,9 @@
             elif line.startswith(b'# Date '):
                 date = line[7:]
             elif line.startswith(b'# Node ID '):
-                node = revlog.bin(line[10:])
+                node = bin(line[10:])
             elif line.startswith(b'# Parent '):
-                parents.append(revlog.bin(line[9:]))
+                parents.append(bin(line[9:]))
             elif not line.startswith(b'# '):
                 inmsg = True
                 message.append(line)
@@ -544,10 +547,10 @@
         fp = self.opener(b'journal', b'w')
         fp.write(b'# User %s\n' % user)
         fp.write(b'# Date %s\n' % date)
-        fp.write(b'# Node ID %s\n' % nodemod.hex(p2))
-        fp.write(b'# Parent ' + nodemod.hex(p1) + b'\n')
+        fp.write(b'# Node ID %s\n' % hex(p2))
+        fp.write(b'# Parent ' + hex(p1) + b'\n')
         if merge:
-            fp.write(b'# Parent ' + nodemod.hex(p2) + b'\n')
+            fp.write(b'# Parent ' + hex(p2) + b'\n')
         fp.write(message.rstrip() + b'\n')
         fp.close()
 
@@ -564,7 +567,7 @@
         def matchfn(node):
             if self.applied(repo, node, root):
                 return False
-            if source.changelog.parents(node)[1] != revlog.nullid:
+            if source.changelog.parents(node)[1] != nullid:
                 return False
             extra = source.changelog.read(node)[5]
             cnode = extra.get(b'transplant_source')
@@ -682,7 +685,7 @@
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def transplant(ui, repo, *revs, **opts):
-    '''transplant changesets from another branch
+    """transplant changesets from another branch
 
     Selected changesets will be applied on top of the current working
     directory with the log of the original changeset. The changesets
@@ -731,7 +734,7 @@
     If a changeset application fails, you can fix the merge by hand
     and then resume where you left off by calling :hg:`transplant
     --continue/-c`.
-    '''
+    """
     with repo.wlock():
         return _dotransplant(ui, repo, *revs, **opts)
 
@@ -743,9 +746,9 @@
                 yield node
 
     def transplantwalk(repo, dest, heads, match=util.always):
-        '''Yield all nodes that are ancestors of a head but not ancestors
+        """Yield all nodes that are ancestors of a head but not ancestors
         of dest.
-        If no heads are specified, the heads of repo will be used.'''
+        If no heads are specified, the heads of repo will be used."""
         if not heads:
             heads = repo.heads()
         ancestors = []
@@ -800,14 +803,14 @@
     tp = transplanter(ui, repo, opts)
 
     p1 = repo.dirstate.p1()
-    if len(repo) > 0 and p1 == revlog.nullid:
+    if len(repo) > 0 and p1 == nullid:
         raise error.Abort(_(b'no revision checked out'))
     if opts.get(b'continue'):
         if not tp.canresume():
-            raise error.Abort(_(b'no transplant to continue'))
+            raise error.StateError(_(b'no transplant to continue'))
     elif opts.get(b'stop'):
         if not tp.canresume():
-            raise error.Abort(_(b'no interrupted transplant found'))
+            raise error.StateError(_(b'no interrupted transplant found'))
         return tp.stop(ui, repo)
     else:
         cmdutil.checkunfinished(repo)
@@ -886,8 +889,7 @@
 
 @revsetpredicate(b'transplanted([set])')
 def revsettransplanted(repo, subset, x):
-    """Transplanted changesets in set, or all transplanted changesets.
-    """
+    """Transplanted changesets in set, or all transplanted changesets."""
     if x:
         s = revset.getset(repo, subset, x)
     else:
@@ -906,7 +908,7 @@
     changeset if any."""
     ctx = context.resource(mapping, b'ctx')
     n = ctx.extra().get(b'transplant_source')
-    return n and nodemod.hex(n) or b''
+    return n and hex(n) or b''
 
 
 def extsetup(ui):
--- a/hgext/uncommit.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/uncommit.py	Tue Jan 19 21:48:43 2021 +0530
@@ -20,6 +20,7 @@
 from __future__ import absolute_import
 
 from mercurial.i18n import _
+from mercurial.node import nullid
 
 from mercurial import (
     cmdutil,
@@ -27,7 +28,6 @@
     context,
     copies as copiesmod,
     error,
-    node,
     obsutil,
     pathutil,
     pycompat,
@@ -43,10 +43,14 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'experimental', b'uncommitondirtywdir', default=False,
+    b'experimental',
+    b'uncommitondirtywdir',
+    default=False,
 )
 configitem(
-    b'experimental', b'uncommit.keep', default=False,
+    b'experimental',
+    b'uncommit.keep',
+    default=False,
 )
 
 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
@@ -109,7 +113,7 @@
 
     new = context.memctx(
         repo,
-        parents=[base.node(), node.nullid],
+        parents=[base.node(), nullid],
         text=message,
         files=files,
         filectxfn=filectxfn,
--- a/hgext/win32mbcs.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/win32mbcs.py	Tue Jan 19 21:48:43 2021 +0530
@@ -70,7 +70,9 @@
 # Encoding.encoding may be updated by --encoding option.
 # Use a lambda do delay the resolution.
 configitem(
-    b'win32mbcs', b'encoding', default=lambda: encoding.encoding,
+    b'win32mbcs',
+    b'encoding',
+    default=lambda: encoding.encoding,
 )
 
 _encoding = None  # see extsetup
--- a/hgext/win32text.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgext/win32text.py	Tue Jan 19 21:48:43 2021 +0530
@@ -62,7 +62,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'win32text', b'warn', default=True,
+    b'win32text',
+    b'warn',
+    default=True,
 )
 
 # regexp for single LF without CR preceding.
--- a/hgweb.cgi	Thu Dec 24 15:58:08 2020 +0900
+++ b/hgweb.cgi	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # An example hgweb CGI script, edit as necessary
 # See also https://mercurial-scm.org/wiki/PublishingRepositories
--- a/i18n/check-translation.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/i18n/check-translation.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # check-translation.py - check Mercurial specific translation problems
 from __future__ import absolute_import
@@ -33,8 +33,7 @@
 
 
 def match(checker, pe):
-    """Examine whether POEntry "pe" is target of specified checker or not
-    """
+    """Examine whether POEntry "pe" is target of specified checker or not"""
     if not checker.match(pe.msgid):
         return
     # examine suppression by translator comment
--- a/i18n/hggettext	Thu Dec 24 15:58:08 2020 +0900
+++ b/i18n/hggettext	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # hggettext - carefully extract docstrings for Mercurial
 #
--- a/i18n/posplit	Thu Dec 24 15:58:08 2020 +0900
+++ b/i18n/posplit	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # posplit - split messages in paragraphs on .po/.pot files
 #
--- a/mercurial/ancestor.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/ancestor.py	Tue Jan 19 21:48:43 2021 +0530
@@ -148,11 +148,11 @@
 
 
 class incrementalmissingancestors(object):
-    '''persistent state used to calculate missing ancestors incrementally
+    """persistent state used to calculate missing ancestors incrementally
 
     Although similar in spirit to lazyancestors below, this is a separate class
     because trying to support contains and missingancestors operations with the
-    same internal data structures adds needless complexity.'''
+    same internal data structures adds needless complexity."""
 
     def __init__(self, pfunc, bases):
         self.bases = set(bases)
@@ -198,12 +198,12 @@
                 break
 
     def missingancestors(self, revs):
-        '''return all the ancestors of revs that are not ancestors of self.bases
+        """return all the ancestors of revs that are not ancestors of self.bases
 
         This may include elements from revs.
 
         Equivalent to the revset (::revs - ::self.bases). Revs are returned in
-        revision number order, which is a topological order.'''
+        revision number order, which is a topological order."""
         revsvisit = set(revs)
         basesvisit = self.bases
         pfunc = self.pfunc
--- a/mercurial/archival.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/archival.py	Tue Jan 19 21:48:43 2021 +0530
@@ -37,8 +37,8 @@
 
 
 def tidyprefix(dest, kind, prefix):
-    '''choose prefix to use for names in archive.  make sure prefix is
-    safe for consumers.'''
+    """choose prefix to use for names in archive.  make sure prefix is
+    safe for consumers."""
 
     if prefix:
         prefix = util.normpath(prefix)
@@ -132,8 +132,8 @@
 
 
 class tarit(object):
-    '''write archive to tar file or stream.  can write uncompressed,
-    or compress with gzip or bzip2.'''
+    """write archive to tar file or stream.  can write uncompressed,
+    or compress with gzip or bzip2."""
 
     if pycompat.ispy3:
         GzipFileWithTime = gzip.GzipFile  # camelcase-required
@@ -185,8 +185,10 @@
                     mtime=mtime,
                 )
                 self.fileobj = gzfileobj
-                return tarfile.TarFile.taropen(  # pytype: disable=attribute-error
-                    name, pycompat.sysstr(mode), gzfileobj
+                return (
+                    tarfile.TarFile.taropen(  # pytype: disable=attribute-error
+                        name, pycompat.sysstr(mode), gzfileobj
+                    )
                 )
             else:
                 try:
@@ -224,8 +226,8 @@
 
 
 class zipit(object):
-    '''write archive to zip file or stream.  can write uncompressed,
-    or compressed with deflate.'''
+    """write archive to zip file or stream.  can write uncompressed,
+    or compressed with deflate."""
 
     def __init__(self, dest, mtime, compress=True):
         if isinstance(dest, bytes):
@@ -316,7 +318,7 @@
     mtime=None,
     subrepos=False,
 ):
-    '''create archive of repo as it was at node.
+    """create archive of repo as it was at node.
 
     dest can be name of directory, name of archive file, or file
     object to write archive to.
@@ -333,7 +335,7 @@
     mtime is the modified time, in seconds, or None to use the changeset time.
 
     subrepos tells whether to include subrepos.
-    '''
+    """
 
     if kind == b'txz' and not pycompat.ispy3:
         raise error.Abort(_(b'xz compression is only available in Python 3'))
--- a/mercurial/bookmarks.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/bookmarks.py	Tue Jan 19 21:48:43 2021 +0530
@@ -189,8 +189,7 @@
         return self._nodemap.get(node, [])
 
     def applychanges(self, repo, tr, changes):
-        """Apply a list of changes to bookmarks
-        """
+        """Apply a list of changes to bookmarks"""
         bmchanges = tr.changes.get(b'bookmarks')
         for name, node in changes:
             old = self._refmap.get(name)
@@ -422,8 +421,8 @@
 
 
 def calculateupdate(ui, repo):
-    '''Return a tuple (activemark, movemarkfrom) indicating the active bookmark
-    and where to move the active bookmark from, if needed.'''
+    """Return a tuple (activemark, movemarkfrom) indicating the active bookmark
+    and where to move the active bookmark from, if needed."""
     checkout, movemarkfrom = None, None
     activemark = repo._activebookmark
     if isactivewdirparent(repo):
@@ -509,7 +508,7 @@
 
 
 def comparebookmarks(repo, srcmarks, dstmarks, targets=None):
-    '''Compare bookmarks between srcmarks and dstmarks
+    """Compare bookmarks between srcmarks and dstmarks
 
     This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
     differ, invalid)", each are list of bookmarks below:
@@ -532,7 +531,7 @@
 
     If "targets" is specified, only bookmarks listed in it are
     examined.
-    '''
+    """
 
     if targets:
         bset = set(targets)
@@ -585,14 +584,14 @@
 
 
 def _diverge(ui, b, path, localmarks, remotenode):
-    '''Return appropriate diverged bookmark for specified ``path``
+    """Return appropriate diverged bookmark for specified ``path``
 
     This returns None, if it is failed to assign any divergent
     bookmark name.
 
     This reuses already existing one with "@number" suffix, if it
     refers ``remotenode``.
-    '''
+    """
     if b == b'@':
         b = b''
     # try to use an @pathalias suffix
@@ -762,13 +761,17 @@
 
 
 def incoming(ui, repo, peer):
-    '''Show bookmarks incoming from other to repo
-    '''
+    """Show bookmarks incoming from other to repo"""
     ui.status(_(b"searching for changed bookmarks\n"))
 
     with peer.commandexecutor() as e:
         remotemarks = unhexlifybookmarks(
-            e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
+            e.callcommand(
+                b'listkeys',
+                {
+                    b'namespace': b'bookmarks',
+                },
+            ).result()
         )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -813,8 +816,7 @@
 
 
 def outgoing(ui, repo, other):
-    '''Show bookmarks outgoing from repo to other
-    '''
+    """Show bookmarks outgoing from repo to other"""
     ui.status(_(b"searching for changed bookmarks\n"))
 
     remotemarks = unhexlifybookmarks(other.listkeys(b'bookmarks'))
@@ -863,13 +865,18 @@
 
 
 def summary(repo, peer):
-    '''Compare bookmarks between repo and other for "hg summary" output
+    """Compare bookmarks between repo and other for "hg summary" output
 
     This returns "(# of incoming, # of outgoing)" tuple.
-    '''
+    """
     with peer.commandexecutor() as e:
         remotemarks = unhexlifybookmarks(
-            e.callcommand(b'listkeys', {b'namespace': b'bookmarks',}).result()
+            e.callcommand(
+                b'listkeys',
+                {
+                    b'namespace': b'bookmarks',
+                },
+            ).result()
         )
 
     r = comparebookmarks(repo, remotemarks, repo._bookmarks)
@@ -901,7 +908,7 @@
     """
     mark = mark.strip()
     if not mark:
-        raise error.Abort(
+        raise error.InputError(
             _(b"bookmark names cannot consist entirely of whitespace")
         )
     scmutil.checknewlabel(repo, mark, b'bookmark')
@@ -917,7 +924,7 @@
     changes = []
     for mark in names:
         if mark not in marks:
-            raise error.Abort(_(b"bookmark '%s' does not exist") % mark)
+            raise error.InputError(_(b"bookmark '%s' does not exist") % mark)
         if mark == repo._activebookmark:
             deactivate(repo)
         changes.append((mark, None))
@@ -937,7 +944,7 @@
     marks = repo._bookmarks
     mark = checkformat(repo, new)
     if old not in marks:
-        raise error.Abort(_(b"bookmark '%s' does not exist") % old)
+        raise error.InputError(_(b"bookmark '%s' does not exist") % old)
     changes = []
     for bm in marks.checkconflict(mark, force):
         changes.append((bm, None))
@@ -1041,7 +1048,7 @@
     bmarks = {}
     for bmark in names or marks:
         if bmark not in marks:
-            raise error.Abort(_(b"bookmark '%s' does not exist") % bmark)
+            raise error.InputError(_(b"bookmark '%s' does not exist") % bmark)
         active = repo._activebookmark
         if bmark == active:
             prefix, label = b'*', activebookmarklabel
--- a/mercurial/branchmap.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/branchmap.py	Tue Jan 19 21:48:43 2021 +0530
@@ -41,7 +41,17 @@
     )
 
     assert any(
-        (Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Union,)
+        (
+            Any,
+            Callable,
+            Dict,
+            Iterable,
+            List,
+            Optional,
+            Set,
+            Tuple,
+            Union,
+        )
     )
 
 subsettable = repoviewutil.subsettable
@@ -139,8 +149,7 @@
 
 
 def _unknownnode(node):
-    """ raises ValueError when branchcache found a node which does not exists
-    """
+    """raises ValueError when branchcache found a node which does not exists"""
     raise ValueError('node %s does not exist' % pycompat.sysstr(hex(node)))
 
 
@@ -183,9 +192,9 @@
         hasnode=None,
     ):
         # type: (Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes,  int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None
-        """ hasnode is a function which can be used to verify whether changelog
+        """hasnode is a function which can be used to verify whether changelog
         has a given node or not. If it's not provided, we assume that every node
-        we have exists in changelog """
+        we have exists in changelog"""
         self.tipnode = tipnode
         self.tiprev = tiprev
         self.filteredhash = filteredhash
@@ -304,7 +313,7 @@
         return bcache
 
     def load(self, repo, lineiter):
-        """ fully loads the branchcache by reading from the file using the line
+        """fully loads the branchcache by reading from the file using the line
         iterator passed"""
         for line in lineiter:
             line = line.rstrip(b'\n')
@@ -340,8 +349,8 @@
             return False
 
     def _branchtip(self, heads):
-        '''Return tuple with last open head in heads and false,
-        otherwise return last closed head and true.'''
+        """Return tuple with last open head in heads and false,
+        otherwise return last closed head and true."""
         tip = heads[-1]
         closed = True
         for h in reversed(heads):
@@ -352,9 +361,9 @@
         return tip, closed
 
     def branchtip(self, branch):
-        '''Return the tipmost open head on branch head, otherwise return the
+        """Return the tipmost open head on branch head, otherwise return the
         tipmost closed head on branch.
-        Raise KeyError for unknown branch.'''
+        Raise KeyError for unknown branch."""
         return self._branchtip(self[branch])[0]
 
     def iteropen(self, nodes):
@@ -434,33 +443,82 @@
             if closesbranch:
                 self._closednodes.add(cl.node(r))
 
-        # fetch current topological heads to speed up filtering
-        topoheads = set(cl.headrevs())
-
         # new tip revision which we found after iterating items from new
         # branches
         ntiprev = self.tiprev
 
-        # if older branchheads are reachable from new ones, they aren't
-        # really branchheads. Note checking parents is insufficient:
-        # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
+        # Delay fetching the topological heads until they are needed.
+        # A repository without non-continous branches can skip this part.
+        topoheads = None
+
+        # If a changeset is visible, its parents must be visible too, so
+        # use the faster unfiltered parent accessor.
+        parentrevs = repo.unfiltered().changelog.parentrevs
+
         for branch, newheadrevs in pycompat.iteritems(newbranches):
+            # For every branch, compute the new branchheads.
+            # A branchhead is a revision such that no descendant is on
+            # the same branch.
+            #
+            # The branchheads are computed iteratively in revision order.
+            # This ensures topological order, i.e. parents are processed
+            # before their children. Ancestors are inclusive here, i.e.
+            # any revision is an ancestor of itself.
+            #
+            # Core observations:
+            # - The current revision is always a branchhead for the
+            #   repository up to that point.
+            # - It is the first revision of the branch if and only if
+            #   there was no branchhead before. In that case, it is the
+            #   only branchhead as there are no possible ancestors on
+            #   the same branch.
+            # - If a parent is on the same branch, a branchhead can
+            #   only be an ancestor of that parent, if it is parent
+            #   itself. Otherwise it would have been removed as ancestor
+            #   of that parent before.
+            # - Therefore, if all parents are on the same branch, they
+            #   can just be removed from the branchhead set.
+            # - If one parent is on the same branch and the other is not
+            #   and there was exactly one branchhead known, the existing
+            #   branchhead can only be an ancestor if it is the parent.
+            #   Otherwise it would have been removed as ancestor of
+            #   the parent before. The other parent therefore can't have
+            #   a branchhead as ancestor.
+            # - In all other cases, the parents on different branches
+            #   could have a branchhead as ancestor. Those parents are
+            #   kept in the "uncertain" set. If all branchheads are also
+            #   topological heads, they can't have descendants and further
+            #   checks can be skipped. Otherwise, the ancestors of the
+            #   "uncertain" set are removed from branchheads.
+            #   This computation is heavy and avoided if at all possible.
             bheads = self._entries.setdefault(branch, [])
             bheadset = {cl.rev(node) for node in bheads}
-
-            # This have been tested True on all internal usage of this function.
-            # run it again in case of doubt
-            # assert not (set(bheadrevs) & set(newheadrevs))
-            bheadset.update(newheadrevs)
+            uncertain = set()
+            for newrev in sorted(newheadrevs):
+                if not bheadset:
+                    bheadset.add(newrev)
+                    continue
 
-            # This prunes out two kinds of heads - heads that are superseded by
-            # a head in newheadrevs, and newheadrevs that are not heads because
-            # an existing head is their descendant.
-            uncertain = bheadset - topoheads
+                parents = [p for p in parentrevs(newrev) if p != nullrev]
+                samebranch = set()
+                otherbranch = set()
+                for p in parents:
+                    if p in bheadset or getbranchinfo(p)[0] == branch:
+                        samebranch.add(p)
+                    else:
+                        otherbranch.add(p)
+                if otherbranch and not (len(bheadset) == len(samebranch) == 1):
+                    uncertain.update(otherbranch)
+                bheadset.difference_update(samebranch)
+                bheadset.add(newrev)
+
             if uncertain:
-                floorrev = min(uncertain)
-                ancestors = set(cl.ancestors(newheadrevs, floorrev))
-                bheadset -= ancestors
+                if topoheads is None:
+                    topoheads = set(cl.headrevs())
+                if bheadset - topoheads:
+                    floorrev = min(bheadset)
+                    ancestors = set(cl.ancestors(newheadrevs, floorrev))
+                    bheadset -= ancestors
             bheadrevs = sorted(bheadset)
             self[branch] = [cl.node(rev) for rev in bheadrevs]
             tiprev = bheadrevs[-1]
--- a/mercurial/bundle2.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/bundle2.py	Tue Jan 19 21:48:43 2021 +0530
@@ -156,12 +156,16 @@
 import sys
 
 from .i18n import _
+from .node import (
+    hex,
+    nullid,
+    short,
+)
 from . import (
     bookmarks,
     changegroup,
     encoding,
     error,
-    node as nodemod,
     obsolete,
     phases,
     pushkey,
@@ -489,7 +493,12 @@
 
 def _processchangegroup(op, cg, tr, source, url, **kwargs):
     ret = cg.apply(op.repo, tr, source, url, **kwargs)
-    op.records.add(b'changegroup', {b'return': ret,})
+    op.records.add(
+        b'changegroup',
+        {
+            b'return': ret,
+        },
+    )
     return ret
 
 
@@ -1647,8 +1656,7 @@
 
 
 def obsmarkersversion(caps):
-    """extract the list of supported obsmarkers versions from a bundle2caps dict
-    """
+    """extract the list of supported obsmarkers versions from a bundle2caps dict"""
     obscaps = caps.get(b'obsmarkers', ())
     return [int(c[1:]) for c in obscaps if c.startswith(b'V')]
 
@@ -1731,7 +1739,11 @@
 
     if opts.get(b'obsolescence', False):
         obsmarkers = repo.obsstore.relevantmarkers(outgoing.missing)
-        buildobsmarkerspart(bundler, obsmarkers)
+        buildobsmarkerspart(
+            bundler,
+            obsmarkers,
+            mandatory=opts.get(b'obsolescence-mandatory', True),
+        )
 
     if opts.get(b'phases', False):
         headsbyphase = phases.subsetphaseheads(repo, outgoing.missing)
@@ -1854,7 +1866,7 @@
     part.addparam(b'requirements', requirements, mandatory=True)
 
 
-def buildobsmarkerspart(bundler, markers):
+def buildobsmarkerspart(bundler, markers, mandatory=True):
     """add an obsmarker part to the bundler with <markers>
 
     No part is created if markers is empty.
@@ -1868,7 +1880,7 @@
     if version is None:
         raise ValueError(b'bundler does not support common obsmarker format')
     stream = obsolete.encodemarkers(markers, True, version=version)
-    return bundler.newpart(b'obsmarkers', data=stream)
+    return bundler.newpart(b'obsmarkers', data=stream, mandatory=mandatory)
 
 
 def writebundle(
@@ -1948,11 +1960,7 @@
     ),
 )
 def handlechangegroup(op, inpart):
-    """apply a changegroup part on the repo
-
-    This is a very early implementation that will massive rework before being
-    inflicted to any end-user.
-    """
+    """apply a changegroup part on the repo"""
     from . import localrepo
 
     tr = op.gettransaction()
@@ -2127,14 +2135,14 @@
         currentnode = op.repo._bookmarks.get(book)
         if currentnode != node:
             if node is None:
-                finalmsg = msgexist % (book, nodemod.short(currentnode))
+                finalmsg = msgexist % (book, short(currentnode))
             elif currentnode is None:
-                finalmsg = msgmissing % (book, nodemod.short(node))
+                finalmsg = msgmissing % (book, short(node))
             else:
                 finalmsg = msgstandard % (
                     book,
-                    nodemod.short(node),
-                    nodemod.short(currentnode),
+                    short(node),
+                    short(currentnode),
                 )
             raise error.PushRaced(finalmsg)
 
@@ -2211,7 +2219,7 @@
             actualphase = phasecache.phase(unfi, cl.rev(n))
             if actualphase != expectedphase:
                 finalmsg = msg % (
-                    nodemod.short(n),
+                    short(n),
                     phases.phasenames[actualphase],
                     phases.phasenames[expectedphase],
                 )
@@ -2356,10 +2364,8 @@
                 hookargs[b'pushkeycompat'] = b'1'
                 hookargs[b'namespace'] = b'bookmarks'
                 hookargs[b'key'] = book
-                hookargs[b'old'] = nodemod.hex(bookstore.get(book, b''))
-                hookargs[b'new'] = nodemod.hex(
-                    node if node is not None else b''
-                )
+                hookargs[b'old'] = hex(bookstore.get(book, b''))
+                hookargs[b'new'] = hex(node if node is not None else b'')
                 allhooks.append(hookargs)
 
             for hookargs in allhooks:
@@ -2565,7 +2571,7 @@
             fullnodes=commonnodes,
         )
         cgdata = packer.generate(
-            {nodemod.nullid},
+            {nullid},
             list(commonnodes),
             False,
             b'narrow_widen',
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/bundlecaches.py	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,424 @@
+# bundlecaches.py - utility to deal with pre-computed bundle for servers
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from .i18n import _
+
+from .thirdparty import attr
+
+from . import (
+    error,
+    sslutil,
+    util,
+)
+from .utils import stringutil
+
+urlreq = util.urlreq
+
+CB_MANIFEST_FILE = b'clonebundles.manifest'
+
+
+@attr.s
+class bundlespec(object):
+    compression = attr.ib()
+    wirecompression = attr.ib()
+    version = attr.ib()
+    wireversion = attr.ib()
+    params = attr.ib()
+    contentopts = attr.ib()
+
+
+# Maps bundle version human names to changegroup versions.
+_bundlespeccgversions = {
+    b'v1': b'01',
+    b'v2': b'02',
+    b'packed1': b's1',
+    b'bundle2': b'02',  # legacy
+}
+
+# Maps bundle version with content opts to choose which part to bundle
+_bundlespeccontentopts = {
+    b'v1': {
+        b'changegroup': True,
+        b'cg.version': b'01',
+        b'obsolescence': False,
+        b'phases': False,
+        b'tagsfnodescache': False,
+        b'revbranchcache': False,
+    },
+    b'v2': {
+        b'changegroup': True,
+        b'cg.version': b'02',
+        b'obsolescence': False,
+        b'phases': False,
+        b'tagsfnodescache': True,
+        b'revbranchcache': True,
+    },
+    b'packed1': {b'cg.version': b's1'},
+}
+_bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
+
+_bundlespecvariants = {
+    b"streamv2": {
+        b"changegroup": False,
+        b"streamv2": True,
+        b"tagsfnodescache": False,
+        b"revbranchcache": False,
+    }
+}
+
+# Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
+_bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
+
+
+def parsebundlespec(repo, spec, strict=True):
+    """Parse a bundle string specification into parts.
+
+    Bundle specifications denote a well-defined bundle/exchange format.
+    The content of a given specification should not change over time in
+    order to ensure that bundles produced by a newer version of Mercurial are
+    readable from an older version.
+
+    The string currently has the form:
+
+       <compression>-<type>[;<parameter0>[;<parameter1>]]
+
+    Where <compression> is one of the supported compression formats
+    and <type> is (currently) a version string. A ";" can follow the type and
+    all text afterwards is interpreted as URI encoded, ";" delimited key=value
+    pairs.
+
+    If ``strict`` is True (the default) <compression> is required. Otherwise,
+    it is optional.
+
+    Returns a bundlespec object of (compression, version, parameters).
+    Compression will be ``None`` if not in strict mode and a compression isn't
+    defined.
+
+    An ``InvalidBundleSpecification`` is raised when the specification is
+    not syntactically well formed.
+
+    An ``UnsupportedBundleSpecification`` is raised when the compression or
+    bundle type/version is not recognized.
+
+    Note: this function will likely eventually return a more complex data
+    structure, including bundle2 part information.
+    """
+
+    def parseparams(s):
+        if b';' not in s:
+            return s, {}
+
+        params = {}
+        version, paramstr = s.split(b';', 1)
+
+        for p in paramstr.split(b';'):
+            if b'=' not in p:
+                raise error.InvalidBundleSpecification(
+                    _(
+                        b'invalid bundle specification: '
+                        b'missing "=" in parameter: %s'
+                    )
+                    % p
+                )
+
+            key, value = p.split(b'=', 1)
+            key = urlreq.unquote(key)
+            value = urlreq.unquote(value)
+            params[key] = value
+
+        return version, params
+
+    if strict and b'-' not in spec:
+        raise error.InvalidBundleSpecification(
+            _(
+                b'invalid bundle specification; '
+                b'must be prefixed with compression: %s'
+            )
+            % spec
+        )
+
+    if b'-' in spec:
+        compression, version = spec.split(b'-', 1)
+
+        if compression not in util.compengines.supportedbundlenames:
+            raise error.UnsupportedBundleSpecification(
+                _(b'%s compression is not supported') % compression
+            )
+
+        version, params = parseparams(version)
+
+        if version not in _bundlespeccgversions:
+            raise error.UnsupportedBundleSpecification(
+                _(b'%s is not a recognized bundle version') % version
+            )
+    else:
+        # Value could be just the compression or just the version, in which
+        # case some defaults are assumed (but only when not in strict mode).
+        assert not strict
+
+        spec, params = parseparams(spec)
+
+        if spec in util.compengines.supportedbundlenames:
+            compression = spec
+            version = b'v1'
+            # Generaldelta repos require v2.
+            if b'generaldelta' in repo.requirements:
+                version = b'v2'
+            # Modern compression engines require v2.
+            if compression not in _bundlespecv1compengines:
+                version = b'v2'
+        elif spec in _bundlespeccgversions:
+            if spec == b'packed1':
+                compression = b'none'
+            else:
+                compression = b'bzip2'
+            version = spec
+        else:
+            raise error.UnsupportedBundleSpecification(
+                _(b'%s is not a recognized bundle specification') % spec
+            )
+
+    # Bundle version 1 only supports a known set of compression engines.
+    if version == b'v1' and compression not in _bundlespecv1compengines:
+        raise error.UnsupportedBundleSpecification(
+            _(b'compression engine %s is not supported on v1 bundles')
+            % compression
+        )
+
+    # The specification for packed1 can optionally declare the data formats
+    # required to apply it. If we see this metadata, compare against what the
+    # repo supports and error if the bundle isn't compatible.
+    if version == b'packed1' and b'requirements' in params:
+        requirements = set(params[b'requirements'].split(b','))
+        missingreqs = requirements - repo.supportedformats
+        if missingreqs:
+            raise error.UnsupportedBundleSpecification(
+                _(b'missing support for repository features: %s')
+                % b', '.join(sorted(missingreqs))
+            )
+
+    # Compute contentopts based on the version
+    contentopts = _bundlespeccontentopts.get(version, {}).copy()
+
+    # Process the variants
+    if b"stream" in params and params[b"stream"] == b"v2":
+        variant = _bundlespecvariants[b"streamv2"]
+        contentopts.update(variant)
+
+    engine = util.compengines.forbundlename(compression)
+    compression, wirecompression = engine.bundletype()
+    wireversion = _bundlespeccgversions[version]
+
+    return bundlespec(
+        compression, wirecompression, version, wireversion, params, contentopts
+    )
+
+
+def parseclonebundlesmanifest(repo, s):
+    """Parses the raw text of a clone bundles manifest.
+
+    Returns a list of dicts. The dicts have a ``URL`` key corresponding
+    to the URL and other keys are the attributes for the entry.
+    """
+    m = []
+    for line in s.splitlines():
+        fields = line.split()
+        if not fields:
+            continue
+        attrs = {b'URL': fields[0]}
+        for rawattr in fields[1:]:
+            key, value = rawattr.split(b'=', 1)
+            key = util.urlreq.unquote(key)
+            value = util.urlreq.unquote(value)
+            attrs[key] = value
+
+            # Parse BUNDLESPEC into components. This makes client-side
+            # preferences easier to specify since you can prefer a single
+            # component of the BUNDLESPEC.
+            if key == b'BUNDLESPEC':
+                try:
+                    bundlespec = parsebundlespec(repo, value)
+                    attrs[b'COMPRESSION'] = bundlespec.compression
+                    attrs[b'VERSION'] = bundlespec.version
+                except error.InvalidBundleSpecification:
+                    pass
+                except error.UnsupportedBundleSpecification:
+                    pass
+
+        m.append(attrs)
+
+    return m
+
+
+def isstreamclonespec(bundlespec):
+    # Stream clone v1
+    if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
+        return True
+
+    # Stream clone v2
+    if (
+        bundlespec.wirecompression == b'UN'
+        and bundlespec.wireversion == b'02'
+        and bundlespec.contentopts.get(b'streamv2')
+    ):
+        return True
+
+    return False
+
+
+def filterclonebundleentries(repo, entries, streamclonerequested=False):
+    """Remove incompatible clone bundle manifest entries.
+
+    Accepts a list of entries parsed with ``parseclonebundlesmanifest``
+    and returns a new list consisting of only the entries that this client
+    should be able to apply.
+
+    There is no guarantee we'll be able to apply all returned entries because
+    the metadata we use to filter on may be missing or wrong.
+    """
+    newentries = []
+    for entry in entries:
+        spec = entry.get(b'BUNDLESPEC')
+        if spec:
+            try:
+                bundlespec = parsebundlespec(repo, spec, strict=True)
+
+                # If a stream clone was requested, filter out non-streamclone
+                # entries.
+                if streamclonerequested and not isstreamclonespec(bundlespec):
+                    repo.ui.debug(
+                        b'filtering %s because not a stream clone\n'
+                        % entry[b'URL']
+                    )
+                    continue
+
+            except error.InvalidBundleSpecification as e:
+                repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
+                continue
+            except error.UnsupportedBundleSpecification as e:
+                repo.ui.debug(
+                    b'filtering %s because unsupported bundle '
+                    b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
+                )
+                continue
+        # If we don't have a spec and requested a stream clone, we don't know
+        # what the entry is so don't attempt to apply it.
+        elif streamclonerequested:
+            repo.ui.debug(
+                b'filtering %s because cannot determine if a stream '
+                b'clone bundle\n' % entry[b'URL']
+            )
+            continue
+
+        if b'REQUIRESNI' in entry and not sslutil.hassni:
+            repo.ui.debug(
+                b'filtering %s because SNI not supported\n' % entry[b'URL']
+            )
+            continue
+
+        if b'REQUIREDRAM' in entry:
+            try:
+                requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
+            except error.ParseError:
+                repo.ui.debug(
+                    b'filtering %s due to a bad REQUIREDRAM attribute\n'
+                    % entry[b'URL']
+                )
+                continue
+            actualram = repo.ui.estimatememory()
+            if actualram is not None and actualram * 0.66 < requiredram:
+                repo.ui.debug(
+                    b'filtering %s as it needs more than 2/3 of system memory\n'
+                    % entry[b'URL']
+                )
+                continue
+
+        newentries.append(entry)
+
+    return newentries
+
+
+class clonebundleentry(object):
+    """Represents an item in a clone bundles manifest.
+
+    This rich class is needed to support sorting since sorted() in Python 3
+    doesn't support ``cmp`` and our comparison is complex enough that ``key=``
+    won't work.
+    """
+
+    def __init__(self, value, prefers):
+        self.value = value
+        self.prefers = prefers
+
+    def _cmp(self, other):
+        for prefkey, prefvalue in self.prefers:
+            avalue = self.value.get(prefkey)
+            bvalue = other.value.get(prefkey)
+
+            # Special case for b missing attribute and a matches exactly.
+            if avalue is not None and bvalue is None and avalue == prefvalue:
+                return -1
+
+            # Special case for a missing attribute and b matches exactly.
+            if bvalue is not None and avalue is None and bvalue == prefvalue:
+                return 1
+
+            # We can't compare unless attribute present on both.
+            if avalue is None or bvalue is None:
+                continue
+
+            # Same values should fall back to next attribute.
+            if avalue == bvalue:
+                continue
+
+            # Exact matches come first.
+            if avalue == prefvalue:
+                return -1
+            if bvalue == prefvalue:
+                return 1
+
+            # Fall back to next attribute.
+            continue
+
+        # If we got here we couldn't sort by attributes and prefers. Fall
+        # back to index order.
+        return 0
+
+    def __lt__(self, other):
+        return self._cmp(other) < 0
+
+    def __gt__(self, other):
+        return self._cmp(other) > 0
+
+    def __eq__(self, other):
+        return self._cmp(other) == 0
+
+    def __le__(self, other):
+        return self._cmp(other) <= 0
+
+    def __ge__(self, other):
+        return self._cmp(other) >= 0
+
+    def __ne__(self, other):
+        return self._cmp(other) != 0
+
+
+def sortclonebundleentries(ui, entries):
+    prefers = ui.configlist(b'ui', b'clonebundleprefers')
+    if not prefers:
+        return list(entries)
+
+    def _split(p):
+        if b'=' not in p:
+            hint = _(b"each comma separated item should be key=value pairs")
+            raise error.Abort(
+                _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
+            )
+        return p.split(b'=', 1)
+
+    prefers = [_split(p) for p in prefers]
+
+    items = sorted(clonebundleentry(v, prefers) for v in entries)
+    return [i.value for i in items]
--- a/mercurial/bundlerepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/bundlerepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,7 +17,11 @@
 import shutil
 
 from .i18n import _
-from .node import nullid, nullrev
+from .node import (
+    hex,
+    nullid,
+    nullrev,
+)
 
 from . import (
     bundle2,
@@ -32,7 +36,6 @@
     localrepo,
     manifest,
     mdiff,
-    node as nodemod,
     pathutil,
     phases,
     pycompat,
@@ -63,11 +66,14 @@
             size = len(delta)
             start = cgunpacker.tell() - size
 
-            link = linkmapper(cs)
             if self.index.has_node(node):
                 # this can happen if two branches make the same change
                 self.bundlerevs.add(self.index.rev(node))
                 continue
+            if cs == node:
+                linkrev = nullrev
+            else:
+                linkrev = linkmapper(cs)
 
             for p in (p1, p2):
                 if not self.index.has_node(p):
@@ -87,7 +93,7 @@
                 size,
                 -1,
                 baserev,
-                link,
+                linkrev,
                 self.rev(p1),
                 self.rev(p2),
                 node,
@@ -325,8 +331,7 @@
         self._cgunpacker = changegroup.getunbundler(version, cgstream, b'UN')
 
     def _writetempbundle(self, readfn, suffix, header=b''):
-        """Write a temporary file to disk
-        """
+        """Write a temporary file to disk"""
         fdtemp, temp = self.vfs.mkstemp(prefix=b"hg-bundle-", suffix=suffix)
         self.tempfile = temp
 
@@ -435,9 +440,9 @@
         p2rev = self.changelog.rev(p2)
         msg = _(b"setting parent to node %s that only exists in the bundle\n")
         if self.changelog.repotiprev < p1rev:
-            self.ui.warn(msg % nodemod.hex(p1))
+            self.ui.warn(msg % hex(p1))
         if self.changelog.repotiprev < p2rev:
-            self.ui.warn(msg % nodemod.hex(p2))
+            self.ui.warn(msg % hex(p2))
         return super(bundlerepository, self).setparents(p1, p2)
 
 
@@ -527,7 +532,7 @@
 def getremotechanges(
     ui, repo, peer, onlyheads=None, bundlename=None, force=False
 ):
-    '''obtains a bundle of changes incoming from peer
+    """obtains a bundle of changes incoming from peer
 
     "onlyheads" restricts the returned changes to those reachable from the
       specified heads.
@@ -545,7 +550,7 @@
     "cleanupfn" must be called without arguments when you're done processing
       the changes; it closes both the original "peer" and the one returned
       here.
-    '''
+    """
     tmp = discovery.findcommonincoming(repo, peer, heads=onlyheads, force=force)
     common, incoming, rheads = tmp
     if not incoming:
@@ -608,7 +613,10 @@
                 with peer.commandexecutor() as e:
                     cg = e.callcommand(
                         b'changegroup',
-                        {b'nodes': incoming, b'source': b'incoming',},
+                        {
+                            b'nodes': incoming,
+                            b'source': b'incoming',
+                        },
                     ).result()
 
                 rheads = None
@@ -652,7 +660,10 @@
 
         with peer.commandexecutor() as e:
             remotephases = e.callcommand(
-                b'listkeys', {b'namespace': b'phases',}
+                b'listkeys',
+                {
+                    b'namespace': b'phases',
+                },
             ).result()
 
         pullop = exchange.pulloperation(bundlerepo, peer, heads=reponodes)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/base85.pyi	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,6 @@
+from typing import Optional
+
+version: int
+
+def b85encode(text: bytes, pad: Optional[int]) -> bytes: ...
+def b85decode(text: bytes) -> bytes: ...
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/bdiff.pyi	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,12 @@
+from typing import (
+    List,
+    Tuple,
+)
+
+version: int
+
+def bdiff(a: bytes, b: bytes): bytes
+def blocks(a: bytes, b: bytes) -> List[Tuple[int, int, int, int]]: ...
+def fixws(s: bytes, allws: bool) -> bytes: ...
+def splitnewlines(text: bytes) -> List[bytes]: ...
+def xdiffblocks(a: bytes, b: bytes) -> List[Tuple[int, int, int, int]]: ...
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/mpatch.pyi	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,10 @@
+from typing import (
+    List,
+)
+
+version: int
+
+class mpatchError(Exception): ...
+
+def patches(text: bytes, bins: List[bytes]) -> bytes: ...
+def patchedsize(orig: int, data: bytes) -> int: ...
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/osutil.pyi	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,26 @@
+from typing import (
+    AnyStr,
+    IO,
+    List,
+    Sequence,
+)
+
+version: int
+
+class stat:
+    st_dev: int
+    st_mode: int
+    st_nlink: int
+    st_size: int
+    st_mtime: int
+    st_ctime: int
+
+def listdir(path: bytes, st: bool, skip: bool) -> List[stat]: ...
+def posixfile(name: AnyStr, mode: bytes, buffering: int) -> IO: ...
+def statfiles(names: Sequence[bytes]) -> List[stat]: ...
+def recvfds(sockfd: int) -> List[int]: ...
+def setprocname(name: bytes) -> None: ...
+def getfstype(path: bytes) -> bytes: ...
+def getfsmountpoint(path: bytes) -> bytes: ...
+def unblocksignal(sig: int) -> None: ...
+def isgui() -> bool: ...
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/cext/parsers.pyi	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,78 @@
+from typing import (
+    Callable,
+    Dict,
+    Iterator,
+    List,
+    Optional,
+    Set,
+    Tuple,
+    Union,
+)
+
+version: int
+versionerrortext: str
+
+class dirstatetuple:
+    __doc__: str
+
+    def __len__(self) -> int: ...
+    def __getitem__(self, key: int) -> Union[bytes, int]: ...
+
+# From dirs.c
+
+class dirs:
+    __doc__: str
+    def __init__(self, source, skipchar: bytes): ...
+    def __iter__(self) -> Iterator[bytes]: ...
+    def addpath(self, path: bytes) -> None: ...
+    def delpath(self, path: bytes) -> None: ...
+
+# From manifest.c
+class lazymanifest:
+    def __init__(self, data: bytes): ...
+    def __iter__(self) -> Iterator[bytes]: ...
+
+    def __len__(self) -> int: ...
+    def __getitem__(self, item: bytes) -> Optional[Tuple[bytes, bytes]]: ...
+    def __setitem__(self, key: bytes, value: Tuple[bytes, bytes]) -> None: ...
+    def __delitem__(self, key: bytes) -> None: ...
+
+    def iterkeys(self) -> Iterator[bytes]: ...
+    def iterentries(self) -> Iterator[Tuple[bytes, bytes, bytes]]: ...
+    def copy(self) -> lazymanifest: ...
+    def filtercopy(self, matchfn: Callable[[bytes], bool]) -> lazymanifest: ...
+    def diff(self, other: lazymanifest, clean: Optional[bool]) -> Dict[bytes, Tuple[bytes, Tuple]]: ...
+    def text(self) -> bytes: ...
+
+# From revlog.c
+
+class index:
+    __doc__: str
+
+    nodemap: Dict[bytes, int]
+
+    def ancestors(self, *args: int) -> Iterator[int]: ...
+    def commonancestorsheads(self, *args: int) -> List[int]: ...
+    def clearcaches(self) -> None: ...
+    def get(self, value: bytes) -> Optional[int]: ...
+    def get_rev(self, value: bytes) -> Optional[int]: ...
+    def has_node(self, value: Union[int, bytes]) -> bool: ...
+    def rev(self, node: bytes) -> int: ...
+    def computephasesmapsets(self, root: Dict[int, Set[bytes]]) -> Tuple[int, Dict[int, Set[bytes]]]: ...
+    def reachableroots2(self, minroot: int, heads: List[int], roots: List[int], includepath: bool) -> List[int]: ...
+    def headrevs(self, filteredrevs: Optional[List[int]]) -> List[int]: ...
+    def headrevsfiltered(self, filteredrevs: Optional[List[int]]) -> List[int]: ...
+    def issnapshot(self, value: int) -> bool: ...
+    def findsnapshots(self, cache: Dict[int, List[int]], start_rev: int) -> None: ...
+    def deltachain(self, rev: int, stop: int, generaldelta: bool) -> Tuple[List[int], bool]: ...
+    def slicechunktodensity(self, revs: List[int], targetdensity: float, mingapsize: int) -> List[List[int]]: ...
+    def append(self, value: Tuple[int, int, int, int, int, int, int, bytes]) -> None: ...
+    def partialmatch(self, node: bytes) -> bytes: ...
+    def shortest(self, value: bytes) -> int: ...
+    def stats(self) -> Dict[bytes, int]: ...
+
+class nodetree:
+    __doc__: str
+
+    def insert(self, rev: int) -> None: ...
+    def shortest(self, node: bytes) -> int: ...
--- a/mercurial/cext/revlog.c	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/cext/revlog.c	Tue Jan 19 21:48:43 2021 +0530
@@ -18,6 +18,7 @@
 
 #include "bitmanipulation.h"
 #include "charencode.h"
+#include "compat.h"
 #include "revlog.h"
 #include "util.h"
 
@@ -54,10 +55,11 @@
 typedef struct {
 	indexObject *index;
 	nodetreenode *nodes;
-	unsigned length;   /* # nodes in use */
-	unsigned capacity; /* # nodes allocated */
-	int depth;         /* maximum depth of tree */
-	int splits;        /* # splits performed */
+	Py_ssize_t nodelen;
+	size_t length;   /* # nodes in use */
+	size_t capacity; /* # nodes allocated */
+	int depth;       /* maximum depth of tree */
+	int splits;      /* # splits performed */
 } nodetree;
 
 typedef struct {
@@ -80,12 +82,14 @@
 	PyObject_HEAD
 	    /* Type-specific fields go here. */
 	    PyObject *data;     /* raw bytes of index */
+	Py_ssize_t nodelen;     /* digest size of the hash, 20 for SHA-1 */
+	PyObject *nullentry;    /* fast path for references to null */
 	Py_buffer buf;          /* buffer of data */
-	PyObject **cache;       /* cached tuples */
 	const char **offsets;   /* populated on demand */
-	Py_ssize_t raw_length;  /* original number of elements */
-	Py_ssize_t length;      /* current number of elements */
-	PyObject *added;        /* populated on demand */
+	Py_ssize_t length;      /* current on-disk number of elements */
+	unsigned new_length;    /* number of added elements */
+	unsigned added_length;  /* space reserved for added elements */
+	char *added;            /* populated on demand */
 	PyObject *headrevs;     /* cache, invalidated on changes */
 	PyObject *filteredrevs; /* filtered revs set */
 	nodetree nt;            /* base-16 trie */
@@ -98,19 +102,15 @@
 
 static Py_ssize_t index_length(const indexObject *self)
 {
-	if (self->added == NULL)
-		return self->length;
-	return self->length + PyList_GET_SIZE(self->added);
+	return self->length + self->new_length;
 }
 
-static PyObject *nullentry = NULL;
-static const char nullid[20] = {0};
+static const char nullid[32] = {0};
 static const Py_ssize_t nullrev = -1;
 
 static Py_ssize_t inline_scan(indexObject *self, const char **offsets);
 
-static int index_find_node(indexObject *self, const char *node,
-                           Py_ssize_t nodelen);
+static int index_find_node(indexObject *self, const char *node);
 
 #if LONG_MAX == 0x7fffffffL
 static const char *const tuple_format = PY23("Kiiiiiis#", "Kiiiiiiy#");
@@ -156,11 +156,14 @@
  */
 static const char *index_deref(indexObject *self, Py_ssize_t pos)
 {
+	if (pos >= self->length)
+		return self->added + (pos - self->length) * v1_hdrsize;
+
 	if (self->inlined && pos > 0) {
 		if (self->offsets == NULL) {
 			Py_ssize_t ret;
-			self->offsets = PyMem_Malloc(self->raw_length *
-			                             sizeof(*self->offsets));
+			self->offsets =
+			    PyMem_Malloc(self->length * sizeof(*self->offsets));
 			if (self->offsets == NULL)
 				return (const char *)PyErr_NoMemory();
 			ret = inline_scan(self, self->offsets);
@@ -183,23 +186,11 @@
 static inline int index_get_parents(indexObject *self, Py_ssize_t rev, int *ps,
                                     int maxrev)
 {
-	if (rev >= self->length) {
-		long tmp;
-		PyObject *tuple =
-		    PyList_GET_ITEM(self->added, rev - self->length);
-		if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 5), &tmp)) {
-			return -1;
-		}
-		ps[0] = (int)tmp;
-		if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 6), &tmp)) {
-			return -1;
-		}
-		ps[1] = (int)tmp;
-	} else {
-		const char *data = index_deref(self, rev);
-		ps[0] = getbe32(data + 24);
-		ps[1] = getbe32(data + 28);
-	}
+	const char *data = index_deref(self, rev);
+
+	ps[0] = getbe32(data + 24);
+	ps[1] = getbe32(data + 28);
+
 	/* If index file is corrupted, ps[] may point to invalid revisions. So
 	 * there is a risk of buffer overflow to trust them unconditionally. */
 	if (ps[0] < -1 || ps[0] > maxrev || ps[1] < -1 || ps[1] > maxrev) {
@@ -238,74 +229,41 @@
 
 static inline int64_t index_get_start(indexObject *self, Py_ssize_t rev)
 {
+	const char *data;
 	uint64_t offset;
-	if (rev == nullrev) {
+
+	if (rev == nullrev)
 		return 0;
-	}
-	if (rev >= self->length) {
-		PyObject *tuple;
-		PyObject *pylong;
-		PY_LONG_LONG tmp;
-		tuple = PyList_GET_ITEM(self->added, rev - self->length);
-		pylong = PyTuple_GET_ITEM(tuple, 0);
-		tmp = PyLong_AsLongLong(pylong);
-		if (tmp == -1 && PyErr_Occurred()) {
-			return -1;
-		}
-		if (tmp < 0) {
-			PyErr_Format(PyExc_OverflowError,
-			             "revlog entry size out of bound (%lld)",
-			             (long long)tmp);
-			return -1;
-		}
-		offset = (uint64_t)tmp;
+
+	data = index_deref(self, rev);
+	offset = getbe32(data + 4);
+	if (rev == 0) {
+		/* mask out version number for the first entry */
+		offset &= 0xFFFF;
 	} else {
-		const char *data = index_deref(self, rev);
-		offset = getbe32(data + 4);
-		if (rev == 0) {
-			/* mask out version number for the first entry */
-			offset &= 0xFFFF;
-		} else {
-			uint32_t offset_high = getbe32(data);
-			offset |= ((uint64_t)offset_high) << 32;
-		}
+		uint32_t offset_high = getbe32(data);
+		offset |= ((uint64_t)offset_high) << 32;
 	}
 	return (int64_t)(offset >> 16);
 }
 
 static inline int index_get_length(indexObject *self, Py_ssize_t rev)
 {
-	if (rev == nullrev) {
+	const char *data;
+	int tmp;
+
+	if (rev == nullrev)
 		return 0;
+
+	data = index_deref(self, rev);
+
+	tmp = (int)getbe32(data + 8);
+	if (tmp < 0) {
+		PyErr_Format(PyExc_OverflowError,
+		             "revlog entry size out of bound (%d)", tmp);
+		return -1;
 	}
-	if (rev >= self->length) {
-		PyObject *tuple;
-		PyObject *pylong;
-		long ret;
-		tuple = PyList_GET_ITEM(self->added, rev - self->length);
-		pylong = PyTuple_GET_ITEM(tuple, 1);
-		ret = PyInt_AsLong(pylong);
-		if (ret == -1 && PyErr_Occurred()) {
-			return -1;
-		}
-		if (ret < 0 || ret > (long)INT_MAX) {
-			PyErr_Format(PyExc_OverflowError,
-			             "revlog entry size out of bound (%ld)",
-			             ret);
-			return -1;
-		}
-		return (int)ret;
-	} else {
-		const char *data = index_deref(self, rev);
-		int tmp = (int)getbe32(data + 8);
-		if (tmp < 0) {
-			PyErr_Format(PyExc_OverflowError,
-			             "revlog entry size out of bound (%d)",
-			             tmp);
-			return -1;
-		}
-		return tmp;
-	}
+	return tmp;
 }
 
 /*
@@ -318,7 +276,7 @@
  *    4 bytes: link revision
  *    4 bytes: parent 1 revision
  *    4 bytes: parent 2 revision
- *   32 bytes: nodeid (only 20 bytes used)
+ *   32 bytes: nodeid (only 20 bytes used with SHA-1)
  */
 static PyObject *index_get(indexObject *self, Py_ssize_t pos)
 {
@@ -327,11 +285,10 @@
 	const char *c_node_id;
 	const char *data;
 	Py_ssize_t length = index_length(self);
-	PyObject *entry;
 
 	if (pos == nullrev) {
-		Py_INCREF(nullentry);
-		return nullentry;
+		Py_INCREF(self->nullentry);
+		return self->nullentry;
 	}
 
 	if (pos < 0 || pos >= length) {
@@ -339,30 +296,16 @@
 		return NULL;
 	}
 
-	if (pos >= self->length) {
-		PyObject *obj;
-		obj = PyList_GET_ITEM(self->added, pos - self->length);
-		Py_INCREF(obj);
-		return obj;
-	}
-
-	if (self->cache) {
-		if (self->cache[pos]) {
-			Py_INCREF(self->cache[pos]);
-			return self->cache[pos];
-		}
-	} else {
-		self->cache = calloc(self->raw_length, sizeof(PyObject *));
-		if (self->cache == NULL)
-			return PyErr_NoMemory();
-	}
-
 	data = index_deref(self, pos);
 	if (data == NULL)
 		return NULL;
 
 	offset_flags = getbe32(data + 4);
-	if (pos == 0) /* mask out version number for the first entry */
+	/*
+	 * The first entry on-disk needs the version number masked out,
+	 * but this doesn't apply if entries are added to an empty index.
+	 */
+	if (self->length && pos == 0)
 		offset_flags &= 0xFFFF;
 	else {
 		uint32_t offset_high = getbe32(data);
@@ -377,22 +320,13 @@
 	parent_2 = getbe32(data + 28);
 	c_node_id = data + 32;
 
-	entry = Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len,
-	                      base_rev, link_rev, parent_1, parent_2, c_node_id,
-	                      (Py_ssize_t)20);
-
-	if (entry) {
-		PyObject_GC_UnTrack(entry);
-		Py_INCREF(entry);
-	}
-
-	self->cache[pos] = entry;
-
-	return entry;
+	return Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len,
+	                     base_rev, link_rev, parent_1, parent_2, c_node_id,
+	                     self->nodelen);
 }
 
 /*
- * Return the 20-byte SHA of the node corresponding to the given rev.
+ * Return the hash of node corresponding to the given rev.
  */
 static const char *index_node(indexObject *self, Py_ssize_t pos)
 {
@@ -405,19 +339,12 @@
 	if (pos >= length)
 		return NULL;
 
-	if (pos >= self->length) {
-		PyObject *tuple, *str;
-		tuple = PyList_GET_ITEM(self->added, pos - self->length);
-		str = PyTuple_GetItem(tuple, 7);
-		return str ? PyBytes_AS_STRING(str) : NULL;
-	}
-
 	data = index_deref(self, pos);
 	return data ? data + 32 : NULL;
 }
 
 /*
- * Return the 20-byte SHA of the node corresponding to the given rev. The
+ * Return the hash of the node corresponding to the given rev. The
  * rev is assumed to be existing. If not, an exception is set.
  */
 static const char *index_node_existing(indexObject *self, Py_ssize_t pos)
@@ -432,43 +359,62 @@
 
 static int nt_insert(nodetree *self, const char *node, int rev);
 
-static int node_check(PyObject *obj, char **node)
+static int node_check(Py_ssize_t nodelen, PyObject *obj, char **node)
 {
-	Py_ssize_t nodelen;
-	if (PyBytes_AsStringAndSize(obj, node, &nodelen) == -1)
+	Py_ssize_t thisnodelen;
+	if (PyBytes_AsStringAndSize(obj, node, &thisnodelen) == -1)
 		return -1;
-	if (nodelen == 20)
+	if (nodelen == thisnodelen)
 		return 0;
-	PyErr_SetString(PyExc_ValueError, "20-byte hash required");
+	PyErr_Format(PyExc_ValueError, "node len %zd != expected node len %zd",
+	             thisnodelen, nodelen);
 	return -1;
 }
 
 static PyObject *index_append(indexObject *self, PyObject *obj)
 {
-	char *node;
-	Py_ssize_t len;
+	uint64_t offset_flags;
+	int rev, comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
+	Py_ssize_t c_node_id_len;
+	const char *c_node_id;
+	char *data;
 
-	if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
+	if (!PyArg_ParseTuple(obj, tuple_format, &offset_flags, &comp_len,
+	                      &uncomp_len, &base_rev, &link_rev, &parent_1,
+	                      &parent_2, &c_node_id, &c_node_id_len)) {
 		PyErr_SetString(PyExc_TypeError, "8-tuple required");
 		return NULL;
 	}
+	if (c_node_id_len != self->nodelen) {
+		PyErr_SetString(PyExc_TypeError, "invalid node");
+		return NULL;
+	}
 
-	if (node_check(PyTuple_GET_ITEM(obj, 7), &node) == -1)
-		return NULL;
-
-	len = index_length(self);
-
-	if (self->added == NULL) {
-		self->added = PyList_New(0);
-		if (self->added == NULL)
-			return NULL;
+	if (self->new_length == self->added_length) {
+		size_t new_added_length =
+		    self->added_length ? self->added_length * 2 : 4096;
+		void *new_added =
+		    PyMem_Realloc(self->added, new_added_length * v1_hdrsize);
+		if (!new_added)
+			return PyErr_NoMemory();
+		self->added = new_added;
+		self->added_length = new_added_length;
 	}
-
-	if (PyList_Append(self->added, obj) == -1)
-		return NULL;
+	rev = self->length + self->new_length;
+	data = self->added + v1_hdrsize * self->new_length++;
+	putbe32(offset_flags >> 32, data);
+	putbe32(offset_flags & 0xffffffffU, data + 4);
+	putbe32(comp_len, data + 8);
+	putbe32(uncomp_len, data + 12);
+	putbe32(base_rev, data + 16);
+	putbe32(link_rev, data + 20);
+	putbe32(parent_1, data + 24);
+	putbe32(parent_2, data + 28);
+	memcpy(data + 32, c_node_id, c_node_id_len);
+	memset(data + 32 + c_node_id_len, 0, 32 - c_node_id_len);
 
 	if (self->ntinitialized)
-		nt_insert(&self->nt, node, (int)len);
+		nt_insert(&self->nt, c_node_id, rev);
 
 	Py_CLEAR(self->headrevs);
 	Py_RETURN_NONE;
@@ -495,20 +441,8 @@
 		Py_CLEAR(t);                                                   \
 	} while (0)
 
-	if (self->added) {
-		Py_ssize_t len = PyList_GET_SIZE(self->added);
-		s = PyBytes_FromString("index entries added");
-		t = PyInt_FromSsize_t(len);
-		if (!s || !t)
-			goto bail;
-		if (PyDict_SetItem(obj, s, t) == -1)
-			goto bail;
-		Py_CLEAR(s);
-		Py_CLEAR(t);
-	}
-
-	if (self->raw_length != self->length)
-		istat(raw_length, "revs on disk");
+	if (self->added_length)
+		istat(new_length, "index entries added");
 	istat(length, "revs in memory");
 	istat(ntlookups, "node trie lookups");
 	istat(ntmisses, "node trie misses");
@@ -564,7 +498,7 @@
 			return -1;
 		}
 
-		result = PyEval_CallObject(filter, arglist);
+		result = PyObject_Call(filter, arglist, NULL);
 		Py_DECREF(arglist);
 		if (!result) {
 			return -1;
@@ -766,9 +700,9 @@
 	if (iterator == NULL)
 		return -2;
 	while ((item = PyIter_Next(iterator))) {
-		if (node_check(item, &node) == -1)
+		if (node_check(self->nodelen, item, &node) == -1)
 			goto failed;
-		rev = index_find_node(self, node, 20);
+		rev = index_find_node(self, node);
 		/* null is implicitly public, so negative is invalid */
 		if (rev < 0 || rev >= len)
 			goto failed;
@@ -1020,22 +954,11 @@
 	const char *data;
 	int result;
 
-	if (rev >= self->length) {
-		PyObject *tuple =
-		    PyList_GET_ITEM(self->added, rev - self->length);
-		long ret;
-		if (!pylong_to_long(PyTuple_GET_ITEM(tuple, 3), &ret)) {
-			return -2;
-		}
-		result = (int)ret;
-	} else {
-		data = index_deref(self, rev);
-		if (data == NULL) {
-			return -2;
-		}
+	data = index_deref(self, rev);
+	if (data == NULL)
+		return -2;
+	result = getbe32(data + 16);
 
-		result = getbe32(data + 16);
-	}
 	if (result > rev) {
 		PyErr_Format(
 		    PyExc_ValueError,
@@ -1573,13 +1496,17 @@
 	int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
 	int level, maxlevel, off;
 
-	if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
+	/* If the input is binary, do a fast check for the nullid first. */
+	if (!hex && nodelen == self->nodelen && node[0] == '\0' &&
+	    node[1] == '\0' && memcmp(node, nullid, self->nodelen) == 0)
 		return -1;
 
 	if (hex)
-		maxlevel = nodelen > 40 ? 40 : (int)nodelen;
+		maxlevel = nodelen;
 	else
-		maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
+		maxlevel = 2 * nodelen;
+	if (maxlevel > 2 * self->nodelen)
+		maxlevel = 2 * self->nodelen;
 
 	for (level = off = 0; level < maxlevel; level++) {
 		int k = getnybble(node, level);
@@ -1610,10 +1537,10 @@
 static int nt_new(nodetree *self)
 {
 	if (self->length == self->capacity) {
-		unsigned newcapacity;
+		size_t newcapacity;
 		nodetreenode *newnodes;
 		newcapacity = self->capacity * 2;
-		if (newcapacity >= INT_MAX / sizeof(nodetreenode)) {
+		if (newcapacity >= SIZE_MAX / sizeof(nodetreenode)) {
 			PyErr_SetString(PyExc_MemoryError,
 			                "overflow in nt_new");
 			return -1;
@@ -1637,7 +1564,7 @@
 	int level = 0;
 	int off = 0;
 
-	while (level < 40) {
+	while (level < 2 * self->nodelen) {
 		int k = nt_level(node, level);
 		nodetreenode *n;
 		int v;
@@ -1656,7 +1583,7 @@
 
 			if (oldnode == NULL)
 				return -1;
-			if (!memcmp(oldnode, node, 20)) {
+			if (!memcmp(oldnode, node, self->nodelen)) {
 				n->children[k] = -rev - 2;
 				return 0;
 			}
@@ -1714,9 +1641,10 @@
 	/* The input capacity is in terms of revisions, while the field is in
 	 * terms of nodetree nodes. */
 	self->capacity = (capacity < 4 ? 4 : capacity / 2);
+	self->nodelen = index->nodelen;
 	self->depth = 0;
 	self->splits = 0;
-	if ((size_t)self->capacity > INT_MAX / sizeof(nodetreenode)) {
+	if (self->capacity > SIZE_MAX / sizeof(nodetreenode)) {
 		PyErr_SetString(PyExc_ValueError, "overflow in init_nt");
 		return -1;
 	}
@@ -1758,7 +1686,7 @@
 {
 	int level, off;
 
-	for (level = off = 0; level < 40; level++) {
+	for (level = off = 0; level < 2 * self->nodelen; level++) {
 		int k, v;
 		nodetreenode *n = &self->nodes[off];
 		k = nt_level(node, level);
@@ -1769,7 +1697,7 @@
 			n = index_node_existing(self->index, v);
 			if (n == NULL)
 				return -3;
-			if (memcmp(node, n, 20) != 0)
+			if (memcmp(node, n, self->nodelen) != 0)
 				/*
 				 * Found a unique prefix, but it wasn't for the
 				 * requested node (i.e the requested node does
@@ -1799,7 +1727,7 @@
 
 	if (!PyArg_ParseTuple(args, "O", &val))
 		return NULL;
-	if (node_check(val, &node) == -1)
+	if (node_check(self->nt.nodelen, val, &node) == -1)
 		return NULL;
 
 	length = nt_shortest(&self->nt, node);
@@ -1876,7 +1804,7 @@
 static int index_init_nt(indexObject *self)
 {
 	if (!self->ntinitialized) {
-		if (nt_init(&self->nt, self, (int)self->raw_length) == -1) {
+		if (nt_init(&self->nt, self, (int)self->length) == -1) {
 			nt_dealloc(&self->nt);
 			return -1;
 		}
@@ -1899,8 +1827,7 @@
  *   -2: not found (no exception set)
  * rest: valid rev
  */
-static int index_find_node(indexObject *self, const char *node,
-                           Py_ssize_t nodelen)
+static int index_find_node(indexObject *self, const char *node)
 {
 	int rev;
 
@@ -1908,7 +1835,7 @@
 		return -3;
 
 	self->ntlookups++;
-	rev = nt_find(&self->nt, node, nodelen, 0);
+	rev = nt_find(&self->nt, node, self->nodelen, 0);
 	if (rev >= -1)
 		return rev;
 
@@ -1926,7 +1853,7 @@
 			const char *n = index_node_existing(self, rev);
 			if (n == NULL)
 				return -3;
-			if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+			if (memcmp(node, n, self->nodelen) == 0) {
 				if (nt_insert(&self->nt, n, rev) == -1)
 					return -3;
 				break;
@@ -1941,7 +1868,7 @@
 				self->ntrev = rev + 1;
 				return -3;
 			}
-			if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
+			if (memcmp(node, n, self->nodelen) == 0) {
 				break;
 			}
 		}
@@ -1966,9 +1893,9 @@
 		return index_get(self, idx);
 	}
 
-	if (node_check(value, &node) == -1)
+	if (node_check(self->nodelen, value, &node) == -1)
 		return NULL;
-	rev = index_find_node(self, node, 20);
+	rev = index_find_node(self, node);
 	if (rev >= -1)
 		return PyInt_FromLong(rev);
 	if (rev == -2)
@@ -2010,7 +1937,7 @@
 		return NULL;
 	}
 
-	if (nodelen > 40) {
+	if (nodelen > 2 * self->nodelen) {
 		PyErr_SetString(PyExc_ValueError, "key too long");
 		return NULL;
 	}
@@ -2036,14 +1963,14 @@
 	case -2:
 		Py_RETURN_NONE;
 	case -1:
-		return PyBytes_FromStringAndSize(nullid, 20);
+		return PyBytes_FromStringAndSize(nullid, self->nodelen);
 	}
 
 	fullnode = index_node_existing(self, rev);
 	if (fullnode == NULL) {
 		return NULL;
 	}
-	return PyBytes_FromStringAndSize(fullnode, 20);
+	return PyBytes_FromStringAndSize(fullnode, self->nodelen);
 }
 
 static PyObject *index_shortest(indexObject *self, PyObject *args)
@@ -2054,7 +1981,7 @@
 
 	if (!PyArg_ParseTuple(args, "O", &val))
 		return NULL;
-	if (node_check(val, &node) == -1)
+	if (node_check(self->nodelen, val, &node) == -1)
 		return NULL;
 
 	self->ntlookups++;
@@ -2080,9 +2007,9 @@
 
 	if (!PyArg_ParseTuple(args, "O", &val))
 		return NULL;
-	if (node_check(val, &node) == -1)
+	if (node_check(self->nodelen, val, &node) == -1)
 		return NULL;
-	rev = index_find_node(self, node, 20);
+	rev = index_find_node(self, node);
 	if (rev == -3)
 		return NULL;
 	if (rev == -2)
@@ -2102,10 +2029,10 @@
 		return rev >= -1 && rev < index_length(self);
 	}
 
-	if (node_check(value, &node) == -1)
+	if (node_check(self->nodelen, value, &node) == -1)
 		return -1;
 
-	switch (index_find_node(self, node, 20)) {
+	switch (index_find_node(self, node)) {
 	case -3:
 		return -1;
 	case -2:
@@ -2128,9 +2055,9 @@
 	char *node;
 	int rev;
 
-	if (node_check(val, &node) == -1)
+	if (node_check(self->nodelen, val, &node) == -1)
 		return NULL;
-	rev = index_find_node(self, node, 20);
+	rev = index_find_node(self, node);
 	if (rev >= -1)
 		return PyInt_FromLong(rev);
 	if (rev == -2)
@@ -2501,17 +2428,17 @@
  */
 static void index_invalidate_added(indexObject *self, Py_ssize_t start)
 {
-	Py_ssize_t i, len = PyList_GET_SIZE(self->added);
-
-	for (i = start; i < len; i++) {
-		PyObject *tuple = PyList_GET_ITEM(self->added, i);
-		PyObject *node = PyTuple_GET_ITEM(tuple, 7);
+	Py_ssize_t i, len;
 
-		nt_delete_node(&self->nt, PyBytes_AS_STRING(node));
-	}
+	len = self->length + self->new_length;
+	i = start - self->length;
+	if (i < 0)
+		return;
 
-	if (start == 0)
-		Py_CLEAR(self->added);
+	for (i = start; i < len; i++)
+		nt_delete_node(&self->nt, index_deref(self, i) + 32);
+
+	self->new_length = start - self->length;
 }
 
 /*
@@ -2569,34 +2496,25 @@
 
 				nt_delete_node(&self->nt, node);
 			}
-			if (self->added)
-				index_invalidate_added(self, 0);
+			if (self->new_length)
+				index_invalidate_added(self, self->length);
 			if (self->ntrev > start)
 				self->ntrev = (int)start;
-		} else if (self->added) {
-			Py_CLEAR(self->added);
+		} else if (self->new_length) {
+			self->new_length = 0;
 		}
 
 		self->length = start;
-		if (start < self->raw_length) {
-			if (self->cache) {
-				Py_ssize_t i;
-				for (i = start; i < self->raw_length; i++)
-					Py_CLEAR(self->cache[i]);
-			}
-			self->raw_length = start;
-		}
 		goto done;
 	}
 
 	if (self->ntinitialized) {
-		index_invalidate_added(self, start - self->length);
+		index_invalidate_added(self, start);
 		if (self->ntrev > start)
 			self->ntrev = (int)start;
+	} else {
+		self->new_length = start - self->length;
 	}
-	if (self->added)
-		ret = PyList_SetSlice(self->added, start - self->length,
-		                      PyList_GET_SIZE(self->added), NULL);
 done:
 	Py_CLEAR(self->headrevs);
 	return ret;
@@ -2618,7 +2536,7 @@
 	if (PySlice_Check(item) && value == NULL)
 		return index_slice_del(self, item);
 
-	if (node_check(item, &node) == -1)
+	if (node_check(self->nodelen, item, &node) == -1)
 		return -1;
 
 	if (value == NULL)
@@ -2675,9 +2593,9 @@
 
 	/* Initialize before argument-checking to avoid index_dealloc() crash.
 	 */
-	self->raw_length = 0;
 	self->added = NULL;
-	self->cache = NULL;
+	self->new_length = 0;
+	self->added_length = 0;
 	self->data = NULL;
 	memset(&self->buf, 0, sizeof(self->buf));
 	self->headrevs = NULL;
@@ -2685,6 +2603,8 @@
 	Py_INCREF(Py_None);
 	self->ntinitialized = 0;
 	self->offsets = NULL;
+	self->nodelen = 20;
+	self->nullentry = NULL;
 
 	if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
 		return -1;
@@ -2693,6 +2613,16 @@
 		                "data does not support buffer interface");
 		return -1;
 	}
+	if (self->nodelen < 20 || self->nodelen > (Py_ssize_t)sizeof(nullid)) {
+		PyErr_SetString(PyExc_RuntimeError, "unsupported node size");
+		return -1;
+	}
+
+	self->nullentry = Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0,
+	                                -1, -1, -1, -1, nullid, self->nodelen);
+	if (!self->nullentry)
+		return -1;
+	PyObject_GC_UnTrack(self->nullentry);
 
 	if (PyObject_GetBuffer(data_obj, &self->buf, PyBUF_SIMPLE) == -1)
 		return -1;
@@ -2709,15 +2639,13 @@
 		Py_ssize_t len = inline_scan(self, NULL);
 		if (len == -1)
 			goto bail;
-		self->raw_length = len;
 		self->length = len;
 	} else {
 		if (size % v1_hdrsize) {
 			PyErr_SetString(PyExc_ValueError, "corrupt index file");
 			goto bail;
 		}
-		self->raw_length = size / v1_hdrsize;
-		self->length = self->raw_length;
+		self->length = size / v1_hdrsize;
 	}
 
 	return 0;
@@ -2733,14 +2661,6 @@
 
 static void _index_clearcaches(indexObject *self)
 {
-	if (self->cache) {
-		Py_ssize_t i;
-
-		for (i = 0; i < self->raw_length; i++)
-			Py_CLEAR(self->cache[i]);
-		free(self->cache);
-		self->cache = NULL;
-	}
 	if (self->offsets) {
 		PyMem_Free((void *)self->offsets);
 		self->offsets = NULL;
@@ -2769,7 +2689,8 @@
 		memset(&self->buf, 0, sizeof(self->buf));
 	}
 	Py_XDECREF(self->data);
-	Py_XDECREF(self->added);
+	PyMem_Free(self->added);
+	Py_XDECREF(self->nullentry);
 	PyObject_Del(self);
 }
 
@@ -2940,14 +2861,6 @@
 	Py_INCREF(&nodetreeType);
 	PyModule_AddObject(mod, "nodetree", (PyObject *)&nodetreeType);
 
-	if (!nullentry) {
-		nullentry =
-		    Py_BuildValue(PY23("iiiiiiis#", "iiiiiiiy#"), 0, 0, 0, -1,
-		                  -1, -1, -1, nullid, (Py_ssize_t)20);
-	}
-	if (nullentry)
-		PyObject_GC_UnTrack(nullentry);
-
 	caps = PyCapsule_New(&CAPI, "mercurial.cext.parsers.revlog_CAPI", NULL);
 	if (caps != NULL)
 		PyModule_AddObject(mod, "revlog_CAPI", caps);
--- a/mercurial/changegroup.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/changegroup.py	Tue Jan 19 21:48:43 2021 +0530
@@ -316,20 +316,29 @@
             self.callback = progress.increment
 
             efilesset = set()
+            cgnodes = []
+
+            def ondupchangelog(cl, node):
+                if cl.rev(node) < clstart:
+                    cgnodes.append(node)
 
             def onchangelog(cl, node):
                 efilesset.update(cl.readfiles(node))
 
             self.changelogheader()
             deltas = self.deltaiter()
-            cgnodes = cl.addgroup(deltas, csmap, trp, addrevisioncb=onchangelog)
-            efiles = len(efilesset)
-
-            if not cgnodes:
+            if not cl.addgroup(
+                deltas,
+                csmap,
+                trp,
+                addrevisioncb=onchangelog,
+                duplicaterevisioncb=ondupchangelog,
+            ):
                 repo.ui.develwarn(
                     b'applied empty changelog from changegroup',
                     config=b'warn-empty-changegroup',
                 )
+            efiles = len(efilesset)
             clend = len(cl)
             changesets = clend - clstart
             progress.complete()
@@ -356,7 +365,7 @@
                 for cset in pycompat.xrange(clstart, clend):
                     mfnode = cl.changelogrevision(cset).manifest
                     mfest = ml[mfnode].readdelta()
-                    # store file cgnodes we must see
+                    # store file nodes we must see
                     for f, n in pycompat.iteritems(mfest):
                         needfiles.setdefault(f, set()).add(n)
 
@@ -414,7 +423,7 @@
                     **pycompat.strkwargs(hookargs)
                 )
 
-            added = [cl.node(r) for r in pycompat.xrange(clstart, clend)]
+            added = pycompat.xrange(clstart, clend)
             phaseall = None
             if srctype in (b'push', b'serve'):
                 # Old servers can not push the boundary themselves.
@@ -436,7 +445,8 @@
             if added:
                 phases.registernew(repo, tr, targetphase, added)
             if phaseall is not None:
-                phases.advanceboundary(repo, tr, phaseall, cgnodes)
+                phases.advanceboundary(repo, tr, phaseall, cgnodes, revs=added)
+                cgnodes = []
 
             if changesets > 0:
 
@@ -449,9 +459,9 @@
 
                     repo.hook(b"changegroup", **pycompat.strkwargs(hookargs))
 
-                    for n in added:
+                    for rev in added:
                         args = hookargs.copy()
-                        args[b'node'] = hex(n)
+                        args[b'node'] = hex(cl.node(rev))
                         del args[b'node_last']
                         repo.hook(b"incoming", **pycompat.strkwargs(args))
 
--- a/mercurial/changelog.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/changelog.py	Tue Jan 19 21:48:43 2021 +0530
@@ -91,8 +91,8 @@
 
 
 class appender(object):
-    '''the changelog index must be updated last on disk, so we use this class
-    to delay writes to it'''
+    """the changelog index must be updated last on disk, so we use this class
+    to delay writes to it"""
 
     def __init__(self, vfs, name, mode, buf):
         self.data = buf
@@ -601,7 +601,7 @@
 
         This function exists because creating a changectx object
         just to access this is costly."""
-        extra = self.read(rev)[5]
+        extra = self.changelogrevision(rev).extra
         return encoding.tolocal(extra.get(b"branch")), b'close' in extra
 
     def _nodeduplicatecallback(self, transaction, node):
--- a/mercurial/chgserver.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/chgserver.py	Tue Jan 19 21:48:43 2021 +0530
@@ -54,13 +54,13 @@
     getattr,
     setattr,
 )
+from .node import hex
 
 from . import (
     commandserver,
     encoding,
     error,
     extensions,
-    node,
     pycompat,
     util,
 )
@@ -74,7 +74,7 @@
 
 def _hashlist(items):
     """return sha1 hexdigest for a list"""
-    return node.hex(hashutil.sha1(stringutil.pprint(items)).digest())
+    return hex(hashutil.sha1(stringutil.pprint(items)).digest())
 
 
 # sensitive config sections affecting confighash
@@ -409,14 +409,23 @@
             # be unbuffered no matter if it is a tty or not.
             if fn == b'ferr':
                 newfp = fp
+            elif pycompat.ispy3:
+                # On Python 3, the standard library doesn't offer line-buffered
+                # binary streams, so wrap/unwrap it.
+                if fp.isatty():
+                    newfp = procutil.make_line_buffered(fp)
+                else:
+                    newfp = procutil.unwrap_line_buffered(fp)
             else:
-                # make it line buffered explicitly because the default is
-                # decided on first write(), where fout could be a pager.
+                # Python 2 uses the I/O streams provided by the C library, so
+                # make it line-buffered explicitly. Otherwise the default would
+                # be decided on first write(), where fout could be a pager.
                 if fp.isatty():
                     bufsize = 1  # line buffered
                 else:
                     bufsize = -1  # system default
                 newfp = os.fdopen(fp.fileno(), mode, bufsize)
+            if newfp is not fp:
                 setattr(ui, fn, newfp)
             setattr(self, cn, newfp)
 
@@ -440,13 +449,16 @@
         ui = self.ui
         for (ch, fp, fd), (cn, fn, mode) in zip(self._oldios, _iochannels):
             newfp = getattr(ui, fn)
-            # close newfp while it's associated with client; otherwise it
-            # would be closed when newfp is deleted
-            if newfp is not fp:
+            # On Python 2, newfp and fp may be separate file objects associated
+            # with the same fd, so we must close newfp while it's associated
+            # with the client. Otherwise the new associated fd would be closed
+            # when newfp gets deleted. On Python 3, newfp is just a wrapper
+            # around fp even if newfp is not fp, so deleting newfp is safe.
+            if not (pycompat.ispy3 or newfp is fp):
                 newfp.close()
             # restore original fd: fp is open again
             try:
-                if newfp is fp and 'w' in mode:
+                if (pycompat.ispy3 or newfp is fp) and 'w' in mode:
                     # Discard buffered data which couldn't be flushed because
                     # of EPIPE. The data should belong to the current session
                     # and should never persist.
@@ -491,22 +503,32 @@
         list, the client can continue with this server after completing all
         the instructions.
         """
-        from . import dispatch  # avoid cycle
-
         args = self._readlist()
+        errorraised = False
+        detailed_exit_code = 255
         try:
             self.ui, lui = _loadnewui(self.ui, args, self.cdebug)
-        except error.ParseError as inst:
-            dispatch._formatparse(self.ui.warn, inst)
-            self.ui.flush()
-            self.cresult.write(b'exit 255')
-            return
-        except error.Abort as inst:
-            self.ui.error(_(b"abort: %s\n") % inst.message)
+        except error.RepoError as inst:
+            # RepoError can be raised while trying to read shared source
+            # configuration
+            self.ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
             if inst.hint:
                 self.ui.error(_(b"(%s)\n") % inst.hint)
+            errorraised = True
+        except error.Abort as inst:
+            if isinstance(inst, error.InputError):
+                detailed_exit_code = 10
+            elif isinstance(inst, error.ConfigError):
+                detailed_exit_code = 30
+            self.ui.error(inst.format())
+            errorraised = True
+
+        if errorraised:
             self.ui.flush()
-            self.cresult.write(b'exit 255')
+            exit_code = 255
+            if self.ui.configbool(b'ui', b'detailed-exit-code'):
+                exit_code = detailed_exit_code
+            self.cresult.write(b'exit %d' % exit_code)
             return
         newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
         insts = []
--- a/mercurial/cmdutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/cmdutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -279,7 +279,7 @@
     for x in args:
         if opts.get(x):
             if previous:
-                raise error.Abort(
+                raise error.InputError(
                     _(b'cannot specify both --%s and --%s')
                     % (to_display(previous), to_display(x))
                 )
@@ -332,9 +332,9 @@
         return
 
     if len(note) > 255:
-        raise error.Abort(_(b"cannot store a note of more than 255 bytes"))
+        raise error.InputError(_(b"cannot store a note of more than 255 bytes"))
     if b'\n' in note:
-        raise error.Abort(_(b"note cannot contain a newline"))
+        raise error.InputError(_(b"note cannot contain a newline"))
 
 
 def ishunk(x):
@@ -359,7 +359,17 @@
 
 
 def parsealiases(cmd):
-    return cmd.split(b"|")
+    base_aliases = cmd.split(b"|")
+    all_aliases = set(base_aliases)
+    extra_aliases = []
+    for alias in base_aliases:
+        if b'-' in alias:
+            folded_alias = alias.replace(b'-', b'')
+            if folded_alias not in all_aliases:
+                all_aliases.add(folded_alias)
+                extra_aliases.append(folded_alias)
+    base_aliases.extend(extra_aliases)
+    return base_aliases
 
 
 def setupwrapcolorwrite(ui):
@@ -399,7 +409,7 @@
 
 
 def recordfilter(ui, originalhunks, match, operation=None):
-    """ Prompts the user to filter the originalhunks and return a list of
+    """Prompts the user to filter the originalhunks and return a list of
     selected hunks.
     *operation* is used for to build ui messages to indicate the user what
     kind of filtering they are doing: reverting, committing, shelving, etc.
@@ -426,7 +436,7 @@
             msg = _(b'running non-interactively, use %s instead') % cmdsuggest
         else:
             msg = _(b'running non-interactively')
-        raise error.Abort(msg)
+        raise error.InputError(msg)
 
     # make sure username is set before going interactive
     if not opts.get(b'user'):
@@ -451,7 +461,7 @@
         wctx = repo[None]
         merge = len(wctx.parents()) > 1
         if merge:
-            raise error.Abort(
+            raise error.InputError(
                 _(
                     b'cannot partially commit a merge '
                     b'(use "hg commit" instead)'
@@ -459,7 +469,7 @@
             )
 
         def fail(f, msg):
-            raise error.Abort(b'%s: %s' % (f, msg))
+            raise error.InputError(b'%s: %s' % (f, msg))
 
         force = opts.get(b'force')
         if not force:
@@ -510,7 +520,7 @@
         try:
             chunks, newopts = filterfn(ui, originalchunks, match)
         except error.PatchError as err:
-            raise error.Abort(_(b'error parsing patch: %s') % err)
+            raise error.InputError(_(b'error parsing patch: %s') % err)
         opts.update(newopts)
 
         # We need to keep a backup of files that have been newly added and
@@ -600,7 +610,7 @@
                     ui.debug(fp.getvalue())
                     patch.internalpatch(ui, repo, fp, 1, eolmode=None)
                 except error.PatchError as err:
-                    raise error.Abort(pycompat.bytestr(err))
+                    raise error.InputError(pycompat.bytestr(err))
             del fp
 
             # 4. We prepared working directory according to filtered
@@ -762,7 +772,7 @@
     # checking the argument validity
     for s in pycompat.bytestr(terseargs):
         if s not in allst:
-            raise error.Abort(_(b"'%s' not recognized") % s)
+            raise error.InputError(_(b"'%s' not recognized") % s)
 
     # creating a dirnode object for the root of the repo
     rootobj = dirnode(b'')
@@ -968,10 +978,10 @@
         bailifchanged(repo)
         revs = scmutil.revrange(repo, revs)
         if not revs:
-            raise error.Abort(b"empty revision set")
+            raise error.InputError(b"empty revision set")
         roots = repo.revs(b'roots(%ld)', revs)
         if len(roots) > 1:
-            raise error.Abort(
+            raise error.InputError(
                 _(b"cannot change branch of non-linear revisions")
             )
         rewriteutil.precheck(repo, revs, b'change branch of')
@@ -983,16 +993,20 @@
             and label not in rpb
             and label in repo.branchmap()
         ):
-            raise error.Abort(_(b"a branch of the same name already exists"))
+            raise error.InputError(
+                _(b"a branch of the same name already exists")
+            )
 
         if repo.revs(b'obsolete() and %ld', revs):
-            raise error.Abort(
+            raise error.InputError(
                 _(b"cannot change branch of a obsolete changeset")
             )
 
         # make sure only topological heads
         if repo.revs(b'heads(%ld) - head()', revs):
-            raise error.Abort(_(b"cannot change branch in middle of a stack"))
+            raise error.InputError(
+                _(b"cannot change branch in middle of a stack")
+            )
 
         replacements = {}
         # avoid import cycle mercurial.cmdutil -> mercurial.context ->
@@ -1074,7 +1088,7 @@
 
 
 def bailifchanged(repo, merge=True, hint=None):
-    """ enforce the precondition that working directory must be clean.
+    """enforce the precondition that working directory must be clean.
 
     'merge' can be set to false if a pending uncommitted merge should be
     ignored (such as when 'update --check' runs).
@@ -1083,10 +1097,10 @@
     """
 
     if merge and repo.dirstate.p2() != nullid:
-        raise error.Abort(_(b'outstanding uncommitted merge'), hint=hint)
+        raise error.StateError(_(b'outstanding uncommitted merge'), hint=hint)
     st = repo.status()
     if st.modified or st.added or st.removed or st.deleted:
-        raise error.Abort(_(b'uncommitted changes'), hint=hint)
+        raise error.StateError(_(b'uncommitted changes'), hint=hint)
     ctx = repo[None]
     for s in sorted(ctx.substate):
         ctx.sub(s).bailifchanged(hint=hint)
@@ -1210,6 +1224,30 @@
     return t.renderdefault(mapping)
 
 
+def format_changeset_summary(ui, ctx, command=None, default_spec=None):
+    """Format a changeset summary (one line)."""
+    spec = None
+    if command:
+        spec = ui.config(
+            b'command-templates', b'oneline-summary.%s' % command, None
+        )
+    if not spec:
+        spec = ui.config(b'command-templates', b'oneline-summary')
+    if not spec:
+        spec = default_spec
+    if not spec:
+        spec = (
+            b'{separate(" ", '
+            b'label("oneline-summary.changeset", "{rev}:{node|short}")'
+            b', '
+            b'join(filter(namespaces % "{ifeq(namespace, "branches", "", join(names % "{label("oneline-summary.{namespace}", name)}", " "))}"), " ")'
+            b')} '
+            b'"{label("oneline-summary.desc", desc|firstline)}"'
+        )
+    text = rendertemplate(ctx, spec)
+    return text.split(b'\n')[0]
+
+
 def _buildfntemplate(pat, total=None, seqno=None, revwidth=None, pathname=None):
     r"""Convert old-style filename format string to template string
 
@@ -1349,7 +1387,7 @@
                 b'without a repository'
             )
     if msg:
-        raise error.Abort(msg)
+        raise error.InputError(msg)
 
     r = None
     if repo:
@@ -1357,7 +1395,7 @@
             r = repo.unfiltered().changelog
         elif dir:
             if not scmutil.istreemanifest(repo):
-                raise error.Abort(
+                raise error.InputError(
                     _(
                         b"--dir can only be used on repos with "
                         b"treemanifest enabled"
@@ -1383,16 +1421,18 @@
             elif util.safehasattr(r, b'_revlog'):
                 r = r._revlog  # pytype: disable=attribute-error
             elif r is not None:
-                raise error.Abort(_(b'%r does not appear to be a revlog') % r)
+                raise error.InputError(
+                    _(b'%r does not appear to be a revlog') % r
+                )
 
     if not r:
         if not returnrevlog:
-            raise error.Abort(_(b'cannot give path to non-revlog'))
+            raise error.InputError(_(b'cannot give path to non-revlog'))
 
         if not file_:
             raise error.CommandError(cmd, _(b'invalid arguments'))
         if not os.path.isfile(file_):
-            raise error.Abort(_(b"revlog '%s' not found") % file_)
+            raise error.InputError(_(b"revlog '%s' not found") % file_)
         r = revlog.revlog(
             vfsmod.vfs(encoding.getcwd(), audit=False), file_[:-2] + b".i"
         )
@@ -1429,10 +1469,12 @@
         if not forget and not after:
             # TODO: Remove this restriction and make it also create the copy
             #       targets (and remove the rename source if rename==True).
-            raise error.Abort(_(b'--at-rev requires --after'))
+            raise error.InputError(_(b'--at-rev requires --after'))
         ctx = scmutil.revsingle(repo, rev)
         if len(ctx.parents()) > 1:
-            raise error.Abort(_(b'cannot mark/unmark copy in merge commit'))
+            raise error.InputError(
+                _(b'cannot mark/unmark copy in merge commit')
+            )
     else:
         ctx = repo[None]
 
@@ -1445,7 +1487,7 @@
             new_ctx = ctx
         else:
             if len(ctx.parents()) > 1:
-                raise error.Abort(_(b'cannot unmark copy in merge commit'))
+                raise error.InputError(_(b'cannot unmark copy in merge commit'))
             # avoid cycle context -> subrepo -> cmdutil
             from . import context
 
@@ -1488,9 +1530,9 @@
 
     pats = scmutil.expandpats(pats)
     if not pats:
-        raise error.Abort(_(b'no source or destination specified'))
+        raise error.InputError(_(b'no source or destination specified'))
     if len(pats) == 1:
-        raise error.Abort(_(b'no destination specified'))
+        raise error.InputError(_(b'no destination specified'))
     dest = pats.pop()
 
     def walkpat(pat):
@@ -1530,12 +1572,12 @@
         rewriteutil.precheck(repo, [ctx.rev()], b'uncopy')
         absdest = pathutil.canonpath(repo.root, cwd, dest)
         if ctx.hasdir(absdest):
-            raise error.Abort(
+            raise error.InputError(
                 _(b'%s: --at-rev does not support a directory as destination')
                 % uipathfn(absdest)
             )
         if absdest not in ctx:
-            raise error.Abort(
+            raise error.InputError(
                 _(b'%s: copy destination does not exist in %s')
                 % (uipathfn(absdest), ctx)
             )
@@ -1552,12 +1594,12 @@
                 copylist.append(abs)
 
         if not copylist:
-            raise error.Abort(_(b'no files to copy'))
+            raise error.InputError(_(b'no files to copy'))
         # TODO: Add support for `hg cp --at-rev . foo bar dir` and
         # `hg cp --at-rev . dir1 dir2`, preferably unifying the code with the
         # existing functions below.
         if len(copylist) != 1:
-            raise error.Abort(_(b'--at-rev requires a single source'))
+            raise error.InputError(_(b'--at-rev requires a single source'))
 
         new_ctx = context.overlayworkingctx(repo)
         new_ctx.setbase(ctx.p1())
@@ -1785,14 +1827,16 @@
     destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
     if not destdirexists:
         if len(pats) > 1 or matchmod.patkind(pats[0]):
-            raise error.Abort(
+            raise error.InputError(
                 _(
                     b'with multiple sources, destination must be an '
                     b'existing directory'
                 )
             )
         if util.endswithsep(dest):
-            raise error.Abort(_(b'destination %s is not a directory') % dest)
+            raise error.InputError(
+                _(b'destination %s is not a directory') % dest
+            )
 
     tfn = targetpathfn
     if after:
@@ -1804,7 +1848,7 @@
             continue
         copylist.append((tfn(pat, dest, srcs), srcs))
     if not copylist:
-        raise error.Abort(_(b'no files to copy'))
+        raise error.InputError(_(b'no files to copy'))
 
     errors = 0
     for targetpath, srcs in copylist:
@@ -1895,7 +1939,7 @@
         parents.append(repo[nullid])
     if opts.get(b'exact'):
         if not nodeid or not p1:
-            raise error.Abort(_(b'not a Mercurial patch'))
+            raise error.InputError(_(b'not a Mercurial patch'))
         p1 = repo[p1]
         p2 = repo[p2 or nullid]
     elif p2:
@@ -2150,7 +2194,7 @@
     opts=None,
     match=None,
 ):
-    '''export changesets as hg patches
+    """export changesets as hg patches
 
     Args:
       repo: The repository from which we're exporting revisions.
@@ -2171,7 +2215,7 @@
         fntemplate specified: Each rev is written to a unique file named using
                             the given template.
         Otherwise: All revs will be written to basefm.
-    '''
+    """
     _prefetchchangedfiles(repo, revs, match)
 
     if not fntemplate:
@@ -2231,7 +2275,7 @@
     try:
         rev = mrevs.max()
     except ValueError:
-        raise error.Abort(_(b"revision matching date not found"))
+        raise error.InputError(_(b"revision matching date not found"))
 
     ui.status(
         _(b"found revision %d from %s\n")
@@ -2314,7 +2358,9 @@
     ui, repo, match, prefix, uipathfn, explicitonly, dryrun, interactive
 ):
     if dryrun and interactive:
-        raise error.Abort(_(b"cannot specify both --dry-run and --interactive"))
+        raise error.InputError(
+            _(b"cannot specify both --dry-run and --interactive")
+        )
     bad = []
     badfn = lambda x, y: bad.append(x) or match.bad(x, y)
     wctx = repo[None]
@@ -3026,9 +3072,9 @@
     if finishdesc:
         text = finishdesc(text)
     if not text.strip():
-        raise error.Abort(_(b"empty commit message"))
+        raise error.InputError(_(b"empty commit message"))
     if unchangedmessagedetection and editortext == templatetext:
-        raise error.Abort(_(b"commit message unchanged"))
+        raise error.InputError(_(b"commit message unchanged"))
 
     return text
 
@@ -3089,13 +3135,18 @@
     return b"\n".join(edittext)
 
 
-def commitstatus(repo, node, branch, bheads=None, opts=None):
+def commitstatus(repo, node, branch, bheads=None, tip=None, opts=None):
     if opts is None:
         opts = {}
     ctx = repo[node]
     parents = ctx.parents()
 
-    if (
+    if tip is not None and repo.changelog.tip() == tip:
+        # avoid reporting something like "committed new head" when
+        # recommitting old changesets, and issue a helpful warning
+        # for most instances
+        repo.ui.warn(_("warning: commit already existed in the repository!\n"))
+    elif (
         not opts.get(b'amend')
         and bheads
         and node not in bheads
@@ -3435,7 +3486,8 @@
                 repo, [f for sublist in oplist for f in sublist]
             )
             prefetch(
-                repo, [(ctx.rev(), matchfiles)],
+                repo,
+                [(ctx.rev(), matchfiles)],
             )
             match = scmutil.match(repo[None], pats)
             _performrevert(
@@ -3683,10 +3735,10 @@
 
 
 def checkunfinished(repo, commit=False, skipmerge=False):
-    '''Look for an unfinished multistep operation, like graft, and abort
+    """Look for an unfinished multistep operation, like graft, and abort
     if found. It's probably good to check this right before
     bailifchanged().
-    '''
+    """
     # Check for non-clearable states first, so things like rebase will take
     # precedence over update.
     for state in statemod._unfinishedstates:
@@ -3697,7 +3749,7 @@
         ):
             continue
         if state.isunfinished(repo):
-            raise error.Abort(state.msg(), hint=state.hint())
+            raise error.StateError(state.msg(), hint=state.hint())
 
     for s in statemod._unfinishedstates:
         if (
@@ -3708,18 +3760,18 @@
         ):
             continue
         if s.isunfinished(repo):
-            raise error.Abort(s.msg(), hint=s.hint())
+            raise error.StateError(s.msg(), hint=s.hint())
 
 
 def clearunfinished(repo):
-    '''Check for unfinished operations (as above), and clear the ones
+    """Check for unfinished operations (as above), and clear the ones
     that are clearable.
-    '''
+    """
     for state in statemod._unfinishedstates:
         if state._reportonly:
             continue
         if not state._clearable and state.isunfinished(repo):
-            raise error.Abort(state.msg(), hint=state.hint())
+            raise error.StateError(state.msg(), hint=state.hint())
 
     for s in statemod._unfinishedstates:
         if s._opname == b'merge' or state._reportonly:
@@ -3729,8 +3781,8 @@
 
 
 def getunfinishedstate(repo):
-    ''' Checks for unfinished operations and returns statecheck object
-        for it'''
+    """Checks for unfinished operations and returns statecheck object
+    for it"""
     for state in statemod._unfinishedstates:
         if state.isunfinished(repo):
             return state
@@ -3738,7 +3790,7 @@
 
 
 def howtocontinue(repo):
-    '''Check for an unfinished operation and return the command to finish
+    """Check for an unfinished operation and return the command to finish
     it.
 
     statemod._unfinishedstates list is checked for an unfinished operation
@@ -3747,7 +3799,7 @@
 
     Returns a (msg, warning) tuple. 'msg' is a string and 'warning' is
     a boolean.
-    '''
+    """
     contmsg = _(b"continue: %s")
     for state in statemod._unfinishedstates:
         if not state._continueflag:
@@ -3760,13 +3812,13 @@
 
 
 def checkafterresolved(repo):
-    '''Inform the user about the next action after completing hg resolve
+    """Inform the user about the next action after completing hg resolve
 
     If there's a an unfinished operation that supports continue flag,
     howtocontinue will yield repo.ui.warn as the reporter.
 
     Otherwise, it will yield repo.ui.note.
-    '''
+    """
     msg, warning = howtocontinue(repo)
     if msg is not None:
         if warning:
@@ -3776,26 +3828,26 @@
 
 
 def wrongtooltocontinue(repo, task):
-    '''Raise an abort suggesting how to properly continue if there is an
+    """Raise an abort suggesting how to properly continue if there is an
     active task.
 
     Uses howtocontinue() to find the active task.
 
     If there's no task (repo.ui.note for 'hg commit'), it does not offer
     a hint.
-    '''
+    """
     after = howtocontinue(repo)
     hint = None
     if after[1]:
         hint = after[0]
-    raise error.Abort(_(b'no %s in progress') % task, hint=hint)
+    raise error.StateError(_(b'no %s in progress') % task, hint=hint)
 
 
 def abortgraft(ui, repo, graftstate):
     """abort the interrupted graft and rollbacks to the state before interrupted
     graft"""
     if not graftstate.exists():
-        raise error.Abort(_(b"no interrupted graft to abort"))
+        raise error.StateError(_(b"no interrupted graft to abort"))
     statedata = readgraftstate(repo, graftstate)
     newnodes = statedata.get(b'newnodes')
     if newnodes is None:
--- a/mercurial/commands.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/commands.py	Tue Jan 19 21:48:43 2021 +0530
@@ -26,6 +26,7 @@
     archival,
     bookmarks,
     bundle2,
+    bundlecaches,
     changegroup,
     cmdutil,
     copies,
@@ -186,9 +187,9 @@
     dryrun = opts.get('dry_run')
     abortstate = cmdutil.getunfinishedstate(repo)
     if not abortstate:
-        raise error.Abort(_(b'no operation in progress'))
+        raise error.StateError(_(b'no operation in progress'))
     if not abortstate.abortfunc:
-        raise error.Abort(
+        raise error.InputError(
             (
                 _(b"%s in progress but does not support 'hg abort'")
                 % (abortstate._opname)
@@ -417,7 +418,9 @@
     """
     opts = pycompat.byteskwargs(opts)
     if not pats:
-        raise error.Abort(_(b'at least one filename or pattern is required'))
+        raise error.InputError(
+            _(b'at least one filename or pattern is required')
+        )
 
     if opts.get(b'follow'):
         # --follow is deprecated and now just an alias for -f/--file
@@ -438,7 +441,7 @@
         and (not opts.get(b'changeset'))
         and (not opts.get(b'number'))
     ):
-        raise error.Abort(_(b'at least one of -n/-c is required for -l'))
+        raise error.InputError(_(b'at least one of -n/-c is required for -l'))
 
     rev = opts.get(b'rev')
     if rev:
@@ -602,7 +605,7 @@
     helpcategory=command.CATEGORY_IMPORT_EXPORT,
 )
 def archive(ui, repo, dest, **opts):
-    '''create an unversioned archive of a repository revision
+    """create an unversioned archive of a repository revision
 
     By default, the revision used is the parent of the working
     directory; use -r/--rev to specify a different revision.
@@ -641,7 +644,7 @@
     removed.
 
     Returns 0 on success.
-    '''
+    """
 
     opts = pycompat.byteskwargs(opts)
     rev = opts.get(b'rev')
@@ -649,18 +652,20 @@
         repo = scmutil.unhidehashlikerevs(repo, [rev], b'nowarn')
     ctx = scmutil.revsingle(repo, rev)
     if not ctx:
-        raise error.Abort(_(b'no working directory: please specify a revision'))
+        raise error.InputError(
+            _(b'no working directory: please specify a revision')
+        )
     node = ctx.node()
     dest = cmdutil.makefilename(ctx, dest)
     if os.path.realpath(dest) == repo.root:
-        raise error.Abort(_(b'repository root cannot be destination'))
+        raise error.InputError(_(b'repository root cannot be destination'))
 
     kind = opts.get(b'type') or archival.guesskind(dest) or b'files'
     prefix = opts.get(b'prefix')
 
     if dest == b'-':
         if kind == b'files':
-            raise error.Abort(_(b'cannot archive plain files to stdout'))
+            raise error.InputError(_(b'cannot archive plain files to stdout'))
         dest = cmdutil.makefileobj(ctx, dest)
         if not prefix:
             prefix = os.path.basename(repo.root) + b'-%h'
@@ -713,7 +718,7 @@
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def backout(ui, repo, node=None, rev=None, **opts):
-    '''reverse effect of earlier changeset
+    """reverse effect of earlier changeset
 
     Prepare a new changeset with the effect of REV undone in the
     current working directory. If no conflicts were encountered,
@@ -763,7 +768,7 @@
 
     Returns 0 on success, 1 if nothing to backout or there are unresolved
     files.
-    '''
+    """
     with repo.wlock(), repo.lock():
         return _dobackout(ui, repo, node, rev, **opts)
 
@@ -773,13 +778,13 @@
     opts = pycompat.byteskwargs(opts)
 
     if rev and node:
-        raise error.Abort(_(b"please specify just one revision"))
+        raise error.InputError(_(b"please specify just one revision"))
 
     if not rev:
         rev = node
 
     if not rev:
-        raise error.Abort(_(b"please specify a revision to backout"))
+        raise error.InputError(_(b"please specify a revision to backout"))
 
     date = opts.get(b'date')
     if date:
@@ -792,23 +797,27 @@
 
     op1, op2 = repo.dirstate.parents()
     if not repo.changelog.isancestor(node, op1):
-        raise error.Abort(_(b'cannot backout change that is not an ancestor'))
+        raise error.InputError(
+            _(b'cannot backout change that is not an ancestor')
+        )
 
     p1, p2 = repo.changelog.parents(node)
     if p1 == nullid:
-        raise error.Abort(_(b'cannot backout a change with no parents'))
+        raise error.InputError(_(b'cannot backout a change with no parents'))
     if p2 != nullid:
         if not opts.get(b'parent'):
-            raise error.Abort(_(b'cannot backout a merge changeset'))
+            raise error.InputError(_(b'cannot backout a merge changeset'))
         p = repo.lookup(opts[b'parent'])
         if p not in (p1, p2):
-            raise error.Abort(
+            raise error.InputError(
                 _(b'%s is not a parent of %s') % (short(p), short(node))
             )
         parent = p
     else:
         if opts.get(b'parent'):
-            raise error.Abort(_(b'cannot use --parent on non-merge changeset'))
+            raise error.InputError(
+                _(b'cannot use --parent on non-merge changeset')
+            )
         parent = p1
 
     # the backout should appear on the same branch
@@ -850,11 +859,14 @@
             message, opts.get(b'user'), opts.get(b'date'), match, editor=e
         )
 
+    # save to detect changes
+    tip = repo.changelog.tip()
+
     newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
     if not newnode:
         ui.status(_(b"nothing changed\n"))
         return 1
-    cmdutil.commitstatus(repo, newnode, branch, bheads)
+    cmdutil.commitstatus(repo, newnode, branch, bheads, tip)
 
     def nice(node):
         return b'%d:%s' % (repo.changelog.rev(node), short(node))
@@ -895,8 +907,8 @@
 def bisect(
     ui,
     repo,
-    rev=None,
-    extra=None,
+    positional_1=None,
+    positional_2=None,
     command=None,
     reset=None,
     good=None,
@@ -986,18 +998,22 @@
 
     Returns 0 on success.
     """
+    rev = []
     # backward compatibility
-    if rev in b"good bad reset init".split():
+    if positional_1 in (b"good", b"bad", b"reset", b"init"):
         ui.warn(_(b"(use of 'hg bisect <cmd>' is deprecated)\n"))
-        cmd, rev, extra = rev, extra, None
+        cmd = positional_1
+        rev.append(positional_2)
         if cmd == b"good":
             good = True
         elif cmd == b"bad":
             bad = True
         else:
             reset = True
-    elif extra:
-        raise error.Abort(_(b'incompatible arguments'))
+    elif positional_2:
+        raise error.InputError(_(b'incompatible arguments'))
+    elif positional_1 is not None:
+        rev.append(positional_1)
 
     incompatibles = {
         b'--bad': bad,
@@ -1011,7 +1027,7 @@
     enabled = [x for x in incompatibles if incompatibles[x]]
 
     if len(enabled) > 1:
-        raise error.Abort(
+        raise error.InputError(
             _(b'%s and %s are incompatible') % tuple(sorted(enabled)[0:2])
         )
 
@@ -1021,12 +1037,13 @@
 
     state = hbisect.load_state(repo)
 
+    if rev:
+        nodes = [repo[i].node() for i in scmutil.revrange(repo, rev)]
+    else:
+        nodes = [repo.lookup(b'.')]
+
     # update state
     if good or bad or skip:
-        if rev:
-            nodes = [repo[i].node() for i in scmutil.revrange(repo, [rev])]
-        else:
-            nodes = [repo.lookup(b'.')]
         if good:
             state[b'good'] += nodes
         elif bad:
@@ -1053,7 +1070,7 @@
             try:
                 node = state[b'current'][0]
             except LookupError:
-                raise error.Abort(
+                raise error.StateError(
                     _(
                         b'current bisect revision is unknown - '
                         b'start a new bisect to fix'
@@ -1062,9 +1079,11 @@
         else:
             node, p2 = repo.dirstate.parents()
             if p2 != nullid:
-                raise error.Abort(_(b'current bisect revision is a merge'))
+                raise error.StateError(_(b'current bisect revision is a merge'))
         if rev:
-            node = repo[scmutil.revsingle(repo, rev, node)].node()
+            if not nodes:
+                raise error.Abort(_(b'empty revision set'))
+            node = repo[nodes.last()].node()
         with hbisect.restore_state(repo, state, node):
             while changesets:
                 # update state
@@ -1115,7 +1134,7 @@
                 state[b'current'] = [extendnode.node()]
                 hbisect.save_state(repo, state)
                 return mayupdate(repo, extendnode.node())
-        raise error.Abort(_(b"nothing to extend"))
+        raise error.StateError(_(b"nothing to extend"))
 
     if changesets == 0:
         hbisect.printresult(ui, repo, state, displayer, nodes, good)
@@ -1154,7 +1173,7 @@
     helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
 )
 def bookmark(ui, repo, *names, **opts):
-    '''create a new bookmark or list existing bookmarks
+    """create a new bookmark or list existing bookmarks
 
     Bookmarks are labels on changesets to help track lines of development.
     Bookmarks are unversioned and can be moved, renamed and deleted.
@@ -1212,7 +1231,7 @@
       - print only the active bookmark name::
 
           hg book -ql .
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     force = opts.get(b'force')
     rev = opts.get(b'rev')
@@ -1232,7 +1251,7 @@
         opts, b'inactive', [b'delete', b'list']
     )
     if not names and action in {b'add', b'delete'}:
-        raise error.Abort(_(b"bookmark name required"))
+        raise error.InputError(_(b"bookmark name required"))
 
     if action in {b'add', b'delete', b'rename', b'inactive'}:
         with repo.wlock(), repo.lock(), repo.transaction(b'bookmark') as tr:
@@ -1241,9 +1260,11 @@
                 bookmarks.delete(repo, tr, names)
             elif action == b'rename':
                 if not names:
-                    raise error.Abort(_(b"new bookmark name required"))
+                    raise error.InputError(_(b"new bookmark name required"))
                 elif len(names) > 1:
-                    raise error.Abort(_(b"only one new bookmark name allowed"))
+                    raise error.InputError(
+                        _(b"only one new bookmark name allowed")
+                    )
                 oldname = repo._bookmarks.expandname(opts[b'rename'])
                 bookmarks.rename(repo, tr, oldname, names[0], force, inactive)
             elif action == b'add':
@@ -1319,7 +1340,9 @@
 
     if not opts.get(b'clean') and not label:
         if revs:
-            raise error.Abort(_(b"no branch name specified for the revisions"))
+            raise error.InputError(
+                _(b"no branch name specified for the revisions")
+            )
         ui.write(b"%s\n" % repo.dirstate.branch())
         return
 
@@ -1336,7 +1359,7 @@
 
             if not opts.get(b'force') and label in repo.branchmap():
                 if label not in [p.branch() for p in repo[None].parents()]:
-                    raise error.Abort(
+                    raise error.InputError(
                         _(b'a branch of the same name already exists'),
                         # i18n: "it" refers to an existing branch
                         hint=_(b"use 'hg update' to switch to it"),
@@ -1538,13 +1561,15 @@
         revstrings = opts[b'rev']
         revs = scmutil.revrange(repo, revstrings)
         if revstrings and not revs:
-            raise error.Abort(_(b'no commits to bundle'))
+            raise error.InputError(_(b'no commits to bundle'))
 
     bundletype = opts.get(b'type', b'bzip2').lower()
     try:
-        bundlespec = exchange.parsebundlespec(repo, bundletype, strict=False)
+        bundlespec = bundlecaches.parsebundlespec(
+            repo, bundletype, strict=False
+        )
     except error.UnsupportedBundleSpecification as e:
-        raise error.Abort(
+        raise error.InputError(
             pycompat.bytestr(e),
             hint=_(b"see 'hg help bundlespec' for supported values for --type"),
         )
@@ -1552,14 +1577,14 @@
 
     # Packed bundles are a pseudo bundle format for now.
     if cgversion == b's1':
-        raise error.Abort(
+        raise error.InputError(
             _(b'packed bundles cannot be produced by "hg bundle"'),
             hint=_(b"use 'hg debugcreatestreamclonebundle'"),
         )
 
     if opts.get(b'all'):
         if dest:
-            raise error.Abort(
+            raise error.InputError(
                 _(b"--all is incompatible with specifying a destination")
             )
         if opts.get(b'base'):
@@ -1574,7 +1599,7 @@
 
     if base:
         if dest:
-            raise error.Abort(
+            raise error.InputError(
                 _(b"--base is incompatible with specifying a destination")
             )
         common = [repo[rev].node() for rev in base]
@@ -1623,12 +1648,17 @@
     if complevel is not None:
         compopts[b'level'] = complevel
 
-    # Allow overriding the bundling of obsmarker in phases through
-    # configuration while we don't have a bundle version that include them
-    if repo.ui.configbool(b'experimental', b'evolution.bundle-obsmarker'):
-        bundlespec.contentopts[b'obsolescence'] = True
-    if repo.ui.configbool(b'experimental', b'bundle-phases'):
-        bundlespec.contentopts[b'phases'] = True
+    # Bundling of obsmarker and phases is optional as not all clients
+    # support the necessary features.
+    cfg = ui.configbool
+    contentopts = {
+        b'obsolescence': cfg(b'experimental', b'evolution.bundle-obsmarker'),
+        b'obsolescence-mandatory': cfg(
+            b'experimental', b'evolution.bundle-obsmarker:mandatory'
+        ),
+        b'phases': cfg(b'experimental', b'bundle-phases'),
+    }
+    bundlespec.contentopts.update(contentopts)
 
     bundle2.writenewbundle(
         ui,
@@ -2013,8 +2043,7 @@
 
     opts = pycompat.byteskwargs(opts)
     if opts.get(b'subrepos'):
-        if opts.get(b'amend'):
-            raise error.Abort(_(b'cannot amend with --subrepos'))
+        cmdutil.check_incompatible_arguments(opts, b'subrepos', [b'amend'])
         # Let --subrepos on the command line override config setting.
         ui.setconfig(b'ui', b'commitsubrepos', True, b'commit')
 
@@ -2022,17 +2051,20 @@
 
     branch = repo[None].branch()
     bheads = repo.branchheads(branch)
+    tip = repo.changelog.tip()
 
     extra = {}
     if opts.get(b'close_branch') or opts.get(b'force_close_branch'):
         extra[b'close'] = b'1'
 
         if repo[b'.'].closesbranch():
-            raise error.Abort(
+            raise error.InputError(
                 _(b'current revision is already a branch closing head')
             )
         elif not bheads:
-            raise error.Abort(_(b'branch "%s" has no heads to close') % branch)
+            raise error.InputError(
+                _(b'branch "%s" has no heads to close') % branch
+            )
         elif (
             branch == repo[b'.'].branch()
             and repo[b'.'].node() not in bheads
@@ -2042,17 +2074,19 @@
                 b'use --force-close-branch to close branch from a non-head'
                 b' changeset'
             )
-            raise error.Abort(_(b'can only close branch heads'), hint=hint)
+            raise error.InputError(_(b'can only close branch heads'), hint=hint)
         elif opts.get(b'amend'):
             if (
                 repo[b'.'].p1().branch() != branch
                 and repo[b'.'].p2().branch() != branch
             ):
-                raise error.Abort(_(b'can only close branch heads'))
+                raise error.InputError(_(b'can only close branch heads'))
 
     if opts.get(b'amend'):
         if ui.configbool(b'ui', b'commitsubrepos'):
-            raise error.Abort(_(b'cannot amend with ui.commitsubrepos enabled'))
+            raise error.InputError(
+                _(b'cannot amend with ui.commitsubrepos enabled')
+            )
 
         old = repo[b'.']
         rewriteutil.precheck(repo, [old.rev()], b'amend')
@@ -2111,7 +2145,7 @@
                 ui.status(_(b"nothing changed\n"))
             return 1
 
-    cmdutil.commitstatus(repo, node, branch, bheads, opts)
+    cmdutil.commitstatus(repo, node, branch, bheads, tip, opts)
 
     if not ui.quiet and ui.configbool(b'commands', b'commit.post-status'):
         status(
@@ -2138,6 +2172,7 @@
             None,
             _(b'edit shared source repository config (EXPERIMENTAL)'),
         ),
+        (b'', b'non-shared', None, _(b'edit non shared config (EXPERIMENTAL)')),
         (b'g', b'global', None, _(b'edit global config')),
     ]
     + formatteropts,
@@ -2168,6 +2203,9 @@
 
     .. container:: verbose
 
+      --non-shared flag is used to edit `.hg/hgrc-not-shared` config file.
+      This file is not shared across shares when in share-safe mode.
+
       Template:
 
       The following keywords are supported. See also :hg:`help templates`.
@@ -2185,28 +2223,32 @@
     """
 
     opts = pycompat.byteskwargs(opts)
-    editopts = (b'edit', b'local', b'global', b'shared')
+    editopts = (b'edit', b'local', b'global', b'shared', b'non_shared')
     if any(opts.get(o) for o in editopts):
         cmdutil.check_at_most_one_arg(opts, *editopts[1:])
         if opts.get(b'local'):
             if not repo:
-                raise error.Abort(_(b"can't use --local outside a repository"))
+                raise error.InputError(
+                    _(b"can't use --local outside a repository")
+                )
             paths = [repo.vfs.join(b'hgrc')]
         elif opts.get(b'global'):
             paths = rcutil.systemrcpath()
         elif opts.get(b'shared'):
             if not repo.shared():
-                raise error.Abort(
+                raise error.InputError(
                     _(b"repository is not shared; can't use --shared")
                 )
-                if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
-                    raise error.Abort(
-                        _(
-                            b"share safe feature not unabled; "
-                            b"unable to edit shared source repository config"
-                        )
+            if requirements.SHARESAFE_REQUIREMENT not in repo.requirements:
+                raise error.InputError(
+                    _(
+                        b"share safe feature not enabled; "
+                        b"unable to edit shared source repository config"
                     )
+                )
             paths = [vfsmod.vfs(repo.sharedpath).join(b'hgrc')]
+        elif opts.get(b'non_shared'):
+            paths = [repo.vfs.join(b'hgrc-not-shared')]
         else:
             paths = rcutil.userrcpath()
 
@@ -2229,7 +2271,7 @@
         editor = ui.geteditor()
         ui.system(
             b"%s \"%s\"" % (editor, f),
-            onerr=error.Abort,
+            onerr=error.InputError,
             errprefix=_(b"edit failed"),
             blockedtag=b'config_edit',
         )
@@ -2305,9 +2347,9 @@
     dryrun = opts.get('dry_run')
     contstate = cmdutil.getunfinishedstate(repo)
     if not contstate:
-        raise error.Abort(_(b'no operation in progress'))
+        raise error.StateError(_(b'no operation in progress'))
     if not contstate.continuefunc:
-        raise error.Abort(
+        raise error.StateError(
             (
                 _(b"%s in progress but does not support 'hg continue'")
                 % (contstate._opname)
@@ -2341,7 +2383,7 @@
     ]
     + walkopts
     + dryrunopts,
-    _(b'[OPTION]... SOURCE... DEST'),
+    _(b'[OPTION]... (SOURCE... DEST | --forget DEST...)'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
 )
 def copy(ui, repo, *pats, **opts):
@@ -2418,14 +2460,16 @@
 @command(
     b'diff',
     [
-        (b'r', b'rev', [], _(b'revision'), _(b'REV')),
+        (b'r', b'rev', [], _(b'revision (DEPRECATED)'), _(b'REV')),
+        (b'', b'from', b'', _(b'revision to diff from'), _(b'REV1')),
+        (b'', b'to', b'', _(b'revision to diff to'), _(b'REV2')),
         (b'c', b'change', b'', _(b'change made by revision'), _(b'REV')),
     ]
     + diffopts
     + diffopts2
     + walkopts
     + subrepoopts,
-    _(b'[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
+    _(b'[OPTION]... ([-c REV] | [--from REV1] [--to REV2]) [FILE]...'),
     helpcategory=command.CATEGORY_FILE_CONTENTS,
     helpbasic=True,
     inferrepo=True,
@@ -2444,14 +2488,17 @@
        default to comparing against the working directory's first
        parent changeset if no revisions are specified.
 
-    When two revision arguments are given, then changes are shown
-    between those revisions. If only one revision is specified then
-    that revision is compared to the working directory, and, when no
-    revisions are specified, the working directory files are compared
-    to its first parent.
-
-    Alternatively you can specify -c/--change with a revision to see
-    the changes in that changeset relative to its first parent.
+    By default, the working directory files are compared to its first parent. To
+    see the differences from another revision, use --from. To see the difference
+    to another revision, use --to. For example, :hg:`diff --from .^` will show
+    the differences from the working copy's grandparent to the working copy,
+    :hg:`diff --to .` will show the diff from the working copy to its parent
+    (i.e. the reverse of the default), and :hg:`diff --from 1.0 --to 1.2` will
+    show the diff between those two revisions.
+
+    Alternatively you can specify -c/--change with a revision to see the changes
+    in that changeset relative to its first parent (i.e. :hg:`diff -c 42` is
+    equivalent to :hg:`diff --from 42^ --to 42`)
 
     Without the -a/--text option, diff will avoid generating diffs of
     files it detects as binary. With -a, diff will generate a diff
@@ -2470,11 +2517,11 @@
 
       - compare two historical versions of a directory, with rename info::
 
-          hg diff --git -r 1.0:1.2 lib/
+          hg diff --git --from 1.0 --to 1.2 lib/
 
       - get change stats relative to the last change on some date::
 
-          hg diff --stat -r "date('may 2')"
+          hg diff --stat --from "date('may 2')"
 
       - diff all newly-added files that contain a keyword::
 
@@ -2482,9 +2529,9 @@
 
       - compare a revision and its parents::
 
-          hg diff -c 9353         # compare against first parent
-          hg diff -r 9353^:9353   # same using revset syntax
-          hg diff -r 9353^2:9353  # compare against the second parent
+          hg diff -c 9353                  # compare against first parent
+          hg diff --from 9353^ --to 9353   # same using revset syntax
+          hg diff --from 9353^2 --to 9353  # compare against the second parent
 
     Returns 0 on success.
     """
@@ -2493,13 +2540,23 @@
     opts = pycompat.byteskwargs(opts)
     revs = opts.get(b'rev')
     change = opts.get(b'change')
+    from_rev = opts.get(b'from')
+    to_rev = opts.get(b'to')
     stat = opts.get(b'stat')
     reverse = opts.get(b'reverse')
 
+    cmdutil.check_incompatible_arguments(opts, b'from', [b'rev', b'change'])
+    cmdutil.check_incompatible_arguments(opts, b'to', [b'rev', b'change'])
     if change:
         repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
         ctx2 = scmutil.revsingle(repo, change, None)
         ctx1 = ctx2.p1()
+    elif from_rev or to_rev:
+        repo = scmutil.unhidehashlikerevs(
+            repo, [from_rev] + [to_rev], b'nowarn'
+        )
+        ctx1 = scmutil.revsingle(repo, from_rev, None)
+        ctx2 = scmutil.revsingle(repo, to_rev, None)
     else:
         repo = scmutil.unhidehashlikerevs(repo, revs, b'nowarn')
         ctx1, ctx2 = scmutil.revpair(repo, revs)
@@ -2637,7 +2694,7 @@
 
     if bookmark:
         if bookmark not in repo._bookmarks:
-            raise error.Abort(_(b"bookmark '%s' not found") % bookmark)
+            raise error.InputError(_(b"bookmark '%s' not found") % bookmark)
 
         revs = scmutil.bookmarkrevs(repo, bookmark)
     else:
@@ -2648,7 +2705,7 @@
         revs = scmutil.revrange(repo, changesets)
 
     if not revs:
-        raise error.Abort(_(b"export requires at least one changeset"))
+        raise error.InputError(_(b"export requires at least one changeset"))
     if len(revs) > 1:
         ui.note(_(b'exporting patches:\n'))
     else:
@@ -2774,7 +2831,9 @@
 
 @command(
     b'forget',
-    [(b'i', b'interactive', None, _(b'use interactive mode')),]
+    [
+        (b'i', b'interactive', None, _(b'use interactive mode')),
+    ]
     + walkopts
     + dryrunopts,
     _(b'[OPTION]... FILE...'),
@@ -2813,7 +2872,7 @@
 
     opts = pycompat.byteskwargs(opts)
     if not pats:
-        raise error.Abort(_(b'no files specified'))
+        raise error.InputError(_(b'no files specified'))
 
     m = scmutil.match(repo[None], pats, opts)
     dryrun, interactive = opts.get(b'dry_run'), opts.get(b'interactive')
@@ -2874,7 +2933,7 @@
     helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
 )
 def graft(ui, repo, *revs, **opts):
-    '''copy changes from other branches onto the current branch
+    """copy changes from other branches onto the current branch
 
     This command uses Mercurial's merge logic to copy individual
     changes from other branches without merging branches in the
@@ -2913,7 +2972,7 @@
 
       is thus pretty much the same as::
 
-        hg diff -r 234 -r 345 | hg import
+        hg diff --from 234 --to 345 | hg import
 
       but using merge to resolve conflicts and track moved files.
 
@@ -2967,7 +3026,7 @@
     See :hg:`help revisions` for more about specifying revisions.
 
     Returns 0 on successful completion, 1 if there are unresolved files.
-    '''
+    """
     with repo.wlock():
         return _dograft(ui, repo, *revs, **opts)
 
@@ -3040,7 +3099,7 @@
     elif opts.get(b'continue'):
         cont = True
         if revs:
-            raise error.Abort(_(b"can't specify --continue and revisions"))
+            raise error.InputError(_(b"can't specify --continue and revisions"))
         # read in unfinished revisions
         if graftstate.exists():
             statedata = cmdutil.readgraftstate(repo, graftstate)
@@ -3060,7 +3119,7 @@
             cmdutil.wrongtooltocontinue(repo, _(b'graft'))
     else:
         if not revs:
-            raise error.Abort(_(b'no revisions specified'))
+            raise error.InputError(_(b'no revisions specified'))
         cmdutil.checkunfinished(repo)
         cmdutil.bailifchanged(repo)
         revs = scmutil.revrange(repo, revs)
@@ -3078,7 +3137,7 @@
     if not revs:
         return -1
     if basectx is not None and len(revs) != 1:
-        raise error.Abort(_(b'only one revision allowed with --base '))
+        raise error.InputError(_(b'only one revision allowed with --base '))
 
     # Don't check in the --continue case, in effect retaining --force across
     # --continues. That's because without --force, any revisions we decided to
@@ -3240,7 +3299,7 @@
 def _stopgraft(ui, repo, graftstate):
     """stop the interrupted graft"""
     if not graftstate.exists():
-        raise error.Abort(_(b"no interrupted graft found"))
+        raise error.StateError(_(b"no interrupted graft found"))
     pctx = repo[b'.']
     mergemod.clean_update(pctx)
     graftstate.delete()
@@ -3753,7 +3812,7 @@
 
     opts = pycompat.byteskwargs(opts)
     if not repo and not source:
-        raise error.Abort(
+        raise error.InputError(
             _(b"there is no Mercurial repository here (.hg not found)")
         )
 
@@ -3772,7 +3831,7 @@
 
     if not repo:
         if num or branch or tags:
-            raise error.Abort(
+            raise error.InputError(
                 _(b"can't query remote revision number, branch, or tags")
             )
         if not rev and revs:
@@ -4044,7 +4103,7 @@
     cmdutil.check_incompatible_arguments(opts, 'exact', ['edit', 'prefix'])
     opts = pycompat.byteskwargs(opts)
     if not patch1:
-        raise error.Abort(_(b'need at least one patch to import'))
+        raise error.InputError(_(b'need at least one patch to import'))
 
     patches = (patch1,) + patches
 
@@ -4057,11 +4116,11 @@
     try:
         sim = float(opts.get(b'similarity') or 0)
     except ValueError:
-        raise error.Abort(_(b'similarity must be a number'))
+        raise error.InputError(_(b'similarity must be a number'))
     if sim < 0 or sim > 100:
-        raise error.Abort(_(b'similarity must be between 0 and 100'))
+        raise error.InputError(_(b'similarity must be between 0 and 100'))
     if sim and not update:
-        raise error.Abort(_(b'cannot use --similarity with --bypass'))
+        raise error.InputError(_(b'cannot use --similarity with --bypass'))
 
     base = opts[b"base"]
     msgs = []
@@ -4118,7 +4177,7 @@
                         break
 
                 if not haspatch:
-                    raise error.Abort(_(b'%s: no diffs found') % patchurl)
+                    raise error.InputError(_(b'%s: no diffs found') % patchurl)
 
             if msgs:
                 repo.savecommitmessage(b'\n* * *\n'.join(msgs))
@@ -4426,6 +4485,13 @@
             _(b'BRANCH'),
         ),
         (
+            b'B',
+            b'bookmark',
+            [],
+            _(b"show changesets within the given bookmark"),
+            _(b'BOOKMARK'),
+        ),
+        (
             b'P',
             b'prune',
             [],
@@ -4564,7 +4630,7 @@
 
     See :hg:`help templates` for more about pre-packaged styles and
     specifying custom templates. The default template used by the log
-    command can be customized via the ``ui.logtemplate`` configuration
+    command can be customized via the ``command-templates.log`` configuration
     setting.
 
     Returns 0 on success.
@@ -4574,18 +4640,17 @@
     linerange = opts.get(b'line_range')
 
     if linerange and not opts.get(b'follow'):
-        raise error.Abort(_(b'--line-range requires --follow'))
+        raise error.InputError(_(b'--line-range requires --follow'))
 
     if linerange and pats:
         # TODO: take pats as patterns with no line-range filter
-        raise error.Abort(
+        raise error.InputError(
             _(b'FILE arguments are not compatible with --line-range option')
         )
 
     repo = scmutil.unhidehashlikerevs(repo, opts.get(b'rev'), b'nowarn')
-    revs, differ = logcmdutil.getrevs(
-        repo, logcmdutil.parseopts(ui, pats, opts)
-    )
+    walk_opts = logcmdutil.parseopts(ui, pats, opts)
+    revs, differ = logcmdutil.getrevs(repo, walk_opts)
     if linerange:
         # TODO: should follow file history from logcmdutil._initialrevs(),
         # then filter the result by logcmdutil._makerevset() and --limit
@@ -4640,7 +4705,7 @@
 
     if opts.get(b'all'):
         if rev or node:
-            raise error.Abort(_(b"can't specify a revision with --all"))
+            raise error.InputError(_(b"can't specify a revision with --all"))
 
         res = set()
         for rev in repo:
@@ -4655,7 +4720,7 @@
         return
 
     if rev and node:
-        raise error.Abort(_(b"please specify just one revision"))
+        raise error.InputError(_(b"please specify just one revision"))
 
     if not node:
         node = rev
@@ -4737,16 +4802,16 @@
     if abort:
         state = cmdutil.getunfinishedstate(repo)
         if state and state._opname != b'merge':
-            raise error.Abort(
+            raise error.StateError(
                 _(b'cannot abort merge with %s in progress') % (state._opname),
                 hint=state.hint(),
             )
         if node:
-            raise error.Abort(_(b"cannot specify a node with --abort"))
+            raise error.InputError(_(b"cannot specify a node with --abort"))
         return hg.abortmerge(repo.ui, repo)
 
     if opts.get(b'rev') and node:
-        raise error.Abort(_(b"please specify just one revision"))
+        raise error.InputError(_(b"please specify just one revision"))
     if not node:
         node = opts.get(b'rev')
 
@@ -4754,7 +4819,7 @@
         ctx = scmutil.revsingle(repo, node)
     else:
         if ui.configbool(b'commands', b'merge.require-rev'):
-            raise error.Abort(
+            raise error.InputError(
                 _(
                     b'configuration requires specifying revision to merge '
                     b'with'
@@ -4763,7 +4828,9 @@
         ctx = repo[destutil.destmerge(repo)]
 
     if ctx.node() is None:
-        raise error.Abort(_(b'merging with the working copy has no effect'))
+        raise error.InputError(
+            _(b'merging with the working copy has no effect')
+        )
 
     if opts.get(b'preview'):
         # find nodes that are ancestors of p2 but not of p1
@@ -4871,7 +4938,7 @@
     # style URLs, so don't overwrite dest.
     path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
     if not path:
-        raise error.Abort(
+        raise error.ConfigError(
             _(b'default repository not configured!'),
             hint=_(b"see 'hg help config.paths'"),
         )
@@ -4956,7 +5023,7 @@
     if file_:
         m = scmutil.match(ctx, (file_,), opts)
         if m.anypats() or len(m.files()) != 1:
-            raise error.Abort(_(b'can only specify an explicit filename'))
+            raise error.InputError(_(b'can only specify an explicit filename'))
         file_ = m.files()[0]
         filenodes = []
         for cp in ctx.parents():
@@ -4967,7 +5034,7 @@
             except error.LookupError:
                 pass
         if not filenodes:
-            raise error.Abort(_(b"'%s' not found in manifest!") % file_)
+            raise error.InputError(_(b"'%s' not found in manifest") % file_)
         p = []
         for fn in filenodes:
             fctx = repo.filectx(file_, fileid=fn)
@@ -5109,7 +5176,7 @@
     for idx, name in enumerate(phases.cmdphasenames):
         if opts[name]:
             if targetphase is not None:
-                raise error.Abort(_(b'only one phase can be specified'))
+                raise error.InputError(_(b'only one phase can be specified'))
             targetphase = idx
 
     # look for specified revision
@@ -5132,7 +5199,7 @@
         with repo.lock(), repo.transaction(b"phase") as tr:
             # set phase
             if not revs:
-                raise error.Abort(_(b'empty revision set'))
+                raise error.InputError(_(b'empty revision set'))
             nodes = [repo[r].node() for r in revs]
             # moving revision from public to draft may hide them
             # We have to check result on an unfiltered repository
@@ -5217,7 +5284,12 @@
             None,
             _(b'run even when remote repository is unrelated'),
         ),
-        (b'', b'confirm', None, _(b'confirm pull before applying changes'),),
+        (
+            b'',
+            b'confirm',
+            None,
+            _(b'confirm pull before applying changes'),
+        ),
         (
             b'r',
             b'rev',
@@ -5275,10 +5347,11 @@
     ):
         msg = _(b'update destination required by configuration')
         hint = _(b'use hg pull followed by hg update DEST')
-        raise error.Abort(msg, hint=hint)
+        raise error.InputError(msg, hint=hint)
 
     source, branches = hg.parseurl(ui.expandpath(source), opts.get(b'branch'))
     ui.status(_(b'pulling from %s\n') % util.hidepassword(source))
+    ui.flush()
     other = hg.peer(repo, opts, source)
     try:
         revs, checkout = hg.addbranchrevs(
@@ -5314,7 +5387,9 @@
             for b in opts.get(b'bookmark', []):
                 b = repo._bookmarks.expandname(b)
                 if b not in remotebookmarks:
-                    raise error.Abort(_(b'remote bookmark %s not found!') % b)
+                    raise error.InputError(
+                        _(b'remote bookmark %s not found!') % b
+                    )
                 nodes.append(remotebookmarks[b])
             for i, rev in enumerate(revs):
                 node = fnodes[i].result()
@@ -5383,6 +5458,7 @@
             _(b'REV'),
         ),
         (b'B', b'bookmark', [], _(b"bookmark to push"), _(b'BOOKMARK')),
+        (b'', b'all-bookmarks', None, _(b"push all bookmarks (EXPERIMENTAL)")),
         (
             b'b',
             b'branch',
@@ -5439,7 +5515,8 @@
     If -B/--bookmark is used, the specified bookmarked revision, its
     ancestors, and the bookmark will be pushed to the remote
     repository. Specifying ``.`` is equivalent to specifying the active
-    bookmark's name.
+    bookmark's name. Use the --all-bookmarks option for pushing all
+    current bookmarks.
 
     Please see :hg:`help urls` for important details about ``ssh://``
     URLs. If DESTINATION is omitted, a default path will be used.
@@ -5467,6 +5544,15 @@
     """
 
     opts = pycompat.byteskwargs(opts)
+
+    if opts.get(b'all_bookmarks'):
+        cmdutil.check_incompatible_arguments(
+            opts,
+            b'all_bookmarks',
+            [b'bookmark', b'rev'],
+        )
+        opts[b'bookmark'] = list(repo._bookmarks)
+
     if opts.get(b'bookmark'):
         ui.setconfig(b'bookmarks', b'pushing', opts[b'bookmark'], b'push')
         for b in opts[b'bookmark']:
@@ -5481,7 +5567,7 @@
 
     path = ui.paths.getpath(dest, default=(b'default-push', b'default'))
     if not path:
-        raise error.Abort(
+        raise error.ConfigError(
             _(b'default repository not configured!'),
             hint=_(b"see 'hg help config.paths'"),
         )
@@ -5494,7 +5580,7 @@
     if revs:
         revs = [repo[r].node() for r in scmutil.revrange(repo, revs)]
         if not revs:
-            raise error.Abort(
+            raise error.InputError(
                 _(b"specified revisions evaluate to an empty set"),
                 hint=_(b"use different revision arguments"),
             )
@@ -5505,11 +5591,11 @@
         revs = scmutil.revrange(repo, [expr])
         revs = [repo[rev].node() for rev in revs]
         if not revs:
-            raise error.Abort(
+            raise error.InputError(
                 _(b'default push revset for path evaluates to an empty set')
             )
     elif ui.configbool(b'commands', b'push.require-revs'):
-        raise error.Abort(
+        raise error.InputError(
             _(b'no revisions specified to push'),
             hint=_(b'did you mean "hg push -r ."?'),
         )
@@ -5553,7 +5639,9 @@
 
 @command(
     b'recover',
-    [(b'', b'verify', False, b"run `hg verify` after successful recover"),],
+    [
+        (b'', b'verify', False, b"run `hg verify` after successful recover"),
+    ],
     helpcategory=command.CATEGORY_MAINTENANCE,
 )
 def recover(ui, repo, **opts):
@@ -5638,7 +5726,7 @@
     after, force = opts.get(b'after'), opts.get(b'force')
     dryrun = opts.get(b'dry_run')
     if not pats and not after:
-        raise error.Abort(_(b'no files specified'))
+        raise error.InputError(_(b'no files specified'))
 
     m = scmutil.match(repo[None], pats, opts)
     subrepos = opts.get(b'subrepos')
@@ -5768,16 +5856,16 @@
 
     actioncount = len(list(filter(None, [show, mark, unmark, remerge])))
     if actioncount > 1:
-        raise error.Abort(_(b"too many actions specified"))
+        raise error.InputError(_(b"too many actions specified"))
     elif actioncount == 0 and ui.configbool(
         b'commands', b'resolve.explicit-re-merge'
     ):
         hint = _(b'use --mark, --unmark, --list or --re-merge')
-        raise error.Abort(_(b'no action specified'), hint=hint)
+        raise error.InputError(_(b'no action specified'), hint=hint)
     if pats and all:
-        raise error.Abort(_(b"can't specify --all and patterns"))
+        raise error.InputError(_(b"can't specify --all and patterns"))
     if not (all or pats or show or mark or unmark):
-        raise error.Abort(
+        raise error.InputError(
             _(b'no files or directories specified'),
             hint=b'use --all to re-merge all unresolved files',
         )
@@ -5787,7 +5875,7 @@
             if ui.promptchoice(
                 _(b're-merge all unresolved files (yn)?$$ &Yes $$ &No')
             ):
-                raise error.Abort(_(b'user quit'))
+                raise error.CanceledError(_(b'user quit'))
         if mark and not pats:
             if ui.promptchoice(
                 _(
@@ -5795,7 +5883,7 @@
                     b'$$ &Yes $$ &No'
                 )
             ):
-                raise error.Abort(_(b'user quit'))
+                raise error.CanceledError(_(b'user quit'))
         if unmark and not pats:
             if ui.promptchoice(
                 _(
@@ -5803,7 +5891,7 @@
                     b'$$ &Yes $$ &No'
                 )
             ):
-                raise error.Abort(_(b'user quit'))
+                raise error.CanceledError(_(b'user quit'))
 
     uipathfn = scmutil.getuipathfn(repo)
 
@@ -5850,7 +5938,7 @@
         ms = mergestatemod.mergestate.read(repo)
 
         if not (ms.active() or repo.dirstate.p2() != nullid):
-            raise error.Abort(
+            raise error.StateError(
                 _(b'resolve command not applicable when not merging')
             )
 
@@ -5942,7 +6030,7 @@
                 )
             )
             if markcheck == b'abort' and not all and not pats:
-                raise error.Abort(
+                raise error.StateError(
                     _(b'conflict markers detected'),
                     hint=_(b'use --all to mark anyway'),
                 )
@@ -6061,7 +6149,7 @@
     parent, p2 = repo.dirstate.parents()
     if not opts.get(b'rev') and p2 != nullid:
         # revert after merge is a trap for new users (issue2915)
-        raise error.Abort(
+        raise error.InputError(
             _(b'uncommitted merge with no revision specified'),
             hint=_(b"use 'hg update' or see 'hg help revert'"),
         )
@@ -6084,7 +6172,7 @@
                 b"uncommitted merge, use --all to discard all changes,"
                 b" or 'hg update -C .' to abort the merge"
             )
-            raise error.Abort(msg, hint=hint)
+            raise error.InputError(msg, hint=hint)
         dirty = any(repo.status())
         node = ctx.node()
         if node != parent:
@@ -6108,7 +6196,7 @@
             hint = _(b"uncommitted changes, use --all to discard all changes")
         else:
             hint = _(b"use --all to revert all files")
-        raise error.Abort(msg, hint=hint)
+        raise error.InputError(msg, hint=hint)
 
     return cmdutil.revert(ui, repo, ctx, *pats, **pycompat.strkwargs(opts))
 
@@ -6318,7 +6406,7 @@
     cmdutil.check_incompatible_arguments(opts, 'stdio', ['cmdserver'])
     opts = pycompat.byteskwargs(opts)
     if opts[b"print_url"] and ui.verbose:
-        raise error.Abort(_(b"cannot use --print-url with --verbose"))
+        raise error.InputError(_(b"cannot use --print-url with --verbose"))
 
     if opts[b"stdio"]:
         if repo is None:
@@ -6327,6 +6415,7 @@
             )
         s = wireprotoserver.sshserver(ui, repo)
         s.serve_forever()
+        return
 
     service = server.createservice(ui, repo, opts)
     return server.runservice(opts, initfn=service.init, runfn=service.run)
@@ -6392,7 +6481,7 @@
     helpcategory=command.CATEGORY_WORKING_DIRECTORY,
 )
 def shelve(ui, repo, *pats, **opts):
-    '''save and set aside changes from the working directory
+    """save and set aside changes from the working directory
 
     Shelving takes files that "hg status" reports as not clean, saves
     the modifications to a bundle (a shelved change), and reverts the
@@ -6423,7 +6512,7 @@
 
     To delete specific shelved changes, use ``--delete``. To delete
     all shelved changes, use ``--cleanup``.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
     allowables = [
         (b'addremove', {b'create'}),  # 'create' is pseudo action
@@ -6444,7 +6533,7 @@
         if opts.get(opt):
             for i, allowable in allowables:
                 if opts[i] and opt not in allowable:
-                    raise error.Abort(
+                    raise error.InputError(
                         _(
                             b"options '--%s' and '--%s' may not be "
                             b"used together"
@@ -6455,7 +6544,9 @@
 
     if checkopt(b'cleanup'):
         if pats:
-            raise error.Abort(_(b"cannot specify names when using '--cleanup'"))
+            raise error.InputError(
+                _(b"cannot specify names when using '--cleanup'")
+            )
         return shelvemod.cleanupcmd(ui, repo)
     elif checkopt(b'delete'):
         return shelvemod.deletecmd(ui, repo, pats)
@@ -6621,7 +6712,7 @@
 
     if revs and terse:
         msg = _(b'cannot use --terse with --rev')
-        raise error.Abort(msg)
+        raise error.InputError(msg)
     elif change:
         repo = scmutil.unhidehashlikerevs(repo, [change], b'nowarn')
         ctx2 = scmutil.revsingle(repo, change, None)
@@ -6695,8 +6786,10 @@
 
     morestatus = None
     if (
-        ui.verbose or ui.configbool(b'commands', b'status.verbose')
-    ) and not ui.plain():
+        (ui.verbose or ui.configbool(b'commands', b'status.verbose'))
+        and not ui.plain()
+        and not opts.get(b'print0')
+    ):
         morestatus = cmdutil.readmorestatus(repo)
 
     ui.pager(b'status')
@@ -7092,11 +7185,11 @@
         rev_ = b"."
         names = [t.strip() for t in (name1,) + names]
         if len(names) != len(set(names)):
-            raise error.Abort(_(b'tag names must be unique'))
+            raise error.InputError(_(b'tag names must be unique'))
         for n in names:
             scmutil.checknewlabel(repo, n, b'tag')
             if not n:
-                raise error.Abort(
+                raise error.InputError(
                     _(b'tag names cannot consist entirely of whitespace')
                 )
         if opts.get(b'rev'):
@@ -7112,16 +7205,20 @@
                 if repo.tagtype(n) == b'global':
                     alltags = tagsmod.findglobaltags(ui, repo)
                     if alltags[n][0] == nullid:
-                        raise error.Abort(_(b"tag '%s' is already removed") % n)
+                        raise error.InputError(
+                            _(b"tag '%s' is already removed") % n
+                        )
                 if not repo.tagtype(n):
-                    raise error.Abort(_(b"tag '%s' does not exist") % n)
+                    raise error.InputError(_(b"tag '%s' does not exist") % n)
                 if repo.tagtype(n) != expectedtype:
                     if expectedtype == b'global':
-                        raise error.Abort(
+                        raise error.InputError(
                             _(b"tag '%s' is not a global tag") % n
                         )
                     else:
-                        raise error.Abort(_(b"tag '%s' is not a local tag") % n)
+                        raise error.InputError(
+                            _(b"tag '%s' is not a local tag") % n
+                        )
             rev_ = b'null'
             if not message:
                 # we don't translate commit messages
@@ -7129,16 +7226,16 @@
         elif not opts.get(b'force'):
             for n in names:
                 if n in repo.tags():
-                    raise error.Abort(
+                    raise error.InputError(
                         _(b"tag '%s' already exists (use -f to force)") % n
                     )
         if not opts.get(b'local'):
             p1, p2 = repo.dirstate.parents()
             if p2 != nullid:
-                raise error.Abort(_(b'uncommitted merge'))
+                raise error.StateError(_(b'uncommitted merge'))
             bheads = repo.branchheads()
             if not opts.get(b'force') and bheads and p1 not in bheads:
-                raise error.Abort(
+                raise error.InputError(
                     _(
                         b'working directory is not at a branch head '
                         b'(use -f to force)'
@@ -7170,7 +7267,7 @@
             not opts.get(b'remove')
             and scmutil.revsingle(repo, rev_).rev() == nullrev
         ):
-            raise error.Abort(_(b"cannot tag null revision"))
+            raise error.InputError(_(b"cannot tag null revision"))
 
         tagsmod.tag(
             repo,
@@ -7219,10 +7316,11 @@
     for t, n in reversed(repo.tagslist()):
         hn = hexfunc(n)
         label = b'tags.normal'
-        tagtype = b''
-        if repo.tagtype(t) == b'local':
-            label = b'tags.local'
-            tagtype = b'local'
+        tagtype = repo.tagtype(t)
+        if not tagtype or tagtype == b'global':
+            tagtype = b''
+        else:
+            label = b'tags.' + tagtype
 
         fm.startitem()
         fm.context(repo=repo)
@@ -7302,7 +7400,7 @@
             f = hg.openpath(ui, fname)
             gen = exchange.readbundle(ui, f, fname)
             if isinstance(gen, streamclone.streamcloneapplier):
-                raise error.Abort(
+                raise error.InputError(
                     _(
                         b'packed bundles cannot be applied with '
                         b'"hg unbundle"'
@@ -7497,11 +7595,11 @@
     check = opts.get('check')
     merge = opts.get('merge')
     if rev and node:
-        raise error.Abort(_(b"please specify just one revision"))
+        raise error.InputError(_(b"please specify just one revision"))
 
     if ui.configbool(b'commands', b'update.requiredest'):
         if not node and not rev and not date:
-            raise error.Abort(
+            raise error.InputError(
                 _(b'you must specify a destination'),
                 hint=_(b'for example: hg update ".::"'),
             )
@@ -7510,7 +7608,7 @@
         rev = node
 
     if date and rev is not None:
-        raise error.Abort(_(b"you can't specify a revision and a date"))
+        raise error.InputError(_(b"you can't specify a revision and a date"))
 
     updatecheck = None
     if check:
@@ -7645,8 +7743,7 @@
 
 
 def loadcmdtable(ui, name, cmdtable):
-    """Load command functions from specified cmdtable
-    """
+    """Load command functions from specified cmdtable"""
     overrides = [cmd for cmd in cmdtable if cmd in table]
     if overrides:
         ui.warn(
--- a/mercurial/commandserver.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/commandserver.py	Tue Jan 19 21:48:43 2021 +0530
@@ -316,8 +316,8 @@
         return -1
 
     def runcommand(self):
-        """ reads a list of \0 terminated arguments, executes
-        and writes the return code to the result channel """
+        """reads a list of \0 terminated arguments, executes
+        and writes the return code to the result channel"""
         from . import dispatch  # avoid cycle
 
         args = self._readlist()
--- a/mercurial/commit.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/commit.py	Tue Jan 19 21:48:43 2021 +0530
@@ -98,7 +98,11 @@
         )
         xp1, xp2 = p1.hex(), p2 and p2.hex() or b''
         repo.hook(
-            b'pretxncommit', throw=True, node=hex(n), parent1=xp1, parent2=xp2,
+            b'pretxncommit',
+            throw=True,
+            node=hex(n),
+            parent1=xp1,
+            parent2=xp2,
         )
         # set the new commit is proper phase
         targetphase = subrepoutil.newcommitphase(repo.ui, ctx)
@@ -112,7 +116,7 @@
             # be compliant anyway
             #
             # if minimal phase was 0 we don't need to retract anything
-            phases.registernew(repo, tr, targetphase, [n])
+            phases.registernew(repo, tr, targetphase, [repo[n].rev()])
         return n
 
 
@@ -154,10 +158,10 @@
 
 
 def _get_salvaged(repo, ms, ctx):
-    """ returns a list of salvaged files
+    """returns a list of salvaged files
 
     returns empty list if config option which process salvaged files are
-    not enabled """
+    not enabled"""
     salvaged = []
     copy_sd = repo.filecopiesmode == b'changeset-sidedata'
     if copy_sd and len(ctx.parents()) > 1:
@@ -238,7 +242,14 @@
 
 
 def _filecommit(
-    repo, fctx, manifest1, manifest2, linkrev, tr, includecopymeta, ms,
+    repo,
+    fctx,
+    manifest1,
+    manifest2,
+    linkrev,
+    tr,
+    includecopymeta,
+    ms,
 ):
     """
     commit an individual file as part of a larger transaction
--- a/mercurial/config.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/config.py	Tue Jan 19 21:48:43 2021 +0530
@@ -165,7 +165,7 @@
                     include(expanded, remap=remap, sections=sections)
                 except IOError as inst:
                     if inst.errno != errno.ENOENT:
-                        raise error.ParseError(
+                        raise error.ConfigError(
                             _(b"cannot include %s (%s)")
                             % (expanded, encoding.strtolocal(inst.strerror)),
                             b"%s:%d" % (src, line),
@@ -200,14 +200,19 @@
                 self._unset.append((section, name))
                 continue
 
-            raise error.ParseError(l.rstrip(), (b"%s:%d" % (src, line)))
+            message = l.rstrip()
+            if l.startswith(b' '):
+                message = b"unexpected leading whitespace: %s" % message
+            raise error.ConfigError(message, (b"%s:%d" % (src, line)))
 
     def read(self, path, fp=None, sections=None, remap=None):
         if not fp:
             fp = util.posixfile(path, b'rb')
-        assert getattr(fp, 'mode', 'rb') == 'rb', (
-            b'config files must be opened in binary mode, got fp=%r mode=%r'
-            % (fp, fp.mode,)
+        assert (
+            getattr(fp, 'mode', 'rb') == 'rb'
+        ), b'config files must be opened in binary mode, got fp=%r mode=%r' % (
+            fp,
+            fp.mode,
         )
 
         dir = os.path.dirname(path)
--- a/mercurial/configitems.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/configitems.py	Tue Jan 19 21:48:43 2021 +0530
@@ -133,78 +133,127 @@
 
 def _registerdiffopts(section, configprefix=b''):
     coreconfigitem(
-        section, configprefix + b'nodates', default=False,
+        section,
+        configprefix + b'nodates',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'showfunc', default=False,
+        section,
+        configprefix + b'showfunc',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'unified', default=None,
+        section,
+        configprefix + b'unified',
+        default=None,
     )
     coreconfigitem(
-        section, configprefix + b'git', default=False,
+        section,
+        configprefix + b'git',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignorews', default=False,
+        section,
+        configprefix + b'ignorews',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignorewsamount', default=False,
+        section,
+        configprefix + b'ignorewsamount',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignoreblanklines', default=False,
+        section,
+        configprefix + b'ignoreblanklines',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'ignorewseol', default=False,
+        section,
+        configprefix + b'ignorewseol',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'nobinary', default=False,
+        section,
+        configprefix + b'nobinary',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'noprefix', default=False,
+        section,
+        configprefix + b'noprefix',
+        default=False,
     )
     coreconfigitem(
-        section, configprefix + b'word-diff', default=False,
+        section,
+        configprefix + b'word-diff',
+        default=False,
     )
 
 
 coreconfigitem(
-    b'alias', b'.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'auth', b'cookiefile', default=None,
+    b'alias',
+    b'.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'auth',
+    b'cookiefile',
+    default=None,
 )
 _registerdiffopts(section=b'annotate')
 # bookmarks.pushing: internal hack for discovery
 coreconfigitem(
-    b'bookmarks', b'pushing', default=list,
+    b'bookmarks',
+    b'pushing',
+    default=list,
 )
 # bundle.mainreporoot: internal hack for bundlerepo
 coreconfigitem(
-    b'bundle', b'mainreporoot', default=b'',
-)
-coreconfigitem(
-    b'censor', b'policy', default=b'abort', experimental=True,
-)
-coreconfigitem(
-    b'chgserver', b'idletimeout', default=3600,
-)
-coreconfigitem(
-    b'chgserver', b'skiphash', default=False,
-)
-coreconfigitem(
-    b'cmdserver', b'log', default=None,
-)
-coreconfigitem(
-    b'cmdserver', b'max-log-files', default=7,
-)
-coreconfigitem(
-    b'cmdserver', b'max-log-size', default=b'1 MB',
-)
-coreconfigitem(
-    b'cmdserver', b'max-repo-cache', default=0, experimental=True,
-)
-coreconfigitem(
-    b'cmdserver', b'message-encodings', default=list,
+    b'bundle',
+    b'mainreporoot',
+    default=b'',
+)
+coreconfigitem(
+    b'censor',
+    b'policy',
+    default=b'abort',
+    experimental=True,
+)
+coreconfigitem(
+    b'chgserver',
+    b'idletimeout',
+    default=3600,
+)
+coreconfigitem(
+    b'chgserver',
+    b'skiphash',
+    default=False,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'log',
+    default=None,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'max-log-files',
+    default=7,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'max-log-size',
+    default=b'1 MB',
+)
+coreconfigitem(
+    b'cmdserver',
+    b'max-repo-cache',
+    default=0,
+    experimental=True,
+)
+coreconfigitem(
+    b'cmdserver',
+    b'message-encodings',
+    default=list,
 )
 coreconfigitem(
     b'cmdserver',
@@ -212,340 +261,634 @@
     default=lambda: [b'chgserver', b'cmdserver', b'repocache'],
 )
 coreconfigitem(
-    b'cmdserver', b'shutdown-on-interrupt', default=True,
-)
-coreconfigitem(
-    b'color', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'color', b'mode', default=b'auto',
-)
-coreconfigitem(
-    b'color', b'pagermode', default=dynamicdefault,
+    b'cmdserver',
+    b'shutdown-on-interrupt',
+    default=True,
+)
+coreconfigitem(
+    b'color',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'color',
+    b'mode',
+    default=b'auto',
+)
+coreconfigitem(
+    b'color',
+    b'pagermode',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'command-templates',
+    b'graphnode',
+    default=None,
+    alias=[(b'ui', b'graphnodetemplate')],
+)
+coreconfigitem(
+    b'command-templates',
+    b'log',
+    default=None,
+    alias=[(b'ui', b'logtemplate')],
+)
+coreconfigitem(
+    b'command-templates',
+    b'mergemarker',
+    default=(
+        b'{node|short} '
+        b'{ifeq(tags, "tip", "", '
+        b'ifeq(tags, "", "", "{tags} "))}'
+        b'{if(bookmarks, "{bookmarks} ")}'
+        b'{ifeq(branch, "default", "", "{branch} ")}'
+        b'- {author|user}: {desc|firstline}'
+    ),
+    alias=[(b'ui', b'mergemarkertemplate')],
+)
+coreconfigitem(
+    b'command-templates',
+    b'pre-merge-tool-output',
+    default=None,
+    alias=[(b'ui', b'pre-merge-tool-output-template')],
+)
+coreconfigitem(
+    b'command-templates',
+    b'oneline-summary',
+    default=None,
+)
+coreconfigitem(
+    b'command-templates',
+    b'oneline-summary.*',
+    default=dynamicdefault,
+    generic=True,
 )
 _registerdiffopts(section=b'commands', configprefix=b'commit.interactive.')
 coreconfigitem(
-    b'commands', b'commit.post-status', default=False,
-)
-coreconfigitem(
-    b'commands', b'grep.all-files', default=False, experimental=True,
-)
-coreconfigitem(
-    b'commands', b'merge.require-rev', default=False,
-)
-coreconfigitem(
-    b'commands', b'push.require-revs', default=False,
-)
-coreconfigitem(
-    b'commands', b'resolve.confirm', default=False,
-)
-coreconfigitem(
-    b'commands', b'resolve.explicit-re-merge', default=False,
-)
-coreconfigitem(
-    b'commands', b'resolve.mark-check', default=b'none',
+    b'commands',
+    b'commit.post-status',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'grep.all-files',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'commands',
+    b'merge.require-rev',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'push.require-revs',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'resolve.confirm',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'resolve.explicit-re-merge',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'resolve.mark-check',
+    default=b'none',
 )
 _registerdiffopts(section=b'commands', configprefix=b'revert.interactive.')
 coreconfigitem(
-    b'commands', b'show.aliasprefix', default=list,
-)
-coreconfigitem(
-    b'commands', b'status.relative', default=False,
-)
-coreconfigitem(
-    b'commands', b'status.skipstates', default=[], experimental=True,
-)
-coreconfigitem(
-    b'commands', b'status.terse', default=b'',
-)
-coreconfigitem(
-    b'commands', b'status.verbose', default=False,
-)
-coreconfigitem(
-    b'commands', b'update.check', default=None,
-)
-coreconfigitem(
-    b'commands', b'update.requiredest', default=False,
-)
-coreconfigitem(
-    b'committemplate', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'convert', b'bzr.saverev', default=True,
-)
-coreconfigitem(
-    b'convert', b'cvsps.cache', default=True,
-)
-coreconfigitem(
-    b'convert', b'cvsps.fuzz', default=60,
-)
-coreconfigitem(
-    b'convert', b'cvsps.logencoding', default=None,
-)
-coreconfigitem(
-    b'convert', b'cvsps.mergefrom', default=None,
-)
-coreconfigitem(
-    b'convert', b'cvsps.mergeto', default=None,
-)
-coreconfigitem(
-    b'convert', b'git.committeractions', default=lambda: [b'messagedifferent'],
-)
-coreconfigitem(
-    b'convert', b'git.extrakeys', default=list,
-)
-coreconfigitem(
-    b'convert', b'git.findcopiesharder', default=False,
-)
-coreconfigitem(
-    b'convert', b'git.remoteprefix', default=b'remote',
-)
-coreconfigitem(
-    b'convert', b'git.renamelimit', default=400,
-)
-coreconfigitem(
-    b'convert', b'git.saverev', default=True,
-)
-coreconfigitem(
-    b'convert', b'git.similarity', default=50,
-)
-coreconfigitem(
-    b'convert', b'git.skipsubmodules', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.clonebranches', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.ignoreerrors', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.preserve-hash', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.revs', default=None,
-)
-coreconfigitem(
-    b'convert', b'hg.saverev', default=False,
-)
-coreconfigitem(
-    b'convert', b'hg.sourcename', default=None,
-)
-coreconfigitem(
-    b'convert', b'hg.startrev', default=None,
-)
-coreconfigitem(
-    b'convert', b'hg.tagsbranch', default=b'default',
-)
-coreconfigitem(
-    b'convert', b'hg.usebranchnames', default=True,
-)
-coreconfigitem(
-    b'convert', b'ignoreancestorcheck', default=False, experimental=True,
-)
-coreconfigitem(
-    b'convert', b'localtimezone', default=False,
-)
-coreconfigitem(
-    b'convert', b'p4.encoding', default=dynamicdefault,
-)
-coreconfigitem(
-    b'convert', b'p4.startrev', default=0,
-)
-coreconfigitem(
-    b'convert', b'skiptags', default=False,
-)
-coreconfigitem(
-    b'convert', b'svn.debugsvnlog', default=True,
-)
-coreconfigitem(
-    b'convert', b'svn.trunk', default=None,
-)
-coreconfigitem(
-    b'convert', b'svn.tags', default=None,
-)
-coreconfigitem(
-    b'convert', b'svn.branches', default=None,
-)
-coreconfigitem(
-    b'convert', b'svn.startrev', default=0,
-)
-coreconfigitem(
-    b'debug', b'dirstate.delaywrite', default=0,
-)
-coreconfigitem(
-    b'defaults', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'devel', b'all-warnings', default=False,
-)
-coreconfigitem(
-    b'devel', b'bundle2.debug', default=False,
-)
-coreconfigitem(
-    b'devel', b'bundle.delta', default=b'',
-)
-coreconfigitem(
-    b'devel', b'cache-vfs', default=None,
-)
-coreconfigitem(
-    b'devel', b'check-locks', default=False,
-)
-coreconfigitem(
-    b'devel', b'check-relroot', default=False,
-)
-coreconfigitem(
-    b'devel', b'default-date', default=None,
-)
-coreconfigitem(
-    b'devel', b'deprec-warn', default=False,
-)
-coreconfigitem(
-    b'devel', b'disableloaddefaultcerts', default=False,
-)
-coreconfigitem(
-    b'devel', b'warn-empty-changegroup', default=False,
-)
-coreconfigitem(
-    b'devel', b'legacy.exchange', default=list,
-)
-coreconfigitem(
-    b'devel', b'persistent-nodemap', default=False,
-)
-coreconfigitem(
-    b'devel', b'servercafile', default=b'',
-)
-coreconfigitem(
-    b'devel', b'serverexactprotocol', default=b'',
-)
-coreconfigitem(
-    b'devel', b'serverrequirecert', default=False,
-)
-coreconfigitem(
-    b'devel', b'strip-obsmarkers', default=True,
-)
-coreconfigitem(
-    b'devel', b'warn-config', default=None,
-)
-coreconfigitem(
-    b'devel', b'warn-config-default', default=None,
-)
-coreconfigitem(
-    b'devel', b'user.obsmarker', default=None,
-)
-coreconfigitem(
-    b'devel', b'warn-config-unknown', default=None,
-)
-coreconfigitem(
-    b'devel', b'debug.copies', default=False,
-)
-coreconfigitem(
-    b'devel', b'debug.extensions', default=False,
-)
-coreconfigitem(
-    b'devel', b'debug.repo-filters', default=False,
-)
-coreconfigitem(
-    b'devel', b'debug.peer-request', default=False,
-)
-coreconfigitem(
-    b'devel', b'discovery.randomize', default=True,
+    b'commands',
+    b'show.aliasprefix',
+    default=list,
+)
+coreconfigitem(
+    b'commands',
+    b'status.relative',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'status.skipstates',
+    default=[],
+    experimental=True,
+)
+coreconfigitem(
+    b'commands',
+    b'status.terse',
+    default=b'',
+)
+coreconfigitem(
+    b'commands',
+    b'status.verbose',
+    default=False,
+)
+coreconfigitem(
+    b'commands',
+    b'update.check',
+    default=None,
+)
+coreconfigitem(
+    b'commands',
+    b'update.requiredest',
+    default=False,
+)
+coreconfigitem(
+    b'committemplate',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'convert',
+    b'bzr.saverev',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.cache',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.fuzz',
+    default=60,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.logencoding',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.mergefrom',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'cvsps.mergeto',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'git.committeractions',
+    default=lambda: [b'messagedifferent'],
+)
+coreconfigitem(
+    b'convert',
+    b'git.extrakeys',
+    default=list,
+)
+coreconfigitem(
+    b'convert',
+    b'git.findcopiesharder',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'git.remoteprefix',
+    default=b'remote',
+)
+coreconfigitem(
+    b'convert',
+    b'git.renamelimit',
+    default=400,
+)
+coreconfigitem(
+    b'convert',
+    b'git.saverev',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'git.similarity',
+    default=50,
+)
+coreconfigitem(
+    b'convert',
+    b'git.skipsubmodules',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.clonebranches',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.ignoreerrors',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.preserve-hash',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.revs',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.saverev',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.sourcename',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.startrev',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'hg.tagsbranch',
+    default=b'default',
+)
+coreconfigitem(
+    b'convert',
+    b'hg.usebranchnames',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'ignoreancestorcheck',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'convert',
+    b'localtimezone',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'p4.encoding',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'convert',
+    b'p4.startrev',
+    default=0,
+)
+coreconfigitem(
+    b'convert',
+    b'skiptags',
+    default=False,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.debugsvnlog',
+    default=True,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.trunk',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.tags',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.branches',
+    default=None,
+)
+coreconfigitem(
+    b'convert',
+    b'svn.startrev',
+    default=0,
+)
+coreconfigitem(
+    b'debug',
+    b'dirstate.delaywrite',
+    default=0,
+)
+coreconfigitem(
+    b'defaults',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'devel',
+    b'all-warnings',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'bundle2.debug',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'bundle.delta',
+    default=b'',
+)
+coreconfigitem(
+    b'devel',
+    b'cache-vfs',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'check-locks',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'check-relroot',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'default-date',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'deprec-warn',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'disableloaddefaultcerts',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-empty-changegroup',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'legacy.exchange',
+    default=list,
+)
+# When True, revlogs use a special reference version of the nodemap, that is not
+# performant but is "known" to behave properly.
+coreconfigitem(
+    b'devel',
+    b'persistent-nodemap',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'servercafile',
+    default=b'',
+)
+coreconfigitem(
+    b'devel',
+    b'serverexactprotocol',
+    default=b'',
+)
+coreconfigitem(
+    b'devel',
+    b'serverrequirecert',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'strip-obsmarkers',
+    default=True,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-config',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-config-default',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'user.obsmarker',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'warn-config-unknown',
+    default=None,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.copies',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.extensions',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.repo-filters',
+    default=False,
+)
+coreconfigitem(
+    b'devel',
+    b'debug.peer-request',
+    default=False,
+)
+# If discovery.exchange-heads is False, the discovery will not start with
+# remote head fetching and local head querying.
+coreconfigitem(
+    b'devel',
+    b'discovery.exchange-heads',
+    default=True,
+)
+# If discovery.grow-sample is False, the sample size used in set discovery will
+# not be increased through the process
+coreconfigitem(
+    b'devel',
+    b'discovery.grow-sample',
+    default=True,
+)
+# discovery.grow-sample.rate control the rate at which the sample grow
+coreconfigitem(
+    b'devel',
+    b'discovery.grow-sample.rate',
+    default=1.05,
+)
+# If discovery.randomize is False, random sampling during discovery are
+# deterministic. It is meant for integration tests.
+coreconfigitem(
+    b'devel',
+    b'discovery.randomize',
+    default=True,
 )
 _registerdiffopts(section=b'diff')
 coreconfigitem(
-    b'email', b'bcc', default=None,
-)
-coreconfigitem(
-    b'email', b'cc', default=None,
-)
-coreconfigitem(
-    b'email', b'charsets', default=list,
-)
-coreconfigitem(
-    b'email', b'from', default=None,
-)
-coreconfigitem(
-    b'email', b'method', default=b'smtp',
-)
-coreconfigitem(
-    b'email', b'reply-to', default=None,
-)
-coreconfigitem(
-    b'email', b'to', default=None,
-)
-coreconfigitem(
-    b'experimental', b'archivemetatemplate', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'auto-publish', default=b'publish',
-)
-coreconfigitem(
-    b'experimental', b'bundle-phases', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundle2-advertise', default=True,
-)
-coreconfigitem(
-    b'experimental', b'bundle2-output-capture', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundle2.pushback', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundle2lazylocking', default=False,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.bzip2', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.gzip', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.none', default=None,
-)
-coreconfigitem(
-    b'experimental', b'bundlecomplevel.zstd', default=None,
-)
-coreconfigitem(
-    b'experimental', b'changegroup3', default=False,
-)
-coreconfigitem(
-    b'experimental', b'cleanup-as-archived', default=False,
-)
-coreconfigitem(
-    b'experimental', b'clientcompressionengines', default=list,
-)
-coreconfigitem(
-    b'experimental', b'copytrace', default=b'on',
-)
-coreconfigitem(
-    b'experimental', b'copytrace.movecandidateslimit', default=100,
-)
-coreconfigitem(
-    b'experimental', b'copytrace.sourcecommitlimit', default=100,
-)
-coreconfigitem(
-    b'experimental', b'copies.read-from', default=b"filelog-only",
-)
-coreconfigitem(
-    b'experimental', b'copies.write-to', default=b'filelog-only',
-)
-coreconfigitem(
-    b'experimental', b'crecordtest', default=None,
-)
-coreconfigitem(
-    b'experimental', b'directaccess', default=False,
-)
-coreconfigitem(
-    b'experimental', b'directaccess.revnums', default=False,
-)
-coreconfigitem(
-    b'experimental', b'editortmpinhg', default=False,
-)
-coreconfigitem(
-    b'experimental', b'evolution', default=list,
+    b'email',
+    b'bcc',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'cc',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'charsets',
+    default=list,
+)
+coreconfigitem(
+    b'email',
+    b'from',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'method',
+    default=b'smtp',
+)
+coreconfigitem(
+    b'email',
+    b'reply-to',
+    default=None,
+)
+coreconfigitem(
+    b'email',
+    b'to',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'archivemetatemplate',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'auto-publish',
+    default=b'publish',
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle-phases',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2-advertise',
+    default=True,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2-output-capture',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2.pushback',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundle2lazylocking',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.bzip2',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.gzip',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.none',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'bundlecomplevel.zstd',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'changegroup3',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'cleanup-as-archived',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'clientcompressionengines',
+    default=list,
+)
+coreconfigitem(
+    b'experimental',
+    b'copytrace',
+    default=b'on',
+)
+coreconfigitem(
+    b'experimental',
+    b'copytrace.movecandidateslimit',
+    default=100,
+)
+coreconfigitem(
+    b'experimental',
+    b'copytrace.sourcecommitlimit',
+    default=100,
+)
+coreconfigitem(
+    b'experimental',
+    b'copies.read-from',
+    default=b"filelog-only",
+)
+coreconfigitem(
+    b'experimental',
+    b'copies.write-to',
+    default=b'filelog-only',
+)
+coreconfigitem(
+    b'experimental',
+    b'crecordtest',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'directaccess',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'directaccess.revnums',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'editortmpinhg',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution',
+    default=list,
 )
 coreconfigitem(
     b'experimental',
@@ -554,10 +897,14 @@
     alias=[(b'experimental', b'allowdivergence')],
 )
 coreconfigitem(
-    b'experimental', b'evolution.allowunstable', default=None,
-)
-coreconfigitem(
-    b'experimental', b'evolution.createmarkers', default=None,
+    b'experimental',
+    b'evolution.allowunstable',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.createmarkers',
+    default=None,
 )
 coreconfigitem(
     b'experimental',
@@ -566,109 +913,178 @@
     alias=[(b'experimental', b'effect-flags')],
 )
 coreconfigitem(
-    b'experimental', b'evolution.exchange', default=None,
-)
-coreconfigitem(
-    b'experimental', b'evolution.bundle-obsmarker', default=False,
-)
-coreconfigitem(
-    b'experimental', b'log.topo', default=False,
-)
-coreconfigitem(
-    b'experimental', b'evolution.report-instabilities', default=True,
-)
-coreconfigitem(
-    b'experimental', b'evolution.track-operation', default=True,
+    b'experimental',
+    b'evolution.exchange',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.bundle-obsmarker',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.bundle-obsmarker:mandatory',
+    default=True,
+)
+coreconfigitem(
+    b'experimental',
+    b'log.topo',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.report-instabilities',
+    default=True,
+)
+coreconfigitem(
+    b'experimental',
+    b'evolution.track-operation',
+    default=True,
 )
 # repo-level config to exclude a revset visibility
 #
 # The target use case is to use `share` to expose different subset of the same
 # repository, especially server side. See also `server.view`.
 coreconfigitem(
-    b'experimental', b'extra-filter-revs', default=None,
-)
-coreconfigitem(
-    b'experimental', b'maxdeltachainspan', default=-1,
+    b'experimental',
+    b'extra-filter-revs',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'maxdeltachainspan',
+    default=-1,
 )
 # tracks files which were undeleted (merge might delete them but we explicitly
 # kept/undeleted them) and creates new filenodes for them
 coreconfigitem(
-    b'experimental', b'merge-track-salvaged', default=False,
-)
-coreconfigitem(
-    b'experimental', b'mergetempdirprefix', default=None,
-)
-coreconfigitem(
-    b'experimental', b'mmapindexthreshold', default=None,
-)
-coreconfigitem(
-    b'experimental', b'narrow', default=False,
-)
-coreconfigitem(
-    b'experimental', b'nonnormalparanoidcheck', default=False,
-)
-coreconfigitem(
-    b'experimental', b'exportableenviron', default=list,
-)
-coreconfigitem(
-    b'experimental', b'extendedheader.index', default=None,
-)
-coreconfigitem(
-    b'experimental', b'extendedheader.similarity', default=False,
-)
-coreconfigitem(
-    b'experimental', b'graphshorten', default=False,
-)
-coreconfigitem(
-    b'experimental', b'graphstyle.parent', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'graphstyle.missing', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'graphstyle.grandparent', default=dynamicdefault,
-)
-coreconfigitem(
-    b'experimental', b'hook-track-tags', default=False,
-)
-coreconfigitem(
-    b'experimental', b'httppeer.advertise-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'httppeer.v2-encoder-order', default=None,
-)
-coreconfigitem(
-    b'experimental', b'httppostargs', default=False,
+    b'experimental',
+    b'merge-track-salvaged',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'mergetempdirprefix',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'mmapindexthreshold',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'narrow',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'nonnormalparanoidcheck',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'exportableenviron',
+    default=list,
+)
+coreconfigitem(
+    b'experimental',
+    b'extendedheader.index',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'extendedheader.similarity',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphshorten',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphstyle.parent',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphstyle.missing',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'graphstyle.grandparent',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'experimental',
+    b'hook-track-tags',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'httppeer.advertise-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'httppeer.v2-encoder-order',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'httppostargs',
+    default=False,
 )
 coreconfigitem(b'experimental', b'nointerrupt', default=False)
 coreconfigitem(b'experimental', b'nointerrupt-interactiveonly', default=True)
 
 coreconfigitem(
-    b'experimental', b'obsmarkers-exchange-debug', default=False,
-)
-coreconfigitem(
-    b'experimental', b'remotenames', default=False,
-)
-coreconfigitem(
-    b'experimental', b'removeemptydirs', default=True,
-)
-coreconfigitem(
-    b'experimental', b'revert.interactive.select-to-keep', default=False,
-)
-coreconfigitem(
-    b'experimental', b'revisions.prefixhexnode', default=False,
-)
-coreconfigitem(
-    b'experimental', b'revlogv2', default=None,
-)
-coreconfigitem(
-    b'experimental', b'revisions.disambiguatewithin', default=None,
-)
-coreconfigitem(
-    b'experimental', b'rust.index', default=False,
-)
-coreconfigitem(
-    b'experimental', b'server.filesdata.recommended-batch-size', default=50000,
+    b'experimental',
+    b'obsmarkers-exchange-debug',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'remotenames',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'removeemptydirs',
+    default=True,
+)
+coreconfigitem(
+    b'experimental',
+    b'revert.interactive.select-to-keep',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'revisions.prefixhexnode',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'revlogv2',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'revisions.disambiguatewithin',
+    default=None,
+)
+coreconfigitem(
+    b'experimental',
+    b'rust.index',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'server.filesdata.recommended-batch-size',
+    default=50000,
 )
 coreconfigitem(
     b'experimental',
@@ -676,10 +1092,14 @@
     default=100000,
 )
 coreconfigitem(
-    b'experimental', b'server.stream-narrow-clones', default=False,
-)
-coreconfigitem(
-    b'experimental', b'single-head-per-branch', default=False,
+    b'experimental',
+    b'server.stream-narrow-clones',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'single-head-per-branch',
+    default=False,
 )
 coreconfigitem(
     b'experimental',
@@ -687,73 +1107,130 @@
     default=False,
 )
 coreconfigitem(
-    b'experimental', b'sshserver.support-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'sparse-read', default=False,
-)
-coreconfigitem(
-    b'experimental', b'sparse-read.density-threshold', default=0.50,
-)
-coreconfigitem(
-    b'experimental', b'sparse-read.min-gap-size', default=b'65K',
-)
-coreconfigitem(
-    b'experimental', b'treemanifest', default=False,
-)
-coreconfigitem(
-    b'experimental', b'update.atomic-file', default=False,
-)
-coreconfigitem(
-    b'experimental', b'sshpeer.advertise-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'web.apiserver', default=False,
-)
-coreconfigitem(
-    b'experimental', b'web.api.http-v2', default=False,
-)
-coreconfigitem(
-    b'experimental', b'web.api.debugreflect', default=False,
-)
-coreconfigitem(
-    b'experimental', b'worker.wdir-get-thread-safe', default=False,
-)
-coreconfigitem(
-    b'experimental', b'worker.repository-upgrade', default=False,
-)
-coreconfigitem(
-    b'experimental', b'xdiff', default=False,
-)
-coreconfigitem(
-    b'extensions', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'extdata', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'format', b'bookmarks-in-store', default=False,
-)
-coreconfigitem(
-    b'format', b'chunkcachesize', default=None, experimental=True,
-)
-coreconfigitem(
-    b'format', b'dotencode', default=True,
-)
-coreconfigitem(
-    b'format', b'generaldelta', default=False, experimental=True,
-)
-coreconfigitem(
-    b'format', b'manifestcachesize', default=None, experimental=True,
-)
-coreconfigitem(
-    b'format', b'maxchainlen', default=dynamicdefault, experimental=True,
-)
-coreconfigitem(
-    b'format', b'obsstore-version', default=None,
-)
-coreconfigitem(
-    b'format', b'sparse-revlog', default=True,
+    b'experimental',
+    b'single-head-per-branch:public-changes-only',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'sshserver.support-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'sparse-read',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'sparse-read.density-threshold',
+    default=0.50,
+)
+coreconfigitem(
+    b'experimental',
+    b'sparse-read.min-gap-size',
+    default=b'65K',
+)
+coreconfigitem(
+    b'experimental',
+    b'treemanifest',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'update.atomic-file',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'sshpeer.advertise-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'web.apiserver',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'web.api.http-v2',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'web.api.debugreflect',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'worker.wdir-get-thread-safe',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'worker.repository-upgrade',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'xdiff',
+    default=False,
+)
+coreconfigitem(
+    b'extensions',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'extdata',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'format',
+    b'bookmarks-in-store',
+    default=False,
+)
+coreconfigitem(
+    b'format',
+    b'chunkcachesize',
+    default=None,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'dotencode',
+    default=True,
+)
+coreconfigitem(
+    b'format',
+    b'generaldelta',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'manifestcachesize',
+    default=None,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'maxchainlen',
+    default=dynamicdefault,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'obsstore-version',
+    default=None,
+)
+coreconfigitem(
+    b'format',
+    b'sparse-revlog',
+    default=True,
 )
 coreconfigitem(
     b'format',
@@ -762,19 +1239,24 @@
     alias=[(b'experimental', b'format.compression')],
 )
 coreconfigitem(
-    b'format', b'usefncache', default=True,
-)
-coreconfigitem(
-    b'format', b'usegeneraldelta', default=True,
-)
-coreconfigitem(
-    b'format', b'usestore', default=True,
-)
-# Right now, the only efficient implement of the nodemap logic is in Rust, so
-# the persistent nodemap feature needs to stay experimental as long as the Rust
-# extensions are an experimental feature.
-coreconfigitem(
-    b'format', b'use-persistent-nodemap', default=False, experimental=True
+    b'format',
+    b'usefncache',
+    default=True,
+)
+coreconfigitem(
+    b'format',
+    b'usegeneraldelta',
+    default=True,
+)
+coreconfigitem(
+    b'format',
+    b'usestore',
+    default=True,
+)
+coreconfigitem(
+    b'format',
+    b'use-persistent-nodemap',
+    default=False,
 )
 coreconfigitem(
     b'format',
@@ -783,43 +1265,76 @@
     experimental=True,
 )
 coreconfigitem(
-    b'format', b'exp-use-side-data', default=False, experimental=True,
-)
-coreconfigitem(
-    b'format', b'exp-share-safe', default=False, experimental=True,
-)
-coreconfigitem(
-    b'format', b'internal-phase', default=False, experimental=True,
-)
-coreconfigitem(
-    b'fsmonitor', b'warn_when_unused', default=True,
-)
-coreconfigitem(
-    b'fsmonitor', b'warn_update_file_count', default=50000,
-)
-coreconfigitem(
-    b'fsmonitor', b'warn_update_file_count_rust', default=400000,
-)
-coreconfigitem(
-    b'help', br'hidden-command\..*', default=False, generic=True,
-)
-coreconfigitem(
-    b'help', br'hidden-topic\..*', default=False, generic=True,
-)
-coreconfigitem(
-    b'hooks', b'.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'hgweb-paths', b'.*', default=list, generic=True,
-)
-coreconfigitem(
-    b'hostfingerprints', b'.*', default=list, generic=True,
-)
-coreconfigitem(
-    b'hostsecurity', b'ciphers', default=None,
-)
-coreconfigitem(
-    b'hostsecurity', b'minimumprotocol', default=dynamicdefault,
+    b'format',
+    b'exp-use-side-data',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'format',
+    b'use-share-safe',
+    default=False,
+)
+coreconfigitem(
+    b'format',
+    b'internal-phase',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'fsmonitor',
+    b'warn_when_unused',
+    default=True,
+)
+coreconfigitem(
+    b'fsmonitor',
+    b'warn_update_file_count',
+    default=50000,
+)
+coreconfigitem(
+    b'fsmonitor',
+    b'warn_update_file_count_rust',
+    default=400000,
+)
+coreconfigitem(
+    b'help',
+    br'hidden-command\..*',
+    default=False,
+    generic=True,
+)
+coreconfigitem(
+    b'help',
+    br'hidden-topic\..*',
+    default=False,
+    generic=True,
+)
+coreconfigitem(
+    b'hooks',
+    b'.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'hgweb-paths',
+    b'.*',
+    default=list,
+    generic=True,
+)
+coreconfigitem(
+    b'hostfingerprints',
+    b'.*',
+    default=list,
+    generic=True,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'ciphers',
+    default=None,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'minimumprotocol',
+    default=dynamicdefault,
 )
 coreconfigitem(
     b'hostsecurity',
@@ -828,73 +1343,122 @@
     generic=True,
 )
 coreconfigitem(
-    b'hostsecurity', b'.*:ciphers$', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'hostsecurity', b'.*:fingerprints$', default=list, generic=True,
-)
-coreconfigitem(
-    b'hostsecurity', b'.*:verifycertsfile$', default=None, generic=True,
+    b'hostsecurity',
+    b'.*:ciphers$',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'.*:fingerprints$',
+    default=list,
+    generic=True,
+)
+coreconfigitem(
+    b'hostsecurity',
+    b'.*:verifycertsfile$',
+    default=None,
+    generic=True,
 )
 
 coreconfigitem(
-    b'http_proxy', b'always', default=False,
-)
-coreconfigitem(
-    b'http_proxy', b'host', default=None,
-)
-coreconfigitem(
-    b'http_proxy', b'no', default=list,
-)
-coreconfigitem(
-    b'http_proxy', b'passwd', default=None,
-)
-coreconfigitem(
-    b'http_proxy', b'user', default=None,
+    b'http_proxy',
+    b'always',
+    default=False,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'host',
+    default=None,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'no',
+    default=list,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'passwd',
+    default=None,
+)
+coreconfigitem(
+    b'http_proxy',
+    b'user',
+    default=None,
 )
 
 coreconfigitem(
-    b'http', b'timeout', default=None,
+    b'http',
+    b'timeout',
+    default=None,
 )
 
 coreconfigitem(
-    b'logtoprocess', b'commandexception', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'commandfinish', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'command', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'develwarn', default=None,
-)
-coreconfigitem(
-    b'logtoprocess', b'uiblocked', default=None,
-)
-coreconfigitem(
-    b'merge', b'checkunknown', default=b'abort',
-)
-coreconfigitem(
-    b'merge', b'checkignored', default=b'abort',
-)
-coreconfigitem(
-    b'experimental', b'merge.checkpathconflicts', default=False,
-)
-coreconfigitem(
-    b'merge', b'followcopies', default=True,
-)
-coreconfigitem(
-    b'merge', b'on-failure', default=b'continue',
-)
-coreconfigitem(
-    b'merge', b'preferancestor', default=lambda: [b'*'], experimental=True,
-)
-coreconfigitem(
-    b'merge', b'strict-capability-check', default=False,
-)
-coreconfigitem(
-    b'merge-tools', b'.*', default=None, generic=True,
+    b'logtoprocess',
+    b'commandexception',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'commandfinish',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'command',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'develwarn',
+    default=None,
+)
+coreconfigitem(
+    b'logtoprocess',
+    b'uiblocked',
+    default=None,
+)
+coreconfigitem(
+    b'merge',
+    b'checkunknown',
+    default=b'abort',
+)
+coreconfigitem(
+    b'merge',
+    b'checkignored',
+    default=b'abort',
+)
+coreconfigitem(
+    b'experimental',
+    b'merge.checkpathconflicts',
+    default=False,
+)
+coreconfigitem(
+    b'merge',
+    b'followcopies',
+    default=True,
+)
+coreconfigitem(
+    b'merge',
+    b'on-failure',
+    default=b'continue',
+)
+coreconfigitem(
+    b'merge',
+    b'preferancestor',
+    default=lambda: [b'*'],
+    experimental=True,
+)
+coreconfigitem(
+    b'merge',
+    b'strict-capability-check',
+    default=False,
+)
+coreconfigitem(
+    b'merge-tools',
+    b'.*',
+    default=None,
+    generic=True,
 )
 coreconfigitem(
     b'merge-tools',
@@ -904,10 +1468,18 @@
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.binary$', default=False, generic=True, priority=-1,
-)
-coreconfigitem(
-    b'merge-tools', br'.*\.check$', default=list, generic=True, priority=-1,
+    b'merge-tools',
+    br'.*\.binary$',
+    default=False,
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    b'merge-tools',
+    br'.*\.check$',
+    default=list,
+    generic=True,
+    priority=-1,
 )
 coreconfigitem(
     b'merge-tools',
@@ -924,10 +1496,18 @@
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.fixeol$', default=False, generic=True, priority=-1,
-)
-coreconfigitem(
-    b'merge-tools', br'.*\.gui$', default=False, generic=True, priority=-1,
+    b'merge-tools',
+    br'.*\.fixeol$',
+    default=False,
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    b'merge-tools',
+    br'.*\.gui$',
+    default=False,
+    generic=True,
+    priority=-1,
 )
 coreconfigitem(
     b'merge-tools',
@@ -939,12 +1519,16 @@
 coreconfigitem(
     b'merge-tools',
     br'.*\.mergemarkertemplate$',
-    default=dynamicdefault,  # take from ui.mergemarkertemplate
+    default=dynamicdefault,  # take from command-templates.mergemarker
     generic=True,
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.priority$', default=0, generic=True, priority=-1,
+    b'merge-tools',
+    br'.*\.priority$',
+    default=0,
+    generic=True,
+    priority=-1,
 )
 coreconfigitem(
     b'merge-tools',
@@ -954,100 +1538,168 @@
     priority=-1,
 )
 coreconfigitem(
-    b'merge-tools', br'.*\.symlink$', default=False, generic=True, priority=-1,
-)
-coreconfigitem(
-    b'pager', b'attend-.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'pager', b'ignore', default=list,
-)
-coreconfigitem(
-    b'pager', b'pager', default=dynamicdefault,
-)
-coreconfigitem(
-    b'patch', b'eol', default=b'strict',
-)
-coreconfigitem(
-    b'patch', b'fuzz', default=2,
-)
-coreconfigitem(
-    b'paths', b'default', default=None,
-)
-coreconfigitem(
-    b'paths', b'default-push', default=None,
-)
-coreconfigitem(
-    b'paths', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'phases', b'checksubrepos', default=b'follow',
-)
-coreconfigitem(
-    b'phases', b'new-commit', default=b'draft',
-)
-coreconfigitem(
-    b'phases', b'publish', default=True,
-)
-coreconfigitem(
-    b'profiling', b'enabled', default=False,
-)
-coreconfigitem(
-    b'profiling', b'format', default=b'text',
-)
-coreconfigitem(
-    b'profiling', b'freq', default=1000,
-)
-coreconfigitem(
-    b'profiling', b'limit', default=30,
-)
-coreconfigitem(
-    b'profiling', b'nested', default=0,
-)
-coreconfigitem(
-    b'profiling', b'output', default=None,
-)
-coreconfigitem(
-    b'profiling', b'showmax', default=0.999,
-)
-coreconfigitem(
-    b'profiling', b'showmin', default=dynamicdefault,
-)
-coreconfigitem(
-    b'profiling', b'showtime', default=True,
-)
-coreconfigitem(
-    b'profiling', b'sort', default=b'inlinetime',
-)
-coreconfigitem(
-    b'profiling', b'statformat', default=b'hotpath',
-)
-coreconfigitem(
-    b'profiling', b'time-track', default=dynamicdefault,
-)
-coreconfigitem(
-    b'profiling', b'type', default=b'stat',
-)
-coreconfigitem(
-    b'progress', b'assume-tty', default=False,
-)
-coreconfigitem(
-    b'progress', b'changedelay', default=1,
-)
-coreconfigitem(
-    b'progress', b'clear-complete', default=True,
-)
-coreconfigitem(
-    b'progress', b'debug', default=False,
-)
-coreconfigitem(
-    b'progress', b'delay', default=3,
-)
-coreconfigitem(
-    b'progress', b'disable', default=False,
-)
-coreconfigitem(
-    b'progress', b'estimateinterval', default=60.0,
+    b'merge-tools',
+    br'.*\.symlink$',
+    default=False,
+    generic=True,
+    priority=-1,
+)
+coreconfigitem(
+    b'pager',
+    b'attend-.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'pager',
+    b'ignore',
+    default=list,
+)
+coreconfigitem(
+    b'pager',
+    b'pager',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'patch',
+    b'eol',
+    default=b'strict',
+)
+coreconfigitem(
+    b'patch',
+    b'fuzz',
+    default=2,
+)
+coreconfigitem(
+    b'paths',
+    b'default',
+    default=None,
+)
+coreconfigitem(
+    b'paths',
+    b'default-push',
+    default=None,
+)
+coreconfigitem(
+    b'paths',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'phases',
+    b'checksubrepos',
+    default=b'follow',
+)
+coreconfigitem(
+    b'phases',
+    b'new-commit',
+    default=b'draft',
+)
+coreconfigitem(
+    b'phases',
+    b'publish',
+    default=True,
+)
+coreconfigitem(
+    b'profiling',
+    b'enabled',
+    default=False,
+)
+coreconfigitem(
+    b'profiling',
+    b'format',
+    default=b'text',
+)
+coreconfigitem(
+    b'profiling',
+    b'freq',
+    default=1000,
+)
+coreconfigitem(
+    b'profiling',
+    b'limit',
+    default=30,
+)
+coreconfigitem(
+    b'profiling',
+    b'nested',
+    default=0,
+)
+coreconfigitem(
+    b'profiling',
+    b'output',
+    default=None,
+)
+coreconfigitem(
+    b'profiling',
+    b'showmax',
+    default=0.999,
+)
+coreconfigitem(
+    b'profiling',
+    b'showmin',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'profiling',
+    b'showtime',
+    default=True,
+)
+coreconfigitem(
+    b'profiling',
+    b'sort',
+    default=b'inlinetime',
+)
+coreconfigitem(
+    b'profiling',
+    b'statformat',
+    default=b'hotpath',
+)
+coreconfigitem(
+    b'profiling',
+    b'time-track',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'profiling',
+    b'type',
+    default=b'stat',
+)
+coreconfigitem(
+    b'progress',
+    b'assume-tty',
+    default=False,
+)
+coreconfigitem(
+    b'progress',
+    b'changedelay',
+    default=1,
+)
+coreconfigitem(
+    b'progress',
+    b'clear-complete',
+    default=True,
+)
+coreconfigitem(
+    b'progress',
+    b'debug',
+    default=False,
+)
+coreconfigitem(
+    b'progress',
+    b'delay',
+    default=3,
+)
+coreconfigitem(
+    b'progress',
+    b'disable',
+    default=False,
+)
+coreconfigitem(
+    b'progress',
+    b'estimateinterval',
+    default=60.0,
 )
 coreconfigitem(
     b'progress',
@@ -1055,16 +1707,24 @@
     default=lambda: [b'topic', b'bar', b'number', b'estimate'],
 )
 coreconfigitem(
-    b'progress', b'refresh', default=0.1,
-)
-coreconfigitem(
-    b'progress', b'width', default=dynamicdefault,
-)
-coreconfigitem(
-    b'pull', b'confirm', default=False,
-)
-coreconfigitem(
-    b'push', b'pushvars.server', default=False,
+    b'progress',
+    b'refresh',
+    default=0.1,
+)
+coreconfigitem(
+    b'progress',
+    b'width',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'pull',
+    b'confirm',
+    default=False,
+)
+coreconfigitem(
+    b'push',
+    b'pushvars.server',
+    default=False,
 )
 coreconfigitem(
     b'rewrite',
@@ -1073,13 +1733,21 @@
     alias=[(b'ui', b'history-editing-backup')],
 )
 coreconfigitem(
-    b'rewrite', b'update-timestamp', default=False,
-)
-coreconfigitem(
-    b'rewrite', b'empty-successor', default=b'skip', experimental=True,
-)
-coreconfigitem(
-    b'storage', b'new-repo-backend', default=b'revlogv1', experimental=True,
+    b'rewrite',
+    b'update-timestamp',
+    default=False,
+)
+coreconfigitem(
+    b'rewrite',
+    b'empty-successor',
+    default=b'skip',
+    experimental=True,
+)
+coreconfigitem(
+    b'storage',
+    b'new-repo-backend',
+    default=b'revlogv1',
+    experimental=True,
 )
 coreconfigitem(
     b'storage',
@@ -1089,44 +1757,71 @@
 )
 # experimental as long as rust is experimental (or a C version is implemented)
 coreconfigitem(
-    b'storage', b'revlog.nodemap.mmap', default=True, experimental=True
+    b'storage',
+    b'revlog.persistent-nodemap.mmap',
+    default=True,
 )
 # experimental as long as format.use-persistent-nodemap is.
 coreconfigitem(
-    b'storage', b'revlog.nodemap.mode', default=b'compat', experimental=True
-)
-coreconfigitem(
-    b'storage', b'revlog.reuse-external-delta', default=True,
-)
-coreconfigitem(
-    b'storage', b'revlog.reuse-external-delta-parent', default=None,
-)
-coreconfigitem(
-    b'storage', b'revlog.zlib.level', default=None,
-)
-coreconfigitem(
-    b'storage', b'revlog.zstd.level', default=None,
-)
-coreconfigitem(
-    b'server', b'bookmarks-pushkey-compat', default=True,
-)
-coreconfigitem(
-    b'server', b'bundle1', default=True,
-)
-coreconfigitem(
-    b'server', b'bundle1gd', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1.pull', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1gd.pull', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1.push', default=None,
-)
-coreconfigitem(
-    b'server', b'bundle1gd.push', default=None,
+    b'storage',
+    b'revlog.persistent-nodemap.slow-path',
+    default=b"abort",
+)
+
+coreconfigitem(
+    b'storage',
+    b'revlog.reuse-external-delta',
+    default=True,
+)
+coreconfigitem(
+    b'storage',
+    b'revlog.reuse-external-delta-parent',
+    default=None,
+)
+coreconfigitem(
+    b'storage',
+    b'revlog.zlib.level',
+    default=None,
+)
+coreconfigitem(
+    b'storage',
+    b'revlog.zstd.level',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bookmarks-pushkey-compat',
+    default=True,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1',
+    default=True,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1gd',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1.pull',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1gd.pull',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1.push',
+    default=None,
+)
+coreconfigitem(
+    b'server',
+    b'bundle1gd.push',
+    default=None,
 )
 coreconfigitem(
     b'server',
@@ -1135,73 +1830,140 @@
     alias=[(b'experimental', b'bundle2.stream')],
 )
 coreconfigitem(
-    b'server', b'compressionengines', default=list,
-)
-coreconfigitem(
-    b'server', b'concurrent-push-mode', default=b'check-related',
-)
-coreconfigitem(
-    b'server', b'disablefullbundle', default=False,
-)
-coreconfigitem(
-    b'server', b'maxhttpheaderlen', default=1024,
-)
-coreconfigitem(
-    b'server', b'pullbundle', default=False,
-)
-coreconfigitem(
-    b'server', b'preferuncompressed', default=False,
-)
-coreconfigitem(
-    b'server', b'streamunbundle', default=False,
-)
-coreconfigitem(
-    b'server', b'uncompressed', default=True,
-)
-coreconfigitem(
-    b'server', b'uncompressedallowsecret', default=False,
-)
-coreconfigitem(
-    b'server', b'view', default=b'served',
-)
-coreconfigitem(
-    b'server', b'validate', default=False,
-)
-coreconfigitem(
-    b'server', b'zliblevel', default=-1,
-)
-coreconfigitem(
-    b'server', b'zstdlevel', default=3,
-)
-coreconfigitem(
-    b'share', b'pool', default=None,
-)
-coreconfigitem(
-    b'share', b'poolnaming', default=b'identity',
-)
-coreconfigitem(
-    b'shelve', b'maxbackups', default=10,
-)
-coreconfigitem(
-    b'smtp', b'host', default=None,
-)
-coreconfigitem(
-    b'smtp', b'local_hostname', default=None,
-)
-coreconfigitem(
-    b'smtp', b'password', default=None,
-)
-coreconfigitem(
-    b'smtp', b'port', default=dynamicdefault,
-)
-coreconfigitem(
-    b'smtp', b'tls', default=b'none',
-)
-coreconfigitem(
-    b'smtp', b'username', default=None,
-)
-coreconfigitem(
-    b'sparse', b'missingwarning', default=True, experimental=True,
+    b'server',
+    b'compressionengines',
+    default=list,
+)
+coreconfigitem(
+    b'server',
+    b'concurrent-push-mode',
+    default=b'check-related',
+)
+coreconfigitem(
+    b'server',
+    b'disablefullbundle',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'maxhttpheaderlen',
+    default=1024,
+)
+coreconfigitem(
+    b'server',
+    b'pullbundle',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'preferuncompressed',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'streamunbundle',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'uncompressed',
+    default=True,
+)
+coreconfigitem(
+    b'server',
+    b'uncompressedallowsecret',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'view',
+    default=b'served',
+)
+coreconfigitem(
+    b'server',
+    b'validate',
+    default=False,
+)
+coreconfigitem(
+    b'server',
+    b'zliblevel',
+    default=-1,
+)
+coreconfigitem(
+    b'server',
+    b'zstdlevel',
+    default=3,
+)
+coreconfigitem(
+    b'share',
+    b'pool',
+    default=None,
+)
+coreconfigitem(
+    b'share',
+    b'poolnaming',
+    default=b'identity',
+)
+coreconfigitem(
+    b'share',
+    b'safe-mismatch.source-not-safe',
+    default=b'abort',
+)
+coreconfigitem(
+    b'share',
+    b'safe-mismatch.source-safe',
+    default=b'abort',
+)
+coreconfigitem(
+    b'share',
+    b'safe-mismatch.source-not-safe.warn',
+    default=True,
+)
+coreconfigitem(
+    b'share',
+    b'safe-mismatch.source-safe.warn',
+    default=True,
+)
+coreconfigitem(
+    b'shelve',
+    b'maxbackups',
+    default=10,
+)
+coreconfigitem(
+    b'smtp',
+    b'host',
+    default=None,
+)
+coreconfigitem(
+    b'smtp',
+    b'local_hostname',
+    default=None,
+)
+coreconfigitem(
+    b'smtp',
+    b'password',
+    default=None,
+)
+coreconfigitem(
+    b'smtp',
+    b'port',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'smtp',
+    b'tls',
+    default=b'none',
+)
+coreconfigitem(
+    b'smtp',
+    b'username',
+    default=None,
+)
+coreconfigitem(
+    b'sparse',
+    b'missingwarning',
+    default=True,
+    experimental=True,
 )
 coreconfigitem(
     b'subrepos',
@@ -1209,385 +1971,612 @@
     default=dynamicdefault,  # to make backporting simpler
 )
 coreconfigitem(
-    b'subrepos', b'hg:allowed', default=dynamicdefault,
-)
-coreconfigitem(
-    b'subrepos', b'git:allowed', default=dynamicdefault,
-)
-coreconfigitem(
-    b'subrepos', b'svn:allowed', default=dynamicdefault,
-)
-coreconfigitem(
-    b'templates', b'.*', default=None, generic=True,
-)
-coreconfigitem(
-    b'templateconfig', b'.*', default=dynamicdefault, generic=True,
-)
-coreconfigitem(
-    b'trusted', b'groups', default=list,
-)
-coreconfigitem(
-    b'trusted', b'users', default=list,
-)
-coreconfigitem(
-    b'ui', b'_usedassubrepo', default=False,
-)
-coreconfigitem(
-    b'ui', b'allowemptycommit', default=False,
-)
-coreconfigitem(
-    b'ui', b'archivemeta', default=True,
-)
-coreconfigitem(
-    b'ui', b'askusername', default=False,
-)
-coreconfigitem(
-    b'ui', b'available-memory', default=None,
+    b'subrepos',
+    b'hg:allowed',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'subrepos',
+    b'git:allowed',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'subrepos',
+    b'svn:allowed',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'templates',
+    b'.*',
+    default=None,
+    generic=True,
+)
+coreconfigitem(
+    b'templateconfig',
+    b'.*',
+    default=dynamicdefault,
+    generic=True,
+)
+coreconfigitem(
+    b'trusted',
+    b'groups',
+    default=list,
+)
+coreconfigitem(
+    b'trusted',
+    b'users',
+    default=list,
+)
+coreconfigitem(
+    b'ui',
+    b'_usedassubrepo',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'allowemptycommit',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'archivemeta',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'askusername',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'available-memory',
+    default=None,
 )
 
 coreconfigitem(
-    b'ui', b'clonebundlefallback', default=False,
-)
-coreconfigitem(
-    b'ui', b'clonebundleprefers', default=list,
-)
-coreconfigitem(
-    b'ui', b'clonebundles', default=True,
-)
-coreconfigitem(
-    b'ui', b'color', default=b'auto',
-)
-coreconfigitem(
-    b'ui', b'commitsubrepos', default=False,
-)
-coreconfigitem(
-    b'ui', b'debug', default=False,
-)
-coreconfigitem(
-    b'ui', b'debugger', default=None,
-)
-coreconfigitem(
-    b'ui', b'editor', default=dynamicdefault,
-)
-coreconfigitem(
-    b'ui', b'fallbackencoding', default=None,
-)
-coreconfigitem(
-    b'ui', b'forcecwd', default=None,
-)
-coreconfigitem(
-    b'ui', b'forcemerge', default=None,
-)
-coreconfigitem(
-    b'ui', b'formatdebug', default=False,
-)
-coreconfigitem(
-    b'ui', b'formatjson', default=False,
-)
-coreconfigitem(
-    b'ui', b'formatted', default=None,
-)
-coreconfigitem(
-    b'ui', b'graphnodetemplate', default=None,
-)
-coreconfigitem(
-    b'ui', b'interactive', default=None,
-)
-coreconfigitem(
-    b'ui', b'interface', default=None,
-)
-coreconfigitem(
-    b'ui', b'interface.chunkselector', default=None,
-)
-coreconfigitem(
-    b'ui', b'large-file-limit', default=10000000,
-)
-coreconfigitem(
-    b'ui', b'logblockedtimes', default=False,
-)
-coreconfigitem(
-    b'ui', b'logtemplate', default=None,
-)
-coreconfigitem(
-    b'ui', b'merge', default=None,
-)
-coreconfigitem(
-    b'ui', b'mergemarkers', default=b'basic',
+    b'ui',
+    b'clonebundlefallback',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'clonebundleprefers',
+    default=list,
+)
+coreconfigitem(
+    b'ui',
+    b'clonebundles',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'color',
+    default=b'auto',
+)
+coreconfigitem(
+    b'ui',
+    b'commitsubrepos',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'debug',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'debugger',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'editor',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'ui',
+    b'detailed-exit-code',
+    default=False,
+    experimental=True,
+)
+coreconfigitem(
+    b'ui',
+    b'fallbackencoding',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'forcecwd',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'forcemerge',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'formatdebug',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'formatjson',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'formatted',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'interactive',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'interface',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'interface.chunkselector',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'large-file-limit',
+    default=10000000,
+)
+coreconfigitem(
+    b'ui',
+    b'logblockedtimes',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'merge',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'mergemarkers',
+    default=b'basic',
+)
+coreconfigitem(
+    b'ui',
+    b'message-output',
+    default=b'stdio',
+)
+coreconfigitem(
+    b'ui',
+    b'nontty',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'origbackuppath',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'paginate',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'patch',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'portablefilenames',
+    default=b'warn',
+)
+coreconfigitem(
+    b'ui',
+    b'promptecho',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'quiet',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'quietbookmarkmove',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'relative-paths',
+    default=b'legacy',
+)
+coreconfigitem(
+    b'ui',
+    b'remotecmd',
+    default=b'hg',
+)
+coreconfigitem(
+    b'ui',
+    b'report_untrusted',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'rollback',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'signal-safe-lock',
+    default=True,
+)
+coreconfigitem(
+    b'ui',
+    b'slash',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'ssh',
+    default=b'ssh',
+)
+coreconfigitem(
+    b'ui',
+    b'ssherrorhint',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'statuscopies',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'strict',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'style',
+    default=b'',
+)
+coreconfigitem(
+    b'ui',
+    b'supportcontact',
+    default=None,
+)
+coreconfigitem(
+    b'ui',
+    b'textwidth',
+    default=78,
+)
+coreconfigitem(
+    b'ui',
+    b'timeout',
+    default=b'600',
 )
 coreconfigitem(
     b'ui',
-    b'mergemarkertemplate',
-    default=(
-        b'{node|short} '
-        b'{ifeq(tags, "tip", "", '
-        b'ifeq(tags, "", "", "{tags} "))}'
-        b'{if(bookmarks, "{bookmarks} ")}'
-        b'{ifeq(branch, "default", "", "{branch} ")}'
-        b'- {author|user}: {desc|firstline}'
-    ),
-)
-coreconfigitem(
-    b'ui', b'message-output', default=b'stdio',
-)
-coreconfigitem(
-    b'ui', b'nontty', default=False,
-)
-coreconfigitem(
-    b'ui', b'origbackuppath', default=None,
-)
-coreconfigitem(
-    b'ui', b'paginate', default=True,
-)
-coreconfigitem(
-    b'ui', b'patch', default=None,
-)
-coreconfigitem(
-    b'ui', b'pre-merge-tool-output-template', default=None,
-)
-coreconfigitem(
-    b'ui', b'portablefilenames', default=b'warn',
-)
-coreconfigitem(
-    b'ui', b'promptecho', default=False,
-)
-coreconfigitem(
-    b'ui', b'quiet', default=False,
-)
-coreconfigitem(
-    b'ui', b'quietbookmarkmove', default=False,
-)
-coreconfigitem(
-    b'ui', b'relative-paths', default=b'legacy',
-)
-coreconfigitem(
-    b'ui', b'remotecmd', default=b'hg',
-)
-coreconfigitem(
-    b'ui', b'report_untrusted', default=True,
-)
-coreconfigitem(
-    b'ui', b'rollback', default=True,
-)
-coreconfigitem(
-    b'ui', b'signal-safe-lock', default=True,
-)
-coreconfigitem(
-    b'ui', b'slash', default=False,
-)
-coreconfigitem(
-    b'ui', b'ssh', default=b'ssh',
-)
-coreconfigitem(
-    b'ui', b'ssherrorhint', default=None,
-)
-coreconfigitem(
-    b'ui', b'statuscopies', default=False,
-)
-coreconfigitem(
-    b'ui', b'strict', default=False,
-)
-coreconfigitem(
-    b'ui', b'style', default=b'',
-)
-coreconfigitem(
-    b'ui', b'supportcontact', default=None,
-)
-coreconfigitem(
-    b'ui', b'textwidth', default=78,
-)
-coreconfigitem(
-    b'ui', b'timeout', default=b'600',
-)
-coreconfigitem(
-    b'ui', b'timeout.warn', default=0,
-)
-coreconfigitem(
-    b'ui', b'timestamp-output', default=False,
-)
-coreconfigitem(
-    b'ui', b'traceback', default=False,
-)
-coreconfigitem(
-    b'ui', b'tweakdefaults', default=False,
+    b'timeout.warn',
+    default=0,
+)
+coreconfigitem(
+    b'ui',
+    b'timestamp-output',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'traceback',
+    default=False,
+)
+coreconfigitem(
+    b'ui',
+    b'tweakdefaults',
+    default=False,
 )
 coreconfigitem(b'ui', b'username', alias=[(b'ui', b'user')])
 coreconfigitem(
-    b'ui', b'verbose', default=False,
-)
-coreconfigitem(
-    b'verify', b'skipflags', default=None,
-)
-coreconfigitem(
-    b'web', b'allowbz2', default=False,
-)
-coreconfigitem(
-    b'web', b'allowgz', default=False,
-)
-coreconfigitem(
-    b'web', b'allow-pull', alias=[(b'web', b'allowpull')], default=True,
-)
-coreconfigitem(
-    b'web', b'allow-push', alias=[(b'web', b'allow_push')], default=list,
-)
-coreconfigitem(
-    b'web', b'allowzip', default=False,
-)
-coreconfigitem(
-    b'web', b'archivesubrepos', default=False,
-)
-coreconfigitem(
-    b'web', b'cache', default=True,
-)
-coreconfigitem(
-    b'web', b'comparisoncontext', default=5,
-)
-coreconfigitem(
-    b'web', b'contact', default=None,
-)
-coreconfigitem(
-    b'web', b'deny_push', default=list,
-)
-coreconfigitem(
-    b'web', b'guessmime', default=False,
-)
-coreconfigitem(
-    b'web', b'hidden', default=False,
-)
-coreconfigitem(
-    b'web', b'labels', default=list,
-)
-coreconfigitem(
-    b'web', b'logoimg', default=b'hglogo.png',
-)
-coreconfigitem(
-    b'web', b'logourl', default=b'https://mercurial-scm.org/',
-)
-coreconfigitem(
-    b'web', b'accesslog', default=b'-',
-)
-coreconfigitem(
-    b'web', b'address', default=b'',
-)
-coreconfigitem(
-    b'web', b'allow-archive', alias=[(b'web', b'allow_archive')], default=list,
-)
-coreconfigitem(
-    b'web', b'allow_read', default=list,
-)
-coreconfigitem(
-    b'web', b'baseurl', default=None,
-)
-coreconfigitem(
-    b'web', b'cacerts', default=None,
-)
-coreconfigitem(
-    b'web', b'certificate', default=None,
-)
-coreconfigitem(
-    b'web', b'collapse', default=False,
-)
-coreconfigitem(
-    b'web', b'csp', default=None,
-)
-coreconfigitem(
-    b'web', b'deny_read', default=list,
-)
-coreconfigitem(
-    b'web', b'descend', default=True,
-)
-coreconfigitem(
-    b'web', b'description', default=b"",
-)
-coreconfigitem(
-    b'web', b'encoding', default=lambda: encoding.encoding,
-)
-coreconfigitem(
-    b'web', b'errorlog', default=b'-',
-)
-coreconfigitem(
-    b'web', b'ipv6', default=False,
-)
-coreconfigitem(
-    b'web', b'maxchanges', default=10,
-)
-coreconfigitem(
-    b'web', b'maxfiles', default=10,
-)
-coreconfigitem(
-    b'web', b'maxshortchanges', default=60,
-)
-coreconfigitem(
-    b'web', b'motd', default=b'',
-)
-coreconfigitem(
-    b'web', b'name', default=dynamicdefault,
-)
-coreconfigitem(
-    b'web', b'port', default=8000,
-)
-coreconfigitem(
-    b'web', b'prefix', default=b'',
-)
-coreconfigitem(
-    b'web', b'push_ssl', default=True,
-)
-coreconfigitem(
-    b'web', b'refreshinterval', default=20,
-)
-coreconfigitem(
-    b'web', b'server-header', default=None,
-)
-coreconfigitem(
-    b'web', b'static', default=None,
-)
-coreconfigitem(
-    b'web', b'staticurl', default=None,
-)
-coreconfigitem(
-    b'web', b'stripes', default=1,
-)
-coreconfigitem(
-    b'web', b'style', default=b'paper',
-)
-coreconfigitem(
-    b'web', b'templates', default=None,
-)
-coreconfigitem(
-    b'web', b'view', default=b'served', experimental=True,
-)
-coreconfigitem(
-    b'worker', b'backgroundclose', default=dynamicdefault,
+    b'ui',
+    b'verbose',
+    default=False,
+)
+coreconfigitem(
+    b'verify',
+    b'skipflags',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'allowbz2',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'allowgz',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'allow-pull',
+    alias=[(b'web', b'allowpull')],
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'allow-push',
+    alias=[(b'web', b'allow_push')],
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'allowzip',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'archivesubrepos',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'cache',
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'comparisoncontext',
+    default=5,
+)
+coreconfigitem(
+    b'web',
+    b'contact',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'deny_push',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'guessmime',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'hidden',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'labels',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'logoimg',
+    default=b'hglogo.png',
+)
+coreconfigitem(
+    b'web',
+    b'logourl',
+    default=b'https://mercurial-scm.org/',
+)
+coreconfigitem(
+    b'web',
+    b'accesslog',
+    default=b'-',
+)
+coreconfigitem(
+    b'web',
+    b'address',
+    default=b'',
+)
+coreconfigitem(
+    b'web',
+    b'allow-archive',
+    alias=[(b'web', b'allow_archive')],
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'allow_read',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'baseurl',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'cacerts',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'certificate',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'collapse',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'csp',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'deny_read',
+    default=list,
+)
+coreconfigitem(
+    b'web',
+    b'descend',
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'description',
+    default=b"",
+)
+coreconfigitem(
+    b'web',
+    b'encoding',
+    default=lambda: encoding.encoding,
+)
+coreconfigitem(
+    b'web',
+    b'errorlog',
+    default=b'-',
+)
+coreconfigitem(
+    b'web',
+    b'ipv6',
+    default=False,
+)
+coreconfigitem(
+    b'web',
+    b'maxchanges',
+    default=10,
+)
+coreconfigitem(
+    b'web',
+    b'maxfiles',
+    default=10,
+)
+coreconfigitem(
+    b'web',
+    b'maxshortchanges',
+    default=60,
+)
+coreconfigitem(
+    b'web',
+    b'motd',
+    default=b'',
+)
+coreconfigitem(
+    b'web',
+    b'name',
+    default=dynamicdefault,
+)
+coreconfigitem(
+    b'web',
+    b'port',
+    default=8000,
+)
+coreconfigitem(
+    b'web',
+    b'prefix',
+    default=b'',
+)
+coreconfigitem(
+    b'web',
+    b'push_ssl',
+    default=True,
+)
+coreconfigitem(
+    b'web',
+    b'refreshinterval',
+    default=20,
+)
+coreconfigitem(
+    b'web',
+    b'server-header',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'static',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'staticurl',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'stripes',
+    default=1,
+)
+coreconfigitem(
+    b'web',
+    b'style',
+    default=b'paper',
+)
+coreconfigitem(
+    b'web',
+    b'templates',
+    default=None,
+)
+coreconfigitem(
+    b'web',
+    b'view',
+    default=b'served',
+    experimental=True,
+)
+coreconfigitem(
+    b'worker',
+    b'backgroundclose',
+    default=dynamicdefault,
 )
 # Windows defaults to a limit of 512 open files. A buffer of 128
 # should give us enough headway.
 coreconfigitem(
-    b'worker', b'backgroundclosemaxqueue', default=384,
-)
-coreconfigitem(
-    b'worker', b'backgroundcloseminfilecount', default=2048,
-)
-coreconfigitem(
-    b'worker', b'backgroundclosethreadcount', default=4,
-)
-coreconfigitem(
-    b'worker', b'enabled', default=True,
-)
-coreconfigitem(
-    b'worker', b'numcpus', default=None,
+    b'worker',
+    b'backgroundclosemaxqueue',
+    default=384,
+)
+coreconfigitem(
+    b'worker',
+    b'backgroundcloseminfilecount',
+    default=2048,
+)
+coreconfigitem(
+    b'worker',
+    b'backgroundclosethreadcount',
+    default=4,
+)
+coreconfigitem(
+    b'worker',
+    b'enabled',
+    default=True,
+)
+coreconfigitem(
+    b'worker',
+    b'numcpus',
+    default=None,
 )
 
 # Rebase related configuration moved to core because other extension are doing
 # strange things. For example, shelve import the extensions to reuse some bit
 # without formally loading it.
 coreconfigitem(
-    b'commands', b'rebase.requiredest', default=False,
-)
-coreconfigitem(
-    b'experimental', b'rebaseskipobsolete', default=True,
-)
-coreconfigitem(
-    b'rebase', b'singletransaction', default=False,
-)
-coreconfigitem(
-    b'rebase', b'experimental.inmemory', default=False,
-)
+    b'commands',
+    b'rebase.requiredest',
+    default=False,
+)
+coreconfigitem(
+    b'experimental',
+    b'rebaseskipobsolete',
+    default=True,
+)
+coreconfigitem(
+    b'rebase',
+    b'singletransaction',
+    default=False,
+)
+coreconfigitem(
+    b'rebase',
+    b'experimental.inmemory',
+    default=False,
+)
--- a/mercurial/context.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/context.py	Tue Jan 19 21:48:43 2021 +0530
@@ -316,9 +316,9 @@
         return subrepo.nullsubrepo(self, path, pctx)
 
     def workingsub(self, path):
-        '''return a subrepo for the stored revision, or wdir if this is a wdir
+        """return a subrepo for the stored revision, or wdir if this is a wdir
         context.
-        '''
+        """
         return subrepo.subrepo(self, path, allowwdir=True)
 
     def match(
@@ -398,7 +398,17 @@
 
         If other is None, compare this node with working directory.
 
-        returns (modified, added, removed, deleted, unknown, ignored, clean)
+        ctx1.status(ctx2) returns the status of change from ctx1 to ctx2
+
+        Returns a mercurial.scmutils.status object.
+
+        Data can be accessed using either tuple notation:
+
+            (modified, added, removed, deleted, unknown, ignored, clean)
+
+        or direct attribute access:
+
+            s.modified, s.added, ...
         """
 
         ctx1 = self
@@ -1044,8 +1054,7 @@
         return lkr
 
     def isintroducedafter(self, changelogrev):
-        """True if a filectx has been introduced after a given floor revision
-        """
+        """True if a filectx has been introduced after a given floor revision"""
         if self.linkrev() >= changelogrev:
             return True
         introrev = self._introrev(stoprev=changelogrev)
@@ -1222,7 +1231,7 @@
 
 class filectx(basefilectx):
     """A filecontext object makes access to data related to a particular
-       filerevision convenient."""
+    filerevision convenient."""
 
     def __init__(
         self,
@@ -1234,15 +1243,16 @@
         changectx=None,
     ):
         """changeid must be a revision number, if specified.
-           fileid can be a file revision or node."""
+        fileid can be a file revision or node."""
         self._repo = repo
         self._path = path
 
         assert (
             changeid is not None or fileid is not None or changectx is not None
-        ), (
-            b"bad args: changeid=%r, fileid=%r, changectx=%r"
-            % (changeid, fileid, changectx,)
+        ), b"bad args: changeid=%r, fileid=%r, changectx=%r" % (
+            changeid,
+            fileid,
+            changectx,
         )
 
         if filelog is not None:
@@ -1279,8 +1289,8 @@
             return self._repo.unfiltered()[self._changeid]
 
     def filectx(self, fileid, changeid=None):
-        '''opens an arbitrary revision of the file without
-        opening a new filelog'''
+        """opens an arbitrary revision of the file without
+        opening a new filelog"""
         return filectx(
             self._repo,
             self._path,
@@ -2091,7 +2101,7 @@
 
 class workingfilectx(committablefilectx):
     """A workingfilectx object makes access to data related to a particular
-       file in the working directory convenient."""
+    file in the working directory convenient."""
 
     def __init__(self, repo, path, filelog=None, workingctx=None):
         super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
@@ -2692,8 +2702,7 @@
 
     @propertycache
     def _changedset(self):
-        """Return the set of files changed in this context
-        """
+        """Return the set of files changed in this context"""
         changed = set(self._status.modified)
         changed.update(self._status.added)
         changed.update(self._status.removed)
@@ -2867,8 +2876,7 @@
 
     @propertycache
     def _status(self):
-        """Calculate exact status from ``files`` specified at construction
-        """
+        """Calculate exact status from ``files`` specified at construction"""
         man1 = self.p1().manifest()
         p2 = self._parents[1]
         # "1 < len(self._parents)" can't be used for checking
--- a/mercurial/copies.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/copies.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,3 +1,4 @@
+# coding: utf8
 # copies.py - copy detection for Mercurial
 #
 # Copyright 2008 Matt Mackall <mpm@selenic.com>
@@ -11,12 +12,15 @@
 import os
 
 from .i18n import _
-
+from .node import (
+    nullid,
+    nullrev,
+)
 
 from . import (
     match as matchmod,
-    node,
     pathutil,
+    policy,
     pycompat,
     util,
 )
@@ -24,7 +28,12 @@
 
 from .utils import stringutil
 
-from .revlogutils import flagutil
+from .revlogutils import (
+    flagutil,
+    sidedata as sidedatamod,
+)
+
+rustmod = policy.importrust("copy_tracing")
 
 
 def _filter(src, dst, t):
@@ -32,7 +41,7 @@
 
     # When _chain()'ing copies in 'a' (from 'src' via some other commit 'mid')
     # with copies in 'b' (from 'mid' to 'dst'), we can get the different cases
-    # in the following table (not including trivial cases). For example, case 2
+    # in the following table (not including trivial cases). For example, case 6
     # is where a file existed in 'src' and remained under that name in 'mid' and
     # then was renamed between 'mid' and 'dst'.
     #
@@ -141,7 +150,7 @@
     # optimization, since the ctx.files() for a merge commit is not correct for
     # this comparison.
     forwardmissingmatch = match
-    if b.p1() == a and b.p2().node() == node.nullid:
+    if b.p1() == a and b.p2().node() == nullid:
         filesmatcher = matchmod.exact(b.files())
         forwardmissingmatch = matchmod.intersectmatchers(match, filesmatcher)
     missing = _computeforwardmissing(a, b, match=forwardmissingmatch)
@@ -172,7 +181,7 @@
     return cm
 
 
-def _revinfo_getter(repo):
+def _revinfo_getter(repo, match):
     """returns a function that returns the following data given a <rev>"
 
     * p1: revision number of first parent
@@ -187,92 +196,142 @@
 
     changelogrevision = cl.changelogrevision
 
-    # A small cache to avoid doing the work twice for merges
-    #
-    # In the vast majority of cases, if we ask information for a revision
-    # about 1 parent, we'll later ask it for the other. So it make sense to
-    # keep the information around when reaching the first parent of a merge
-    # and dropping it after it was provided for the second parents.
-    #
-    # It exists cases were only one parent of the merge will be walked. It
-    # happens when the "destination" the copy tracing is descendant from a
-    # new root, not common with the "source". In that case, we will only walk
-    # through merge parents that are descendant of changesets common
-    # between "source" and "destination".
-    #
-    # With the current case implementation if such changesets have a copy
-    # information, we'll keep them in memory until the end of
-    # _changesetforwardcopies. We don't expect the case to be frequent
-    # enough to matters.
-    #
-    # In addition, it would be possible to reach pathological case, were
-    # many first parent are met before any second parent is reached. In
-    # that case the cache could grow. If this even become an issue one can
-    # safely introduce a maximum cache size. This would trade extra CPU/IO
-    # time to save memory.
-    merge_caches = {}
+    if rustmod is not None:
+
+        def revinfo(rev):
+            p1, p2 = parents(rev)
+            if flags(rev) & HASCOPIESINFO:
+                raw = changelogrevision(rev)._sidedata.get(sidedatamod.SD_FILES)
+            else:
+                raw = None
+            return (p1, p2, raw)
 
-    def revinfo(rev):
-        p1, p2 = parents(rev)
-        value = None
-        e = merge_caches.pop(rev, None)
-        if e is not None:
-            return e
-        changes = None
-        if flags(rev) & HASCOPIESINFO:
-            changes = changelogrevision(rev).changes
-        value = (p1, p2, changes)
-        if p1 != node.nullrev and p2 != node.nullrev:
-            # XXX some case we over cache, IGNORE
-            merge_caches[rev] = value
-        return value
+    else:
+
+        def revinfo(rev):
+            p1, p2 = parents(rev)
+            if flags(rev) & HASCOPIESINFO:
+                changes = changelogrevision(rev).changes
+            else:
+                changes = None
+            return (p1, p2, changes)
 
     return revinfo
 
 
+def cached_is_ancestor(is_ancestor):
+    """return a cached version of is_ancestor"""
+    cache = {}
+
+    def _is_ancestor(anc, desc):
+        if anc > desc:
+            return False
+        elif anc == desc:
+            return True
+        key = (anc, desc)
+        ret = cache.get(key)
+        if ret is None:
+            ret = cache[key] = is_ancestor(anc, desc)
+        return ret
+
+    return _is_ancestor
+
+
 def _changesetforwardcopies(a, b, match):
-    if a.rev() in (node.nullrev, b.rev()):
+    if a.rev() in (nullrev, b.rev()):
         return {}
 
     repo = a.repo().unfiltered()
     children = {}
 
     cl = repo.changelog
-    isancestor = cl.isancestorrev  # XXX we should had chaching to this.
-    missingrevs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()])
-    mrset = set(missingrevs)
+    isancestor = cl.isancestorrev
+
+    # To track rename from "A" to B, we need to gather all parent → children
+    # edges that are contains in `::B` but not in `::A`.
+    #
+    #
+    # To do so, we need to gather all revisions exclusive¹ to "B" (ie¹: `::b -
+    # ::a`) and also all the "roots point", ie the parents of the exclusive set
+    # that belong to ::a. These are exactly all the revisions needed to express
+    # the parent → children we need to combine.
+    #
+    # [1] actually, we need to gather all the edges within `(::a)::b`, ie:
+    # excluding paths that leads to roots that are not ancestors of `a`. We
+    # keep this out of the explanation because it is hard enough without this special case..
+
+    parents = cl._uncheckedparentrevs
+    graph_roots = (nullrev, nullrev)
+
+    ancestors = cl.ancestors([a.rev()], inclusive=True)
+    revs = cl.findmissingrevs(common=[a.rev()], heads=[b.rev()])
     roots = set()
-    for r in missingrevs:
-        for p in cl.parentrevs(r):
-            if p == node.nullrev:
-                continue
-            if p not in children:
-                children[p] = [r]
-            else:
-                children[p].append(r)
-            if p not in mrset:
-                roots.add(p)
+    has_graph_roots = False
+
+    # iterate over `only(B, A)`
+    for r in revs:
+        ps = parents(r)
+        if ps == graph_roots:
+            has_graph_roots = True
+        else:
+            p1, p2 = ps
+
+            # find all the "root points" (see larger comment above)
+            if p1 != nullrev and p1 in ancestors:
+                roots.add(p1)
+            if p2 != nullrev and p2 in ancestors:
+                roots.add(p2)
     if not roots:
         # no common revision to track copies from
         return {}
-    min_root = min(roots)
-
-    from_head = set(
-        cl.reachableroots(min_root, [b.rev()], list(roots), includepath=True)
-    )
-
-    iterrevs = set(from_head)
-    iterrevs &= mrset
-    iterrevs.update(roots)
-    iterrevs.remove(b.rev())
-    revs = sorted(iterrevs)
+    if has_graph_roots:
+        # this deal with the special case mentionned in the [1] footnotes. We
+        # must filter out revisions that leads to non-common graphroots.
+        roots = list(roots)
+        m = min(roots)
+        h = [b.rev()]
+        roots_to_head = cl.reachableroots(m, h, roots, includepath=True)
+        roots_to_head = set(roots_to_head)
+        revs = [r for r in revs if r in roots_to_head]
 
     if repo.filecopiesmode == b'changeset-sidedata':
-        revinfo = _revinfo_getter(repo)
+        # When using side-data, we will process the edges "from" the children.
+        # We iterate over the childre, gathering previous collected data for
+        # the parents. Do know when the parents data is no longer necessary, we
+        # keep a counter of how many children each revision has.
+        #
+        # An interresting property of `children_count` is that it only contains
+        # revision that will be relevant for a edge of the graph. So if a
+        # children has parent not in `children_count`, that edges should not be
+        # processed.
+        children_count = dict((r, 0) for r in roots)
+        for r in revs:
+            for p in cl.parentrevs(r):
+                if p == nullrev:
+                    continue
+                children_count[r] = 0
+                if p in children_count:
+                    children_count[p] += 1
+        revinfo = _revinfo_getter(repo, match)
         return _combine_changeset_copies(
-            revs, children, b.rev(), revinfo, match, isancestor
+            revs, children_count, b.rev(), revinfo, match, isancestor
         )
     else:
+        # When not using side-data, we will process the edges "from" the parent.
+        # so we need a full mapping of the parent -> children relation.
+        children = dict((r, []) for r in roots)
+        for r in revs:
+            for p in cl.parentrevs(r):
+                if p == nullrev:
+                    continue
+                children[r] = []
+                if p in children:
+                    children[p].append(r)
+        x = revs.pop()
+        assert x == b.rev()
+        revs.extend(roots)
+        revs.sort()
+
         revinfo = _revinfo_getter_extra(repo)
         return _combine_changeset_copies_extra(
             revs, children, b.rev(), revinfo, match, isancestor
@@ -280,87 +339,116 @@
 
 
 def _combine_changeset_copies(
-    revs, children, targetrev, revinfo, match, isancestor
+    revs, children_count, targetrev, revinfo, match, isancestor
 ):
     """combine the copies information for each item of iterrevs
 
     revs: sorted iterable of revision to visit
-    children: a {parent: [children]} mapping.
+    children_count: a {parent: <number-of-relevant-children>} mapping.
     targetrev: the final copies destination revision (not in iterrevs)
     revinfo(rev): a function that return (p1, p2, p1copies, p2copies, removed)
     match: a matcher
 
     It returns the aggregated copies information for `targetrev`.
     """
-    all_copies = {}
+
     alwaysmatch = match.always()
-    for r in revs:
-        copies = all_copies.pop(r, None)
-        if copies is None:
-            # this is a root
-            copies = {}
-        for i, c in enumerate(children[r]):
-            p1, p2, changes = revinfo(c)
-            childcopies = {}
-            if r == p1:
-                parent = 1
-                if changes is not None:
-                    childcopies = changes.copied_from_p1
-            else:
-                assert r == p2
-                parent = 2
+
+    if rustmod is not None:
+        final_copies = rustmod.combine_changeset_copies(
+            list(revs), children_count, targetrev, revinfo, isancestor
+        )
+    else:
+        isancestor = cached_is_ancestor(isancestor)
+
+        all_copies = {}
+        # iterate over all the "children" side of copy tracing "edge"
+        for current_rev in revs:
+            p1, p2, changes = revinfo(current_rev)
+            current_copies = None
+            # iterate over all parents to chain the existing data with the
+            # data from the parent → child edge.
+            for parent, parent_rev in ((1, p1), (2, p2)):
+                if parent_rev == nullrev:
+                    continue
+                remaining_children = children_count.get(parent_rev)
+                if remaining_children is None:
+                    continue
+                remaining_children -= 1
+                children_count[parent_rev] = remaining_children
+                if remaining_children:
+                    copies = all_copies.get(parent_rev, None)
+                else:
+                    copies = all_copies.pop(parent_rev, None)
+
+                if copies is None:
+                    # this is a root
+                    newcopies = copies = {}
+                elif remaining_children:
+                    newcopies = copies.copy()
+                else:
+                    newcopies = copies
+                # chain the data in the edge with the existing data
                 if changes is not None:
-                    childcopies = changes.copied_from_p2
-            if not alwaysmatch:
-                childcopies = {
-                    dst: src for dst, src in childcopies.items() if match(dst)
-                }
-            newcopies = copies
-            if childcopies:
-                newcopies = copies.copy()
-                for dest, source in pycompat.iteritems(childcopies):
-                    prev = copies.get(source)
-                    if prev is not None and prev[1] is not None:
-                        source = prev[1]
-                    newcopies[dest] = (c, source)
-                assert newcopies is not copies
-            if changes is not None:
-                for f in changes.removed:
-                    if f in newcopies:
-                        if newcopies is copies:
-                            # copy on write to avoid affecting potential other
-                            # branches.  when there are no other branches, this
-                            # could be avoided.
-                            newcopies = copies.copy()
-                        newcopies[f] = (c, None)
-            othercopies = all_copies.get(c)
-            if othercopies is None:
-                all_copies[c] = newcopies
-            else:
-                # we are the second parent to work on c, we need to merge our
-                # work with the other.
-                #
-                # In case of conflict, parent 1 take precedence over parent 2.
-                # This is an arbitrary choice made anew when implementing
-                # changeset based copies. It was made without regards with
-                # potential filelog related behavior.
-                if parent == 1:
-                    _merge_copies_dict(
-                        othercopies, newcopies, isancestor, changes
+                    childcopies = {}
+                    if parent == 1:
+                        childcopies = changes.copied_from_p1
+                    elif parent == 2:
+                        childcopies = changes.copied_from_p2
+
+                    if childcopies:
+                        newcopies = copies.copy()
+                        for dest, source in pycompat.iteritems(childcopies):
+                            prev = copies.get(source)
+                            if prev is not None and prev[1] is not None:
+                                source = prev[1]
+                            newcopies[dest] = (current_rev, source)
+                        assert newcopies is not copies
+                    if changes.removed:
+                        for f in changes.removed:
+                            if f in newcopies:
+                                if newcopies is copies:
+                                    # copy on write to avoid affecting potential other
+                                    # branches.  when there are no other branches, this
+                                    # could be avoided.
+                                    newcopies = copies.copy()
+                                newcopies[f] = (current_rev, None)
+                # check potential need to combine the data from another parent (for
+                # that child). See comment below for details.
+                if current_copies is None:
+                    current_copies = newcopies
+                else:
+                    # we are the second parent to work on c, we need to merge our
+                    # work with the other.
+                    #
+                    # In case of conflict, parent 1 take precedence over parent 2.
+                    # This is an arbitrary choice made anew when implementing
+                    # changeset based copies. It was made without regards with
+                    # potential filelog related behavior.
+                    assert parent == 2
+                    current_copies = _merge_copies_dict(
+                        newcopies, current_copies, isancestor, changes
                     )
-                else:
-                    _merge_copies_dict(
-                        newcopies, othercopies, isancestor, changes
-                    )
-                    all_copies[c] = newcopies
+            all_copies[current_rev] = current_copies
 
-    final_copies = {}
-    for dest, (tt, source) in all_copies[targetrev].items():
-        if source is not None:
-            final_copies[dest] = source
+        # filter out internal details and return a {dest: source mapping}
+        final_copies = {}
+        for dest, (tt, source) in all_copies[targetrev].items():
+            if source is not None:
+                final_copies[dest] = source
+    if not alwaysmatch:
+        for filename in list(final_copies.keys()):
+            if not match(filename):
+                del final_copies[filename]
     return final_copies
 
 
+# constant to decide which side to pick with _merge_copies_dict
+PICK_MINOR = 0
+PICK_MAJOR = 1
+PICK_EITHER = 2
+
+
 def _merge_copies_dict(minor, major, isancestor, changes):
     """merge two copies-mapping together, minor and major
 
@@ -371,36 +459,47 @@
 
     - `ismerged(path)`: callable return True if `path` have been merged in the
                         current revision,
+
+    return the resulting dict (in practice, the "minor" object, updated)
     """
     for dest, value in major.items():
         other = minor.get(dest)
         if other is None:
             minor[dest] = value
         else:
-            new_tt = value[0]
-            other_tt = other[0]
-            if value[1] == other[1]:
-                continue
-            # content from "major" wins, unless it is older
-            # than the branch point or there is a merge
-            if new_tt == other_tt:
+            pick = _compare_values(changes, isancestor, dest, other, value)
+            if pick == PICK_MAJOR:
                 minor[dest] = value
-            elif (
-                changes is not None
-                and value[1] is None
-                and dest in changes.salvaged
-            ):
-                pass
-            elif (
-                changes is not None
-                and other[1] is None
-                and dest in changes.salvaged
-            ):
-                minor[dest] = value
-            elif changes is not None and dest in changes.merged:
-                minor[dest] = value
-            elif not isancestor(new_tt, other_tt):
-                minor[dest] = value
+    return minor
+
+
+def _compare_values(changes, isancestor, dest, minor, major):
+    """compare two value within a _merge_copies_dict loop iteration"""
+    major_tt, major_value = major
+    minor_tt, minor_value = minor
+
+    # evacuate some simple case first:
+    if major_tt == minor_tt:
+        # if it comes from the same revision it must be the same value
+        assert major_value == minor_value
+        return PICK_EITHER
+    elif major[1] == minor[1]:
+        return PICK_EITHER
+
+    # actual merging needed: content from "major" wins, unless it is older than
+    # the branch point or there is a merge
+    elif changes is not None and major[1] is None and dest in changes.salvaged:
+        return PICK_MINOR
+    elif changes is not None and minor[1] is None and dest in changes.salvaged:
+        return PICK_MAJOR
+    elif changes is not None and dest in changes.merged:
+        return PICK_MAJOR
+    elif not isancestor(major_tt, minor_tt):
+        if major[1] is not None:
+            return PICK_MAJOR
+        elif isancestor(minor_tt, major_tt):
+            return PICK_MAJOR
+    return PICK_MINOR
 
 
 def _revinfo_getter_extra(repo):
@@ -426,7 +525,7 @@
             parents = fctx._filelog.parents(fctx._filenode)
             nb_parents = 0
             for n in parents:
-                if n != node.nullid:
+                if n != nullid:
                     nb_parents += 1
             return nb_parents >= 2
 
@@ -600,7 +699,7 @@
         if debug:
             repo.ui.debug(b'debug.copies: search mode: combined\n')
         base = None
-        if a.rev() != node.nullrev:
+        if a.rev() != nullrev:
             base = x
         copies = _chain(
             _backwardrenames(x, a, match=match),
@@ -681,7 +780,7 @@
 
 
 def _isfullcopytraceable(repo, c1, base):
-    """ Checks that if base, source and destination are all no-public branches,
+    """Checks that if base, source and destination are all no-public branches,
     if yes let's use the full copytrace algorithm for increased capabilities
     since it will be fast enough.
 
@@ -749,14 +848,16 @@
         self.movewithdir = {} if movewithdir is None else movewithdir
 
     def __repr__(self):
-        return (
-            '<branch_copies\n  copy=%r\n  renamedelete=%r\n  dirmove=%r\n  movewithdir=%r\n>'
-            % (self.copy, self.renamedelete, self.dirmove, self.movewithdir,)
+        return '<branch_copies\n  copy=%r\n  renamedelete=%r\n  dirmove=%r\n  movewithdir=%r\n>' % (
+            self.copy,
+            self.renamedelete,
+            self.dirmove,
+            self.movewithdir,
         )
 
 
 def _fullcopytracing(repo, c1, c2, base):
-    """ The full copytracing algorithm which finds all the new files that were
+    """The full copytracing algorithm which finds all the new files that were
     added from merge base up to the top commit and for each file it checks if
     this file was copied from another file.
 
@@ -826,18 +927,33 @@
             )
 
     # find interesting file sets from manifests
-    addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
-    addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
-    u1 = sorted(addedinm1 - addedinm2)
-    u2 = sorted(addedinm2 - addedinm1)
+    cache = []
+
+    def _get_addedfiles(idx):
+        if not cache:
+            addedinm1 = m1.filesnotin(mb, repo.narrowmatch())
+            addedinm2 = m2.filesnotin(mb, repo.narrowmatch())
+            u1 = sorted(addedinm1 - addedinm2)
+            u2 = sorted(addedinm2 - addedinm1)
+            cache.extend((u1, u2))
+        return cache[idx]
 
-    header = b"  unmatched files in %s"
-    if u1:
-        repo.ui.debug(b"%s:\n   %s\n" % (header % b'local', b"\n   ".join(u1)))
-    if u2:
-        repo.ui.debug(b"%s:\n   %s\n" % (header % b'other', b"\n   ".join(u2)))
+    u1fn = lambda: _get_addedfiles(0)
+    u2fn = lambda: _get_addedfiles(1)
+    if repo.ui.debugflag:
+        u1 = u1fn()
+        u2 = u2fn()
 
-    if repo.ui.debugflag:
+        header = b"  unmatched files in %s"
+        if u1:
+            repo.ui.debug(
+                b"%s:\n   %s\n" % (header % b'local', b"\n   ".join(u1))
+            )
+        if u2:
+            repo.ui.debug(
+                b"%s:\n   %s\n" % (header % b'other', b"\n   ".join(u2))
+            )
+
         renamedeleteset = set()
         divergeset = set()
         for dsts in diverge.values():
@@ -871,8 +987,8 @@
 
     repo.ui.debug(b"  checking for directory renames\n")
 
-    dirmove1, movewithdir2 = _dir_renames(repo, c1, copy1, copies1, u2)
-    dirmove2, movewithdir1 = _dir_renames(repo, c2, copy2, copies2, u1)
+    dirmove1, movewithdir2 = _dir_renames(repo, c1, copy1, copies1, u2fn)
+    dirmove2, movewithdir1 = _dir_renames(repo, c2, copy2, copies2, u1fn)
 
     branch_copies1 = branch_copies(copy1, renamedelete1, dirmove1, movewithdir1)
     branch_copies2 = branch_copies(copy2, renamedelete2, dirmove2, movewithdir2)
@@ -880,17 +996,17 @@
     return branch_copies1, branch_copies2, diverge
 
 
-def _dir_renames(repo, ctx, copy, fullcopy, addedfiles):
+def _dir_renames(repo, ctx, copy, fullcopy, addedfilesfn):
     """Finds moved directories and files that should move with them.
 
     ctx: the context for one of the sides
     copy: files copied on the same side (as ctx)
     fullcopy: files copied on the same side (as ctx), including those that
               merge.manifestmerge() won't care about
-    addedfiles: added files on the other side (compared to ctx)
+    addedfilesfn: function returning added files on the other side (compared to
+                  ctx)
     """
     # generate a directory move map
-    d = ctx.dirs()
     invalid = set()
     dirmove = {}
 
@@ -901,7 +1017,7 @@
         if dsrc in invalid:
             # already seen to be uninteresting
             continue
-        elif dsrc in d and ddst in d:
+        elif ctx.hasdir(dsrc) and ctx.hasdir(ddst):
             # directory wasn't entirely moved locally
             invalid.add(dsrc)
         elif dsrc in dirmove and dirmove[dsrc] != ddst:
@@ -914,7 +1030,7 @@
     for i in invalid:
         if i in dirmove:
             del dirmove[i]
-    del d, invalid
+    del invalid
 
     if not dirmove:
         return {}, {}
@@ -928,7 +1044,7 @@
 
     movewithdir = {}
     # check unaccounted nonoverlapping files against directory moves
-    for f in addedfiles:
+    for f in addedfilesfn():
         if f not in fullcopy:
             for d in dirmove:
                 if f.startswith(d):
@@ -946,7 +1062,7 @@
 
 
 def _heuristicscopytracing(repo, c1, c2, base):
-    """ Fast copytracing using filename heuristics
+    """Fast copytracing using filename heuristics
 
     Assumes that moves or renames are of following two types:
 
--- a/mercurial/crecord.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/crecord.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1000,7 +1000,7 @@
 
     def toggleallbetween(self):
         """toggle applied on or off for all items in range [lastapplied,
-        current]. """
+        current]."""
         if (
             not self.lastapplieditem
             or self.currentselecteditem == self.lastapplieditem
@@ -1894,7 +1894,7 @@
         elif keypressed in ["H", "KEY_SLEFT"]:
             self.leftarrowshiftevent()
         elif keypressed in ["q"]:
-            raise error.Abort(_(b'user quit'))
+            raise error.CanceledError(_(b'user quit'))
         elif keypressed in ['a']:
             self.flipselections()
         elif keypressed in ["c"]:
--- a/mercurial/dagop.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/dagop.py	Tue Jan 19 21:48:43 2021 +0530
@@ -11,10 +11,10 @@
 
 from .node import nullrev
 from .thirdparty import attr
+from .node import nullrev
 from . import (
     error,
     mdiff,
-    node,
     patch,
     pycompat,
     scmutil,
@@ -77,7 +77,7 @@
         pdepth = curdepth + 1
         if foundnew and pdepth < stopdepth:
             for prev in pfunc(currev):
-                if prev != node.nullrev:
+                if prev != nullrev:
                     heapq.heappush(pendingheap, (heapsign * prev, pdepth))
 
 
@@ -183,7 +183,6 @@
 
     cl = repo.changelog
     first = revs.min()
-    nullrev = node.nullrev
     if first == nullrev:
         # Are there nodes with a null first parent and a non-null
         # second one? Maybe. Do we care? Probably not.
@@ -206,7 +205,6 @@
 def _builddescendantsmap(repo, startrev, followfirst):
     """Build map of 'rev -> child revs', offset from startrev"""
     cl = repo.changelog
-    nullrev = node.nullrev
     descmap = [[] for _rev in pycompat.xrange(startrev, len(cl))]
     for currev in cl.revs(startrev + 1):
         p1rev, p2rev = cl.parentrevs(currev)
@@ -682,7 +680,7 @@
 
 
 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
-    r'''
+    r"""
     Given parent and child fctxes and annotate data for parents, for all lines
     in either parent that match the child, annotate the child with the parent's
     data.
@@ -691,7 +689,7 @@
     annotate data as well such that child is never blamed for any lines.
 
     See test-annotate.py for unit tests.
-    '''
+    """
     pblocks = [
         (parent, mdiff.allblocks(parent.text, child.text, opts=diffopts))
         for parent in parents
@@ -971,7 +969,7 @@
             if rev == currentrev:  # only display stuff in rev
                 gr[0].append(rev)
             gr[1].remove(rev)
-            parents = [p for p in parentsfunc(rev) if p > node.nullrev]
+            parents = [p for p in parentsfunc(rev) if p > nullrev]
             gr[1].update(parents)
             for p in parents:
                 if p not in pendingset:
@@ -1030,7 +1028,7 @@
     the input set.
     """
     headrevs = set(revs)
-    parents = {node.nullrev}
+    parents = {nullrev}
     up = parents.update
 
     for rev in revs:
@@ -1104,7 +1102,7 @@
             visit.append(-rev - 1)
 
             for prev in parentsfn(rev):
-                if prev == node.nullrev or prev not in revs or prev in finished:
+                if prev == nullrev or prev not in revs or prev in finished:
                     continue
 
                 visit.append(prev)
--- a/mercurial/dagparser.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/dagparser.py	Tue Jan 19 21:48:43 2021 +0530
@@ -425,7 +425,7 @@
     usedots=False,
     maxlinewidth=70,
 ):
-    '''generates lines of a textual representation for a dag event stream
+    """generates lines of a textual representation for a dag event stream
 
     events should generate what parsedag() does, so:
 
@@ -501,7 +501,7 @@
         >>> dagtext(parsedag(b'+1 :f +1 :p2 *f */p2'))
         '+1 :f +1 :p2 *f */p2'
 
-    '''
+    """
     return b"\n".join(
         dagtextlines(
             dag,
--- a/mercurial/debugcommands.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/debugcommands.py	Tue Jan 19 21:48:43 2021 +0530
@@ -78,6 +78,7 @@
     sshpeer,
     sslutil,
     streamclone,
+    strip,
     tags as tagsmod,
     templater,
     treediscovery,
@@ -105,7 +106,9 @@
 
 release = lockmod.release
 
-command = registrar.command()
+table = {}
+table.update(strip.command._table)
+command = registrar.command(table)
 
 
 @command(b'debugancestor', [], _(b'[INDEX] REV1 REV2'), optionalrepo=True)
@@ -975,6 +978,7 @@
     # make sure tests are repeatable
     random.seed(int(opts[b'seed']))
 
+    data = {}
     if opts.get(b'old'):
 
         def doit(pushedrevs, remoteheads, remote=remote):
@@ -982,7 +986,7 @@
                 # enable in-client legacy support
                 remote = localrepo.locallegacypeer(remote.local())
             common, _in, hds = treediscovery.findcommonincoming(
-                repo, remote, force=True
+                repo, remote, force=True, audit=data
             )
             common = set(common)
             if not opts.get(b'nonheads'):
@@ -1004,7 +1008,7 @@
                 revs = scmutil.revrange(repo, pushedrevs)
                 nodes = [repo[r].node() for r in revs]
             common, any, hds = setdiscovery.findcommonheads(
-                ui, repo, remote, ancestorsof=nodes
+                ui, repo, remote, ancestorsof=nodes, audit=data
             )
             return common, hds
 
@@ -1014,44 +1018,97 @@
         common, hds = doit(localrevs, remoterevs)
 
     # compute all statistics
-    common = set(common)
-    rheads = set(hds)
-    lheads = set(repo.heads())
-
-    data = {}
+    heads_common = set(common)
+    heads_remote = set(hds)
+    heads_local = set(repo.heads())
+    # note: they cannot be a local or remote head that is in common and not
+    # itself a head of common.
+    heads_common_local = heads_common & heads_local
+    heads_common_remote = heads_common & heads_remote
+    heads_common_both = heads_common & heads_remote & heads_local
+
+    all = repo.revs(b'all()')
+    common = repo.revs(b'::%ln', common)
+    roots_common = repo.revs(b'roots(::%ld)', common)
+    missing = repo.revs(b'not ::%ld', common)
+    heads_missing = repo.revs(b'heads(%ld)', missing)
+    roots_missing = repo.revs(b'roots(%ld)', missing)
+    assert len(common) + len(missing) == len(all)
+
+    initial_undecided = repo.revs(
+        b'not (::%ln or %ln::)', heads_common_remote, heads_common_local
+    )
+    heads_initial_undecided = repo.revs(b'heads(%ld)', initial_undecided)
+    roots_initial_undecided = repo.revs(b'roots(%ld)', initial_undecided)
+    common_initial_undecided = initial_undecided & common
+    missing_initial_undecided = initial_undecided & missing
+
     data[b'elapsed'] = t.elapsed
-    data[b'nb-common'] = len(common)
-    data[b'nb-common-local'] = len(common & lheads)
-    data[b'nb-common-remote'] = len(common & rheads)
-    data[b'nb-common-both'] = len(common & rheads & lheads)
-    data[b'nb-local'] = len(lheads)
-    data[b'nb-local-missing'] = data[b'nb-local'] - data[b'nb-common-local']
-    data[b'nb-remote'] = len(rheads)
-    data[b'nb-remote-unknown'] = data[b'nb-remote'] - data[b'nb-common-remote']
-    data[b'nb-revs'] = len(repo.revs(b'all()'))
-    data[b'nb-revs-common'] = len(repo.revs(b'::%ln', common))
-    data[b'nb-revs-missing'] = data[b'nb-revs'] - data[b'nb-revs-common']
+    data[b'nb-common-heads'] = len(heads_common)
+    data[b'nb-common-heads-local'] = len(heads_common_local)
+    data[b'nb-common-heads-remote'] = len(heads_common_remote)
+    data[b'nb-common-heads-both'] = len(heads_common_both)
+    data[b'nb-common-roots'] = len(roots_common)
+    data[b'nb-head-local'] = len(heads_local)
+    data[b'nb-head-local-missing'] = len(heads_local) - len(heads_common_local)
+    data[b'nb-head-remote'] = len(heads_remote)
+    data[b'nb-head-remote-unknown'] = len(heads_remote) - len(
+        heads_common_remote
+    )
+    data[b'nb-revs'] = len(all)
+    data[b'nb-revs-common'] = len(common)
+    data[b'nb-revs-missing'] = len(missing)
+    data[b'nb-missing-heads'] = len(heads_missing)
+    data[b'nb-missing-roots'] = len(roots_missing)
+    data[b'nb-ini_und'] = len(initial_undecided)
+    data[b'nb-ini_und-heads'] = len(heads_initial_undecided)
+    data[b'nb-ini_und-roots'] = len(roots_initial_undecided)
+    data[b'nb-ini_und-common'] = len(common_initial_undecided)
+    data[b'nb-ini_und-missing'] = len(missing_initial_undecided)
 
     # display discovery summary
     ui.writenoi18n(b"elapsed time:  %(elapsed)f seconds\n" % data)
+    ui.writenoi18n(b"round-trips:           %(total-roundtrips)9d\n" % data)
     ui.writenoi18n(b"heads summary:\n")
-    ui.writenoi18n(b"  total common heads:  %(nb-common)9d\n" % data)
-    ui.writenoi18n(b"    also local heads:  %(nb-common-local)9d\n" % data)
-    ui.writenoi18n(b"    also remote heads: %(nb-common-remote)9d\n" % data)
-    ui.writenoi18n(b"    both:              %(nb-common-both)9d\n" % data)
-    ui.writenoi18n(b"  local heads:         %(nb-local)9d\n" % data)
-    ui.writenoi18n(b"    common:            %(nb-common-local)9d\n" % data)
-    ui.writenoi18n(b"    missing:           %(nb-local-missing)9d\n" % data)
-    ui.writenoi18n(b"  remote heads:        %(nb-remote)9d\n" % data)
-    ui.writenoi18n(b"    common:            %(nb-common-remote)9d\n" % data)
-    ui.writenoi18n(b"    unknown:           %(nb-remote-unknown)9d\n" % data)
+    ui.writenoi18n(b"  total common heads:  %(nb-common-heads)9d\n" % data)
+    ui.writenoi18n(
+        b"    also local heads:  %(nb-common-heads-local)9d\n" % data
+    )
+    ui.writenoi18n(
+        b"    also remote heads: %(nb-common-heads-remote)9d\n" % data
+    )
+    ui.writenoi18n(b"    both:              %(nb-common-heads-both)9d\n" % data)
+    ui.writenoi18n(b"  local heads:         %(nb-head-local)9d\n" % data)
+    ui.writenoi18n(
+        b"    common:            %(nb-common-heads-local)9d\n" % data
+    )
+    ui.writenoi18n(
+        b"    missing:           %(nb-head-local-missing)9d\n" % data
+    )
+    ui.writenoi18n(b"  remote heads:        %(nb-head-remote)9d\n" % data)
+    ui.writenoi18n(
+        b"    common:            %(nb-common-heads-remote)9d\n" % data
+    )
+    ui.writenoi18n(
+        b"    unknown:           %(nb-head-remote-unknown)9d\n" % data
+    )
     ui.writenoi18n(b"local changesets:      %(nb-revs)9d\n" % data)
     ui.writenoi18n(b"  common:              %(nb-revs-common)9d\n" % data)
+    ui.writenoi18n(b"    heads:             %(nb-common-heads)9d\n" % data)
+    ui.writenoi18n(b"    roots:             %(nb-common-roots)9d\n" % data)
     ui.writenoi18n(b"  missing:             %(nb-revs-missing)9d\n" % data)
+    ui.writenoi18n(b"    heads:             %(nb-missing-heads)9d\n" % data)
+    ui.writenoi18n(b"    roots:             %(nb-missing-roots)9d\n" % data)
+    ui.writenoi18n(b"  first undecided set: %(nb-ini_und)9d\n" % data)
+    ui.writenoi18n(b"    heads:             %(nb-ini_und-heads)9d\n" % data)
+    ui.writenoi18n(b"    roots:             %(nb-ini_und-roots)9d\n" % data)
+    ui.writenoi18n(b"    common:            %(nb-ini_und-common)9d\n" % data)
+    ui.writenoi18n(b"    missing:           %(nb-ini_und-missing)9d\n" % data)
 
     if ui.verbose:
         ui.writenoi18n(
-            b"common heads: %s\n" % b" ".join(sorted(short(n) for n in common))
+            b"common heads: %s\n"
+            % b" ".join(sorted(short(n) for n in heads_common))
         )
 
 
@@ -1059,11 +1116,14 @@
 
 
 @command(
-    b'debugdownload', [(b'o', b'output', b'', _(b'path')),], optionalrepo=True
+    b'debugdownload',
+    [
+        (b'o', b'output', b'', _(b'path')),
+    ],
+    optionalrepo=True,
 )
 def debugdownload(ui, repo, url, output=None, **opts):
-    """download a resource using Mercurial logic and config
-    """
+    """download a resource using Mercurial logic and config"""
     fh = urlmod.open(ui, url, output)
 
     dest = ui
@@ -1507,10 +1567,10 @@
 
 @command(b'debuginstall', [] + cmdutil.formatteropts, b'', norepo=True)
 def debuginstall(ui, **opts):
-    '''test Mercurial installation
+    """test Mercurial installation
 
     Returns 0 on success.
-    '''
+    """
     opts = pycompat.byteskwargs(opts)
 
     problems = 0
@@ -1829,10 +1889,10 @@
 @command(
     b'debuglocks',
     [
-        (b'L', b'force-lock', None, _(b'free the store lock (DANGEROUS)')),
+        (b'L', b'force-free-lock', None, _(b'free the store lock (DANGEROUS)')),
         (
             b'W',
-            b'force-wlock',
+            b'force-free-wlock',
             None,
             _(b'free the working state lock (DANGEROUS)'),
         ),
@@ -1871,11 +1931,11 @@
 
     """
 
-    if opts.get('force_lock'):
+    if opts.get('force_free_lock'):
         repo.svfs.unlink(b'lock')
-    if opts.get('force_wlock'):
+    if opts.get('force_free_wlock'):
         repo.vfs.unlink(b'wlock')
-    if opts.get('force_lock') or opts.get('force_wlock'):
+    if opts.get('force_free_lock') or opts.get('force_free_wlock'):
         return 0
 
     locks = []
@@ -2170,8 +2230,7 @@
     ],
 )
 def debugnodemap(ui, repo, **opts):
-    """write and inspect on disk nodemap
-    """
+    """write and inspect on disk nodemap"""
     if opts['dump_new']:
         unfi = repo.unfiltered()
         cl = unfi.changelog
@@ -2250,7 +2309,7 @@
                 raise TypeError()
             return n
         except TypeError:
-            raise error.Abort(
+            raise error.InputError(
                 b'changeset references must be full hexadecimal '
                 b'node identifiers'
             )
@@ -2261,7 +2320,7 @@
             try:
                 indices.append(int(v))
             except ValueError:
-                raise error.Abort(
+                raise error.InputError(
                     _(b'invalid index value: %r') % v,
                     hint=_(b'use integers for indices'),
                 )
@@ -2279,7 +2338,9 @@
 
     if precursor is not None:
         if opts[b'rev']:
-            raise error.Abort(b'cannot select revision when creating marker')
+            raise error.InputError(
+                b'cannot select revision when creating marker'
+            )
         metadata = {}
         metadata[b'user'] = encoding.fromlocal(opts[b'user'] or ui.username())
         succs = tuple(parsenodeid(succ) for succ in successors)
@@ -2397,13 +2458,13 @@
     _(b'FILESPEC...'),
 )
 def debugpathcomplete(ui, repo, *specs, **opts):
-    '''complete part or all of a tracked path
+    """complete part or all of a tracked path
 
     This command supports shells that offer path name completion. It
     currently completes only files already known to the dirstate.
 
     Completion extends only to the next path segment unless
-    --full is specified, in which case entire paths are used.'''
+    --full is specified, in which case entire paths are used."""
 
     def complete(path, acceptable):
         dirstate = repo.dirstate
@@ -2582,13 +2643,13 @@
 
 @command(b'debugpushkey', [], _(b'REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
-    '''access the pushkey key/value protocol
+    """access the pushkey key/value protocol
 
     With two args, list the keys in the given namespace.
 
     With five args, set a key to new if it currently is set to old.
     Reports success or failure.
-    '''
+    """
 
     target = hg.peer(ui, {}, repopath)
     if keyinfo:
@@ -3380,12 +3441,22 @@
 
 @command(b'debugsetparents', [], _(b'REV1 [REV2]'))
 def debugsetparents(ui, repo, rev1, rev2=None):
-    """manually set the parents of the current working directory
-
-    This is useful for writing repository conversion tools, but should
-    be used with care. For example, neither the working directory nor the
-    dirstate is updated, so file status may be incorrect after running this
-    command.
+    """manually set the parents of the current working directory (DANGEROUS)
+
+    This command is not what you are looking for and should not be used. Using
+    this command will most certainly results in slight corruption of the file
+    level histories withing your repository. DO NOT USE THIS COMMAND.
+
+    The command update the p1 and p2 field in the dirstate, and not touching
+    anything else. This useful for writing repository conversion tools, but
+    should be used with extreme care. For example, neither the working
+    directory nor the dirstate is updated, so file status may be incorrect
+    after running this command. Only used if you are one of the few people that
+    deeply unstand both conversion tools and file level histories. If you are
+    reading this help, you are not one of this people (most of them sailed west
+    from Mithlond anyway.
+
+    So one last time DO NOT USE THIS COMMAND.
 
     Returns 0 on success.
     """
@@ -3427,7 +3498,7 @@
 
 @command(b'debugssl', [], b'[SOURCE]', optionalrepo=True)
 def debugssl(ui, repo, source=None, **opts):
-    '''test a secure connection to a server
+    """test a secure connection to a server
 
     This builds the certificate chain for the server on Windows, installing the
     missing intermediates and trusted root via Windows Update if necessary.  It
@@ -3438,7 +3509,7 @@
 
     If the update succeeds, retry the original operation.  Otherwise, the cause
     of the SSL error is likely another issue.
-    '''
+    """
     if not pycompat.iswindows:
         raise error.Abort(
             _(b'certificate chain building is only possible on Windows')
@@ -3780,7 +3851,9 @@
 
 @command(
     b'debuguigetpass',
-    [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
+    [
+        (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
+    ],
     _(b'[-p TEXT]'),
     norepo=True,
 )
@@ -3794,7 +3867,9 @@
 
 @command(
     b'debuguiprompt',
-    [(b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),],
+    [
+        (b'p', b'prompt', b'', _(b'prompt text'), _(b'TEXT')),
+    ],
     _(b'[-p TEXT]'),
     norepo=True,
 )
@@ -3825,6 +3900,7 @@
         (b'', b'backup', True, _(b'keep the old repository content around')),
         (b'', b'changelog', None, _(b'select the changelog for upgrade')),
         (b'', b'manifest', None, _(b'select the manifest for upgrade')),
+        (b'', b'filelogs', None, _(b'select all filelogs for upgrade')),
     ],
 )
 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True, **opts):
@@ -3853,9 +3929,11 @@
       * `--no-manifest`: optimize all revlog but the manifest
       * `--changelog`: optimize the changelog only
       * `--no-changelog --no-manifest`: optimize filelogs only
+      * `--filelogs`: optimize the filelogs only
+      * `--no-changelog --no-manifest --no-filelogs`: skip all revlog optimizations
     """
     return upgrade.upgraderepo(
-        ui, repo, run=run, optimize=optimize, backup=backup, **opts
+        ui, repo, run=run, optimize=set(optimize), backup=backup, **opts
     )
 
 
@@ -4307,7 +4385,10 @@
                 {
                     'loggingfh': ui,
                     'loggingname': b's',
-                    'loggingopts': {'logdata': True, 'logdataapis': False,},
+                    'loggingopts': {
+                        'logdata': True,
+                        'logdataapis': False,
+                    },
                 }
             )
 
--- a/mercurial/defaultrc/mergetools.rc	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/defaultrc/mergetools.rc	Tue Jan 19 21:48:43 2021 +0530
@@ -1,82 +1,78 @@
 # Some default global settings for common merge tools
 
 [merge-tools]
-kdiff3.args=--auto --L1 $labelbase --L2 $labellocal --L3 $labelother $base $local $other -o $output
-kdiff3.regkey=Software\KDiff3
-kdiff3.regkeyalt=Software\Wow6432Node\KDiff3
-kdiff3.regappend=\kdiff3.exe
-kdiff3.fixeol=True
-kdiff3.gui=True
-kdiff3.diffargs=--L1 $plabel1 --L2 $clabel $parent $child
 
-gvimdiff.args=--nofork -d -g -O $local $other $base
-gvimdiff.regkey=Software\Vim\GVim
-gvimdiff.regkeyalt=Software\Wow6432Node\Vim\GVim
-gvimdiff.regname=path
-gvimdiff.priority=-9
-gvimdiff.diffargs=--nofork -d -g -O $parent $child
+; The Araxis Merge tool is configured to work with newer versions of Araxis Merge
+; if you are using Araxis Merge 2008 or earlier, create a [merge-tools] section
+; in your mercurial.ini file, and add one of the two following "araxis.executable"
+; lines, depending on whether you have installed a 32 or a 64 bit version
+; of Araxis Merge and whether you are on a 32 or 64 bit version of windows
+;
+;araxis.executable=${ProgramFiles}/Araxis/Araxis Merge/ConsoleCompare.exe
+;araxis.executable=${ProgramFiles(x86)}/Araxis/Araxis Merge/ConsoleCompare.exe
+;
+; Please remember to remove the leading ";" to uncomment the line
+;
+araxis.args=/3 /a2 /wait /merge /title1:"Other" /title2:"Base" /title3:"Local :"$local $other $base $local $output
+araxis.binary=True
+araxis.checkconflicts=True
+araxis.diffargs=/2 /wait /title1:$plabel1 /title2:$clabel $parent $child
+araxis.gui=True
+araxis.priority=-2
+araxis.regappend=\ConsoleCompare.exe
+araxis.regkey=SOFTWARE\Classes\TypeLib\{46799e0a-7bd1-4330-911c-9660bb964ea2}\7.0\HELPDIR
 
-vimdiff.args=$local $other $base -c 'redraw | echomsg "hg merge conflict, type \":cq\" to abort vimdiff"'
-vimdiff.check=changed
-vimdiff.priority=-10
+; Linux version of BeyondCompare 3 and 4 (dirdiff is version 4 only)
+bcompare.args=$local $other $base -mergeoutput=$output -ro -lefttitle=$labellocal -centertitle=$labelbase -righttitle=$labelother -outputtitle=merged -automerge -reviewconflicts -solo
+bcompare.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child
+bcompare.gui=True
+bcompare.priority=-1
 
-merge.check=conflicts
-merge.priority=-100
-
-gpyfm.gui=True
+; OS X version of Beyond Compare
+bcomposx.args=$local $other $base -mergeoutput=$output -ro -lefttitle=$labellocal -centertitle=$labelbase -righttitle=$labelother -outputtitle=merged -automerge -reviewconflicts -solo
+bcomposx.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child
+bcomposx.executable = /Applications/Beyond Compare.app/Contents/MacOS/bcomp
+bcomposx.gui=True
+bcomposx.priority=-1
 
-meld.gui=True
-meld.args=--label=$labellocal $local --label='merged' $base --label=$labelother $other -o $output --auto-merge
-meld.check=changed
-meld.diffargs=-a --label=$plabel1 $parent --label=$clabel $child
+; Windows version of BeyondCompare 3
+beyondcompare3.args=$local $other $base $output /ro /lefttitle=$labellocal /centertitle=$labelbase /righttitle=$labelother /automerge /reviewconflicts /solo
+beyondcompare3.diffargs=/lro /lefttitle=$plabel1 /righttitle=$clabel /solo /expandall $parent $child
+beyondcompare3.gui=True
+beyondcompare3.priority=-2
+beyondcompare3.regkey=Software\Scooter Software\Beyond Compare 3
+beyondcompare3.regkeyalt=Software\Wow6432Node\Scooter Software\Beyond Compare 3
+beyondcompare3.regname=ExePath
 
-tkdiff.args=$local $other -a $base -o $output
-tkdiff.gui=True
-tkdiff.priority=-8
-tkdiff.diffargs=-L $plabel1 $parent -L $clabel $child
+; Windows version of BeyondCompare 4
+beyondcompare4.args=$local $other $base /mergeoutput=$output /ro /lefttitle=$labellocal /centertitle=$labelbase /righttitle=$labelother /outputtitle=merged /automerge /reviewconflicts /solo
+beyondcompare4.diffargs=/lro /lefttitle=$plabel1 /righttitle=$clabel /solo /expandall $parent $child
+beyondcompare4.gui=True
+beyondcompare4.premerge=False
+beyondcompare4.priority=-1
+beyondcompare4.regkey=Software\Scooter Software\Beyond Compare 4
+beyondcompare4.regkeyalt=Software\Wow6432Node\Scooter Software\Beyond Compare 4
+beyondcompare4.regname=ExePath
 
-xxdiff.args=--show-merged-pane --exit-with-merge-status --title1 $labellocal --title2 $labelbase --title3 $labelother --merged-filename $output --merge $local $base $other
-xxdiff.gui=True
-xxdiff.priority=-8
-xxdiff.diffargs=--title1 $plabel1 $parent --title2 $clabel $child
-
+diffmerge.args=-nosplash -merge -title1=$labellocal -title2=merged -title3=$labelother $local $base $other -result=$output
+diffmerge.check=changed
+diffmerge.diffargs=--nosplash --title1=$plabel1 --title2=$clabel $parent $child
+diffmerge.gui=True
+diffmerge.priority=-7
 diffmerge.regkey=Software\SourceGear\SourceGear DiffMerge\
 diffmerge.regkeyalt=Software\Wow6432Node\SourceGear\SourceGear DiffMerge\
 diffmerge.regname=Location
-diffmerge.priority=-7
-diffmerge.args=-nosplash -merge -title1=$labellocal -title2=merged -title3=$labelother $local $base $other -result=$output
-diffmerge.check=changed
-diffmerge.gui=True
-diffmerge.diffargs=--nosplash --title1=$plabel1 --title2=$clabel $parent $child
 
-p4merge.args=$base $local $other $output
-p4merge.regkey=Software\Perforce\Environment
-p4merge.regkeyalt=Software\Wow6432Node\Perforce\Environment
-p4merge.regname=P4INSTROOT
-p4merge.regappend=\p4merge.exe
-p4merge.gui=True
-p4merge.priority=-8
-p4merge.diffargs=$parent $child
-
-p4mergeosx.executable = /Applications/p4merge.app/Contents/MacOS/p4merge
-p4mergeosx.args = $base $local $other $output
-p4mergeosx.gui = True
-p4mergeosx.priority=-8
-p4mergeosx.diffargs=$parent $child
-
-tortoisemerge.args=/base:$base /mine:$local /theirs:$other /merged:$output
-tortoisemerge.regkey=Software\TortoiseSVN
-tortoisemerge.regkeyalt=Software\Wow6432Node\TortoiseSVN
-tortoisemerge.check=changed
-tortoisemerge.gui=True
-tortoisemerge.priority=-8
-tortoisemerge.diffargs=/base:$parent /mine:$child /basename:$plabel1 /minename:$clabel
+diffuse.args=$local $base $other
+diffuse.diffargs=$parent $child
+diffuse.gui=True
+diffuse.priority=-3
 
 ecmerge.args=$base $local $other --mode=merge3 --title0=$labelbase --title1=$labellocal --title2=$labelother --to=$output
+ecmerge.diffargs=$parent $child --mode=diff2 --title1=$plabel1 --title2=$clabel
+ecmerge.gui=True
 ecmerge.regkey=Software\Elli\xc3\xa9 Computing\Merge
 ecmerge.regkeyalt=Software\Wow6432Node\Elli\xc3\xa9 Computing\Merge
-ecmerge.gui=True
-ecmerge.diffargs=$parent $child --mode=diff2 --title1=$plabel1 --title2=$clabel
 
 # editmerge is a small script shipped in contrib.
 # It needs this config otherwise it behaves the same as internal:local
@@ -84,63 +80,89 @@
 editmerge.check=changed
 editmerge.premerge=keep
 
+filemerge.args=-left $other -right $local -ancestor $base -merge $output
 filemerge.executable=/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge
-filemerge.args=-left $other -right $local -ancestor $base -merge $output
 filemerge.gui=True
 
+filemergexcode.args=-left $other -right $local -ancestor $base -merge $output
 filemergexcode.executable=/Applications/Xcode.app/Contents/Applications/FileMerge.app/Contents/MacOS/FileMerge
-filemergexcode.args=-left $other -right $local -ancestor $base -merge $output
 filemergexcode.gui=True
 
-; Windows version of Beyond Compare
-beyondcompare3.args=$local $other $base $output /ro /lefttitle=$labellocal /centertitle=$labelbase /righttitle=$labelother /automerge /reviewconflicts /solo
-beyondcompare3.regkey=Software\Scooter Software\Beyond Compare 3
-beyondcompare3.regname=ExePath
-beyondcompare3.gui=True
-beyondcompare3.priority=-2
-beyondcompare3.diffargs=/lro /lefttitle=$plabel1 /righttitle=$clabel /solo /expandall $parent $child
+gpyfm.gui=True
+
+gvimdiff.args=--nofork -d -g -O $local $other $base
+gvimdiff.diffargs=--nofork -d -g -O $parent $child
+gvimdiff.priority=-9
+gvimdiff.regkey=Software\Vim\GVim
+gvimdiff.regkeyalt=Software\Wow6432Node\Vim\GVim
+gvimdiff.regname=path
+
+kdiff3.args=--auto --L1 $labelbase --L2 $labellocal --L3 $labelother $base $local $other -o $output
+kdiff3.diffargs=--L1 $plabel1 --L2 $clabel $parent $child
+kdiff3.fixeol=True
+kdiff3.gui=True
+kdiff3.regappend=\kdiff3.exe
+kdiff3.regkey=Software\KDiff3
+kdiff3.regkeyalt=Software\Wow6432Node\KDiff3
+
+meld.args=--label=$labellocal $local --label='merged' $base --label=$labelother $other -o $output --auto-merge
+meld.check=changed
+meld.diffargs=-a --label=$plabel1 $parent --label=$clabel $child
+meld.gui=True
+
+merge.check=conflicts
+merge.priority=-100
+
+p4merge.args=$base $local $other $output
+p4merge.diffargs=$parent $child
+p4merge.gui=True
+p4merge.priority=-8
+p4merge.regappend=\p4merge.exe
+p4merge.regkey=Software\Perforce\Environment
+p4merge.regkeyalt=Software\Wow6432Node\Perforce\Environment
+p4merge.regname=P4INSTROOT
 
-; Linux version of Beyond Compare
-bcompare.args=$local $other $base -mergeoutput=$output -ro -lefttitle=$labellocal -centertitle=$labelbase -righttitle=$labelother -outputtitle=merged -automerge -reviewconflicts -solo
-bcompare.gui=True
-bcompare.priority=-1
-bcompare.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child
+p4mergeosx.args = $base $local $other $output
+p4mergeosx.diffargs=$parent $child
+p4mergeosx.executable = /Applications/p4merge.app/Contents/MacOS/p4merge
+p4mergeosx.gui = True
+p4mergeosx.priority=-8
+
+tkdiff.args=$local $other -a $base -o $output
+tkdiff.diffargs=-L $plabel1 $parent -L $clabel $child
+tkdiff.gui=True
+tkdiff.priority=-8
 
-; OS X version of Beyond Compare
-bcomposx.executable = /Applications/Beyond Compare.app/Contents/MacOS/bcomp
-bcomposx.args=$local $other $base -mergeoutput=$output -ro -lefttitle=$labellocal -centertitle=$labelbase -righttitle=$labelother -outputtitle=merged -automerge -reviewconflicts -solo
-bcomposx.gui=True
-bcomposx.priority=-1
-bcomposx.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child
+tortoisemerge.args=/base:$base /mine:$local /theirs:$other /merged:$output
+tortoisemerge.check=changed
+tortoisemerge.diffargs=/base:$parent /mine:$child /basename:$plabel1 /minename:$clabel
+tortoisemerge.gui=True
+tortoisemerge.priority=-8
+tortoisemerge.regkey=Software\TortoiseSVN
+tortoisemerge.regkeyalt=Software\Wow6432Node\TortoiseSVN
 
+UltraCompare.args = $base $local $other -title1 base -title3 other
+UltraCompare.binary = True
+UltraCompare.check = conflicts,changed
+UltraCompare.diffargs=$child $parent -title1 $clabel -title2 $plabel1
+UltraCompare.gui = True
+UltraCompare.priority = -2
+UltraCompare.regkey=Software\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
+UltraCompare.regkeyalt=Software\Wow6432Node\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
+
+vimdiff.args=$local $other $base -c 'redraw | echomsg "hg merge conflict, type \":cq\" to abort vimdiff"'
+vimdiff.check=changed
+vimdiff.priority=-10
 winmerge.args=/e /x /wl /ub /dl $labelother /dr $labellocal $other $local $output
+winmerge.check=changed
+winmerge.diffargs=/r /e /x /ub /wl /dl $plabel1 /dr $clabel $parent $child
+winmerge.gui=True
+winmerge.priority=-10
 winmerge.regkey=Software\Thingamahoochie\WinMerge
 winmerge.regkeyalt=Software\Wow6432Node\Thingamahoochie\WinMerge\
 winmerge.regname=Executable
-winmerge.check=changed
-winmerge.gui=True
-winmerge.priority=-10
-winmerge.diffargs=/r /e /x /ub /wl /dl $plabel1 /dr $clabel $parent $child
 
-araxis.regkey=SOFTWARE\Classes\TypeLib\{46799e0a-7bd1-4330-911c-9660bb964ea2}\7.0\HELPDIR
-araxis.regappend=\ConsoleCompare.exe
-araxis.priority=-2
-araxis.args=/3 /a2 /wait /merge /title1:"Other" /title2:"Base" /title3:"Local :"$local $other $base $local $output
-araxis.checkconflict=True
-araxis.binary=True
-araxis.gui=True
-araxis.diffargs=/2 /wait /title1:$plabel1 /title2:$clabel $parent $child
-
-diffuse.priority=-3
-diffuse.args=$local $base $other
-diffuse.gui=True
-diffuse.diffargs=$parent $child
-
-UltraCompare.regkey=Software\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
-UltraCompare.regkeyalt=Software\Wow6432Node\Microsoft\Windows\CurrentVersion\App Paths\UC.exe
-UltraCompare.args = $base $local $other -title1 base -title3 other
-UltraCompare.priority = -2
-UltraCompare.gui = True
-UltraCompare.binary = True
-UltraCompare.check = conflicts,changed
-UltraCompare.diffargs=$child $parent -title1 $clabel -title2 $plabel1
+xxdiff.args=--show-merged-pane --exit-with-merge-status --title1 $labellocal --title2 $labelbase --title3 $labelother --merged-filename $output --merge $local $base $other
+xxdiff.diffargs=--title1 $plabel1 $parent --title2 $clabel $child
+xxdiff.gui=True
+xxdiff.priority=-8
--- a/mercurial/destutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/destutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -405,7 +405,7 @@
     elif default:
         revs = scmutil.revrange(repo, [default])
     else:
-        raise error.Abort(
+        raise error.ConfigError(
             _(b"config option histedit.defaultrev can't be empty")
         )
 
--- a/mercurial/diffutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/diffutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -43,14 +43,14 @@
     formatchanging=False,
     configprefix=b'',
 ):
-    '''return diffopts with only opted-in features parsed
+    """return diffopts with only opted-in features parsed
 
     Features:
     - git: git-style diffs
     - whitespace: whitespace options like ignoreblanklines and ignorews
     - formatchanging: options that will likely break or cause correctness issues
       with most diff parsers
-    '''
+    """
 
     def get(key, name=None, getter=ui.configbool, forceplain=None):
         if opts:
--- a/mercurial/dirstate.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/dirstate.py	Tue Jan 19 21:48:43 2021 +0530
@@ -74,12 +74,12 @@
 @interfaceutil.implementer(intdirstate.idirstate)
 class dirstate(object):
     def __init__(self, opener, ui, root, validate, sparsematchfn):
-        '''Create a new dirstate object.
+        """Create a new dirstate object.
 
         opener is an open()-like callable that can be used to open the
         dirstate file; root is the root of the directory tracked by
         the dirstate.
-        '''
+        """
         self._opener = opener
         self._validate = validate
         self._root = root
@@ -112,12 +112,12 @@
 
     @contextlib.contextmanager
     def parentchange(self):
-        '''Context manager for handling dirstate parents.
+        """Context manager for handling dirstate parents.
 
         If an exception occurs in the scope of the context manager,
         the incoherent dirstate won't be written when wlock is
         released.
-        '''
+        """
         self._parentwriters += 1
         yield
         # Typically we want the "undo" step of a context manager in a
@@ -128,9 +128,9 @@
         self._parentwriters -= 1
 
     def pendingparentchange(self):
-        '''Returns true if the dirstate is in the middle of a set of changes
+        """Returns true if the dirstate is in the middle of a set of changes
         that modify the dirstate parent.
-        '''
+        """
         return self._parentwriters > 0
 
     @propertycache
@@ -247,12 +247,12 @@
         return encoding.getcwd()
 
     def getcwd(self):
-        '''Return the path from which a canonical path is calculated.
+        """Return the path from which a canonical path is calculated.
 
         This path should be used to resolve file patterns or to convert
         canonical paths back to file paths for display. It shouldn't be
         used to get real file paths. Use vfs functions instead.
-        '''
+        """
         cwd = self._cwd
         if cwd == self._root:
             return b''
@@ -275,7 +275,7 @@
         return path
 
     def __getitem__(self, key):
-        '''Return the current state of key (a filename) in the dirstate.
+        """Return the current state of key (a filename) in the dirstate.
 
         States are:
           n  normal
@@ -283,7 +283,7 @@
           r  marked for removal
           a  marked for addition
           ?  not tracked
-        '''
+        """
         return self._map.get(key, (b"?",))[0]
 
     def __contains__(self, key):
@@ -370,11 +370,11 @@
             raise
 
     def invalidate(self):
-        '''Causes the next access to reread the dirstate.
+        """Causes the next access to reread the dirstate.
 
         This is different from localrepo.invalidatedirstate() because it always
         rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
-        check whether the dirstate has changed before rereading it.'''
+        check whether the dirstate has changed before rereading it."""
 
         for a in ("_map", "_branch", "_ignore"):
             if a in self.__dict__:
@@ -426,7 +426,7 @@
         self._map.addfile(f, oldstate, state, mode, size, mtime)
 
     def normal(self, f, parentfiledata=None):
-        '''Mark a file normal and clean.
+        """Mark a file normal and clean.
 
         parentfiledata: (mode, size, mtime) of the clean file
 
@@ -434,7 +434,7 @@
         size), as or close as possible from the point where we
         determined the file was clean, to limit the risk of the
         file having been changed by an external process between the
-        moment where the file was determined to be clean and now.'''
+        moment where the file was determined to be clean and now."""
         if parentfiledata:
             (mode, size, mtime) = parentfiledata
         else:
@@ -581,7 +581,7 @@
         return folded
 
     def normalize(self, path, isknown=False, ignoremissing=False):
-        '''
+        """
         normalize the case of a pathname when on a casefolding filesystem
 
         isknown specifies whether the filename came from walking the
@@ -596,7 +596,7 @@
         - version of name already stored in the dirstate
         - version of name stored on disk
         - version provided via command arguments
-        '''
+        """
 
         if self._checkcase:
             return self._normalize(path, isknown, ignoremissing)
@@ -643,11 +643,11 @@
         self._dirty = True
 
     def identity(self):
-        '''Return identity of dirstate itself to detect changing in storage
+        """Return identity of dirstate itself to detect changing in storage
 
         If identity of previous dirstate is equal to this, writing
         changes based on the former dirstate out can keep consistency.
-        '''
+        """
         return self._map.identity
 
     def write(self, tr):
@@ -769,14 +769,14 @@
         return (None, -1, b"")
 
     def _walkexplicit(self, match, subrepos):
-        '''Get stat data about the files explicitly specified by match.
+        """Get stat data about the files explicitly specified by match.
 
         Return a triple (results, dirsfound, dirsnotfound).
         - results is a mapping from filename to stat result. It also contains
           listings mapping subrepos and .hg to None.
         - dirsfound is a list of files found to be directories.
         - dirsnotfound is a list of files that the dirstate thinks are
-          directories and that were not found.'''
+          directories and that were not found."""
 
         def badtype(mode):
             kind = _(b'unknown')
@@ -904,7 +904,7 @@
         return results, dirsfound, dirsnotfound
 
     def walk(self, match, subrepos, unknown, ignored, full=True):
-        '''
+        """
         Walk recursively through the directory tree, finding all files
         matched by match.
 
@@ -913,7 +913,7 @@
         Return a dict mapping filename to stat-like object (either
         mercurial.osutil.stat instance or return value of os.stat()).
 
-        '''
+        """
         # full is a flag that extensions that hook into walk can use -- this
         # implementation doesn't use it at all. This satisfies the contract
         # because we only guarantee a "maybe".
@@ -1168,7 +1168,7 @@
         return (lookup, status)
 
     def status(self, match, subrepos, ignored, clean, unknown):
-        '''Determine the status of the working copy relative to the
+        """Determine the status of the working copy relative to the
         dirstate and return a pair of (unsure, status), where status is of type
         scmutil.status and:
 
@@ -1182,7 +1182,7 @@
           status.clean:
             files that have definitely not been modified since the
             dirstate was written
-        '''
+        """
         listignored, listclean, listunknown = ignored, clean, unknown
         lookup, modified, added, unknown, ignored = [], [], [], [], []
         removed, deleted, clean = [], [], []
@@ -1305,9 +1305,9 @@
         return (lookup, status)
 
     def matches(self, match):
-        '''
+        """
         return files in the dirstate (in whatever state) filtered by match
-        '''
+        """
         dmap = self._map
         if rustmod is not None:
             dmap = self._map._rustmap
--- a/mercurial/dirstateguard.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/dirstateguard.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,7 +17,7 @@
 
 
 class dirstateguard(util.transactional):
-    '''Restore dirstate at unexpected failure.
+    """Restore dirstate at unexpected failure.
 
     At the construction, this class does:
 
@@ -28,7 +28,7 @@
     is invoked before ``close()``.
 
     This just removes the backup file at ``close()`` before ``release()``.
-    '''
+    """
 
     def __init__(self, repo, name):
         self._repo = repo
--- a/mercurial/discovery.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/discovery.py	Tue Jan 19 21:48:43 2021 +0530
@@ -75,7 +75,7 @@
 
 
 class outgoing(object):
-    '''Represents the result of a findcommonoutgoing() call.
+    """Represents the result of a findcommonoutgoing() call.
 
     Members:
 
@@ -94,7 +94,7 @@
       remotely.
 
     Some members are computed on demand from the heads, unless provided upfront
-    by discovery.'''
+    by discovery."""
 
     def __init__(
         self, repo, commonheads=None, ancestorsof=None, missingroots=None
@@ -157,7 +157,7 @@
 def findcommonoutgoing(
     repo, other, onlyheads=None, force=False, commoninc=None, portable=False
 ):
-    '''Return an outgoing instance to identify the nodes present in repo but
+    """Return an outgoing instance to identify the nodes present in repo but
     not in other.
 
     If onlyheads is given, only nodes ancestral to nodes in onlyheads
@@ -168,7 +168,7 @@
     findcommonincoming(repo, other, force) to avoid recomputing it here.
 
     If portable is given, compute more conservative common and ancestorsof,
-    to make bundles created from the instance more portable.'''
+    to make bundles created from the instance more portable."""
     # declare an empty outgoing object to be filled later
     og = outgoing(repo, None, None)
 
@@ -332,7 +332,10 @@
 
     with remote.commandexecutor() as e:
         remotebookmarks = e.callcommand(
-            b'listkeys', {b'namespace': b'bookmarks',}
+            b'listkeys',
+            {
+                b'namespace': b'bookmarks',
+            },
         ).result()
 
     bookmarkedheads = set()
@@ -359,7 +362,7 @@
 def checkheads(pushop):
     """Check that a push won't add any outgoing head
 
-    raise Abort error and display ui message as needed.
+    raise StateError error and display ui message as needed.
     """
 
     repo = pushop.repo.unfiltered()
@@ -399,14 +402,14 @@
                 closedbranches.add(tag)
         closedbranches = closedbranches & set(newbranches)
         if closedbranches:
-            errmsg = _(b"push creates new remote branches: %s (%d closed)!") % (
+            errmsg = _(b"push creates new remote branches: %s (%d closed)") % (
                 branchnames,
                 len(closedbranches),
             )
         else:
-            errmsg = _(b"push creates new remote branches: %s!") % branchnames
+            errmsg = _(b"push creates new remote branches: %s") % branchnames
         hint = _(b"use 'hg push --new-branch' to create new remote branches")
-        raise error.Abort(errmsg, hint=hint)
+        raise error.StateError(errmsg, hint=hint)
 
     # 2. Find heads that we need not warn about
     nowarnheads = _nowarnheads(pushop)
@@ -469,15 +472,18 @@
             if errormsg is None:
                 if branch not in (b'default', None):
                     errormsg = _(
-                        b"push creates new remote head %s on branch '%s'!"
-                    ) % (short(dhs[0]), branch,)
+                        b"push creates new remote head %s on branch '%s'"
+                    ) % (
+                        short(dhs[0]),
+                        branch,
+                    )
                 elif repo[dhs[0]].bookmarks():
                     errormsg = _(
                         b"push creates new remote head %s "
-                        b"with bookmark '%s'!"
+                        b"with bookmark '%s'"
                     ) % (short(dhs[0]), repo[dhs[0]].bookmarks()[0])
                 else:
-                    errormsg = _(b"push creates new remote head %s!") % short(
+                    errormsg = _(b"push creates new remote head %s") % short(
                         dhs[0]
                     )
                 if unsyncedheads:
@@ -499,7 +505,7 @@
             for h in dhs:
                 repo.ui.note(b" %s\n" % short(h))
     if errormsg:
-        raise error.Abort(errormsg, hint=hint)
+        raise error.StateError(errormsg, hint=hint)
 
 
 def _postprocessobsolete(pushop, futurecommon, candidate_newhs):
--- a/mercurial/dispatch.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/dispatch.py	Tue Jan 19 21:48:43 2021 +0530
@@ -7,7 +7,6 @@
 
 from __future__ import absolute_import, print_function
 
-import difflib
 import errno
 import getopt
 import io
@@ -36,13 +35,16 @@
     help,
     hg,
     hook,
+    localrepo,
     profiling,
     pycompat,
     rcutil,
     registrar,
+    requirements as requirementsmod,
     scmutil,
     ui as uimod,
     util,
+    vfs,
 )
 
 from .utils import (
@@ -102,43 +104,43 @@
                 raise exc
 
 
+def _flushstdio(ui, err):
+    status = None
+    # In all cases we try to flush stdio streams.
+    if util.safehasattr(ui, b'fout'):
+        assert ui is not None  # help pytype
+        assert ui.fout is not None  # help pytype
+        try:
+            ui.fout.flush()
+        except IOError as e:
+            err = e
+            status = -1
+
+    if util.safehasattr(ui, b'ferr'):
+        assert ui is not None  # help pytype
+        assert ui.ferr is not None  # help pytype
+        try:
+            if err is not None and err.errno != errno.EPIPE:
+                ui.ferr.write(
+                    b'abort: %s\n' % encoding.strtolocal(err.strerror)
+                )
+            ui.ferr.flush()
+        # There's not much we can do about an I/O error here. So (possibly)
+        # change the status code and move on.
+        except IOError:
+            status = -1
+
+    return status
+
+
 def run():
     """run the command in sys.argv"""
     try:
         initstdio()
         with tracing.log('parse args into request'):
             req = request(pycompat.sysargv[1:])
-        err = None
-        try:
-            status = dispatch(req)
-        except error.StdioError as e:
-            err = e
-            status = -1
 
-        # In all cases we try to flush stdio streams.
-        if util.safehasattr(req.ui, b'fout'):
-            assert req.ui is not None  # help pytype
-            assert req.ui.fout is not None  # help pytype
-            try:
-                req.ui.fout.flush()
-            except IOError as e:
-                err = e
-                status = -1
-
-        if util.safehasattr(req.ui, b'ferr'):
-            assert req.ui is not None  # help pytype
-            assert req.ui.ferr is not None  # help pytype
-            try:
-                if err is not None and err.errno != errno.EPIPE:
-                    req.ui.ferr.write(
-                        b'abort: %s\n' % encoding.strtolocal(err.strerror)
-                    )
-                req.ui.ferr.flush()
-            # There's not much we can do about an I/O error here. So (possibly)
-            # change the status code and move on.
-            except IOError:
-                status = -1
-
+        status = dispatch(req)
         _silencestdio()
     except KeyboardInterrupt:
         # Catch early/late KeyboardInterrupt as last ditch. Here nothing will
@@ -164,39 +166,50 @@
         # "just work," here we change the sys.* streams to disable line ending
         # normalization, ensuring compatibility with our ui type.
 
-        # write_through is new in Python 3.7.
-        kwargs = {
-            "newline": "\n",
-            "line_buffering": sys.stdout.line_buffering,
-        }
-        if util.safehasattr(sys.stdout, "write_through"):
-            kwargs["write_through"] = sys.stdout.write_through
-        sys.stdout = io.TextIOWrapper(
-            sys.stdout.buffer, sys.stdout.encoding, sys.stdout.errors, **kwargs
-        )
+        if sys.stdout is not None:
+            # write_through is new in Python 3.7.
+            kwargs = {
+                "newline": "\n",
+                "line_buffering": sys.stdout.line_buffering,
+            }
+            if util.safehasattr(sys.stdout, "write_through"):
+                kwargs["write_through"] = sys.stdout.write_through
+            sys.stdout = io.TextIOWrapper(
+                sys.stdout.buffer,
+                sys.stdout.encoding,
+                sys.stdout.errors,
+                **kwargs
+            )
 
-        kwargs = {
-            "newline": "\n",
-            "line_buffering": sys.stderr.line_buffering,
-        }
-        if util.safehasattr(sys.stderr, "write_through"):
-            kwargs["write_through"] = sys.stderr.write_through
-        sys.stderr = io.TextIOWrapper(
-            sys.stderr.buffer, sys.stderr.encoding, sys.stderr.errors, **kwargs
-        )
+        if sys.stderr is not None:
+            kwargs = {
+                "newline": "\n",
+                "line_buffering": sys.stderr.line_buffering,
+            }
+            if util.safehasattr(sys.stderr, "write_through"):
+                kwargs["write_through"] = sys.stderr.write_through
+            sys.stderr = io.TextIOWrapper(
+                sys.stderr.buffer,
+                sys.stderr.encoding,
+                sys.stderr.errors,
+                **kwargs
+            )
 
-        # No write_through on read-only stream.
-        sys.stdin = io.TextIOWrapper(
-            sys.stdin.buffer,
-            sys.stdin.encoding,
-            sys.stdin.errors,
-            # None is universal newlines mode.
-            newline=None,
-            line_buffering=sys.stdin.line_buffering,
-        )
+        if sys.stdin is not None:
+            # No write_through on read-only stream.
+            sys.stdin = io.TextIOWrapper(
+                sys.stdin.buffer,
+                sys.stdin.encoding,
+                sys.stdin.errors,
+                # None is universal newlines mode.
+                newline=None,
+                line_buffering=sys.stdin.line_buffering,
+            )
 
     def _silencestdio():
         for fp in (sys.stdout, sys.stderr):
+            if fp is None:
+                continue
             # Check if the file is okay
             try:
                 fp.flush()
@@ -204,9 +217,7 @@
             except IOError:
                 pass
             # Otherwise mark it as closed to silence "Exception ignored in"
-            # message emitted by the interpreter finalizer. Be careful to
-            # not close procutil.stdout, which may be a fdopen-ed file object
-            # and its close() actually closes the underlying file descriptor.
+            # message emitted by the interpreter finalizer.
             try:
                 fp.close()
             except IOError:
@@ -223,47 +234,27 @@
         pass
 
 
-def _getsimilar(symbols, value):
-    sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
-    # The cutoff for similarity here is pretty arbitrary. It should
-    # probably be investigated and tweaked.
-    return [s for s in symbols if sim(s) > 0.6]
-
-
-def _reportsimilar(write, similar):
-    if len(similar) == 1:
-        write(_(b"(did you mean %s?)\n") % similar[0])
-    elif similar:
-        ss = b", ".join(sorted(similar))
-        write(_(b"(did you mean one of %s?)\n") % ss)
-
-
-def _formatparse(write, inst):
-    similar = []
-    if isinstance(inst, error.UnknownIdentifier):
-        # make sure to check fileset first, as revset can invoke fileset
-        similar = _getsimilar(inst.symbols, inst.function)
-    if len(inst.args) > 1:
-        write(
-            _(b"hg: parse error at %s: %s\n")
-            % (pycompat.bytestr(inst.args[1]), inst.args[0])
-        )
-        if inst.args[0].startswith(b' '):
-            write(_(b"unexpected leading whitespace\n"))
-    else:
-        write(_(b"hg: parse error: %s\n") % inst.args[0])
-        _reportsimilar(write, similar)
-    if inst.hint:
-        write(_(b"(%s)\n") % inst.hint)
-
-
 def _formatargs(args):
     return b' '.join(procutil.shellquote(a) for a in args)
 
 
 def dispatch(req):
     """run the command specified in req.args; returns an integer status code"""
-    with tracing.log('dispatch.dispatch'):
+    err = None
+    try:
+        status = _rundispatch(req)
+    except error.StdioError as e:
+        err = e
+        status = -1
+
+    ret = _flushstdio(req.ui, err)
+    if ret:
+        status = ret
+    return status
+
+
+def _rundispatch(req):
+    with tracing.log('dispatch._rundispatch'):
         if req.ferr:
             ferr = req.ferr
         elif req.ui:
@@ -288,12 +279,7 @@
             if req.fmsg:
                 req.ui.fmsg = req.fmsg
         except error.Abort as inst:
-            ferr.write(_(b"abort: %s\n") % inst.message)
-            if inst.hint:
-                ferr.write(_(b"(%s)\n") % inst.hint)
-            return -1
-        except error.ParseError as inst:
-            _formatparse(ferr.write, inst)
+            ferr.write(inst.format())
             return -1
 
         msg = _formatargs(req.args)
@@ -484,14 +470,17 @@
     config parsing and commands. besides, use handlecommandexception to handle
     uncaught exceptions.
     """
+    detailed_exit_code = -1
     try:
         return scmutil.callcatch(ui, func)
     except error.AmbiguousCommand as inst:
+        detailed_exit_code = 10
         ui.warn(
             _(b"hg: command '%s' is ambiguous:\n    %s\n")
             % (inst.prefix, b" ".join(inst.matches))
         )
     except error.CommandError as inst:
+        detailed_exit_code = 10
         if inst.command:
             ui.pager(b'help')
             msgbytes = pycompat.bytestr(inst.message)
@@ -500,10 +489,8 @@
         else:
             ui.warn(_(b"hg: %s\n") % inst.message)
             ui.warn(_(b"(use 'hg help -v' for a list of global options)\n"))
-    except error.ParseError as inst:
-        _formatparse(ui.warn, inst)
-        return -1
     except error.UnknownCommand as inst:
+        detailed_exit_code = 10
         nocmdmsg = _(b"hg: unknown command '%s'\n") % inst.command
         try:
             # check if the command is in a disabled extension
@@ -516,10 +503,10 @@
         except (error.UnknownCommand, error.Abort):
             suggested = False
             if inst.all_commands:
-                sim = _getsimilar(inst.all_commands, inst.command)
+                sim = error.getsimilar(inst.all_commands, inst.command)
                 if sim:
                     ui.warn(nocmdmsg)
-                    _reportsimilar(ui.warn, sim)
+                    ui.warn(b"(%s)\n" % error.similarity_hint(sim))
                     suggested = True
             if not suggested:
                 ui.warn(nocmdmsg)
@@ -532,7 +519,10 @@
         if not handlecommandexception(ui):
             raise
 
-    return -1
+    if ui.configbool(b'ui', b'detailed-exit-code'):
+        return detailed_exit_code
+    else:
+        return -1
 
 
 def aliasargs(fn, givenargs):
@@ -550,7 +540,7 @@
             nums.append(num)
             if num < len(givenargs):
                 return givenargs[num]
-            raise error.Abort(_(b'too few arguments for command alias'))
+            raise error.InputError(_(b'too few arguments for command alias'))
 
         cmd = re.sub(br'\$(\d+|\$)', replacer, cmd)
         givenargs = [x for i, x in enumerate(givenargs) if i not in nums]
@@ -559,10 +549,10 @@
 
 
 def aliasinterpolate(name, args, cmd):
-    '''interpolate args into cmd for shell aliases
+    """interpolate args into cmd for shell aliases
 
     This also handles $0, $@ and "$@".
-    '''
+    """
     # util.interpolate can't deal with "$@" (with quotes) because it's only
     # built to match prefix + patterns.
     replacemap = {b'$%d' % (i + 1): arg for i, arg in enumerate(args)}
@@ -670,12 +660,18 @@
         except error.UnknownCommand:
             self.badalias = _(
                 b"alias '%s' resolves to unknown command '%s'"
-            ) % (self.name, cmd,)
+            ) % (
+                self.name,
+                cmd,
+            )
             self.unknowncmd = True
         except error.AmbiguousCommand:
             self.badalias = _(
                 b"alias '%s' resolves to ambiguous command '%s'"
-            ) % (self.name, cmd,)
+            ) % (
+                self.name,
+                cmd,
+            )
 
     def _populatehelp(self, ui, name, cmd, fn, defaulthelp=None):
         # confine strings to be passed to i18n.gettext()
@@ -734,7 +730,7 @@
                     hint = _(b"'%s' is provided by '%s' extension") % (cmd, ext)
                 except error.UnknownCommand:
                     pass
-            raise error.Abort(self.badalias, hint=hint)
+            raise error.ConfigError(self.badalias, hint=hint)
         if self.shadows:
             ui.debug(
                 b"alias '%s' shadows command '%s'\n" % (self.name, self.cmdname)
@@ -868,7 +864,7 @@
             ui.setconfig(section, name, value, b'--config')
             configs.append((section, name, value))
         except (IndexError, ValueError):
-            raise error.Abort(
+            raise error.InputError(
                 _(
                     b'malformed --config option: %r '
                     b'(use --config section.name=value)'
@@ -941,6 +937,30 @@
     return ret
 
 
+def _readsharedsourceconfig(ui, path):
+    """if the current repository is shared one, this tries to read
+    .hg/hgrc of shared source if we are in share-safe mode
+
+    Config read is loaded into the ui object passed
+
+    This should be called before reading .hg/hgrc or the main repo
+    as that overrides config set in shared source"""
+    try:
+        with open(os.path.join(path, b".hg", b"requires"), "rb") as fp:
+            requirements = set(fp.read().splitlines())
+            if not (
+                requirementsmod.SHARESAFE_REQUIREMENT in requirements
+                and requirementsmod.SHARED_REQUIREMENT in requirements
+            ):
+                return
+            hgvfs = vfs.vfs(os.path.join(path, b".hg"))
+            sharedvfs = localrepo._getsharedvfs(hgvfs, requirements)
+            root = sharedvfs.base
+            ui.readconfig(sharedvfs.join(b"hgrc"), root)
+    except IOError:
+        pass
+
+
 def _getlocal(ui, rpath, wd=None):
     """Return (path, local ui object) for the given target path.
 
@@ -961,13 +981,17 @@
     else:
         lui = ui.copy()
         if rcutil.use_repo_hgrc():
+            _readsharedsourceconfig(lui, path)
             lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
+            lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
 
     if rpath:
         path = lui.expandpath(rpath)
         lui = ui.copy()
         if rcutil.use_repo_hgrc():
+            _readsharedsourceconfig(lui, path)
             lui.readconfig(os.path.join(path, b".hg", b"hgrc"), path)
+            lui.readconfig(os.path.join(path, b".hg", b"hgrc-not-shared"), path)
 
     return path, lui
 
@@ -1071,18 +1095,20 @@
         req.canonical_command = cmd
 
         if options[b"config"] != req.earlyoptions[b"config"]:
-            raise error.Abort(_(b"option --config may not be abbreviated!"))
+            raise error.InputError(_(b"option --config may not be abbreviated"))
         if options[b"cwd"] != req.earlyoptions[b"cwd"]:
-            raise error.Abort(_(b"option --cwd may not be abbreviated!"))
+            raise error.InputError(_(b"option --cwd may not be abbreviated"))
         if options[b"repository"] != req.earlyoptions[b"repository"]:
-            raise error.Abort(
+            raise error.InputError(
                 _(
                     b"option -R has to be separated from other options (e.g. not "
-                    b"-qR) and --repository may only be abbreviated as --repo!"
+                    b"-qR) and --repository may only be abbreviated as --repo"
                 )
             )
         if options[b"debugger"] != req.earlyoptions[b"debugger"]:
-            raise error.Abort(_(b"option --debugger may not be abbreviated!"))
+            raise error.InputError(
+                _(b"option --debugger may not be abbreviated")
+            )
         # don't validate --profile/--traceback, which can be enabled from now
 
         if options[b"encoding"]:
@@ -1187,7 +1213,7 @@
                         intents=func.intents,
                     )
                     if not repo.local():
-                        raise error.Abort(
+                        raise error.InputError(
                             _(b"repository '%s' is not local") % path
                         )
                     repo.ui.setconfig(
@@ -1208,7 +1234,7 @@
                                 req.earlyoptions[b'repository'] = guess
                                 return _dispatch(req)
                         if not path:
-                            raise error.RepoError(
+                            raise error.InputError(
                                 _(
                                     b"no repository found in"
                                     b" '%s' (.hg not found)"
@@ -1257,7 +1283,7 @@
     # of date) will be clueful enough to notice the implausible
     # version number and try updating.
     ct = util.versiontuple(n=2)
-    worst = None, ct, b''
+    worst = None, ct, b'', b''
     if ui.config(b'ui', b'supportcontact') is None:
         for name, mod in extensions.extensions():
             # 'testedwith' should be bytes, but not all extensions are ported
@@ -1265,10 +1291,11 @@
             testedwith = stringutil.forcebytestr(
                 getattr(mod, 'testedwith', b'')
             )
+            version = extensions.moduleversion(mod)
             report = getattr(mod, 'buglink', _(b'the extension author.'))
             if not testedwith.strip():
                 # We found an untested extension. It's likely the culprit.
-                worst = name, b'unknown', report
+                worst = name, b'unknown', report, version
                 break
 
             # Never blame on extensions bundled with Mercurial.
@@ -1282,20 +1309,21 @@
             lower = [t for t in tested if t < ct]
             nearest = max(lower or tested)
             if worst[0] is None or nearest < worst[1]:
-                worst = name, nearest, report
+                worst = name, nearest, report, version
     if worst[0] is not None:
-        name, testedwith, report = worst
+        name, testedwith, report, version = worst
         if not isinstance(testedwith, (bytes, str)):
             testedwith = b'.'.join(
                 [stringutil.forcebytestr(c) for c in testedwith]
             )
+        extver = version or _(b"(version N/A)")
         warning = _(
             b'** Unknown exception encountered with '
-            b'possibly-broken third-party extension %s\n'
+            b'possibly-broken third-party extension "%s" %s\n'
             b'** which supports versions %s of Mercurial.\n'
-            b'** Please disable %s and try your action again.\n'
+            b'** Please disable "%s" and try your action again.\n'
             b'** If that fixes the bug please report it to %s\n'
-        ) % (name, testedwith, name, stringutil.forcebytestr(report))
+        ) % (name, extver, testedwith, name, stringutil.forcebytestr(report))
     else:
         bugtracker = ui.config(b'ui', b'supportcontact')
         if bugtracker is None:
@@ -1309,12 +1337,22 @@
             + b'\n'
         )
     sysversion = pycompat.sysbytes(sys.version).replace(b'\n', b'')
+
+    def ext_with_ver(x):
+        ext = x[0]
+        ver = extensions.moduleversion(x[1])
+        if ver:
+            ext += b' ' + ver
+        return ext
+
     warning += (
         (_(b"** Python %s\n") % sysversion)
         + (_(b"** Mercurial Distributed SCM (version %s)\n") % util.version())
         + (
             _(b"** Extensions loaded: %s\n")
-            % b", ".join([x[0] for x in extensions.extensions()])
+            % b", ".join(
+                [ext_with_ver(x) for x in sorted(extensions.extensions())]
+            )
         )
     )
     return warning
--- a/mercurial/encoding.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/encoding.py	Tue Jan 19 21:48:43 2021 +0530
@@ -113,8 +113,8 @@
 
 
 class localstr(bytes):
-    '''This class allows strings that are unmodified to be
-    round-tripped to the local encoding and back'''
+    """This class allows strings that are unmodified to be
+    round-tripped to the local encoding and back"""
 
     def __new__(cls, u, l):
         s = bytes.__new__(cls, l)
@@ -298,7 +298,12 @@
     if pycompat.iswindows:
         # Python 3 on Windows issues a DeprecationWarning about using the bytes
         # API when os.getcwdb() is called.
-        getcwd = lambda: strtolocal(os.getcwd())  # re-exports
+        #
+        # Additionally, py3.8+ uppercases the drive letter when calling
+        # os.path.realpath(), which is used on ``repo.root``.  Since those
+        # strings are compared in various places as simple strings, also call
+        # realpath here.  See https://bugs.python.org/issue40368
+        getcwd = lambda: strtolocal(os.path.realpath(os.getcwd()))  # re-exports
     else:
         getcwd = os.getcwdb  # re-exports
 else:
@@ -329,8 +334,8 @@
 
 def getcols(s, start, c):
     # type: (bytes, int, int) -> bytes
-    '''Use colwidth to find a c-column substring of s starting at byte
-    index start'''
+    """Use colwidth to find a c-column substring of s starting at byte
+    index start"""
     for x in pycompat.xrange(start + c, len(s)):
         t = s[start:x]
         if colwidth(t) == c:
@@ -487,7 +492,7 @@
 
 
 class normcasespecs(object):
-    '''what a platform's normcase does to ASCII strings
+    """what a platform's normcase does to ASCII strings
 
     This is specified per platform, and should be consistent with what normcase
     on that platform actually does.
@@ -496,7 +501,7 @@
     upper: normcase uppercases ASCII strings
     other: the fallback function should always be called
 
-    This should be kept in sync with normcase_spec in util.h.'''
+    This should be kept in sync with normcase_spec in util.h."""
 
     lower = -1
     upper = 1
@@ -505,7 +510,7 @@
 
 def jsonescape(s, paranoid=False):
     # type: (Any, Any) -> Any
-    '''returns a string suitable for JSON
+    """returns a string suitable for JSON
 
     JSON is problematic for us because it doesn't support non-Unicode
     bytes. To deal with this, we take the following approach:
@@ -547,7 +552,7 @@
     'non-BMP: \\\\ud834\\\\udd1e'
     >>> jsonescape(b'<foo@example.org>', paranoid=True)
     '\\\\u003cfoo@example.org\\\\u003e'
-    '''
+    """
 
     u8chars = toutf8b(s)
     try:
@@ -569,11 +574,11 @@
 
 def getutf8char(s, pos):
     # type: (bytes, int) -> bytes
-    '''get the next full utf-8 character in the given string, starting at pos
+    """get the next full utf-8 character in the given string, starting at pos
 
     Raises a UnicodeError if the given location does not start a valid
     utf-8 character.
-    '''
+    """
 
     # find how many bytes to attempt decoding from first nibble
     l = _utf8len[ord(s[pos : pos + 1]) >> 4]
@@ -588,7 +593,7 @@
 
 def toutf8b(s):
     # type: (bytes) -> bytes
-    '''convert a local, possibly-binary string into UTF-8b
+    """convert a local, possibly-binary string into UTF-8b
 
     This is intended as a generic method to preserve data when working
     with schemes like JSON and XML that have no provision for
@@ -616,7 +621,7 @@
     arbitrary bytes into an internal Unicode format that can be
     re-encoded back into the original. Here we are exposing the
     internal surrogate encoding as a UTF-8 string.)
-    '''
+    """
 
     if isinstance(s, localstr):
         # assume that the original UTF-8 sequence would never contain
@@ -657,7 +662,7 @@
 
 def fromutf8b(s):
     # type: (bytes) -> bytes
-    '''Given a UTF-8b string, return a local, possibly-binary string.
+    """Given a UTF-8b string, return a local, possibly-binary string.
 
     return the original binary string. This
     is a round-trip process for strings like filenames, but metadata
@@ -677,7 +682,7 @@
     True
     >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
     True
-    '''
+    """
 
     if isasciistr(s):
         return s
--- a/mercurial/error.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/error.py	Tue Jan 19 21:48:43 2021 +0530
@@ -13,6 +13,8 @@
 
 from __future__ import absolute_import
 
+import difflib
+
 # Do not import anything but pycompat here, please
 from . import pycompat
 
@@ -48,7 +50,7 @@
 
 
 class RevlogError(StorageError):
-    __bytes__ = _tobytes
+    pass
 
 
 class SidedataHashError(RevlogError):
@@ -130,6 +132,17 @@
     __bytes__ = _tobytes
 
 
+class WorkerError(Exception):
+    """Exception raised when a worker process dies."""
+
+    def __init__(self, status_code):
+        self.status_code = status_code
+        # Pass status code to superclass just so it becomes part of __bytes__
+        super(WorkerError, self).__init__(status_code)
+
+    __bytes__ = _tobytes
+
+
 class InterventionRequired(Hint, Exception):
     """Exception raised when a command requires human intervention."""
 
@@ -173,6 +186,43 @@
             # may raise another exception.
             return pycompat.sysstr(self.__bytes__())
 
+    def format(self):
+        from .i18n import _
+
+        message = _(b"abort: %s\n") % self.message
+        if self.hint:
+            message += _(b"(%s)\n") % self.hint
+        return message
+
+
+class InputError(Abort):
+    """Indicates that the user made an error in their input.
+
+    Examples: Invalid command, invalid flags, invalid revision.
+    """
+
+
+class StateError(Abort):
+    """Indicates that the operation might work if retried in a different state.
+
+    Examples: Unresolved merge conflicts, unfinished operations.
+    """
+
+
+class CanceledError(Abort):
+    """Indicates that the user canceled the operation.
+
+    Examples: Close commit editor with error status, quit chistedit.
+    """
+
+
+class SecurityError(Abort):
+    """Indicates that some aspect of security failed.
+
+    Examples: Bad server credentials, expired local credentials for network
+    filesystem, mismatched GPG signature, DoS protection.
+    """
+
 
 class HookLoadError(Abort):
     """raised when loading a hook fails, aborting an operation
@@ -189,6 +239,24 @@
 class ConfigError(Abort):
     """Exception raised when parsing config files"""
 
+    def __init__(self, message, location=None, hint=None):
+        super(ConfigError, self).__init__(message, hint=hint)
+        self.location = location
+
+    def format(self):
+        from .i18n import _
+
+        if self.location is not None:
+            message = _(b"config error at %s: %s\n") % (
+                pycompat.bytestr(self.location),
+                self.message,
+            )
+        else:
+            message = _(b"config error: %s\n") % self.message
+        if self.hint:
+            message += _(b"(%s)\n") % self.hint
+        return message
+
 
 class UpdateAbort(Abort):
     """Raised when an update is aborted for destination issue"""
@@ -221,25 +289,63 @@
     __bytes__ = _tobytes
 
 
-class ParseError(Hint, Exception):
+class ParseError(Abort):
     """Raised when parsing config files and {rev,file}sets (msg[, pos])"""
 
-    __bytes__ = _tobytes
+    def __init__(self, message, location=None, hint=None):
+        super(ParseError, self).__init__(message, hint=hint)
+        self.location = location
+
+    def format(self):
+        from .i18n import _
+
+        if self.location is not None:
+            message = _(b"hg: parse error at %s: %s\n") % (
+                pycompat.bytestr(self.location),
+                self.message,
+            )
+        else:
+            message = _(b"hg: parse error: %s\n") % self.message
+        if self.hint:
+            message += _(b"(%s)\n") % self.hint
+        return message
 
 
 class PatchError(Exception):
     __bytes__ = _tobytes
 
 
+def getsimilar(symbols, value):
+    sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
+    # The cutoff for similarity here is pretty arbitrary. It should
+    # probably be investigated and tweaked.
+    return [s for s in symbols if sim(s) > 0.6]
+
+
+def similarity_hint(similar):
+    from .i18n import _
+
+    if len(similar) == 1:
+        return _(b"did you mean %s?") % similar[0]
+    elif similar:
+        ss = b", ".join(sorted(similar))
+        return _(b"did you mean one of %s?") % ss
+    else:
+        return None
+
+
 class UnknownIdentifier(ParseError):
     """Exception raised when a {rev,file}set references an unknown identifier"""
 
     def __init__(self, function, symbols):
         from .i18n import _
 
-        ParseError.__init__(self, _(b"unknown identifier: %s") % function)
-        self.function = function
-        self.symbols = symbols
+        similar = getsimilar(symbols, function)
+        hint = similarity_hint(similar)
+
+        ParseError.__init__(
+            self, _(b"unknown identifier: %s") % function, hint=hint
+        )
 
 
 class RepoError(Hint, Exception):
@@ -288,8 +394,7 @@
 
 
 class UnknownVersion(Abort):
-    """generic exception for aborting from an encounter with an unknown version
-    """
+    """generic exception for aborting from an encounter with an unknown version"""
 
     def __init__(self, msg, hint=None, version=None):
         self.version = version
--- a/mercurial/exchange.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/exchange.py	Tue Jan 19 21:48:43 2021 +0530
@@ -16,10 +16,10 @@
     nullid,
     nullrev,
 )
-from .thirdparty import attr
 from . import (
     bookmarks as bookmod,
     bundle2,
+    bundlecaches,
     changegroup,
     discovery,
     error,
@@ -34,7 +34,6 @@
     pycompat,
     requirements,
     scmutil,
-    sslutil,
     streamclone,
     url as urlmod,
     util,
@@ -50,202 +49,6 @@
 
 _NARROWACL_SECTION = b'narrowacl'
 
-# Maps bundle version human names to changegroup versions.
-_bundlespeccgversions = {
-    b'v1': b'01',
-    b'v2': b'02',
-    b'packed1': b's1',
-    b'bundle2': b'02',  # legacy
-}
-
-# Maps bundle version with content opts to choose which part to bundle
-_bundlespeccontentopts = {
-    b'v1': {
-        b'changegroup': True,
-        b'cg.version': b'01',
-        b'obsolescence': False,
-        b'phases': False,
-        b'tagsfnodescache': False,
-        b'revbranchcache': False,
-    },
-    b'v2': {
-        b'changegroup': True,
-        b'cg.version': b'02',
-        b'obsolescence': False,
-        b'phases': False,
-        b'tagsfnodescache': True,
-        b'revbranchcache': True,
-    },
-    b'packed1': {b'cg.version': b's1'},
-}
-_bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
-
-_bundlespecvariants = {
-    b"streamv2": {
-        b"changegroup": False,
-        b"streamv2": True,
-        b"tagsfnodescache": False,
-        b"revbranchcache": False,
-    }
-}
-
-# Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
-_bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
-
-
-@attr.s
-class bundlespec(object):
-    compression = attr.ib()
-    wirecompression = attr.ib()
-    version = attr.ib()
-    wireversion = attr.ib()
-    params = attr.ib()
-    contentopts = attr.ib()
-
-
-def parsebundlespec(repo, spec, strict=True):
-    """Parse a bundle string specification into parts.
-
-    Bundle specifications denote a well-defined bundle/exchange format.
-    The content of a given specification should not change over time in
-    order to ensure that bundles produced by a newer version of Mercurial are
-    readable from an older version.
-
-    The string currently has the form:
-
-       <compression>-<type>[;<parameter0>[;<parameter1>]]
-
-    Where <compression> is one of the supported compression formats
-    and <type> is (currently) a version string. A ";" can follow the type and
-    all text afterwards is interpreted as URI encoded, ";" delimited key=value
-    pairs.
-
-    If ``strict`` is True (the default) <compression> is required. Otherwise,
-    it is optional.
-
-    Returns a bundlespec object of (compression, version, parameters).
-    Compression will be ``None`` if not in strict mode and a compression isn't
-    defined.
-
-    An ``InvalidBundleSpecification`` is raised when the specification is
-    not syntactically well formed.
-
-    An ``UnsupportedBundleSpecification`` is raised when the compression or
-    bundle type/version is not recognized.
-
-    Note: this function will likely eventually return a more complex data
-    structure, including bundle2 part information.
-    """
-
-    def parseparams(s):
-        if b';' not in s:
-            return s, {}
-
-        params = {}
-        version, paramstr = s.split(b';', 1)
-
-        for p in paramstr.split(b';'):
-            if b'=' not in p:
-                raise error.InvalidBundleSpecification(
-                    _(
-                        b'invalid bundle specification: '
-                        b'missing "=" in parameter: %s'
-                    )
-                    % p
-                )
-
-            key, value = p.split(b'=', 1)
-            key = urlreq.unquote(key)
-            value = urlreq.unquote(value)
-            params[key] = value
-
-        return version, params
-
-    if strict and b'-' not in spec:
-        raise error.InvalidBundleSpecification(
-            _(
-                b'invalid bundle specification; '
-                b'must be prefixed with compression: %s'
-            )
-            % spec
-        )
-
-    if b'-' in spec:
-        compression, version = spec.split(b'-', 1)
-
-        if compression not in util.compengines.supportedbundlenames:
-            raise error.UnsupportedBundleSpecification(
-                _(b'%s compression is not supported') % compression
-            )
-
-        version, params = parseparams(version)
-
-        if version not in _bundlespeccgversions:
-            raise error.UnsupportedBundleSpecification(
-                _(b'%s is not a recognized bundle version') % version
-            )
-    else:
-        # Value could be just the compression or just the version, in which
-        # case some defaults are assumed (but only when not in strict mode).
-        assert not strict
-
-        spec, params = parseparams(spec)
-
-        if spec in util.compengines.supportedbundlenames:
-            compression = spec
-            version = b'v1'
-            # Generaldelta repos require v2.
-            if b'generaldelta' in repo.requirements:
-                version = b'v2'
-            # Modern compression engines require v2.
-            if compression not in _bundlespecv1compengines:
-                version = b'v2'
-        elif spec in _bundlespeccgversions:
-            if spec == b'packed1':
-                compression = b'none'
-            else:
-                compression = b'bzip2'
-            version = spec
-        else:
-            raise error.UnsupportedBundleSpecification(
-                _(b'%s is not a recognized bundle specification') % spec
-            )
-
-    # Bundle version 1 only supports a known set of compression engines.
-    if version == b'v1' and compression not in _bundlespecv1compengines:
-        raise error.UnsupportedBundleSpecification(
-            _(b'compression engine %s is not supported on v1 bundles')
-            % compression
-        )
-
-    # The specification for packed1 can optionally declare the data formats
-    # required to apply it. If we see this metadata, compare against what the
-    # repo supports and error if the bundle isn't compatible.
-    if version == b'packed1' and b'requirements' in params:
-        requirements = set(params[b'requirements'].split(b','))
-        missingreqs = requirements - repo.supportedformats
-        if missingreqs:
-            raise error.UnsupportedBundleSpecification(
-                _(b'missing support for repository features: %s')
-                % b', '.join(sorted(missingreqs))
-            )
-
-    # Compute contentopts based on the version
-    contentopts = _bundlespeccontentopts.get(version, {}).copy()
-
-    # Process the variants
-    if b"stream" in params and params[b"stream"] == b"v2":
-        variant = _bundlespecvariants[b"streamv2"]
-        contentopts.update(variant)
-
-    engine = util.compengines.forbundlename(compression)
-    compression, wirecompression = engine.bundletype()
-    wireversion = _bundlespeccgversions[version]
-
-    return bundlespec(
-        compression, wirecompression, version, wireversion, params, contentopts
-    )
-
 
 def readbundle(ui, fh, fname, vfs=None):
     header = changegroup.readexactly(fh, 4)
@@ -390,7 +193,7 @@
                 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
                 % len(published)
             ):
-                raise error.Abort(_(b'user quit'))
+                raise error.CanceledError(_(b'user quit'))
         elif behavior == b'abort':
             msg = _(b'push would publish %i changesets') % len(published)
             hint = _(
@@ -552,15 +355,15 @@
 bookmsgmap = {
     b'update': (
         _(b"updating bookmark %s\n"),
-        _(b'updating bookmark %s failed!\n'),
+        _(b'updating bookmark %s failed\n'),
     ),
     b'export': (
         _(b"exporting bookmark %s\n"),
-        _(b'exporting bookmark %s failed!\n'),
+        _(b'exporting bookmark %s failed\n'),
     ),
     b'delete': (
         _(b"deleting remote bookmark %s\n"),
-        _(b'deleting remote bookmark %s failed!\n'),
+        _(b'deleting remote bookmark %s failed\n'),
     ),
 }
 
@@ -575,14 +378,14 @@
     publish=False,
     opargs=None,
 ):
-    '''Push outgoing changesets (limited by revs) from a local
+    """Push outgoing changesets (limited by revs) from a local
     repository to remote. Return an integer:
       - None means nothing to push
       - 0 means HTTP error
       - 1 means we pushed and remote head count is unchanged *or*
         we have outgoing changesets but refused to push
       - other values as described by addchangegroup()
-    '''
+    """
     if opargs is None:
         opargs = {}
     pushop = pushoperation(
@@ -1707,8 +1510,8 @@
 
 
 def add_confirm_callback(repo, pullop):
-    """ adds a finalize callback to transaction which can be used to show stats
-    to user and confirm the pull before committing transaction """
+    """adds a finalize callback to transaction which can be used to show stats
+    to user and confirm the pull before committing transaction"""
 
     tr = pullop.trmanager.transaction()
     scmutil.registersummarycallback(
@@ -2089,7 +1892,11 @@
     elif pullop.heads is None:
         with pullop.remote.commandexecutor() as e:
             cg = e.callcommand(
-                b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
+                b'changegroup',
+                {
+                    b'nodes': pullop.fetch,
+                    b'source': b'pull',
+                },
             ).result()
 
     elif not pullop.remote.capable(b'changegroupsubset'):
@@ -2867,7 +2674,7 @@
     # attempt.
     pullop.clonebundleattempted = True
 
-    entries = parseclonebundlesmanifest(repo, res)
+    entries = bundlecaches.parseclonebundlesmanifest(repo, res)
     if not entries:
         repo.ui.note(
             _(
@@ -2877,7 +2684,7 @@
         )
         return
 
-    entries = filterclonebundleentries(
+    entries = bundlecaches.filterclonebundleentries(
         repo, entries, streamclonerequested=pullop.streamclonerequested
     )
 
@@ -2898,7 +2705,7 @@
         )
         return
 
-    entries = sortclonebundleentries(repo.ui, entries)
+    entries = bundlecaches.sortclonebundleentries(repo.ui, entries)
 
     url = entries[0][b'URL']
     repo.ui.status(_(b'applying clone bundle from %s\n') % url)
@@ -2923,214 +2730,6 @@
         )
 
 
-def parseclonebundlesmanifest(repo, s):
-    """Parses the raw text of a clone bundles manifest.
-
-    Returns a list of dicts. The dicts have a ``URL`` key corresponding
-    to the URL and other keys are the attributes for the entry.
-    """
-    m = []
-    for line in s.splitlines():
-        fields = line.split()
-        if not fields:
-            continue
-        attrs = {b'URL': fields[0]}
-        for rawattr in fields[1:]:
-            key, value = rawattr.split(b'=', 1)
-            key = urlreq.unquote(key)
-            value = urlreq.unquote(value)
-            attrs[key] = value
-
-            # Parse BUNDLESPEC into components. This makes client-side
-            # preferences easier to specify since you can prefer a single
-            # component of the BUNDLESPEC.
-            if key == b'BUNDLESPEC':
-                try:
-                    bundlespec = parsebundlespec(repo, value)
-                    attrs[b'COMPRESSION'] = bundlespec.compression
-                    attrs[b'VERSION'] = bundlespec.version
-                except error.InvalidBundleSpecification:
-                    pass
-                except error.UnsupportedBundleSpecification:
-                    pass
-
-        m.append(attrs)
-
-    return m
-
-
-def isstreamclonespec(bundlespec):
-    # Stream clone v1
-    if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
-        return True
-
-    # Stream clone v2
-    if (
-        bundlespec.wirecompression == b'UN'
-        and bundlespec.wireversion == b'02'
-        and bundlespec.contentopts.get(b'streamv2')
-    ):
-        return True
-
-    return False
-
-
-def filterclonebundleentries(repo, entries, streamclonerequested=False):
-    """Remove incompatible clone bundle manifest entries.
-
-    Accepts a list of entries parsed with ``parseclonebundlesmanifest``
-    and returns a new list consisting of only the entries that this client
-    should be able to apply.
-
-    There is no guarantee we'll be able to apply all returned entries because
-    the metadata we use to filter on may be missing or wrong.
-    """
-    newentries = []
-    for entry in entries:
-        spec = entry.get(b'BUNDLESPEC')
-        if spec:
-            try:
-                bundlespec = parsebundlespec(repo, spec, strict=True)
-
-                # If a stream clone was requested, filter out non-streamclone
-                # entries.
-                if streamclonerequested and not isstreamclonespec(bundlespec):
-                    repo.ui.debug(
-                        b'filtering %s because not a stream clone\n'
-                        % entry[b'URL']
-                    )
-                    continue
-
-            except error.InvalidBundleSpecification as e:
-                repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
-                continue
-            except error.UnsupportedBundleSpecification as e:
-                repo.ui.debug(
-                    b'filtering %s because unsupported bundle '
-                    b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
-                )
-                continue
-        # If we don't have a spec and requested a stream clone, we don't know
-        # what the entry is so don't attempt to apply it.
-        elif streamclonerequested:
-            repo.ui.debug(
-                b'filtering %s because cannot determine if a stream '
-                b'clone bundle\n' % entry[b'URL']
-            )
-            continue
-
-        if b'REQUIRESNI' in entry and not sslutil.hassni:
-            repo.ui.debug(
-                b'filtering %s because SNI not supported\n' % entry[b'URL']
-            )
-            continue
-
-        if b'REQUIREDRAM' in entry:
-            try:
-                requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
-            except error.ParseError:
-                repo.ui.debug(
-                    b'filtering %s due to a bad REQUIREDRAM attribute\n'
-                    % entry[b'URL']
-                )
-                continue
-            actualram = repo.ui.estimatememory()
-            if actualram is not None and actualram * 0.66 < requiredram:
-                repo.ui.debug(
-                    b'filtering %s as it needs more than 2/3 of system memory\n'
-                    % entry[b'URL']
-                )
-                continue
-
-        newentries.append(entry)
-
-    return newentries
-
-
-class clonebundleentry(object):
-    """Represents an item in a clone bundles manifest.
-
-    This rich class is needed to support sorting since sorted() in Python 3
-    doesn't support ``cmp`` and our comparison is complex enough that ``key=``
-    won't work.
-    """
-
-    def __init__(self, value, prefers):
-        self.value = value
-        self.prefers = prefers
-
-    def _cmp(self, other):
-        for prefkey, prefvalue in self.prefers:
-            avalue = self.value.get(prefkey)
-            bvalue = other.value.get(prefkey)
-
-            # Special case for b missing attribute and a matches exactly.
-            if avalue is not None and bvalue is None and avalue == prefvalue:
-                return -1
-
-            # Special case for a missing attribute and b matches exactly.
-            if bvalue is not None and avalue is None and bvalue == prefvalue:
-                return 1
-
-            # We can't compare unless attribute present on both.
-            if avalue is None or bvalue is None:
-                continue
-
-            # Same values should fall back to next attribute.
-            if avalue == bvalue:
-                continue
-
-            # Exact matches come first.
-            if avalue == prefvalue:
-                return -1
-            if bvalue == prefvalue:
-                return 1
-
-            # Fall back to next attribute.
-            continue
-
-        # If we got here we couldn't sort by attributes and prefers. Fall
-        # back to index order.
-        return 0
-
-    def __lt__(self, other):
-        return self._cmp(other) < 0
-
-    def __gt__(self, other):
-        return self._cmp(other) > 0
-
-    def __eq__(self, other):
-        return self._cmp(other) == 0
-
-    def __le__(self, other):
-        return self._cmp(other) <= 0
-
-    def __ge__(self, other):
-        return self._cmp(other) >= 0
-
-    def __ne__(self, other):
-        return self._cmp(other) != 0
-
-
-def sortclonebundleentries(ui, entries):
-    prefers = ui.configlist(b'ui', b'clonebundleprefers')
-    if not prefers:
-        return list(entries)
-
-    def _split(p):
-        if b'=' not in p:
-            hint = _(b"each comma separated item should be key=value pairs")
-            raise error.Abort(
-                _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
-            )
-        return p.split(b'=', 1)
-
-    prefers = [_split(p) for p in prefers]
-
-    items = sorted(clonebundleentry(v, prefers) for v in entries)
-    return [i.value for i in items]
-
-
 def trypullbundlefromurl(ui, repo, url):
     """Attempt to apply a bundle from a URL."""
     with repo.lock(), repo.transaction(b'bundleurl') as tr:
--- a/mercurial/exchangev2.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/exchangev2.py	Tue Jan 19 21:48:43 2021 +0530
@@ -79,7 +79,9 @@
     # Ensure all new changesets are draft by default. If the repo is
     # publishing, the phase will be adjusted by the loop below.
     if csetres[b'added']:
-        phases.registernew(repo, tr, phases.draft, csetres[b'added'])
+        phases.registernew(
+            repo, tr, phases.draft, [repo[n].rev() for n in csetres[b'added']]
+        )
 
     # And adjust the phase of all changesets accordingly.
     for phasenumber, phase in phases.phasenames.items():
@@ -87,7 +89,10 @@
             continue
 
         phases.advanceboundary(
-            repo, tr, phasenumber, csetres[b'nodesbyphase'][phase],
+            repo,
+            tr,
+            phasenumber,
+            csetres[b'nodesbyphase'][phase],
         )
 
     # Write bookmark updates.
@@ -187,7 +192,10 @@
 def _fetchrawstorefiles(repo, remote):
     with remote.commandexecutor() as e:
         objs = e.callcommand(
-            b'rawstorefiledata', {b'files': [b'changelog', b'manifestlog'],}
+            b'rawstorefiledata',
+            {
+                b'files': [b'changelog', b'manifestlog'],
+            },
         ).result()
 
         # First object is a summary of files data that follows.
@@ -343,16 +351,21 @@
     )
 
     manifestnodes = {}
+    added = []
 
     def linkrev(node):
         repo.ui.debug(b'add changeset %s\n' % short(node))
         # Linkrev for changelog is always self.
         return len(cl)
 
+    def ondupchangeset(cl, node):
+        added.append(node)
+
     def onchangeset(cl, node):
         progress.increment()
 
         revision = cl.changelogrevision(node)
+        added.append(node)
 
         # We need to preserve the mapping of changelog revision to node
         # so we can set the linkrev accordingly when manifests are added.
@@ -403,8 +416,12 @@
                 0,
             )
 
-    added = cl.addgroup(
-        iterrevisions(), linkrev, weakref.proxy(tr), addrevisioncb=onchangeset
+    cl.addgroup(
+        iterrevisions(),
+        linkrev,
+        weakref.proxy(tr),
+        addrevisioncb=onchangeset,
+        duplicaterevisioncb=ondupchangeset,
     )
 
     progress.complete()
@@ -516,12 +533,15 @@
             # Chomp off header object.
             next(objs)
 
-            added.extend(
-                rootmanifest.addgroup(
-                    iterrevisions(objs, progress),
-                    linkrevs.__getitem__,
-                    weakref.proxy(tr),
-                )
+            def onchangeset(cl, node):
+                added.append(node)
+
+            rootmanifest.addgroup(
+                iterrevisions(objs, progress),
+                linkrevs.__getitem__,
+                weakref.proxy(tr),
+                addrevisioncb=onchangeset,
+                duplicaterevisioncb=onchangeset,
             )
 
     progress.complete()
@@ -732,7 +752,10 @@
         with remote.commandexecutor() as e:
             args = {
                 b'revisions': [
-                    {b'type': b'changesetexplicit', b'nodes': batch,}
+                    {
+                        b'type': b'changesetexplicit',
+                        b'nodes': batch,
+                    }
                 ],
                 b'fields': fields,
                 b'haveparents': haveparents,
--- a/mercurial/extensions.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/extensions.py	Tue Jan 19 21:48:43 2021 +0530
@@ -457,7 +457,7 @@
 
 
 def afterloaded(extension, callback):
-    '''Run the specified function after a named extension is loaded.
+    """Run the specified function after a named extension is loaded.
 
     If the named extension is already loaded, the callback will be called
     immediately.
@@ -467,7 +467,7 @@
 
     The callback receives the named argument ``loaded``, which is a boolean
     indicating whether the dependent extension actually loaded.
-    '''
+    """
 
     if extension in _extensions:
         # Report loaded as False if the extension is disabled
@@ -500,12 +500,12 @@
 
 
 def bind(func, *args):
-    '''Partial function application
+    """Partial function application
 
-      Returns a new function that is the partial application of args and kwargs
-      to func.  For example,
+    Returns a new function that is the partial application of args and kwargs
+    to func.  For example,
 
-          f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
+        f(1, 2, bar=3) === bind(f, 1)(2, bar=3)"""
     assert callable(func)
 
     def closure(*a, **kw):
@@ -618,7 +618,7 @@
 
 
 def wrapfunction(container, funcname, wrapper):
-    '''Wrap the function named funcname in container
+    """Wrap the function named funcname in container
 
     Replace the funcname member in the given container with the specified
     wrapper. The container is typically a module, class, or instance.
@@ -649,7 +649,7 @@
     work. Since you cannot control what other extensions are loaded by
     your end users, you should play nicely with others by using the
     subclass trick.
-    '''
+    """
     assert callable(wrapper)
 
     origfn = getattr(container, funcname)
@@ -668,7 +668,7 @@
 
 
 def unwrapfunction(container, funcname, wrapper=None):
-    '''undo wrapfunction
+    """undo wrapfunction
 
     If wrappers is None, undo the last wrap. Otherwise removes the wrapper
     from the chain of wrappers.
@@ -676,7 +676,7 @@
     Return the removed wrapper.
     Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
     wrapper is not None but is not found in the wrapper chain.
-    '''
+    """
     chain = getwrapperchain(container, funcname)
     origfn = chain.pop()
     if wrapper is None:
@@ -689,13 +689,13 @@
 
 
 def getwrapperchain(container, funcname):
-    '''get a chain of wrappers of a function
+    """get a chain of wrappers of a function
 
     Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
 
     The wrapper functions are the ones passed to wrapfunction, whose first
     argument is origfunc.
-    '''
+    """
     result = []
     fn = getattr(container, funcname)
     while fn:
@@ -744,11 +744,11 @@
 
 
 def _moduledoc(file):
-    '''return the top-level python documentation for the given file
+    """return the top-level python documentation for the given file
 
     Loosely inspired by pydoc.source_synopsis(), but rewritten to
     handle triple quotes and to return the whole text instead of just
-    the synopsis'''
+    the synopsis"""
     result = []
 
     line = file.readline()
@@ -810,7 +810,7 @@
     exts = {}
     for name, path in pycompat.iteritems(paths):
         doc = _disabledhelp(path)
-        if doc:
+        if doc and name != b'__index__':
             exts[name] = doc.splitlines()[0]
 
     return exts
@@ -883,8 +883,8 @@
 
 
 def disabledcmd(ui, cmd, strict=False):
-    '''find cmd from disabled extensions without importing.
-    returns (cmdname, extname, doc)'''
+    """find cmd from disabled extensions without importing.
+    returns (cmdname, extname, doc)"""
 
     paths = _disabledpaths()
     if not paths:
--- a/mercurial/exthelper.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/exthelper.py	Tue Jan 19 21:48:43 2021 +0530
@@ -53,29 +53,29 @@
         revsetpredicate = eh.revsetpredicate
         templatekeyword = eh.templatekeyword
 
-        @eh.command('mynewcommand',
-            [('r', 'rev', [], _('operate on these revisions'))],
-            _('-r REV...'),
+        @eh.command(b'mynewcommand',
+            [(b'r', b'rev', [], _(b'operate on these revisions'))],
+            _(b'-r REV...'),
             helpcategory=command.CATEGORY_XXX)
         def newcommand(ui, repo, *revs, **opts):
             # implementation goes here
 
-        eh.configitem('experimental', 'foo',
+        eh.configitem(b'experimental', b'foo',
             default=False,
         )
 
-        @eh.filesetpredicate('lfs()')
+        @eh.filesetpredicate(b'lfs()')
         def filesetbabar(mctx, x):
             return mctx.predicate(...)
 
-        @eh.revsetpredicate('hidden')
+        @eh.revsetpredicate(b'hidden')
         def revsetbabar(repo, subset, x):
-            args = revset.getargs(x, 0, 0, 'babar accept no argument')
-            return [r for r in subset if 'babar' in repo[r].description()]
+            args = revset.getargs(x, 0, 0, b'babar accept no argument')
+            return [r for r in subset if b'babar' in repo[r].description()]
 
-        @eh.templatekeyword('babar')
+        @eh.templatekeyword(b'babar')
         def kwbabar(ctx):
-            return 'babar'
+            return b'babar'
     """
 
     def __init__(self):
@@ -160,7 +160,7 @@
         The following operations belong here:
 
         - Changes depending on the status of other extensions. (if
-          extensions.find('mq'))
+          extensions.find(b'mq'))
         - Add a global option to all commands
         """
         knownexts = {}
@@ -203,7 +203,7 @@
 
             @eh.uisetup
             def setupbabar(ui):
-                print 'this is uisetup!'
+                print('this is uisetup!')
         """
         self._uicallables.append(call)
         return call
@@ -215,7 +215,7 @@
 
             @eh.uipopulate
             def setupfoo(ui):
-                print 'this is uipopulate!'
+                print('this is uipopulate!')
         """
         self._uipopulatecallables.append(call)
         return call
@@ -227,7 +227,7 @@
 
             @eh.extsetup
             def setupcelestine(ui):
-                print 'this is extsetup!'
+                print('this is extsetup!')
         """
         self._extcallables.append(call)
         return call
@@ -239,7 +239,7 @@
 
             @eh.reposetup
             def setupzephir(ui, repo):
-                print 'this is reposetup!'
+                print('this is reposetup!')
         """
         self._repocallables.append(call)
         return call
@@ -258,9 +258,9 @@
 
         example::
 
-            @eh.wrapcommand('summary')
+            @eh.wrapcommand(b'summary')
             def wrapsummary(orig, ui, repo, *args, **kwargs):
-                ui.note('Barry!')
+                ui.note(b'Barry!')
                 return orig(ui, repo, *args, **kwargs)
 
         The `opts` argument allows specifying a list of tuples for additional
@@ -298,9 +298,9 @@
 
         example::
 
-            @eh.function(discovery, 'checkheads')
+            @eh.function(discovery, b'checkheads')
             def wrapfunction(orig, *args, **kwargs):
-                ui.note('His head smashed in and his heart cut out')
+                ui.note(b'His head smashed in and his heart cut out')
                 return orig(*args, **kwargs)
         """
 
--- a/mercurial/fancyopts.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/fancyopts.py	Tue Jan 19 21:48:43 2021 +0530
@@ -380,7 +380,7 @@
         else:
 
             def abort(s):
-                raise error.Abort(
+                raise error.InputError(
                     _(b'invalid value %r for option %s, %s')
                     % (pycompat.maybebytestr(val), opt, s)
                 )
--- a/mercurial/filelog.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/filelog.py	Tue Jan 19 21:48:43 2021 +0530
@@ -139,6 +139,7 @@
         linkmapper,
         transaction,
         addrevisioncb=None,
+        duplicaterevisioncb=None,
         maybemissingparents=False,
     ):
         if maybemissingparents:
@@ -150,7 +151,11 @@
             )
 
         return self._revlog.addgroup(
-            deltas, linkmapper, transaction, addrevisioncb=addrevisioncb
+            deltas,
+            linkmapper,
+            transaction,
+            addrevisioncb=addrevisioncb,
+            duplicaterevisioncb=duplicaterevisioncb,
         )
 
     def getstrippoint(self, minlink):
--- a/mercurial/filemerge.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/filemerge.py	Tue Jan 19 21:48:43 2021 +0530
@@ -408,7 +408,7 @@
 
     ui = repo.ui
 
-    validkeep = [b'keep', b'keep-merge3']
+    validkeep = [b'keep', b'keep-merge3', b'keep-mergediff']
 
     # do we attempt to simplemerge first?
     try:
@@ -423,12 +423,17 @@
             )
 
     if premerge:
-        if premerge == b'keep-merge3':
+        mode = b'merge'
+        if premerge in {b'keep-merge3', b'keep-mergediff'}:
             if not labels:
                 labels = _defaultconflictlabels
             if len(labels) < 3:
                 labels.append(b'base')
-        r = simplemerge.simplemerge(ui, fcd, fca, fco, quiet=True, label=labels)
+            if premerge == b'keep-mergediff':
+                mode = b'mergediff'
+        r = simplemerge.simplemerge(
+            ui, fcd, fca, fco, quiet=True, label=labels, mode=mode
+        )
         if not r:
             ui.debug(b" premerge successful\n")
             return 0
@@ -532,6 +537,33 @@
     return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels)
 
 
+@internaltool(
+    b'mergediff',
+    fullmerge,
+    _(
+        b"warning: conflicts while merging %s! "
+        b"(edit, then use 'hg resolve --mark')\n"
+    ),
+    precheck=_mergecheck,
+)
+def _imerge_diff(
+    repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None
+):
+    """
+    Uses the internal non-interactive simple merge algorithm for merging
+    files. It will fail if there are any conflicts and leave markers in
+    the partially merged file. The marker will have two sections, one with the
+    content from one side of the merge, and one with a diff from the base
+    content to the content on the other side. (experimental)"""
+    if not labels:
+        labels = _defaultconflictlabels
+    if len(labels) < 3:
+        labels.append(b'base')
+    return _merge(
+        repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, b'mergediff'
+    )
+
+
 def _imergeauto(
     repo,
     mynode,
@@ -643,7 +675,7 @@
 
 
 def _describemerge(ui, repo, mynode, fcl, fcb, fco, env, toolpath, args):
-    tmpl = ui.config(b'ui', b'pre-merge-tool-output-template')
+    tmpl = ui.config(b'command-templates', b'pre-merge-tool-output')
     if not tmpl:
         return
 
@@ -831,7 +863,7 @@
     ca = fca.changectx()
 
     ui = repo.ui
-    template = ui.config(b'ui', b'mergemarkertemplate')
+    template = ui.config(b'command-templates', b'mergemarker')
     if tool is not None:
         template = _toolstr(ui, tool, b'mergemarkertemplate', template)
     template = templater.unquotestring(template)
@@ -1100,7 +1132,7 @@
             labeltool = None
             if markerstyle != b'basic':
                 # respect 'tool's mergemarkertemplate (which defaults to
-                # ui.mergemarkertemplate)
+                # command-templates.mergemarker)
                 labeltool = tool
             if internalmarkerstyle != b'basic' or markerstyle != b'basic':
                 premergelabels = _formatlabels(
@@ -1232,8 +1264,7 @@
 
 
 def loadinternalmerge(ui, extname, registrarobj):
-    """Load internal merge tool from specified registrarobj
-    """
+    """Load internal merge tool from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         fullname = b':' + name
         internals[fullname] = func
--- a/mercurial/fileset.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/fileset.py	Tue Jan 19 21:48:43 2021 +0530
@@ -122,8 +122,7 @@
 
 @predicate(b'modified()', callstatus=True, weight=_WEIGHT_STATUS)
 def modified(mctx, x):
-    """File that is modified according to :hg:`status`.
-    """
+    """File that is modified according to :hg:`status`."""
     # i18n: "modified" is a keyword
     getargs(x, 0, 0, _(b"modified takes no arguments"))
     s = set(mctx.status().modified)
@@ -132,8 +131,7 @@
 
 @predicate(b'added()', callstatus=True, weight=_WEIGHT_STATUS)
 def added(mctx, x):
-    """File that is added according to :hg:`status`.
-    """
+    """File that is added according to :hg:`status`."""
     # i18n: "added" is a keyword
     getargs(x, 0, 0, _(b"added takes no arguments"))
     s = set(mctx.status().added)
@@ -142,8 +140,7 @@
 
 @predicate(b'removed()', callstatus=True, weight=_WEIGHT_STATUS)
 def removed(mctx, x):
-    """File that is removed according to :hg:`status`.
-    """
+    """File that is removed according to :hg:`status`."""
     # i18n: "removed" is a keyword
     getargs(x, 0, 0, _(b"removed takes no arguments"))
     s = set(mctx.status().removed)
@@ -152,8 +149,7 @@
 
 @predicate(b'deleted()', callstatus=True, weight=_WEIGHT_STATUS)
 def deleted(mctx, x):
-    """Alias for ``missing()``.
-    """
+    """Alias for ``missing()``."""
     # i18n: "deleted" is a keyword
     getargs(x, 0, 0, _(b"deleted takes no arguments"))
     s = set(mctx.status().deleted)
@@ -162,8 +158,7 @@
 
 @predicate(b'missing()', callstatus=True, weight=_WEIGHT_STATUS)
 def missing(mctx, x):
-    """File that is missing according to :hg:`status`.
-    """
+    """File that is missing according to :hg:`status`."""
     # i18n: "missing" is a keyword
     getargs(x, 0, 0, _(b"missing takes no arguments"))
     s = set(mctx.status().deleted)
@@ -190,8 +185,7 @@
 
 @predicate(b'clean()', callstatus=True, weight=_WEIGHT_STATUS)
 def clean(mctx, x):
-    """File that is clean according to :hg:`status`.
-    """
+    """File that is clean according to :hg:`status`."""
     # i18n: "clean" is a keyword
     getargs(x, 0, 0, _(b"clean takes no arguments"))
     s = set(mctx.status().clean)
@@ -208,8 +202,7 @@
 
 @predicate(b'binary()', weight=_WEIGHT_READ_CONTENTS)
 def binary(mctx, x):
-    """File that appears to be binary (contains NUL bytes).
-    """
+    """File that appears to be binary (contains NUL bytes)."""
     # i18n: "binary" is a keyword
     getargs(x, 0, 0, _(b"binary takes no arguments"))
     return mctx.fpredicate(
@@ -219,8 +212,7 @@
 
 @predicate(b'exec()')
 def exec_(mctx, x):
-    """File that is marked as executable.
-    """
+    """File that is marked as executable."""
     # i18n: "exec" is a keyword
     getargs(x, 0, 0, _(b"exec takes no arguments"))
     ctx = mctx.ctx
@@ -229,8 +221,7 @@
 
 @predicate(b'symlink()')
 def symlink(mctx, x):
-    """File that is marked as a symlink.
-    """
+    """File that is marked as a symlink."""
     # i18n: "symlink" is a keyword
     getargs(x, 0, 0, _(b"symlink takes no arguments"))
     ctx = mctx.ctx
@@ -239,8 +230,7 @@
 
 @predicate(b'resolved()', weight=_WEIGHT_STATUS)
 def resolved(mctx, x):
-    """File that is marked resolved according to :hg:`resolve -l`.
-    """
+    """File that is marked resolved according to :hg:`resolve -l`."""
     # i18n: "resolved" is a keyword
     getargs(x, 0, 0, _(b"resolved takes no arguments"))
     if mctx.ctx.rev() is not None:
@@ -253,8 +243,7 @@
 
 @predicate(b'unresolved()', weight=_WEIGHT_STATUS)
 def unresolved(mctx, x):
-    """File that is marked unresolved according to :hg:`resolve -l`.
-    """
+    """File that is marked unresolved according to :hg:`resolve -l`."""
     # i18n: "unresolved" is a keyword
     getargs(x, 0, 0, _(b"unresolved takes no arguments"))
     if mctx.ctx.rev() is not None:
@@ -267,8 +256,7 @@
 
 @predicate(b'hgignore()', weight=_WEIGHT_STATUS)
 def hgignore(mctx, x):
-    """File that matches the active .hgignore pattern.
-    """
+    """File that matches the active .hgignore pattern."""
     # i18n: "hgignore" is a keyword
     getargs(x, 0, 0, _(b"hgignore takes no arguments"))
     return mctx.ctx.repo().dirstate._ignore
@@ -288,8 +276,7 @@
 
 @predicate(b'grep(regex)', weight=_WEIGHT_READ_CONTENTS)
 def grep(mctx, x):
-    """File contains the given regular expression.
-    """
+    """File contains the given regular expression."""
     try:
         # i18n: "grep" is a keyword
         r = re.compile(getstring(x, _(b"grep requires a pattern")))
@@ -414,8 +401,7 @@
 
 @predicate(b'copied()')
 def copied(mctx, x):
-    """File that is recorded as being copied.
-    """
+    """File that is recorded as being copied."""
     # i18n: "copied" is a keyword
     getargs(x, 0, 0, _(b"copied takes no arguments"))
 
@@ -476,8 +462,7 @@
 
 @predicate(b'subrepo([pattern])')
 def subrepo(mctx, x):
-    """Subrepositories whose paths match the given pattern.
-    """
+    """Subrepositories whose paths match the given pattern."""
     # i18n: "subrepo" is a keyword
     getargs(x, 0, 1, _(b"subrepo takes at most one argument"))
     ctx = mctx.ctx
@@ -628,8 +613,7 @@
 
 
 def loadpredicate(ui, extname, registrarobj):
-    """Load fileset predicates from specified registrarobj
-    """
+    """Load fileset predicates from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         symbols[name] = func
 
--- a/mercurial/hbisect.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hbisect.py	Tue Jan 19 21:48:43 2021 +0530
@@ -172,13 +172,13 @@
 def checkstate(state):
     """check we have both 'good' and 'bad' to define a range
 
-    Raise Abort exception otherwise."""
+    Raise StateError exception otherwise."""
     if state[b'good'] and state[b'bad']:
         return True
     if not state[b'good']:
-        raise error.Abort(_(b'cannot bisect (no known good revisions)'))
+        raise error.StateError(_(b'cannot bisect (no known good revisions)'))
     else:
-        raise error.Abort(_(b'cannot bisect (no known bad revisions)'))
+        raise error.StateError(_(b'cannot bisect (no known bad revisions)'))
 
 
 @contextlib.contextmanager
--- a/mercurial/help.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/help.py	Tue Jan 19 21:48:43 2021 +0530
@@ -698,10 +698,10 @@
     fullname=None,
     **opts
 ):
-    '''
+    """
     Generate the help for 'name' as unformatted restructured text. If
     'name' is None, describe the commands available.
-    '''
+    """
 
     opts = pycompat.byteskwargs(opts)
 
@@ -1064,7 +1064,7 @@
         if not rst:
             msg = _(b'no matches')
             hint = _(b"try 'hg help' for a list of topics")
-            raise error.Abort(msg, hint=hint)
+            raise error.InputError(msg, hint=hint)
     elif name and name != b'shortlist':
         queries = []
         if unknowncmd:
@@ -1095,7 +1095,7 @@
                     hintname = name
                 msg = _(b'no such help topic: %s') % formatname
                 hint = _(b"try 'hg help --keyword %s'") % hintname
-                raise error.Abort(msg, hint=hint)
+                raise error.InputError(msg, hint=hint)
     else:
         # program name
         if not ui.quiet:
@@ -1155,6 +1155,6 @@
     # to look for, or we could have simply failed to found "foo.bar"
     # because bar isn't a section of foo
     if section and not (blocks and name):
-        raise error.Abort(_(b"help section not found: %s") % fullname)
+        raise error.InputError(_(b"help section not found: %s") % fullname)
 
     return minirst.formatplain(blocks, textwidth)
--- a/mercurial/helptext/config.txt	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/helptext/config.txt	Tue Jan 19 21:48:43 2021 +0530
@@ -54,6 +54,7 @@
 
   On Unix, the following files are consulted:
 
+  - ``<repo>/.hg/hgrc-not-shared`` (per-repository)
   - ``<repo>/.hg/hgrc`` (per-repository)
   - ``$HOME/.hgrc`` (per-user)
   - ``${XDG_CONFIG_HOME:-$HOME/.config}/hg/hgrc`` (per-user)
@@ -67,6 +68,7 @@
 
   On Windows, the following files are consulted:
 
+  - ``<repo>/.hg/hgrc-not-shared`` (per-repository)
   - ``<repo>/.hg/hgrc`` (per-repository)
   - ``%USERPROFILE%\.hgrc`` (per-user)
   - ``%USERPROFILE%\Mercurial.ini`` (per-user)
@@ -89,6 +91,7 @@
 
   On Plan9, the following files are consulted:
 
+  - ``<repo>/.hg/hgrc-not-shared`` (per-repository)
   - ``<repo>/.hg/hgrc`` (per-repository)
   - ``$home/lib/hgrc`` (per-user)
   - ``<install-root>/lib/mercurial/hgrc`` (per-installation)
@@ -144,6 +147,15 @@
 merge tool configuration but packagers can also put other default configuration
 there.
 
+.. container:: verbose
+
+    On versions 5.7 and later, if share-safe functionality is enabled,
+    shares will read config file of share source too.
+    `<share-source/.hg/hgrc>` is read before reading `<repo/.hg/hgrc>`.
+
+    For configs which should not be shared, `<repo/.hg/hgrc-not-shared>`
+    should be used.
+
 Syntax
 ======
 
@@ -882,6 +894,32 @@
 
     Enabled by default.
 
+``use-persistent-nodemap``
+    Enable or disable the "persistent-nodemap" feature which improves
+    performance if the rust extensions are available.
+
+    The "persistence-nodemap" persist the "node -> rev" on disk removing the
+    need to dynamically build that mapping for each Mercurial invocation. This
+    significantly reduce the startup cost of various local and server-side
+    operation for larger repository.
+
+    The performance improving version of this feature is currently only
+    implemented in Rust, so people not using a version of Mercurial compiled
+    with the Rust part  might actually suffer some slowdown. For this reason,
+    Such version will by default refuse to access such repositories. That
+    behavior can be controlled by configuration. Check
+    :hg:`help config.storage.revlog.persistent-nodemap.slowpath` for details.
+
+    Repository with this on-disk format require Mercurial version 5.4 or above.
+
+    Disabled by default.
+
+``use-share-safe``
+    Enable or disable the "share-safe" functionality, which enables shares
+    to read requirements and configuration of its source repository.
+
+    Disabled by default.
+
 ``usestore``
     Enable or disable the "store" repository format which improves
     compatibility with systems that fold case or otherwise mangle
@@ -1509,12 +1547,13 @@
 
 ``premerge``
   Attempt to run internal non-interactive 3-way merge tool before
-  launching external tool.  Options are ``true``, ``false``, ``keep`` or
-  ``keep-merge3``. The ``keep`` option will leave markers in the file if the
-  premerge fails. The ``keep-merge3`` will do the same but include information
-  about the base of the merge in the marker (see internal :merge3 in
-  :hg:`help merge-tools`).
-  (default: True)
+  launching external tool.  Options are ``true``, ``false``, ``keep``,
+  ``keep-merge3``, or ``keep-mergediff`` (experimental). The ``keep`` option
+  will leave markers in the file if the premerge fails. The ``keep-merge3``
+  will do the same but include information about the base of the merge in the
+  marker (see internal :merge3 in :hg:`help merge-tools`). The
+  ``keep-mergediff`` option is similar but uses a different marker style
+  (see internal :merge3 in :hg:`help merge-tools`). (default: True)
 
 ``binary``
   This tool can merge binary files. (default: False, unless tool
@@ -1549,11 +1588,11 @@
   (default: ``basic``)
 
 ``mergemarkertemplate``
-  This setting can be used to override ``mergemarkertemplate`` from the ``[ui]``
-  section on a per-tool basis; this applies to the ``$label``-prefixed variables
-  and to the conflict markers that are generated if ``premerge`` is ``keep` or
-  ``keep-merge3``. See the corresponding variable in ``[ui]`` for more
-  information.
+  This setting can be used to override ``mergemarker`` from the
+  ``[command-templates]`` section on a per-tool basis; this applies to the
+  ``$label``-prefixed variables and to the conflict markers that are generated
+  if ``premerge`` is ``keep` or ``keep-merge3``. See the corresponding variable
+  in ``[ui]`` for more information.
 
 .. container:: windows
 
@@ -1899,6 +1938,56 @@
     Currently, only the rebase and absorb commands consider this configuration.
     (EXPERIMENTAL)
 
+``share``
+---------
+
+``safe-mismatch.source-safe``
+
+    Controls what happens when the shared repository does not use the
+    share-safe mechanism but its source repository does.
+
+    Possible values are `abort` (default), `allow`, `upgrade-abort` and
+    `upgrade-abort`.
+
+    ``abort``
+    Disallows running any command and aborts
+    ``allow``
+    Respects the feature presence in the share source
+    ``upgrade-abort``
+    tries to upgrade the share to use share-safe; if it fails, aborts
+    ``upgrade-allow``
+    tries to upgrade the share; if it fails, continue by
+    respecting the share source setting
+
+``safe-mismatch.source-not-safe``
+
+    Controls what happens when the shared repository uses the share-safe
+    mechanism but its source does not.
+
+    Possible values are `abort` (default), `allow`, `downgrade-abort` and
+    `downgrade-abort`.
+
+    ``abort``
+    Disallows running any command and aborts
+    ``allow``
+    Respects the feature presence in the share source
+    ``downgrade-abort``
+    tries to downgrade the share to not use share-safe; if it fails, aborts
+    ``downgrade-allow``
+    tries to downgrade the share to not use share-safe;
+    if it fails, continue by respecting the shared source setting
+
+
+``safe-mismatch.source-safe.warn``
+    Shows a warning on operations if the shared repository does not use
+    share-safe, but the source repository does.
+    (default: True)
+
+``safe-mismatch.source-not-safe.warn``
+    Shows a warning on operations if the shared repository uses share-safe,
+    but the source repository does not.
+    (default: True)
+
 ``storage``
 -----------
 
@@ -1913,6 +2002,28 @@
     Turning this option off can result in large increase of repository size for
     repository with many merges.
 
+``revlog.persistent-nodemap.mmap``
+    Whether to use the Operating System "memory mapping" feature (when
+    possible) to access the persistent nodemap data. This improve performance
+    and reduce memory pressure.
+
+    Default to True.
+
+    For details on the "persistent-nodemap" feature, see:
+    :hg:`help config format.use-persistent-nodemap`.
+
+``revlog.persistent-nodemap.slow-path``
+    Control the behavior of Merucrial when using a repository with "persistent"
+    nodemap with an installation of Mercurial without a fast implementation for
+    the feature:
+
+    ``allow``: Silently use the slower implementation to access the repository.
+    ``warn``: Warn, but use the slower implementation to access the repository.
+    ``abort``: Prevent access to such repositories. (This is the default)
+
+    For details on the "persistent-nodemap" feature, see:
+    :hg:`help config format.use-persistent-nodemap`.
+
 ``revlog.reuse-external-delta-parent``
     Control the order in which delta parents are considered when adding new
     revisions from an external source.
@@ -2334,8 +2445,7 @@
     UTF-8. (default: ISO-8859-1)
 
 ``graphnodetemplate``
-    The template used to print changeset nodes in an ASCII revision graph.
-    (default: ``{graphnode}``)
+    (DEPRECATED) Use ``command-templates.graphnode`` instead.
 
 ``ignore``
     A file to read per-user ignore patterns from. This file should be
@@ -2363,7 +2473,7 @@
     (default: 10000000)
 
 ``logtemplate``
-    Template string for commands that print changesets.
+    (DEPRECATED) Use ``command-templates.log`` instead.
 
 ``merge``
     The conflict resolution program to use during a manual merge.
@@ -2371,29 +2481,14 @@
     For configuring merge tools see the ``[merge-tools]`` section.
 
 ``mergemarkers``
-    Sets the merge conflict marker label styling. The ``detailed``
-    style uses the ``mergemarkertemplate`` setting to style the labels.
+    Sets the merge conflict marker label styling. The ``detailed`` style
+    uses the ``command-templates.mergemarker`` setting to style the labels.
     The ``basic`` style just uses 'local' and 'other' as the marker label.
     One of ``basic`` or ``detailed``.
     (default: ``basic``)
 
 ``mergemarkertemplate``
-    The template used to print the commit description next to each conflict
-    marker during merge conflicts. See :hg:`help templates` for the template
-    format.
-
-    Defaults to showing the hash, tags, branches, bookmarks, author, and
-    the first line of the commit description.
-
-    If you use non-ASCII characters in names for tags, branches, bookmarks,
-    authors, and/or commit descriptions, you must pay attention to encodings of
-    managed files. At template expansion, non-ASCII characters use the encoding
-    specified by the ``--encoding`` global option, ``HGENCODING`` or other
-    environment variables that govern your locale. If the encoding of the merge
-    markers is different from the encoding of the merged files,
-    serious problems may occur.
-
-    Can be overridden per-merge-tool, see the ``[merge-tools]`` section.
+    (DEPRECATED) Use ``command-templates.mergemarker`` instead.
 
 ``message-output``
     Where to write status and error messages. (default: ``stdio``)
@@ -2456,14 +2551,7 @@
       On Windows, this configuration option is ignored and the command aborted.
 
 ``pre-merge-tool-output-template``
-    A template that is printed before executing an external merge tool. This can
-    be used to print out additional context that might be useful to have during
-    the conflict resolution, such as the description of the various commits
-    involved or bookmarks/tags.
-
-    Additional information is available in the ``local`, ``base``, and ``other``
-    dicts. For example: ``{local.label}``, ``{base.name}``, or
-    ``{other.islink}``.
+    (DEPRECATED) Use ``command-template.pre-merge-tool-output`` instead.
 
 ``quiet``
     Reduce the amount of output printed.
@@ -2561,6 +2649,55 @@
     Increase the amount of output printed. (default: False)
 
 
+``command-templates``
+---------------------
+
+Templates used for customizing the output of commands.
+
+``graphnode``
+    The template used to print changeset nodes in an ASCII revision graph.
+    (default: ``{graphnode}``)
+
+``log``
+    Template string for commands that print changesets.
+
+``mergemarker``
+    The template used to print the commit description next to each conflict
+    marker during merge conflicts. See :hg:`help templates` for the template
+    format.
+
+    Defaults to showing the hash, tags, branches, bookmarks, author, and
+    the first line of the commit description.
+
+    If you use non-ASCII characters in names for tags, branches, bookmarks,
+    authors, and/or commit descriptions, you must pay attention to encodings of
+    managed files. At template expansion, non-ASCII characters use the encoding
+    specified by the ``--encoding`` global option, ``HGENCODING`` or other
+    environment variables that govern your locale. If the encoding of the merge
+    markers is different from the encoding of the merged files,
+    serious problems may occur.
+
+    Can be overridden per-merge-tool, see the ``[merge-tools]`` section.
+
+``oneline-summary``
+    A template used by `hg rebase` and other commands for showing a one-line
+    summary of a commit. If the template configured here is longer than one
+    line, then only the first line is used.
+
+    The template can be overridden per command by defining a template in
+    `oneline-summary.<command>`, where `<command>` can be e.g. "rebase".
+
+``pre-merge-tool-output``
+    A template that is printed before executing an external merge tool. This can
+    be used to print out additional context that might be useful to have during
+    the conflict resolution, such as the description of the various commits
+    involved or bookmarks/tags.
+
+    Additional information is available in the ``local`, ``base``, and ``other``
+    dicts. For example: ``{local.label}``, ``{base.name}``, or
+    ``{other.islink}``.
+
+
 ``web``
 -------
 
--- a/mercurial/helptext/dates.txt	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/helptext/dates.txt	Tue Jan 19 21:48:43 2021 +0530
@@ -36,4 +36,4 @@
 - ``<DATE`` - at or before a given date/time
 - ``>DATE`` - on or after a given date/time
 - ``DATE to DATE`` - a date range, inclusive
-- ``-DAYS`` - within a given number of days of today
+- ``-DAYS`` - within a given number of days from today
--- a/mercurial/helptext/internals/extensions.txt	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/helptext/internals/extensions.txt	Tue Jan 19 21:48:43 2021 +0530
@@ -24,7 +24,8 @@
 
 To write your own extension, your python module can provide an optional dict
 named ``cmdtable`` with entries describing each command. A command should be
-registered to the ``cmdtable`` by ``@command`` decorator.
+registered to the ``cmdtable`` by ``@command`` decorator. All string-like
+values must be the ``bytes`` type, and are thus prefixed with ``b``.
 
 Example using ``@command`` decorator (requires Mercurial 1.9)::
 
@@ -39,10 +40,10 @@
         from mercurial import cmdutil
         command = cmdutil.command(cmdtable)
 
-    @command('print-parents',
-        [('s', 'short', None, _('print short form')),
-         ('l', 'long', None, _('print long form'))],
-        _('[options] node'))
+    @command(b'print-parents',
+        [(b's', b'short', None, _(b'print short form')),
+         (b'l', b'long', None, _(b'print long form'))],
+        _(b'[options] node'))
     def printparents(ui, repo, node, **opts):
         ...
 
@@ -84,7 +85,7 @@
 ``repo`` passed, then ``norepo=True`` should be passed to the ``@command``
 decorator::
 
-    @command('mycommand', [], norepo=True)
+    @command(b'mycommand', [], norepo=True)
     def mycommand(ui, **opts):
         ...
 
@@ -140,7 +141,7 @@
 ===========================
 
 Besides the ``ui`` methods, like ``ui.write(*msg)`` or
-``ui.prompt(msg, default="y")``, an extension can add help text for each
+``ui.prompt(msg, default=b"y")``, an extension can add help text for each
 of its commands and the extension itself.
 
 The module docstring will be used as help string when ``hg help extensionname``
@@ -175,7 +176,7 @@
 
 Be aware that ``uisetup`` in NOT the function to configure a ``ui`` instance.
 It's called only once per process, not per ``ui`` instance. Also, any changes
-to the ``ui`` may be discarded because the ``ui`` here temporarily loaded
+to the ``ui`` may be discarded because the ``ui`` here is a temporarily loaded
 local configuration. So, it's generally wrong to do `ui.setconfig()` in
 these callbacks. Notable exception is setting ``pre/post-<command>`` hooks
 and extending ``ui.__class__``.
@@ -248,7 +249,7 @@
         class echologui(ui.__class__):
             def log(self, service, *msg, **opts):
                 if msg:
-                    self.write('%s: %s\n' % (service, msg[0] % msg[1:]))
+                    self.write(b'%s: %s\n' % (service, msg[0] % msg[1:]))
                 super(echologui, self).log(service, *msg, **opts)
 
         ui.__class__ = echologui
@@ -259,7 +260,7 @@
 Some extensions must use hooks to do their work. These required hooks can
 be configured manually by the user by modifying the ``[hook]`` section of
 their hgrc, but they can also be configured automatically by calling the
-``ui.setconfig('hooks', ...)`` function in one of the setup functions
+``ui.setconfig(b'hooks', ...)`` function in one of the setup functions
 described above.
 
 The main difference between manually modifying the hooks section in the hgrc
@@ -273,21 +274,21 @@
 
     # Define hooks -- note that the actual function name it irrelevant.
     def preupdatehook(ui, repo, **kwargs):
-        ui.write("Pre-update hook triggered\n")
+        ui.write(b"Pre-update hook triggered\n")
 
     def updatehook(ui, repo, **kwargs):
-        ui.write("Update hook triggered\n")
+        ui.write(b"Update hook triggered\n")
 
     def uisetup(ui):
         # When pre-<cmd> and post-<cmd> hooks are configured by means of
         # the ui.setconfig() function, you must use the ui object passed
         # to uisetup or extsetup.
-        ui.setconfig("hooks", "pre-update.myextension", preupdatehook)
+        ui.setconfig(b"hooks", b"pre-update.myextension", preupdatehook)
 
     def reposetup(ui, repo):
         # Repository-specific hooks can be configured here. These include
         # the update hook.
-        ui.setconfig("hooks", "update.myextension", updatehook)
+        ui.setconfig(b"hooks", b"update.myextension", updatehook)
 
 Note how different hooks may need to be configured in different setup
 functions. In the example you can see that the ``update`` hook must be
@@ -301,7 +302,7 @@
 releases it's known to be compatible with. This helps us and users diagnose
 where problems are coming from::
 
-    testedwith = '2.0 2.0.1 2.1 2.1.1 2.1.2'
+    testedwith = b'2.0 2.0.1 2.1 2.1.1 2.1.2'
 
 Do not use the ``internal`` marker in third-party extensions; we will
 immediately drop all bug reports mentioning your extension if we catch you
@@ -311,16 +312,25 @@
 should report issues with the extension.  This link will be included in the
 error message if the extension produces errors::
 
-    buglink = 'https://bitbucket.org/USER/REPO/issues'
+    buglink = b'https://bitbucket.org/USER/REPO/issues'
 
 If an extension requires a minimum version of Mercurial, it can be declared
 with the ``minimumhgversion`` variable::
 
-    minimumhgversion = '4.6'
+    minimumhgversion = b'4.6'
 
 Older clients will print a warning that the extension requires a new version,
 instead of attempting to load it.
 
+The extension itself can be assigned a version value through one of two module
+attributes, and will be displayed in crash reports and  :hg:`version -v`::
+
+  * ``__version__`` is a plain value
+  * ``getversion`` is a no argument ``Callable`` that returns a value
+
+In both cases, the value must be either a byte string, or a list or tuple of
+numeric values which will be joined with ``.``.
+
 Wrap up: what belongs where?
 ============================
 
@@ -347,7 +357,7 @@
 extsetup
 --------
 
-* Changes depending on the status of other extensions. (``if extensions.find('mq')``)
+* Changes depending on the status of other extensions. (``if extensions.find(b'mq')``)
 * Add a global option to all commands
 * Extend revsets
 
--- a/mercurial/helptext/internals/requirements.txt	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/helptext/internals/requirements.txt	Tue Jan 19 21:48:43 2021 +0530
@@ -159,10 +159,6 @@
 exp-sharesafe
 =============
 
-NOTE: This requirement is for internal development only. The semantics are not
-frozed yet, the feature is experimental. It's not advised to use it for any
-production repository yet.
-
 Represents that the repository can be shared safely. Requirements and config of
 the source repository will be shared.
 Requirements are stored in ``.hg/store`` instead of directly in ``.hg/`` where
@@ -170,7 +166,7 @@
 in ``.hg/``.
 Shares read the ``.hg/hgrc`` of the source repository.
 
-Support for this requirement was added in Mercurial 5.6 (released
-November 2020). The requirement will only be present on repositories that have
-opted in to this format (by having ``format.exp-share-safe=true`` set when
+Support for this requirement was added in Mercurial 5.7 (released
+February 2021). The requirement will only be present on repositories that have
+opted in to this format (by having ``format.use-share-safe=true`` set when
 they were created).
--- a/mercurial/hg.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hg.py	Tue Jan 19 21:48:43 2021 +0530
@@ -14,7 +14,12 @@
 import stat
 
 from .i18n import _
-from .node import nullid
+from .node import (
+    hex,
+    nullhex,
+    nullid,
+    short,
+)
 from .pycompat import getattr
 
 from . import (
@@ -35,7 +40,6 @@
     merge as mergemod,
     mergestate as mergestatemod,
     narrowspec,
-    node,
     phases,
     pycompat,
     requirements,
@@ -108,7 +112,7 @@
                 raise error.Abort(_(b"dirstate branch not accessible"))
             branch = lrepo.dirstate.branch()
         if branch in branchmap:
-            revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
+            revs.extend(hex(r) for r in reversed(branchmap[branch]))
             return True
         else:
             return False
@@ -243,7 +247,7 @@
 
 
 def defaultdest(source):
-    '''return default destination of clone if none is given
+    """return default destination of clone if none is given
 
     >>> defaultdest(b'foo')
     'foo'
@@ -257,7 +261,7 @@
     ''
     >>> defaultdest(b'http://example.org/foo/')
     'foo'
-    '''
+    """
     path = util.url(source).path
     if not path:
         return b''
@@ -333,7 +337,7 @@
 
 
 def _prependsourcehgrc(repo):
-    """ copies the source repo config and prepend it in current repo .hg/hgrc
+    """copies the source repo config and prepend it in current repo .hg/hgrc
     on unshare. This is only done if the share was perfomed using share safe
     method where we share config of source in shares"""
     srcvfs = vfsmod.vfs(repo.sharedpath)
@@ -443,10 +447,10 @@
 
 
 def copystore(ui, srcrepo, destpath):
-    '''copy files from store of srcrepo in destpath
+    """copy files from store of srcrepo in destpath
 
     returns destlock
-    '''
+    """
     destlock = None
     try:
         hardlink = None
@@ -517,7 +521,12 @@
         for r in rev:
             with srcpeer.commandexecutor() as e:
                 remoterevs.append(
-                    e.callcommand(b'lookup', {b'key': r,}).result()
+                    e.callcommand(
+                        b'lookup',
+                        {
+                            b'key': r,
+                        },
+                    ).result()
                 )
         revs = remoterevs
 
@@ -585,16 +594,15 @@
     return srcpeer, peer(ui, peeropts, dest)
 
 
-# Recomputing branch cache might be slow on big repos,
-# so just copy it
+# Recomputing caches is often slow on big repos, so copy them.
 def _copycache(srcrepo, dstcachedir, fname):
     """copy a cache from srcrepo to destcachedir (if it exists)"""
-    srcbranchcache = srcrepo.vfs.join(b'cache/%s' % fname)
-    dstbranchcache = os.path.join(dstcachedir, fname)
-    if os.path.exists(srcbranchcache):
+    srcfname = srcrepo.cachevfs.join(fname)
+    dstfname = os.path.join(dstcachedir, fname)
+    if os.path.exists(srcfname):
         if not os.path.exists(dstcachedir):
             os.mkdir(dstcachedir)
-        util.copyfile(srcbranchcache, dstbranchcache)
+        util.copyfile(srcfname, dstfname)
 
 
 def clone(
@@ -683,14 +691,14 @@
     source = util.urllocalpath(source)
 
     if not dest:
-        raise error.Abort(_(b"empty destination path is not valid"))
+        raise error.InputError(_(b"empty destination path is not valid"))
 
     destvfs = vfsmod.vfs(dest, expandpath=True)
     if destvfs.lexists():
         if not destvfs.isdir():
-            raise error.Abort(_(b"destination '%s' already exists") % dest)
+            raise error.InputError(_(b"destination '%s' already exists") % dest)
         elif destvfs.listdir():
-            raise error.Abort(_(b"destination '%s' is not empty") % dest)
+            raise error.InputError(_(b"destination '%s' is not empty") % dest)
 
     createopts = {}
     narrow = False
@@ -751,11 +759,14 @@
             try:
                 with srcpeer.commandexecutor() as e:
                     rootnode = e.callcommand(
-                        b'lookup', {b'key': b'0',}
+                        b'lookup',
+                        {
+                            b'key': b'0',
+                        },
                     ).result()
 
-                if rootnode != node.nullid:
-                    sharepath = os.path.join(sharepool, node.hex(rootnode))
+                if rootnode != nullid:
+                    sharepath = os.path.join(sharepool, hex(rootnode))
                 else:
                     ui.status(
                         _(
@@ -772,7 +783,7 @@
                 )
         elif sharenamemode == b'remote':
             sharepath = os.path.join(
-                sharepool, node.hex(hashutil.sha1(source).digest())
+                sharepool, hex(hashutil.sha1(source).digest())
             )
         else:
             raise error.Abort(
@@ -864,9 +875,7 @@
             # we need to re-init the repo after manually copying the data
             # into it
             destpeer = peer(srcrepo, peeropts, dest)
-            srcrepo.hook(
-                b'outgoing', source=b'clone', node=node.hex(node.nullid)
-            )
+            srcrepo.hook(b'outgoing', source=b'clone', node=nullhex)
         else:
             try:
                 # only pass ui when no srcrepo
@@ -900,7 +909,12 @@
                 for rev in revs:
                     with srcpeer.commandexecutor() as e:
                         remoterevs.append(
-                            e.callcommand(b'lookup', {b'key': rev,}).result()
+                            e.callcommand(
+                                b'lookup',
+                                {
+                                    b'key': rev,
+                                },
+                            ).result()
                         )
                 revs = remoterevs
 
@@ -974,7 +988,10 @@
                 if update is not True:
                     with srcpeer.commandexecutor() as e:
                         checkout = e.callcommand(
-                            b'lookup', {b'key': update,}
+                            b'lookup',
+                            {
+                                b'key': update,
+                            },
                         ).result()
 
                 uprev = None
@@ -996,15 +1013,19 @@
                                 pass
                 if uprev is None:
                     try:
-                        uprev = destrepo._bookmarks[b'@']
-                        update = b'@'
+                        if destrepo._activebookmark:
+                            uprev = destrepo.lookup(destrepo._activebookmark)
+                            update = destrepo._activebookmark
+                        else:
+                            uprev = destrepo._bookmarks[b'@']
+                            update = b'@'
                         bn = destrepo[uprev].branch()
                         if bn == b'default':
-                            status = _(b"updating to bookmark @\n")
+                            status = _(b"updating to bookmark %s\n" % update)
                         else:
                             status = (
-                                _(b"updating to bookmark @ on branch %s\n") % bn
-                            )
+                                _(b"updating to bookmark %s on branch %s\n")
+                            ) % (update, bn)
                     except KeyError:
                         try:
                             uprev = destrepo.branchtip(b'default')
@@ -1017,6 +1038,14 @@
                 _update(destrepo, uprev)
                 if update in destrepo._bookmarks:
                     bookmarks.activate(destrepo, update)
+            if destlock is not None:
+                release(destlock)
+            # here is a tiny windows were someone could end up writing the
+            # repository before the cache are sure to be warm. This is "fine"
+            # as the only "bad" outcome would be some slowness. That potential
+            # slowness already affect reader.
+            with destrepo.lock():
+                destrepo.updatecaches(full=True)
     finally:
         release(srclock, destlock)
         if cleandir is not None:
@@ -1176,7 +1205,10 @@
 
 
 def merge(
-    ctx, force=False, remind=True, labels=None,
+    ctx,
+    force=False,
+    remind=True,
+    labels=None,
 ):
     """Branch merge with node, resolving changes. Return true if any
     unresolved conflicts."""
@@ -1360,7 +1392,7 @@
             except Exception:
                 repo.ui.warn(
                     _(b'.hgsubstate is corrupt in revision %s\n')
-                    % node.short(ctx.node())
+                    % short(ctx.node())
                 )
 
     return ret
--- a/mercurial/hgweb/__init__.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hgweb/__init__.py	Tue Jan 19 21:48:43 2021 +0530
@@ -27,7 +27,7 @@
 
 
 def hgweb(config, name=None, baseui=None):
-    '''create an hgweb wsgi object
+    """create an hgweb wsgi object
 
     config can be one of:
     - repo object (single repo view)
@@ -35,7 +35,7 @@
     - path to config file (multi-repo view)
     - dict of virtual:real pairs (multi-repo view)
     - list of virtual:real tuples (multi-repo view)
-    '''
+    """
 
     if isinstance(config, pycompat.unicode):
         raise error.ProgrammingError(
--- a/mercurial/hgweb/common.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hgweb/common.py	Tue Jan 19 21:48:43 2021 +0530
@@ -51,9 +51,9 @@
 
 
 def checkauthz(hgweb, req, op):
-    '''Check permission for operation based on request data (including
+    """Check permission for operation based on request data (including
     authentication info). Return if op allowed, else raise an ErrorResponse
-    exception.'''
+    exception."""
 
     user = req.remoteuser
 
--- a/mercurial/hgweb/hgweb_mod.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hgweb/hgweb_mod.py	Tue Jan 19 21:48:43 2021 +0530
@@ -86,12 +86,12 @@
 
 
 def makebreadcrumb(url, prefix=b''):
-    '''Return a 'URL breadcrumb' list
+    """Return a 'URL breadcrumb' list
 
     A 'URL breadcrumb' is a list of URL-name pairs,
     corresponding to each of the path items on a URL.
     This can be used to create path navigation entries.
-    '''
+    """
     if url.endswith(b'/'):
         url = url[:-1]
     if prefix:
--- a/mercurial/hgweb/request.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hgweb/request.py	Tue Jan 19 21:48:43 2021 +0530
@@ -622,8 +622,8 @@
 
 
 def wsgiapplication(app_maker):
-    '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
-    can and should now be used as a WSGI application.'''
+    """For compatibility with old CGI scripts. A plain hgweb() or hgwebdir()
+    can and should now be used as a WSGI application."""
     application = app_maker()
 
     def run_wsgi(env, respond):
--- a/mercurial/hgweb/webutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hgweb/webutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -491,11 +491,11 @@
 
 
 def changelistentry(web, ctx):
-    '''Obtain a dictionary to be used for entries in a changelist.
+    """Obtain a dictionary to be used for entries in a changelist.
 
     This function is called when producing items for the "entries" list passed
     to the "shortlog" and "changelog" templates.
-    '''
+    """
     repo = web.repo
     rev = ctx.rev()
     n = scmutil.binnode(ctx)
--- a/mercurial/hook.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/hook.py	Tue Jan 19 21:48:43 2021 +0530
@@ -30,14 +30,14 @@
 
 
 def pythonhook(ui, repo, htype, hname, funcname, args, throw):
-    '''call python hook. hook is callable object, looked up as
+    """call python hook. hook is callable object, looked up as
     name in python module. if callable returns "true", hook
     fails, else passes. if hook raises exception, treated as
     hook failure. exception propagates if throw is "true".
 
     reason for "true" meaning "hook failed" is so that
     unmodified commands (e.g. mercurial.commands.update) can
-    be run as hooks without wrappers to convert return values.'''
+    be run as hooks without wrappers to convert return values."""
 
     if callable(funcname):
         obj = funcname
@@ -157,6 +157,7 @@
             env[b'HG_PENDING'] = repo.root
     env[b'HG_HOOKTYPE'] = htype
     env[b'HG_HOOKNAME'] = name
+    env[b'HGPLAIN'] = b'1'
 
     for k, v in pycompat.iteritems(args):
         # transaction changes can accumulate MBs of data, so skip it
--- a/mercurial/httppeer.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/httppeer.py	Tue Jan 19 21:48:43 2021 +0530
@@ -766,7 +766,10 @@
                 % _(b', ').join(sorted(permissions))
             )
 
-        permission = {b'push': b'rw', b'pull': b'ro',}[permissions.pop()]
+        permission = {
+            b'push': b'rw',
+            b'pull': b'ro',
+        }[permissions.pop()]
 
         handler, resp = sendv2request(
             self._ui,
@@ -892,7 +895,7 @@
             return True
 
         # Other concepts.
-        if name in b'bundle2':
+        if name in (b'bundle2',):
             return True
 
         # Alias command-* to presence of command of that name.
@@ -942,7 +945,10 @@
 #    Integer priority for the service. If we could choose from multiple
 #    services, we choose the one with the highest priority.
 API_PEERS = {
-    wireprototypes.HTTP_WIREPROTO_V2: {b'init': httpv2peer, b'priority': 50,},
+    wireprototypes.HTTP_WIREPROTO_V2: {
+        b'init': httpv2peer,
+        b'priority': 50,
+    },
 }
 
 
--- a/mercurial/interfaces/dirstate.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/interfaces/dirstate.py	Tue Jan 19 21:48:43 2021 +0530
@@ -9,12 +9,12 @@
 
 class idirstate(interfaceutil.Interface):
     def __init__(opener, ui, root, validate, sparsematchfn):
-        '''Create a new dirstate object.
+        """Create a new dirstate object.
 
         opener is an open()-like callable that can be used to open the
         dirstate file; root is the root of the directory tracked by
         the dirstate.
-        '''
+        """
 
     # TODO: all these private methods and attributes should be made
     # public or removed from the interface.
@@ -31,17 +31,17 @@
 
     @contextlib.contextmanager
     def parentchange():
-        '''Context manager for handling dirstate parents.
+        """Context manager for handling dirstate parents.
 
         If an exception occurs in the scope of the context manager,
         the incoherent dirstate won't be written when wlock is
         released.
-        '''
+        """
 
     def pendingparentchange():
-        '''Returns true if the dirstate is in the middle of a set of changes
+        """Returns true if the dirstate is in the middle of a set of changes
         that modify the dirstate parent.
-        '''
+        """
 
     def hasdir(d):
         pass
@@ -50,18 +50,18 @@
         pass
 
     def getcwd():
-        '''Return the path from which a canonical path is calculated.
+        """Return the path from which a canonical path is calculated.
 
         This path should be used to resolve file patterns or to convert
         canonical paths back to file paths for display. It shouldn't be
         used to get real file paths. Use vfs functions instead.
-        '''
+        """
 
     def pathto(f, cwd=None):
         pass
 
     def __getitem__(key):
-        '''Return the current state of key (a filename) in the dirstate.
+        """Return the current state of key (a filename) in the dirstate.
 
         States are:
           n  normal
@@ -69,7 +69,7 @@
           r  marked for removal
           a  marked for addition
           ?  not tracked
-        '''
+        """
 
     def __contains__(key):
         """Check if bytestring `key` is known to the dirstate."""
@@ -111,11 +111,11 @@
         pass
 
     def invalidate():
-        '''Causes the next access to reread the dirstate.
+        """Causes the next access to reread the dirstate.
 
         This is different from localrepo.invalidatedirstate() because it always
         rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
-        check whether the dirstate has changed before rereading it.'''
+        check whether the dirstate has changed before rereading it."""
 
     def copy(source, dest):
         """Mark dest as a copy of source. Unmark dest if source is None."""
@@ -127,7 +127,7 @@
         pass
 
     def normal(f, parentfiledata=None):
-        '''Mark a file normal and clean.
+        """Mark a file normal and clean.
 
         parentfiledata: (mode, size, mtime) of the clean file
 
@@ -135,7 +135,7 @@
         size), as or close as possible from the point where we
         determined the file was clean, to limit the risk of the
         file having been changed by an external process between the
-        moment where the file was determined to be clean and now.'''
+        moment where the file was determined to be clean and now."""
         pass
 
     def normallookup(f):
@@ -157,7 +157,7 @@
         '''Drop a file from the dirstate'''
 
     def normalize(path, isknown=False, ignoremissing=False):
-        '''
+        """
         normalize the case of a pathname when on a casefolding filesystem
 
         isknown specifies whether the filename came from walking the
@@ -172,7 +172,7 @@
         - version of name already stored in the dirstate
         - version of name stored on disk
         - version provided via command arguments
-        '''
+        """
 
     def clear():
         pass
@@ -181,11 +181,11 @@
         pass
 
     def identity():
-        '''Return identity of dirstate it to detect changing in storage
+        """Return identity of dirstate it to detect changing in storage
 
         If identity of previous dirstate is equal to this, writing
         changes based on the former dirstate out can keep consistency.
-        '''
+        """
 
     def write(tr):
         pass
@@ -201,7 +201,7 @@
         """
 
     def walk(match, subrepos, unknown, ignored, full=True):
-        '''
+        """
         Walk recursively through the directory tree, finding all files
         matched by match.
 
@@ -210,10 +210,10 @@
         Return a dict mapping filename to stat-like object (either
         mercurial.osutil.stat instance or return value of os.stat()).
 
-        '''
+        """
 
     def status(match, subrepos, ignored, clean, unknown):
-        '''Determine the status of the working copy relative to the
+        """Determine the status of the working copy relative to the
         dirstate and return a pair of (unsure, status), where status is of type
         scmutil.status and:
 
@@ -227,12 +227,12 @@
           status.clean:
             files that have definitely not been modified since the
             dirstate was written
-        '''
+        """
 
     def matches(match):
-        '''
+        """
         return files in the dirstate (in whatever state) filtered by match
-        '''
+        """
 
     def savebackup(tr, backupname):
         '''Save current dirstate into backup file'''
--- a/mercurial/interfaces/repository.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/interfaces/repository.py	Tue Jan 19 21:48:43 2021 +0530
@@ -617,7 +617,7 @@
         """
 
     def revision(node, raw=False):
-        """"Obtain fulltext data for a node.
+        """Obtain fulltext data for a node.
 
         By default, any storage transformations are applied before the data
         is returned. If ``raw`` is True, non-raw storage transformations
@@ -628,8 +628,7 @@
         """
 
     def rawdata(node):
-        """Obtain raw data for a node.
-        """
+        """Obtain raw data for a node."""
 
     def read(node):
         """Resolve file fulltext data.
@@ -756,6 +755,7 @@
         linkmapper,
         transaction,
         addrevisioncb=None,
+        duplicaterevisioncb=None,
         maybemissingparents=False,
     ):
         """Process a series of deltas for storage.
@@ -1247,7 +1247,13 @@
         See the documentation for ``ifiledata`` for more.
         """
 
-    def addgroup(deltas, linkmapper, transaction, addrevisioncb=None):
+    def addgroup(
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        duplicaterevisioncb=None,
+    ):
         """Process a series of deltas for storage.
 
         See the documentation in ``ifilemutation`` for more.
--- a/mercurial/keepalive.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/keepalive.py	Tue Jan 19 21:48:43 2021 +0530
@@ -93,8 +93,8 @@
 
 from .i18n import _
 from .pycompat import getattr
+from .node import hex
 from . import (
-    node,
     pycompat,
     urllibcompat,
     util,
@@ -112,7 +112,7 @@
     """
     The connection manager must be able to:
       * keep track of all existing
-      """
+    """
 
     def __init__(self):
         self._lock = threading.Lock()
@@ -675,8 +675,7 @@
 
 
 def wrapgetresponse(cls):
-    """Wraps getresponse in cls with a broken-pipe sane version.
-    """
+    """Wraps getresponse in cls with a broken-pipe sane version."""
 
     def safegetresponse(self):
         # In safesend() we might set the _broken_pipe_resp
@@ -724,7 +723,7 @@
     foo = fo.read()
     fo.close()
     m = md5(foo)
-    print(format % (b'normal urllib', node.hex(m.digest())))
+    print(format % (b'normal urllib', hex(m.digest())))
 
     # now install the keepalive handler and try again
     opener = urlreq.buildopener(HTTPHandler())
@@ -734,7 +733,7 @@
     foo = fo.read()
     fo.close()
     m = md5(foo)
-    print(format % (b'keepalive read', node.hex(m.digest())))
+    print(format % (b'keepalive read', hex(m.digest())))
 
     fo = urlreq.urlopen(url)
     foo = b''
@@ -746,7 +745,7 @@
             break
     fo.close()
     m = md5(foo)
-    print(format % (b'keepalive readline', node.hex(m.digest())))
+    print(format % (b'keepalive readline', hex(m.digest())))
 
 
 def comp(N, url):
--- a/mercurial/localrepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/localrepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -31,6 +31,7 @@
     bookmarks,
     branchmap,
     bundle2,
+    bundlecaches,
     changegroup,
     color,
     commit,
@@ -58,6 +59,7 @@
     rcutil,
     repoview,
     requirements as requirementsmod,
+    revlog,
     revset,
     revsetlang,
     scmutil,
@@ -95,8 +97,7 @@
 
 
 class _basefilecache(scmutil.filecache):
-    """All filecache usage on repo are done for logic that should be unfiltered
-    """
+    """All filecache usage on repo are done for logic that should be unfiltered"""
 
     def __get__(self, repo, type=None):
         if repo is None:
@@ -299,7 +300,7 @@
         return self._caps
 
     def clonebundles(self):
-        return self._repo.tryread(b'clonebundles.manifest')
+        return self._repo.tryread(bundlecaches.CB_MANIFEST_FILE)
 
     def debugwireargs(self, one, two, three=None, four=None, five=None):
         """Used to test argument passing over the wire"""
@@ -399,8 +400,8 @@
 
 @interfaceutil.implementer(repository.ipeerlegacycommands)
 class locallegacypeer(localpeer):
-    '''peer extension which implements legacy methods too; used for tests with
-    restricted capabilities'''
+    """peer extension which implements legacy methods too; used for tests with
+    restricted capabilities"""
 
     def __init__(self, repo):
         super(locallegacypeer, self).__init__(repo, caps=legacycaps)
@@ -439,7 +440,7 @@
 
 
 def _getsharedvfs(hgvfs, requirements):
-    """ returns the vfs object pointing to root of shared source
+    """returns the vfs object pointing to root of shared source
     repo for a shared repository
 
     hgvfs is vfs pointing at .hg/ of current repo (shared one)
@@ -464,7 +465,7 @@
 
 
 def _readrequires(vfs, allowmissing):
-    """ reads the require file present at root of this vfs
+    """reads the require file present at root of this vfs
     and return a set of requirements
 
     If allowmissing is True, we suppress ENOENT if raised"""
@@ -549,22 +550,114 @@
         requirementsmod.SHARED_REQUIREMENT in requirements
         or requirementsmod.RELATIVE_SHARED_REQUIREMENT in requirements
     )
+    storevfs = None
     if shared:
+        # This is a shared repo
         sharedvfs = _getsharedvfs(hgvfs, requirements)
+        storevfs = vfsmod.vfs(sharedvfs.join(b'store'))
+    else:
+        storevfs = vfsmod.vfs(hgvfs.join(b'store'))
 
     # if .hg/requires contains the sharesafe requirement, it means
     # there exists a `.hg/store/requires` too and we should read it
     # NOTE: presence of SHARESAFE_REQUIREMENT imply that store requirement
     # is present. We never write SHARESAFE_REQUIREMENT for a repo if store
     # is not present, refer checkrequirementscompat() for that
+    #
+    # However, if SHARESAFE_REQUIREMENT is not present, it means that the
+    # repository was shared the old way. We check the share source .hg/requires
+    # for SHARESAFE_REQUIREMENT to detect whether the current repository needs
+    # to be reshared
     if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
-        if shared:
-            # This is a shared repo
-            storevfs = vfsmod.vfs(sharedvfs.join(b'store'))
+
+        if (
+            shared
+            and requirementsmod.SHARESAFE_REQUIREMENT
+            not in _readrequires(sharedvfs, True)
+        ):
+            mismatch_warn = ui.configbool(
+                b'share', b'safe-mismatch.source-not-safe.warn'
+            )
+            mismatch_config = ui.config(
+                b'share', b'safe-mismatch.source-not-safe'
+            )
+            if mismatch_config in (
+                b'downgrade-allow',
+                b'allow',
+                b'downgrade-abort',
+            ):
+                # prevent cyclic import localrepo -> upgrade -> localrepo
+                from . import upgrade
+
+                upgrade.downgrade_share_to_non_safe(
+                    ui,
+                    hgvfs,
+                    sharedvfs,
+                    requirements,
+                    mismatch_config,
+                    mismatch_warn,
+                )
+            elif mismatch_config == b'abort':
+                raise error.Abort(
+                    _(
+                        b"share source does not support exp-sharesafe requirement"
+                    )
+                )
+            else:
+                hint = _(
+                    "run `hg help config.share.safe-mismatch.source-not-safe`"
+                )
+                raise error.Abort(
+                    _(
+                        b"share-safe mismatch with source.\nUnrecognized"
+                        b" value '%s' of `share.safe-mismatch.source-not-safe`"
+                        b" set."
+                    )
+                    % mismatch_config,
+                    hint=hint,
+                )
         else:
-            storevfs = vfsmod.vfs(hgvfs.join(b'store'))
-
-        requirements |= _readrequires(storevfs, False)
+            requirements |= _readrequires(storevfs, False)
+    elif shared:
+        sourcerequires = _readrequires(sharedvfs, False)
+        if requirementsmod.SHARESAFE_REQUIREMENT in sourcerequires:
+            mismatch_config = ui.config(b'share', b'safe-mismatch.source-safe')
+            mismatch_warn = ui.configbool(
+                b'share', b'safe-mismatch.source-safe.warn'
+            )
+            if mismatch_config in (
+                b'upgrade-allow',
+                b'allow',
+                b'upgrade-abort',
+            ):
+                # prevent cyclic import localrepo -> upgrade -> localrepo
+                from . import upgrade
+
+                upgrade.upgrade_share_to_safe(
+                    ui,
+                    hgvfs,
+                    storevfs,
+                    requirements,
+                    mismatch_config,
+                    mismatch_warn,
+                )
+            elif mismatch_config == b'abort':
+                raise error.Abort(
+                    _(
+                        b'version mismatch: source uses share-safe'
+                        b' functionality while the current share does not'
+                    )
+                )
+            else:
+                hint = _("run `hg help config.share.safe-mismatch.source-safe`")
+                raise error.Abort(
+                    _(
+                        b"share-safe mismatch with source.\nUnrecognized"
+                        b" value '%s' of `share.safe-mismatch.source-safe` set."
+                    )
+                    % mismatch_config,
+                    hint=hint,
+                )
 
     # The .hg/hgrc file may load extensions or contain config options
     # that influence repository construction. Attempt to load it and
@@ -715,18 +808,28 @@
     if not rcutil.use_repo_hgrc():
         return False
 
+    ret = False
     # first load config from shared source if we has to
     if requirementsmod.SHARESAFE_REQUIREMENT in requirements and sharedvfs:
         try:
             ui.readconfig(sharedvfs.join(b'hgrc'), root=sharedvfs.base)
+            ret = True
         except IOError:
             pass
 
     try:
         ui.readconfig(hgvfs.join(b'hgrc'), root=wdirvfs.base)
-        return True
+        ret = True
     except IOError:
-        return False
+        pass
+
+    try:
+        ui.readconfig(hgvfs.join(b'hgrc-not-shared'), root=wdirvfs.base)
+        ret = True
+    except IOError:
+        pass
+
+    return ret
 
 
 def afterhgrcload(ui, wdirvfs, hgvfs, requirements):
@@ -983,11 +1086,42 @@
     if ui.configbool(b'experimental', b'rust.index'):
         options[b'rust.index'] = True
     if requirementsmod.NODEMAP_REQUIREMENT in requirements:
+        slow_path = ui.config(
+            b'storage', b'revlog.persistent-nodemap.slow-path'
+        )
+        if slow_path not in (b'allow', b'warn', b'abort'):
+            default = ui.config_default(
+                b'storage', b'revlog.persistent-nodemap.slow-path'
+            )
+            msg = _(
+                b'unknown value for config '
+                b'"storage.revlog.persistent-nodemap.slow-path": "%s"\n'
+            )
+            ui.warn(msg % slow_path)
+            if not ui.quiet:
+                ui.warn(_(b'falling back to default value: %s\n') % default)
+            slow_path = default
+
+        msg = _(
+            b"accessing `persistent-nodemap` repository without associated "
+            b"fast implementation."
+        )
+        hint = _(
+            b"check `hg help config.format.use-persistent-nodemap` "
+            b"for details"
+        )
+        if not revlog.HAS_FAST_PERSISTENT_NODEMAP:
+            if slow_path == b'warn':
+                msg = b"warning: " + msg + b'\n'
+                ui.warn(msg)
+                if not ui.quiet:
+                    hint = b'(' + hint + b')\n'
+                    ui.warn(hint)
+            if slow_path == b'abort':
+                raise error.Abort(msg, hint=hint)
         options[b'persistent-nodemap'] = True
-    if ui.configbool(b'storage', b'revlog.nodemap.mmap'):
+    if ui.configbool(b'storage', b'revlog.persistent-nodemap.mmap'):
         options[b'persistent-nodemap.mmap'] = True
-    epnm = ui.config(b'storage', b'revlog.nodemap.mode')
-    options[b'persistent-nodemap.mode'] = epnm
     if ui.configbool(b'devel', b'persistent-nodemap'):
         options[b'devel-force-nodemap'] = True
 
@@ -1720,11 +1854,7 @@
             return context.workingctx(self)
 
     def __contains__(self, changeid):
-        """True if the given changeid exists
-
-        error.AmbiguousPrefixLookupError is raised if an ambiguous node
-        specified.
-        """
+        """True if the given changeid exists"""
         try:
             self[changeid]
             return True
@@ -1745,7 +1875,7 @@
         return iter(self.changelog)
 
     def revs(self, expr, *args):
-        '''Find revisions matching a revset.
+        """Find revisions matching a revset.
 
         The revset is specified as a string ``expr`` that may contain
         %-formatting to escape certain types. See ``revsetlang.formatspec``.
@@ -1756,30 +1886,30 @@
 
         Returns a smartset.abstractsmartset, which is a list-like interface
         that contains integer revisions.
-        '''
+        """
         tree = revsetlang.spectree(expr, *args)
         return revset.makematcher(tree)(self)
 
     def set(self, expr, *args):
-        '''Find revisions matching a revset and emit changectx instances.
+        """Find revisions matching a revset and emit changectx instances.
 
         This is a convenience wrapper around ``revs()`` that iterates the
         result and is a generator of changectx instances.
 
         Revset aliases from the configuration are not expanded. To expand
         user aliases, consider calling ``scmutil.revrange()``.
-        '''
+        """
         for r in self.revs(expr, *args):
             yield self[r]
 
     def anyrevs(self, specs, user=False, localalias=None):
-        '''Find revisions matching one of the given revsets.
+        """Find revisions matching one of the given revsets.
 
         Revset aliases from the configuration are not expanded by default. To
         expand user aliases, specify ``user=True``. To provide some local
         definitions overriding user aliases, set ``localalias`` to
         ``{name: definitionstring}``.
-        '''
+        """
         if specs == [b'null']:
             return revset.baseset([nullrev])
         if specs == [b'.']:
@@ -1811,8 +1941,8 @@
 
     @filteredpropertycache
     def _tagscache(self):
-        '''Returns a tagscache object that contains various tags related
-        caches.'''
+        """Returns a tagscache object that contains various tags related
+        caches."""
 
         # This simplifies its cache management by having one decorated
         # function (this one) and the rest simply fetch things from it.
@@ -1850,12 +1980,12 @@
         return t
 
     def _findtags(self):
-        '''Do the hard work of finding tags.  Return a pair of dicts
+        """Do the hard work of finding tags.  Return a pair of dicts
         (tags, tagtypes) where tags maps tag name to node, and tagtypes
         maps tag name to a string like \'global\' or \'local\'.
         Subclasses or extensions are free to add their own tags, but
         should be aware that the returned dicts will be retained for the
-        duration of the localrepo object.'''
+        duration of the localrepo object."""
 
         # XXX what tagtype should subclasses/extensions use?  Currently
         # mq and bookmarks add tags, but do not set the tagtype at all.
@@ -1886,13 +2016,13 @@
         return (tags, tagtypes)
 
     def tagtype(self, tagname):
-        '''
+        """
         return the type of the given tag. result can be:
 
         'local'  : a local tag
         'global' : a global tag
         None     : tag does not exist
-        '''
+        """
 
         return self._tagscache.tagtypes.get(tagname)
 
@@ -1922,8 +2052,8 @@
         return self._bookmarks.names(node)
 
     def branchmap(self):
-        '''returns a dictionary {branch: [branchheads]} with branchheads
-        ordered by increasing revision number'''
+        """returns a dictionary {branch: [branchheads]} with branchheads
+        ordered by increasing revision number"""
         return self._branchcaches[self]
 
     @unfilteredmethod
@@ -1933,13 +2063,13 @@
         return self._revbranchcache
 
     def branchtip(self, branch, ignoremissing=False):
-        '''return the tip node for a given branch
+        """return the tip node for a given branch
 
         If ignoremissing is True, then this method will not raise an error.
         This is helpful for callers that only expect None for a missing branch
         (e.g. namespace).
 
-        '''
+        """
         try:
             return self.branchmap().branchtip(branch)
         except KeyError:
@@ -2003,7 +2133,7 @@
 
     def filectx(self, path, changeid=None, fileid=None, changectx=None):
         """changeid must be a changeset revision, if specified.
-           fileid can be a file revision or node."""
+        fileid can be a file revision or node."""
         return context.filectx(
             self, path, changeid, fileid, changectx=changectx
         )
@@ -2220,7 +2350,13 @@
                 accountclosed = singleheadsub.get(
                     b"account-closed-heads", False
                 )
-                scmutil.enforcesinglehead(repo, tr2, desc, accountclosed)
+                if singleheadsub.get(b"public-changes-only", False):
+                    filtername = b"immutable"
+                else:
+                    filtername = b"visible"
+                scmutil.enforcesinglehead(
+                    repo, tr2, desc, accountclosed, filtername
+                )
             if hook.hashook(repo.ui, b'pretxnclose-bookmark'):
                 for name, (old, new) in sorted(
                     tr.changes[b'bookmarks'].items()
@@ -2300,8 +2436,7 @@
         tr.addfinalize(b'flush-fncache', self.store.write)
 
         def txnclosehook(tr2):
-            """To be run if transaction is successful, will schedule a hook run
-            """
+            """To be run if transaction is successful, will schedule a hook run"""
             # Don't reference tr2 in hook() so we don't hold a reference.
             # This reduces memory consumption when there are multiple
             # transactions per lock. This can likely go away if issue5045
@@ -2351,8 +2486,7 @@
         tr.addpostclose(b'-warm-cache', self._buildcacheupdater(tr))
 
         def txnaborthook(tr2):
-            """To be run if transaction is aborted
-            """
+            """To be run if transaction is aborted"""
             reporef().hook(
                 b'txnabort', throw=False, **pycompat.strkwargs(tr2.hookargs)
             )
@@ -2557,7 +2691,7 @@
             return
 
         if tr is None or tr.changes[b'origrepolen'] < len(self):
-            # accessing the 'ser ved' branchmap should refresh all the others,
+            # accessing the 'served' branchmap should refresh all the others,
             self.ui.debug(b'updating the branch cache\n')
             self.filtered(b'served').branchmap()
             self.filtered(b'served.hidden').branchmap()
@@ -2609,14 +2743,14 @@
         self._quick_access_changeid_invalidate()
 
     def invalidatedirstate(self):
-        '''Invalidates the dirstate, causing the next call to dirstate
+        """Invalidates the dirstate, causing the next call to dirstate
         to check if it was modified since the last time it was read,
         rereading it if it has.
 
         This is different to dirstate.invalidate() that it doesn't always
         rereads the dirstate. Use dirstate.invalidate() if you want to
         explicitly read the dirstate again (i.e. restoring it to a previous
-        known good state).'''
+        known good state)."""
         if hasunfilteredcache(self, 'dirstate'):
             for k in self.dirstate._filecache:
                 try:
@@ -2626,13 +2760,13 @@
             delattr(self.unfiltered(), 'dirstate')
 
     def invalidate(self, clearfilecache=False):
-        '''Invalidates both store and non-store parts other than dirstate
+        """Invalidates both store and non-store parts other than dirstate
 
         If a transaction is running, invalidation of store is omitted,
         because discarding in-memory changes might cause inconsistency
         (e.g. incomplete fncache causes unintentional failure, but
         redundant one doesn't).
-        '''
+        """
         unfiltered = self.unfiltered()  # all file caches are stored unfiltered
         for k in list(self._filecache.keys()):
             # dirstate is invalidated separately in invalidatedirstate()
@@ -2662,8 +2796,8 @@
             self.store.invalidatecaches()
 
     def invalidateall(self):
-        '''Fully invalidates both store and non-store parts, causing the
-        subsequent operation to reread any outside changes.'''
+        """Fully invalidates both store and non-store parts, causing the
+        subsequent operation to reread any outside changes."""
         # extension should hook this to invalidate its caches
         self.invalidate()
         self.invalidatedirstate()
@@ -2678,7 +2812,13 @@
             ce.refresh()
 
     def _lock(
-        self, vfs, lockname, wait, releasefn, acquirefn, desc,
+        self,
+        vfs,
+        lockname,
+        wait,
+        releasefn,
+        acquirefn,
+        desc,
     ):
         timeout = 0
         warntimeout = 0
@@ -2715,12 +2855,12 @@
             callback(True)
 
     def lock(self, wait=True):
-        '''Lock the repository store (.hg/store) and return a weak reference
+        """Lock the repository store (.hg/store) and return a weak reference
         to the lock. Use this before modifying the store (e.g. committing or
         stripping). If you are opening a transaction, get a lock as well.)
 
         If both 'lock' and 'wlock' must be acquired, ensure you always acquires
-        'wlock' first to avoid a dead-lock hazard.'''
+        'wlock' first to avoid a dead-lock hazard."""
         l = self._currentlock(self._lockref)
         if l is not None:
             l.lock()
@@ -2738,13 +2878,13 @@
         return l
 
     def wlock(self, wait=True):
-        '''Lock the non-store parts of the repository (everything under
+        """Lock the non-store parts of the repository (everything under
         .hg except .hg/store) and return a weak reference to the lock.
 
         Use this before modifying files in .hg.
 
         If both 'lock' and 'wlock' must be acquired, ensure you always acquires
-        'wlock' first to avoid a dead-lock hazard.'''
+        'wlock' first to avoid a dead-lock hazard."""
         l = self._wlockref and self._wlockref()
         if l is not None and l.held:
             l.lock()
@@ -2834,7 +2974,7 @@
             extra = {}
 
         def fail(f, msg):
-            raise error.Abort(b'%s: %s' % (f, msg))
+            raise error.InputError(b'%s: %s' % (f, msg))
 
         if not match:
             match = matchmod.always()
@@ -2952,7 +3092,7 @@
 
     @unfilteredmethod
     def destroying(self):
-        '''Inform the repository that nodes are about to be destroyed.
+        """Inform the repository that nodes are about to be destroyed.
         Intended for use by strip and rollback, so there's a common
         place for anything that has to be done before destroying history.
 
@@ -2961,7 +3101,7 @@
         destroyed is imminent, the repo will be invalidated causing those
         changes to stay in memory (waiting for the next unlock), or vanish
         completely.
-        '''
+        """
         # When using the same lock to commit and strip, the phasecache is left
         # dirty after committing. Then when we strip, the repo is invalidated,
         # causing those changes to disappear.
@@ -2970,10 +3110,10 @@
 
     @unfilteredmethod
     def destroyed(self):
-        '''Inform the repository that nodes have been destroyed.
+        """Inform the repository that nodes have been destroyed.
         Intended for use by strip and rollback, so there's a common
         place for anything that has to be done after destroying history.
-        '''
+        """
         # When one tries to:
         # 1) destroy nodes thus calling this method (e.g. strip)
         # 2) use phasecache somewhere (e.g. commit)
@@ -3056,13 +3196,13 @@
         return sorted(heads, key=self.changelog.rev, reverse=True)
 
     def branchheads(self, branch=None, start=None, closed=False):
-        '''return a (possibly filtered) list of heads for the given branch
+        """return a (possibly filtered) list of heads for the given branch
 
         Heads are returned in topological order, from newest to oldest.
         If branch is None, use the dirstate branch.
         If start is not None, return only heads reachable from start.
         If closed is True, return heads that are marked as closed as well.
-        '''
+        """
         if branch is None:
             branch = self[None].branch()
         branches = self.branchmap()
@@ -3334,17 +3474,17 @@
 
     # if share-safe is enabled, let's create the new repository with the new
     # requirement
-    if ui.configbool(b'format', b'exp-share-safe'):
+    if ui.configbool(b'format', b'use-share-safe'):
         requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
 
     return requirements
 
 
 def checkrequirementscompat(ui, requirements):
-    """ Checks compatibility of repository requirements enabled and disabled.
+    """Checks compatibility of repository requirements enabled and disabled.
 
     Returns a set of requirements which needs to be dropped because dependend
-    requirements are not enabled. Also warns users about it """
+    requirements are not enabled. Also warns users about it"""
 
     dropped = set()
 
@@ -3373,7 +3513,7 @@
         if requirementsmod.SHARESAFE_REQUIREMENT in requirements:
             ui.warn(
                 _(
-                    b"ignoring enabled 'format.exp-share-safe' config because "
+                    b"ignoring enabled 'format.use-share-safe' config because "
                     b"it is incompatible with disabled 'format.usestore'"
                     b" config\n"
                 )
--- a/mercurial/lock.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/lock.py	Tue Jan 19 21:48:43 2021 +0530
@@ -175,14 +175,14 @@
 
 
 class lock(object):
-    '''An advisory lock held by one process to control access to a set
+    """An advisory lock held by one process to control access to a set
     of files.  Non-cooperating processes or incorrectly written scripts
     can ignore Mercurial's locking scheme and stomp all over the
     repository, so don't do that.
 
     Typically used via localrepository.lock() to lock the repository
     store (.hg/store/) or localrepository.wlock() to lock everything
-    else under .hg/.'''
+    else under .hg/."""
 
     # lock is symlink on platforms that support it, file on others.
 
--- a/mercurial/logcmdutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/logcmdutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -417,8 +417,7 @@
                 )
 
     def _exthook(self, ctx):
-        '''empty method used by extension as a hook point
-        '''
+        """empty method used by extension as a hook point"""
 
     def _showpatch(self, ctx, graphwidth=0):
         if self._includestat:
@@ -509,13 +508,13 @@
 
 
 class changesettemplater(changesetprinter):
-    '''format changeset information.
+    """format changeset information.
 
     Note: there are a variety of convenience functions to build a
     changesettemplater for common cases. See functions such as:
     maketemplater, changesetdisplayer, buildcommittemplate, or other
     functions that use changesest_templater.
-    '''
+    """
 
     # Arguments before "buffered" used to be positional. Consider not
     # adding/removing arguments before "buffered" to not break callers.
@@ -623,7 +622,7 @@
 
     # ui settings
     if not tmpl and not style:  # template are stronger than style
-        tmpl = ui.config(b'ui', b'logtemplate')
+        tmpl = ui.config(b'command-templates', b'log')
         if tmpl:
             return formatter.literal_templatespec(templater.unquotestring(tmpl))
         else:
@@ -656,7 +655,7 @@
     Display format will be the first non-empty hit of:
     1. option 'template'
     2. option 'style'
-    3. [ui] setting 'logtemplate'
+    3. [command-templates] setting 'log'
     4. [ui] setting 'style'
     If all of these values are either the unset or the empty string,
     regular display via changesetprinter() is done.
@@ -692,6 +691,7 @@
     revspec = attr.ib()  # type: List[bytes]
 
     # miscellaneous queries to filter revisions (see "hg help log" for details)
+    bookmarks = attr.ib(default=attr.Factory(list))  # type: List[bytes]
     branches = attr.ib(default=attr.Factory(list))  # type: List[bytes]
     date = attr.ib(default=None)  # type: Optional[bytes]
     keywords = attr.ib(default=attr.Factory(list))  # type: List[bytes]
@@ -747,6 +747,7 @@
         pats=pats,
         opts=opts,
         revspec=opts.get(b'rev', []),
+        bookmarks=opts.get(b'bookmark', []),
         # branch and only_branch are really aliases and must be handled at
         # the same time
         branches=opts.get(b'branch', []) + opts.get(b'only_branch', []),
@@ -897,13 +898,13 @@
 def _makerevset(repo, wopts, slowpath):
     """Return a revset string built from log options and file patterns"""
     opts = {
-        b'branch': [repo.lookupbranch(b) for b in wopts.branches],
+        b'branch': [b'literal:' + repo.lookupbranch(b) for b in wopts.branches],
         b'date': wopts.date,
         b'keyword': wopts.keywords,
         b'no_merges': wopts.no_merges,
         b'only_merges': wopts.only_merges,
         b'prune': wopts.prune_ancestors,
-        b'user': wopts.users,
+        b'user': [b'literal:' + v for v in wopts.users],
     }
 
     if wopts.filter_revisions_by_pats and slowpath:
@@ -938,6 +939,14 @@
                 val = [revsetlang.formatspec(revop, v) for v in val]
             expr.append(revsetlang.formatspec(listop, val))
 
+    if wopts.bookmarks:
+        expr.append(
+            revsetlang.formatspec(
+                b'%lr',
+                [scmutil.format_bookmark_revspec(v) for v in wopts.bookmarks],
+            )
+        )
+
     if expr:
         expr = b'(' + b' and '.join(expr) + b')'
     else:
@@ -1111,7 +1120,7 @@
 
 
 def _graphnodeformatter(ui, displayer):
-    spec = ui.config(b'ui', b'graphnodetemplate')
+    spec = ui.config(b'command-templates', b'graphnode')
     if not spec:
         return templatekw.getgraphnode  # fast path for "{graphnode}"
 
--- a/mercurial/logexchange.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/logexchange.py	Tue Jan 19 21:48:43 2021 +0530
@@ -141,7 +141,10 @@
 
     with remoterepo.commandexecutor() as e:
         bookmarks = e.callcommand(
-            b'listkeys', {b'namespace': b'bookmarks',}
+            b'listkeys',
+            {
+                b'namespace': b'bookmarks',
+            },
         ).result()
 
     # on a push, we don't want to keep obsolete heads since
--- a/mercurial/mail.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/mail.py	Tue Jan 19 21:48:43 2021 +0530
@@ -44,10 +44,10 @@
 
 
 class STARTTLS(smtplib.SMTP):
-    '''Derived class to verify the peer certificate for STARTTLS.
+    """Derived class to verify the peer certificate for STARTTLS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
-    '''
+    """
 
     def __init__(self, ui, host=None, **kwargs):
         smtplib.SMTP.__init__(self, **kwargs)
@@ -76,10 +76,10 @@
 
 
 class SMTPS(smtplib.SMTP):
-    '''Derived class to verify the peer certificate for SMTPS.
+    """Derived class to verify the peer certificate for SMTPS.
 
     This class allows to pass any keyword arguments to SSL socket creation.
-    '''
+    """
 
     def __init__(self, ui, keyfile=None, certfile=None, host=None, **kwargs):
         self.keyfile = keyfile
@@ -221,8 +221,8 @@
 
 
 def connect(ui, mbox=None):
-    '''make a mail connection. return a function to send mail.
-    call as sendmail(sender, list-of-recipients, msg).'''
+    """make a mail connection. return a function to send mail.
+    call as sendmail(sender, list-of-recipients, msg)."""
     if mbox:
         open(mbox, b'wb').close()
         return lambda s, r, m: _mbox(mbox, s, r, m)
@@ -267,11 +267,11 @@
 
 def mimetextpatch(s, subtype='plain', display=False):
     # type: (bytes, str, bool) -> email.message.Message
-    '''Return MIME message suitable for a patch.
+    """Return MIME message suitable for a patch.
     Charset will be detected by first trying to decode as us-ascii, then utf-8,
     and finally the global encodings. If all those fail, fall back to
     ISO-8859-1, an encoding with that allows all byte sequences.
-    Transfer encodings will be used if necessary.'''
+    Transfer encodings will be used if necessary."""
 
     cs = [
         'us-ascii',
@@ -293,9 +293,9 @@
 
 def mimetextqp(body, subtype, charset):
     # type: (bytes, str, str) -> email.message.Message
-    '''Return MIME message.
+    """Return MIME message.
     Quoted-printable transfer encoding will be used if necessary.
-    '''
+    """
     cs = email.charset.Charset(charset)
     msg = email.message.Message()
     msg.set_type('text/' + subtype)
@@ -337,11 +337,11 @@
 
 def _encode(ui, s, charsets):
     # type: (Any, bytes, List[str]) -> Tuple[bytes, str]
-    '''Returns (converted) string, charset tuple.
+    """Returns (converted) string, charset tuple.
     Finds out best charset by cycling through sendcharsets in descending
     order. Tries both encoding and fallbackencoding for input. Only as
     last resort send as is in fake ascii.
-    Caveat: Do not use for mail parts containing patches!'''
+    Caveat: Do not use for mail parts containing patches!"""
     sendcharsets = charsets or _charsets(ui)
     if not isinstance(s, bytes):
         # We have unicode data, which we need to try and encode to
@@ -427,9 +427,9 @@
 
 def addrlistencode(ui, addrs, charsets=None, display=False):
     # type: (Any, List[bytes], List[str], bool) -> List[str]
-    '''Turns a list of addresses into a list of RFC-2047 compliant headers.
+    """Turns a list of addresses into a list of RFC-2047 compliant headers.
     A single element of input list may contain multiple addresses, but output
-    always has one address per item'''
+    always has one address per item"""
     straddrs = []
     for a in addrs:
         assert isinstance(a, bytes), '%r unexpectedly not a bytestr' % a
@@ -447,8 +447,8 @@
 
 def mimeencode(ui, s, charsets=None, display=False):
     # type: (Any, bytes, List[str], bool) -> email.message.Message
-    '''creates mime text object, encodes it if needed, and sets
-    charset and transfer-encoding accordingly.'''
+    """creates mime text object, encodes it if needed, and sets
+    charset and transfer-encoding accordingly."""
     cs = 'us-ascii'
     if not display:
         s, cs = _encode(ui, s, charsets)
--- a/mercurial/manifest.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/manifest.py	Tue Jan 19 21:48:43 2021 +0530
@@ -528,8 +528,8 @@
         return dir in self._dirs
 
     def _filesfastpath(self, match):
-        '''Checks whether we can correctly and quickly iterate over matcher
-        files instead of over manifest files.'''
+        """Checks whether we can correctly and quickly iterate over matcher
+        files instead of over manifest files."""
         files = match.files()
         return len(files) < 100 and (
             match.isexact()
@@ -537,13 +537,13 @@
         )
 
     def walk(self, match):
-        '''Generates matching file names.
+        """Generates matching file names.
 
         Equivalent to manifest.matches(match).iterkeys(), but without creating
         an entirely new manifest.
 
         It also reports nonexistent files by marking them bad with match.bad().
-        '''
+        """
         if match.always():
             for f in iter(self):
                 yield f
@@ -591,7 +591,7 @@
         return m
 
     def diff(self, m2, match=None, clean=False):
-        '''Finds changes between the current manifest and m2.
+        """Finds changes between the current manifest and m2.
 
         Args:
           m2: the manifest to which this manifest should be compared.
@@ -604,7 +604,7 @@
         in the current/other manifest. Where the file does not exist,
         the nodeid will be None and the flags will be the empty
         string.
-        '''
+        """
         if match:
             m1 = self._matches(match)
             m2 = m2._matches(match)
@@ -703,14 +703,14 @@
 
 
 def _msearch(m, s, lo=0, hi=None):
-    '''return a tuple (start, end) that says where to find s within m.
+    """return a tuple (start, end) that says where to find s within m.
 
     If the string is found m[start:end] are the line containing
     that string.  If start == end the string was not found and
     they indicate the proper sorted insertion point.
 
     m should be a buffer, a memoryview or a byte string.
-    s is a byte string'''
+    s is a byte string"""
 
     def advance(i, c):
         while i < lenm and m[i : i + 1] != c:
@@ -818,23 +818,24 @@
 
     def _loadalllazy(self):
         selfdirs = self._dirs
-        for d, (path, node, readsubtree, docopy) in pycompat.iteritems(
+        subpath = self._subpath
+        for d, (node, readsubtree, docopy) in pycompat.iteritems(
             self._lazydirs
         ):
             if docopy:
-                selfdirs[d] = readsubtree(path, node).copy()
+                selfdirs[d] = readsubtree(subpath(d), node).copy()
             else:
-                selfdirs[d] = readsubtree(path, node)
+                selfdirs[d] = readsubtree(subpath(d), node)
         self._lazydirs = {}
 
     def _loadlazy(self, d):
         v = self._lazydirs.get(d)
         if v:
-            path, node, readsubtree, docopy = v
+            node, readsubtree, docopy = v
             if docopy:
-                self._dirs[d] = readsubtree(path, node).copy()
+                self._dirs[d] = readsubtree(self._subpath(d), node).copy()
             else:
-                self._dirs[d] = readsubtree(path, node)
+                self._dirs[d] = readsubtree(self._subpath(d), node)
             del self._lazydirs[d]
 
     def _loadchildrensetlazy(self, visit):
@@ -861,7 +862,7 @@
         toloadlazy = []
         for d, v1 in pycompat.iteritems(t1._lazydirs):
             v2 = t2._lazydirs.get(d)
-            if not v2 or v2[1] != v1[1]:
+            if not v2 or v2[0] != v1[0]:
                 toloadlazy.append(d)
         for d, v1 in pycompat.iteritems(t2._lazydirs):
             if d not in t1._lazydirs:
@@ -909,14 +910,14 @@
         )
 
     def dir(self):
-        '''The directory that this tree manifest represents, including a
-        trailing '/'. Empty string for the repo root directory.'''
+        """The directory that this tree manifest represents, including a
+        trailing '/'. Empty string for the repo root directory."""
         return self._dir
 
     def node(self):
-        '''This node of this instance. nullid for unsaved instances. Should
+        """This node of this instance. nullid for unsaved instances. Should
         be updated when the instance is read or written from a revlog.
-        '''
+        """
         assert not self._dirty
         return self._node
 
@@ -1092,8 +1093,8 @@
             def _copyfunc(s):
                 self._load()
                 s._lazydirs = {
-                    d: (p, n, r, True)
-                    for d, (p, n, r, c) in pycompat.iteritems(self._lazydirs)
+                    d: (n, r, True)
+                    for d, (n, r, c) in pycompat.iteritems(self._lazydirs)
                 }
                 sdirs = s._dirs
                 for d, v in pycompat.iteritems(self._dirs):
@@ -1157,10 +1158,10 @@
         return dirslash in self._dirs or dirslash in self._lazydirs
 
     def walk(self, match):
-        '''Generates matching file names.
+        """Generates matching file names.
 
         It also reports nonexistent files by marking them bad with match.bad().
-        '''
+        """
         if match.always():
             for f in iter(self):
                 yield f
@@ -1202,8 +1203,7 @@
                         yield f
 
     def _matches(self, match):
-        '''recursively generate a new manifest filtered by the match argument.
-        '''
+        """recursively generate a new manifest filtered by the match argument."""
         if match.always():
             return self.copy()
         return self._matches_inner(match)
@@ -1253,7 +1253,7 @@
         raise FastdeltaUnavailable()
 
     def diff(self, m2, match=None, clean=False):
-        '''Finds changes between the current manifest and m2.
+        """Finds changes between the current manifest and m2.
 
         Args:
           m2: the manifest to which this manifest should be compared.
@@ -1266,7 +1266,7 @@
         in the current/other manifest. Where the file does not exist,
         the nodeid will be None and the flags will be the empty
         string.
-        '''
+        """
         if match and not match.always():
             m1 = self._matches(match)
             m2 = m2._matches(match)
@@ -1318,13 +1318,12 @@
 
     def parse(self, text, readsubtree):
         selflazy = self._lazydirs
-        subpath = self._subpath
         for f, n, fl in _parse(text):
             if fl == b't':
                 f = f + b'/'
                 # False below means "doesn't need to be copied" and can use the
                 # cached value from readsubtree directly.
-                selflazy[f] = (subpath(f), n, readsubtree, False)
+                selflazy[f] = (n, readsubtree, False)
             elif b'/' in f:
                 # This is a flat manifest, so use __setitem__ and setflag rather
                 # than assigning directly to _files and _flags, so we can
@@ -1352,7 +1351,7 @@
         self._load()
         flags = self.flags
         lazydirs = [
-            (d[:-1], v[1], b't') for d, v in pycompat.iteritems(self._lazydirs)
+            (d[:-1], v[0], b't') for d, v in pycompat.iteritems(self._lazydirs)
         ]
         dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs]
         files = [(f, self._files[f], flags(f)) for f in self._files]
@@ -1374,7 +1373,7 @@
         def getnode(m, d):
             ld = m._lazydirs.get(d)
             if ld:
-                return ld[1]
+                return ld[0]
             return m._dirs.get(d, emptytree)._node
 
         # let's skip investigating things that `match` says we do not need.
@@ -1546,9 +1545,9 @@
 
 @interfaceutil.implementer(repository.imanifeststorage)
 class manifestrevlog(object):
-    '''A revlog that stores manifest texts. This is responsible for caching the
+    """A revlog that stores manifest texts. This is responsible for caching the
     full-text manifest contents.
-    '''
+    """
 
     def __init__(
         self,
@@ -1832,9 +1831,20 @@
             deltamode=deltamode,
         )
 
-    def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None):
+    def addgroup(
+        self,
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        duplicaterevisioncb=None,
+    ):
         return self._revlog.addgroup(
-            deltas, linkmapper, transaction, addrevisioncb=addrevisioncb
+            deltas,
+            linkmapper,
+            transaction,
+            addrevisioncb=addrevisioncb,
+            duplicaterevisioncb=duplicaterevisioncb,
         )
 
     def rawsize(self, rev):
@@ -2066,12 +2076,12 @@
         return self._data
 
     def readfast(self, shallow=False):
-        '''Calls either readdelta or read, based on which would be less work.
+        """Calls either readdelta or read, based on which would be less work.
         readdelta is called if the delta is against the p1, and therefore can be
         read quickly.
 
         If `shallow` is True, nothing changes since this is a flat manifest.
-        '''
+        """
         store = self._storage()
         r = store.rev(self._node)
         deltaparent = store.deltaparent(r)
@@ -2080,12 +2090,12 @@
         return self.read()
 
     def readdelta(self, shallow=False):
-        '''Returns a manifest containing just the entries that are present
+        """Returns a manifest containing just the entries that are present
         in this manifest, but not in its p1 manifest. This is efficient to read
         if the revlog delta is already p1.
 
         Changing the value of `shallow` has no effect on flat manifests.
-        '''
+        """
         store = self._storage()
         r = store.rev(self._node)
         d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
@@ -2197,7 +2207,7 @@
         return self._storage().parents(self._node)
 
     def readdelta(self, shallow=False):
-        '''Returns a manifest containing just the entries that are present
+        """Returns a manifest containing just the entries that are present
         in this manifest, but not in its p1 manifest. This is efficient to read
         if the revlog delta is already p1.
 
@@ -2206,7 +2216,7 @@
         subdirectory entry will be reported as it appears in the manifest, i.e.
         the subdirectory will be reported among files and distinguished only by
         its 't' flag.
-        '''
+        """
         store = self._storage()
         if shallow:
             r = store.rev(self._node)
@@ -2226,13 +2236,13 @@
             return md
 
     def readfast(self, shallow=False):
-        '''Calls either readdelta or read, based on which would be less work.
+        """Calls either readdelta or read, based on which would be less work.
         readdelta is called if the delta is against the p1, and therefore can be
         read quickly.
 
         If `shallow` is True, it only returns the entries from this manifest,
         and not any submanifests.
-        '''
+        """
         store = self._storage()
         r = store.rev(self._node)
         deltaparent = store.deltaparent(r)
@@ -2287,6 +2297,10 @@
     def read(self):
         return excludeddir(self._dir, self._node)
 
+    def readfast(self, shallow=False):
+        # special version of readfast since we don't have underlying storage
+        return self.read()
+
     def write(self, *args):
         raise error.ProgrammingError(
             b'attempt to write manifest from excluded dir %s' % self._dir
--- a/mercurial/match.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/match.py	Tue Jan 19 21:48:43 2021 +0530
@@ -47,8 +47,8 @@
 
 
 def _rematcher(regex):
-    '''compile the regexp with the best available regexp engine and return a
-    matcher function'''
+    """compile the regexp with the best available regexp engine and return a
+    matcher function"""
     m = util.re.compile(regex)
     try:
         # slightly faster, provided by facebook's re2 bindings
@@ -82,8 +82,8 @@
 
 
 def _expandsubinclude(kindpats, root):
-    '''Returns the list of subinclude matcher args and the kindpats without the
-    subincludes in it.'''
+    """Returns the list of subinclude matcher args and the kindpats without the
+    subincludes in it."""
     relmatchers = []
     other = []
 
@@ -107,7 +107,7 @@
 
 
 def _kindpatsalwaysmatch(kindpats):
-    """"Checks whether the kindspats match everything, as e.g.
+    """Checks whether the kindspats match everything, as e.g.
     'relpath:.' does.
     """
     for kind, pat, source in kindpats:
@@ -117,11 +117,21 @@
 
 
 def _buildkindpatsmatcher(
-    matchercls, root, cwd, kindpats, ctx=None, listsubrepos=False, badfn=None,
+    matchercls,
+    root,
+    cwd,
+    kindpats,
+    ctx=None,
+    listsubrepos=False,
+    badfn=None,
 ):
     matchers = []
     fms, kindpats = _expandsets(
-        cwd, kindpats, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn,
+        cwd,
+        kindpats,
+        ctx=ctx,
+        listsubrepos=listsubrepos,
+        badfn=badfn,
     )
     if kindpats:
         m = matchercls(root, kindpats, badfn=badfn)
@@ -321,8 +331,8 @@
 
 
 def _donormalize(patterns, default, root, cwd, auditor=None, warn=None):
-    '''Convert 'kind:pat' from the patterns list to tuples with kind and
-    normalized and rooted patterns and with listfiles expanded.'''
+    """Convert 'kind:pat' from the patterns list to tuples with kind and
+    normalized and rooted patterns and with listfiles expanded."""
     kindpats = []
     for kind, pat in [_patsplit(p, default) for p in patterns]:
         if kind in cwdrelativepatternkinds:
@@ -383,8 +393,8 @@
     # Callbacks related to how the matcher is used by dirstate.walk.
     # Subscribers to these events must monkeypatch the matcher object.
     def bad(self, f, msg):
-        '''Callback from dirstate.walk for each explicit file that can't be
-        found/accessed, with an error message.'''
+        """Callback from dirstate.walk for each explicit file that can't be
+        found/accessed, with an error message."""
 
     # If an traversedir is set, it will be called when a directory discovered
     # by recursive traversal is visited.
@@ -395,11 +405,11 @@
         return []
 
     def files(self):
-        '''Explicitly listed files or patterns or roots:
+        """Explicitly listed files or patterns or roots:
         if no patterns or .always(): empty list,
         if exact: list exact files,
         if not .anypats(): list all files and dirs,
-        else: optimal roots'''
+        else: optimal roots"""
         return self._files
 
     @propertycache
@@ -414,18 +424,18 @@
         return False
 
     def visitdir(self, dir):
-        '''Decides whether a directory should be visited based on whether it
+        """Decides whether a directory should be visited based on whether it
         has potential matches in it or one of its subdirectories. This is
         based on the match's primary, included, and excluded patterns.
 
         Returns the string 'all' if the given directory and all subdirectories
         should be visited. Otherwise returns True or False indicating whether
         the given directory should be visited.
-        '''
+        """
         return True
 
     def visitchildrenset(self, dir):
-        '''Decides whether a directory should be visited based on whether it
+        """Decides whether a directory should be visited based on whether it
         has potential matches in it or one of its subdirectories, and
         potentially lists which subdirectories of that directory should be
         visited. This is based on the match's primary, included, and excluded
@@ -464,27 +474,27 @@
           indicating that there are no files in this dir to investigate (or
           equivalently that if there are files to investigate in 'dir' that it
           will always return 'this').
-        '''
+        """
         return b'this'
 
     def always(self):
-        '''Matcher will match everything and .files() will be empty --
-        optimization might be possible.'''
+        """Matcher will match everything and .files() will be empty --
+        optimization might be possible."""
         return False
 
     def isexact(self):
-        '''Matcher will match exactly the list of files in .files() --
-        optimization might be possible.'''
+        """Matcher will match exactly the list of files in .files() --
+        optimization might be possible."""
         return False
 
     def prefix(self):
-        '''Matcher will match the paths in .files() recursively --
-        optimization might be possible.'''
+        """Matcher will match the paths in .files() recursively --
+        optimization might be possible."""
         return False
 
     def anypats(self):
-        '''None of .always(), .isexact(), and .prefix() is true --
-        optimizations will be difficult.'''
+        """None of .always(), .isexact(), and .prefix() is true --
+        optimizations will be difficult."""
         return not self.always() and not self.isexact() and not self.prefix()
 
 
@@ -553,6 +563,36 @@
         return b'<predicatenmatcher pred=%s>' % s
 
 
+def path_or_parents_in_set(path, prefix_set):
+    """Returns True if `path` (or any parent of `path`) is in `prefix_set`."""
+    l = len(prefix_set)
+    if l == 0:
+        return False
+    if path in prefix_set:
+        return True
+    # If there's more than 5 paths in prefix_set, it's *probably* quicker to
+    # "walk up" the directory hierarchy instead, with the assumption that most
+    # directory hierarchies are relatively shallow and hash lookup is cheap.
+    if l > 5:
+        return any(
+            parentdir in prefix_set for parentdir in pathutil.finddirs(path)
+        )
+
+    # FIXME: Ideally we'd never get to this point if this is the case - we'd
+    # recognize ourselves as an 'always' matcher and skip this.
+    if b'' in prefix_set:
+        return True
+
+    if pycompat.ispy3:
+        sl = ord(b'/')
+    else:
+        sl = '/'
+
+    # We already checked that path isn't in prefix_set exactly, so
+    # `path[len(pf)] should never raise IndexError.
+    return any(path.startswith(pf) and path[len(pf)] == sl for pf in prefix_set)
+
+
 class patternmatcher(basematcher):
     r"""Matches a set of (kind, pat, source) against a 'root' directory.
 
@@ -600,14 +640,7 @@
     def visitdir(self, dir):
         if self._prefix and dir in self._fileset:
             return b'all'
-        return (
-            dir in self._fileset
-            or dir in self._dirs
-            or any(
-                parentdir in self._fileset
-                for parentdir in pathutil.finddirs(dir)
-            )
-        )
+        return dir in self._dirs or path_or_parents_in_set(dir, self._fileset)
 
     def visitchildrenset(self, dir):
         ret = self.visitdir(dir)
@@ -688,12 +721,9 @@
         if self._prefix and dir in self._roots:
             return b'all'
         return (
-            dir in self._roots
-            or dir in self._dirs
+            dir in self._dirs
             or dir in self._parents
-            or any(
-                parentdir in self._roots for parentdir in pathutil.finddirs(dir)
-            )
+            or path_or_parents_in_set(dir, self._roots)
         )
 
     @propertycache
@@ -716,11 +746,8 @@
         # visitdir, that's handled below.
         if (
             b'' in self._roots
-            or dir in self._roots
             or dir in self._dirs
-            or any(
-                parentdir in self._roots for parentdir in pathutil.finddirs(dir)
-            )
+            or path_or_parents_in_set(dir, self._roots)
         ):
             return b'this'
 
@@ -734,7 +761,7 @@
 
 
 class exactmatcher(basematcher):
-    r'''Matches the input files exactly. They are interpreted as paths, not
+    r"""Matches the input files exactly. They are interpreted as paths, not
     patterns (so no kind-prefixes).
 
     >>> m = exactmatcher([b'a.txt', br're:.*\.c$'])
@@ -752,7 +779,7 @@
     False
     >>> m(br're:.*\.c$')
     True
-    '''
+    """
 
     def __init__(self, files, badfn=None):
         super(exactmatcher, self).__init__(badfn)
@@ -799,11 +826,11 @@
 
 
 class differencematcher(basematcher):
-    '''Composes two matchers by matching if the first matches and the second
+    """Composes two matchers by matching if the first matches and the second
     does not.
 
     The second matcher's non-matching-attributes (bad, traversedir) are ignored.
-    '''
+    """
 
     def __init__(self, m1, m2):
         super(differencematcher, self).__init__()
@@ -868,10 +895,10 @@
 
 
 def intersectmatchers(m1, m2):
-    '''Composes two matchers by matching if both of them match.
+    """Composes two matchers by matching if both of them match.
 
     The second matcher's non-matching-attributes (bad, traversedir) are ignored.
-    '''
+    """
     if m1 is None or m2 is None:
         return m1 or m2
     if m1.always():
@@ -1166,7 +1193,7 @@
 
 
 def patkind(pattern, default=None):
-    r'''If pattern is 'kind:pat' with a known kind, return kind.
+    r"""If pattern is 'kind:pat' with a known kind, return kind.
 
     >>> patkind(br're:.*\.c$')
     're'
@@ -1177,7 +1204,7 @@
     >>> patkind(b'main.py')
     >>> patkind(b'main.py', default=b're')
     're'
-    '''
+    """
     return _patsplit(pattern, default)[0]
 
 
@@ -1192,7 +1219,7 @@
 
 
 def _globre(pat):
-    r'''Convert an extended glob string to a regexp string.
+    r"""Convert an extended glob string to a regexp string.
 
     >>> from . import pycompat
     >>> def bprint(s):
@@ -1213,7 +1240,7 @@
     (?:a|b)
     >>> bprint(_globre(br'.\*\?'))
     \.\*\?
-    '''
+    """
     i, n = 0, len(pat)
     res = b''
     group = 0
@@ -1276,9 +1303,9 @@
 
 
 def _regex(kind, pat, globsuffix):
-    '''Convert a (normalized) pattern of any kind into a
+    """Convert a (normalized) pattern of any kind into a
     regular expression.
-    globsuffix is appended to the regexp of globs.'''
+    globsuffix is appended to the regexp of globs."""
     if not pat and kind in (b'glob', b'relpath'):
         return b''
     if kind == b're':
@@ -1312,8 +1339,8 @@
 
 
 def _buildmatch(kindpats, globsuffix, root):
-    '''Return regexp string and a matcher function for kindpats.
-    globsuffix is appended to the regexp of globs.'''
+    """Return regexp string and a matcher function for kindpats.
+    globsuffix is appended to the regexp of globs."""
     matchfuncs = []
 
     subincludes, kindpats = _expandsubinclude(kindpats, root)
@@ -1422,13 +1449,13 @@
 
 
 def _patternrootsanddirs(kindpats):
-    '''Returns roots and directories corresponding to each pattern.
+    """Returns roots and directories corresponding to each pattern.
 
     This calculates the roots and directories exactly matching the patterns and
     returns a tuple of (roots, dirs) for each. It does not return other
     directories which may also need to be considered, like the parent
     directories.
-    '''
+    """
     r = []
     d = []
     for kind, pat, source in kindpats:
@@ -1459,7 +1486,7 @@
 
 
 def _rootsdirsandparents(kindpats):
-    '''Returns roots and exact directories from patterns.
+    """Returns roots and exact directories from patterns.
 
     `roots` are directories to match recursively, `dirs` should
     be matched non-recursively, and `parents` are the implicitly required
@@ -1486,7 +1513,7 @@
     ...      (b'relre', b'rr', b'')])
     >>> print(r[0:2], sorted(r[2])) # the set has an unstable output
     (['', '', ''], []) ['']
-    '''
+    """
     r, d = _patternrootsanddirs(kindpats)
 
     p = set()
@@ -1503,13 +1530,13 @@
 
 
 def _explicitfiles(kindpats):
-    '''Returns the potential explicit filenames from the patterns.
+    """Returns the potential explicit filenames from the patterns.
 
     >>> _explicitfiles([(b'path', b'foo/bar', b'')])
     ['foo/bar']
     >>> _explicitfiles([(b'rootfilesin', b'foo/bar', b'')])
     []
-    '''
+    """
     # Keep only the pattern kinds where one can specify filenames (vs only
     # directory names).
     filable = [kp for kp in kindpats if kp[0] not in (b'rootfilesin',)]
@@ -1528,7 +1555,7 @@
 
 
 def readpatternfile(filepath, warn, sourceinfo=False):
-    '''parse a pattern file, returning a list of
+    """parse a pattern file, returning a list of
     patterns. These patterns should be given to compile()
     to be validated and converted into a match function.
 
@@ -1549,7 +1576,7 @@
     if sourceinfo is set, returns a list of tuples:
     (pattern, lineno, originalline).
     This is useful to debug ignore patterns.
-    '''
+    """
 
     syntaxes = {
         b're': b'relre:',
--- a/mercurial/mdiff.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/mdiff.py	Tue Jan 19 21:48:43 2021 +0530
@@ -39,7 +39,7 @@
 
 # TODO: this looks like it could be an attrs, which might help pytype
 class diffopts(object):
-    '''context is the number of context lines
+    """context is the number of context lines
     text treats all files as text
     showfunc enables diff -p output
     git enables the git extended patch format
@@ -50,7 +50,7 @@
     ignorewsamount ignores changes in the amount of whitespace
     ignoreblanklines ignores changes whose lines are all blank
     upgrade generates git diffs to avoid data loss
-    '''
+    """
 
     _HAS_DYNAMIC_ATTRIBUTES = True
 
@@ -199,7 +199,7 @@
         if hunkinrange((b1, (b2 - b1)), rangeb):
             filteredblocks.append(block)
     if lba is None or uba is None or uba < lba:
-        raise error.Abort(_(b'line range exceeds file size'))
+        raise error.InputError(_(b'line range exceeds file size'))
     return filteredblocks, (lba, uba)
 
 
--- a/mercurial/merge.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/merge.py	Tue Jan 19 21:48:43 2021 +0530
@@ -217,7 +217,10 @@
                 if config == b'warn':
                     warnconflicts.add(f)
                 mresult.addfile(
-                    f, mergestatemod.ACTION_GET, (fl2, True), b'remote created',
+                    f,
+                    mergestatemod.ACTION_GET,
+                    (fl2, True),
+                    b'remote created',
                 )
 
     for f in sorted(abortconflicts):
@@ -281,7 +284,10 @@
         for f in wctx.removed():
             if f not in mctx:
                 mresult.addfile(
-                    f, mergestatemod.ACTION_FORGET, None, b"forget removed",
+                    f,
+                    mergestatemod.ACTION_FORGET,
+                    None,
+                    b"forget removed",
                 )
 
 
@@ -544,10 +550,10 @@
 
 
 class mergeresult(object):
-    ''''An object representing result of merging manifests.
+    """An object representing result of merging manifests.
 
     It has information about what actions need to be performed on dirstate
-    mapping of divergent renames and other such cases. '''
+    mapping of divergent renames and other such cases."""
 
     def __init__(self):
         """
@@ -572,7 +578,7 @@
         self._renamedelete = renamedelete
 
     def addfile(self, filename, action, data, message):
-        """ adds a new file to the mergeresult object
+        """adds a new file to the mergeresult object
 
         filename: file which we are adding
         action: one of mergestatemod.ACTION_*
@@ -589,15 +595,15 @@
         self._actionmapping[action][filename] = (data, message)
 
     def getfile(self, filename, default_return=None):
-        """ returns (action, args, msg) about this file
+        """returns (action, args, msg) about this file
 
-        returns default_return if the file is not present """
+        returns default_return if the file is not present"""
         if filename in self._filemapping:
             return self._filemapping[filename]
         return default_return
 
     def files(self, actions=None):
-        """ returns files on which provided action needs to perfromed
+        """returns files on which provided action needs to perfromed
 
         If actions is None, all files are returned
         """
@@ -613,14 +619,14 @@
                     yield f
 
     def removefile(self, filename):
-        """ removes a file from the mergeresult object as the file might
-        not merging anymore """
+        """removes a file from the mergeresult object as the file might
+        not merging anymore"""
         action, data, message = self._filemapping[filename]
         del self._filemapping[filename]
         del self._actionmapping[action][filename]
 
     def getactions(self, actions, sort=False):
-        """ get list of files which are marked with these actions
+        """get list of files which are marked with these actions
         if sort is true, files for each action is sorted and then added
 
         Returns a list of tuple of form (filename, data, message)
@@ -637,10 +643,10 @@
                     yield f, args, msg
 
     def len(self, actions=None):
-        """ returns number of files which needs actions
+        """returns number of files which needs actions
 
         if actions is passed, total of number of files in that action
-        only is returned """
+        only is returned"""
 
         if actions is None:
             return len(self._filemapping)
@@ -656,8 +662,8 @@
                 yield key, val
 
     def addcommitinfo(self, filename, key, value):
-        """ adds key-value information about filename which will be required
-        while committing this merge """
+        """adds key-value information about filename which will be required
+        while committing this merge"""
         self._commitinfo[filename][key] = value
 
     @property
@@ -674,8 +680,8 @@
 
     @property
     def actionsdict(self):
-        """ returns a dictionary of actions to be perfomed with action as key
-        and a list of files and related arguments as values """
+        """returns a dictionary of actions to be perfomed with action as key
+        and a list of files and related arguments as values"""
         res = collections.defaultdict(list)
         for a, d in pycompat.iteritems(self._actionmapping):
             for f, (args, msg) in pycompat.iteritems(d):
@@ -689,8 +695,8 @@
             self._actionmapping[act][f] = data, msg
 
     def hasconflicts(self):
-        """ tells whether this merge resulted in some actions which can
-        result in conflicts or not """
+        """tells whether this merge resulted in some actions which can
+        result in conflicts or not"""
         for a in self._actionmapping.keys():
             if (
                 a
@@ -817,13 +823,32 @@
                     args = (f, f, None, False, pa.node())
                     msg = b'both created'
                 mresult.addfile(f, mergestatemod.ACTION_MERGE, args, msg)
+            elif f in branch_copies1.copy:
+                fa = branch_copies1.copy[f]
+                mresult.addfile(
+                    f,
+                    mergestatemod.ACTION_MERGE,
+                    (f, fa, fa, False, pa.node()),
+                    b'local replaced from %s' % fa,
+                )
+            elif f in branch_copies2.copy:
+                fa = branch_copies2.copy[f]
+                mresult.addfile(
+                    f,
+                    mergestatemod.ACTION_MERGE,
+                    (fa, f, fa, False, pa.node()),
+                    b'other replaced from %s' % fa,
+                )
             else:
                 a = ma[f]
                 fla = ma.flags(f)
                 nol = b'l' not in fl1 + fl2 + fla
                 if n2 == a and fl2 == fla:
                     mresult.addfile(
-                        f, mergestatemod.ACTION_KEEP, (), b'remote unchanged',
+                        f,
+                        mergestatemod.ACTION_KEEP,
+                        (),
+                        b'remote unchanged',
                     )
                 elif n1 == a and fl1 == fla:  # local unchanged - use remote
                     if n1 == n2:  # optimization: keep local content
@@ -920,11 +945,17 @@
                     # This file was locally added. We should forget it instead of
                     # deleting it.
                     mresult.addfile(
-                        f, mergestatemod.ACTION_FORGET, None, b'remote deleted',
+                        f,
+                        mergestatemod.ACTION_FORGET,
+                        None,
+                        b'remote deleted',
                     )
                 else:
                     mresult.addfile(
-                        f, mergestatemod.ACTION_REMOVE, None, b'other deleted',
+                        f,
+                        mergestatemod.ACTION_REMOVE,
+                        None,
+                        b'other deleted',
                     )
                     if branchmerge:
                         # the file must be absent after merging,
@@ -1070,7 +1101,7 @@
 
 def _resolvetrivial(repo, wctx, mctx, ancestor, mresult):
     """Resolves false conflicts where the nodeid changed but the content
-       remained the same."""
+    remained the same."""
     # We force a copy of actions.items() because we're going to mutate
     # actions as we resolve trivial conflicts.
     for f in list(mresult.files((mergestatemod.ACTION_CHANGED_DELETED,))):
@@ -1407,7 +1438,13 @@
     prefetch = scmutil.prefetchfiles
     matchfiles = scmutil.matchfiles
     prefetch(
-        repo, [(ctx.rev(), matchfiles(repo, files),)],
+        repo,
+        [
+            (
+                ctx.rev(),
+                matchfiles(repo, files),
+            )
+        ],
     )
 
 
@@ -1428,7 +1465,13 @@
 
 
 def applyupdates(
-    repo, mresult, wctx, mctx, overwrite, wantfiledata, labels=None,
+    repo,
+    mresult,
+    wctx,
+    mctx,
+    overwrite,
+    wantfiledata,
+    labels=None,
 ):
     """apply the merge action list to the working directory
 
@@ -1718,7 +1761,8 @@
     if dirstate.rustmod is not None:
         # When using rust status, fsmonitor becomes necessary at higher sizes
         fsmonitorthreshold = repo.ui.configint(
-            b'fsmonitor', b'warn_update_file_count_rust',
+            b'fsmonitor',
+            b'warn_update_file_count_rust',
         )
 
     try:
@@ -1896,7 +1940,7 @@
                         hint=_(b"use 'hg update' or check 'hg heads'"),
                     )
             if not force and (wc.files() or wc.deleted()):
-                raise error.Abort(
+                raise error.StateError(
                     _(b"uncommitted changes"),
                     hint=_(b"use 'hg status' to list changes"),
                 )
@@ -1985,7 +2029,10 @@
                     0,
                 ):
                     mresult.addfile(
-                        f, mergestatemod.ACTION_REMOVE, None, b'prompt delete',
+                        f,
+                        mergestatemod.ACTION_REMOVE,
+                        None,
+                        b'prompt delete',
                     )
                 elif f in p1:
                     mresult.addfile(
@@ -1996,7 +2043,10 @@
                     )
                 else:
                     mresult.addfile(
-                        f, mergestatemod.ACTION_ADD, None, b'prompt keep',
+                        f,
+                        mergestatemod.ACTION_ADD,
+                        None,
+                        b'prompt keep',
                     )
             elif m == mergestatemod.ACTION_DELETED_CHANGED:
                 f1, f2, fa, move, anc = args
@@ -2073,7 +2123,13 @@
 
         wantfiledata = updatedirstate and not branchmerge
         stats, getfiledata = applyupdates(
-            repo, mresult, wc, p2, overwrite, wantfiledata, labels=labels,
+            repo,
+            mresult,
+            wc,
+            p2,
+            overwrite,
+            wantfiledata,
+            labels=labels,
         )
 
         if updatedirstate:
--- a/mercurial/mergestate.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/mergestate.py	Tue Jan 19 21:48:43 2021 +0530
@@ -132,7 +132,7 @@
 
 
 class _mergestate_base(object):
-    '''track 3-way merge state of individual files
+    """track 3-way merge state of individual files
 
     The merge state is stored on disk when needed. Two files are used: one with
     an old format (version 1), and one with a new format (version 2). Version 2
@@ -164,7 +164,7 @@
 
     The resolve command transitions between 'u' and 'r' for conflicts and
     'pu' and 'pr' for path conflicts.
-    '''
+    """
 
     def __init__(self, repo):
         """Initialize the merge state.
@@ -275,8 +275,8 @@
         self._dirty = True
 
     def addcommitinfo(self, path, data):
-        """ stores information which is required at commit
-        into _stateextras """
+        """stores information which is required at commit
+        into _stateextras"""
         self._stateextras[path].update(data)
         self._dirty = True
 
--- a/mercurial/mergeutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/mergeutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -14,6 +14,6 @@
 
 def checkunresolved(ms):
     if list(ms.unresolved()):
-        raise error.Abort(
+        raise error.StateError(
             _(b"unresolved merge conflicts (see 'hg help resolve')")
         )
--- a/mercurial/metadata.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/metadata.py	Tue Jan 19 21:48:43 2021 +0530
@@ -11,9 +11,12 @@
 import multiprocessing
 import struct
 
+from .node import (
+    nullid,
+    nullrev,
+)
 from . import (
     error,
-    node,
     pycompat,
     util,
 )
@@ -239,11 +242,11 @@
     """compute the files changed by a revision"""
     p1 = ctx.p1()
     p2 = ctx.p2()
-    if p1.rev() == node.nullrev and p2.rev() == node.nullrev:
+    if p1.rev() == nullrev and p2.rev() == nullrev:
         return _process_root(ctx)
-    elif p1.rev() != node.nullrev and p2.rev() == node.nullrev:
+    elif p1.rev() != nullrev and p2.rev() == nullrev:
         return _process_linear(p1, ctx)
-    elif p1.rev() == node.nullrev and p2.rev() != node.nullrev:
+    elif p1.rev() == nullrev and p2.rev() != nullrev:
         # In the wild, one can encounter changeset where p1 is null but p2 is not
         return _process_linear(p1, ctx, parent=2)
     elif p1.rev() == p2.rev():
@@ -254,8 +257,7 @@
 
 
 def _process_root(ctx):
-    """compute the appropriate changed files for a changeset with no parents
-    """
+    """compute the appropriate changed files for a changeset with no parents"""
     # Simple, there was nothing before it, so everything is added.
     md = ChangingFiles()
     manifest = ctx.manifest()
@@ -265,8 +267,7 @@
 
 
 def _process_linear(parent_ctx, children_ctx, parent=1):
-    """compute the appropriate changed files for a changeset with a single parent
-    """
+    """compute the appropriate changed files for a changeset with a single parent"""
     md = ChangingFiles()
     parent_manifest = parent_ctx.manifest()
     children_manifest = children_ctx.manifest()
@@ -425,7 +426,7 @@
         p1_ctx.node(), p2_ctx.node()
     )
     if not cahs:
-        cahs = [node.nullrev]
+        cahs = [nullrev]
     mas = [ctx.repo()[r].manifest() for r in cahs]
 
     copy_candidates = []
@@ -515,8 +516,7 @@
 
 
 def computechangesetfilesadded(ctx):
-    """return the list of files added in a changeset
-    """
+    """return the list of files added in a changeset"""
     added = []
     for f in ctx.files():
         if not any(f in p for p in ctx.parents()):
@@ -563,7 +563,7 @@
         p2n = p2.node()
         cahs = ctx.repo().changelog.commonancestorsheads(p1n, p2n)
         if not cahs:
-            cahs = [node.nullrev]
+            cahs = [nullrev]
         return [ctx.repo()[r].manifest() for r in cahs]
 
     def deletionfromparent(f):
@@ -580,8 +580,7 @@
 
 
 def computechangesetfilesremoved(ctx):
-    """return the list of files removed in a changeset
-    """
+    """return the list of files removed in a changeset"""
     removed = []
     for f in ctx.files():
         if f not in ctx:
@@ -593,8 +592,7 @@
 
 
 def computechangesetfilesmerged(ctx):
-    """return the list of files merged in a changeset
-    """
+    """return the list of files merged in a changeset"""
     merged = []
     if len(ctx.parents()) < 2:
         return merged
@@ -602,7 +600,7 @@
         if f in ctx:
             fctx = ctx[f]
             parents = fctx._filelog.parents(fctx._filenode)
-            if parents[1] != node.nullid:
+            if parents[1] != nullid:
                 merged.append(f)
     return merged
 
--- a/mercurial/minirst.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/minirst.py	Tue Jan 19 21:48:43 2021 +0530
@@ -52,7 +52,7 @@
 
 
 def replace(text, substs):
-    '''
+    """
     Apply a list of (find, replace) pairs to a text.
 
     >>> replace(b"foo bar", [(b'f', b'F'), (b'b', b'B')])
@@ -63,7 +63,7 @@
     >>> encoding.encoding = b'shiftjis'
     >>> replace(b'\\x81\\\\', [(b'\\\\', b'/')])
     '\\x81\\\\'
-    '''
+    """
 
     # some character encodings (cp932 for Japanese, at least) use
     # ASCII characters other than control/alphabet/digit as a part of
@@ -322,10 +322,10 @@
 
 
 def findtables(blocks):
-    '''Find simple tables
+    """Find simple tables
 
-       Only simple one-line table elements are supported
-    '''
+    Only simple one-line table elements are supported
+    """
 
     for block in blocks:
         # Searching for a block that looks like this:
@@ -432,7 +432,11 @@
     while i < len(blocks):
         if blocks[i][b'type'] == blocks[i - 1][b'type'] and blocks[i][
             b'type'
-        ] in (b'bullet', b'option', b'field',):
+        ] in (
+            b'bullet',
+            b'option',
+            b'field',
+        ):
             i += 1
         elif not blocks[i - 1][b'lines']:
             # no lines in previous block, do not separate
--- a/mercurial/narrowspec.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/narrowspec.py	Tue Jan 19 21:48:43 2021 +0530
@@ -226,7 +226,7 @@
 
 
 def restrictpatterns(req_includes, req_excludes, repo_includes, repo_excludes):
-    r""" Restricts the patterns according to repo settings,
+    r"""Restricts the patterns according to repo settings,
     results in a logical AND operation
 
     :param req_includes: requested includes
--- a/mercurial/obsolete.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/obsolete.py	Tue Jan 19 21:48:43 2021 +0530
@@ -74,10 +74,14 @@
 
 from .i18n import _
 from .pycompat import getattr
+from .node import (
+    bin,
+    hex,
+    nullid,
+)
 from . import (
     encoding,
     error,
-    node,
     obsutil,
     phases,
     policy,
@@ -235,7 +239,7 @@
             parents = ()
         if parents is not None:
             try:
-                parents = tuple(node.bin(p) for p in parents)
+                parents = tuple(bin(p) for p in parents)
                 # if parent content is not a nodeid, drop the data
                 for p in parents:
                     if len(p) != 20:
@@ -262,7 +266,7 @@
             # mark that we explicitly recorded no parents
             metadata[b'p0'] = b''
         for i, p in enumerate(parents, 1):
-            metadata[b'p%i' % i] = node.hex(p)
+            metadata[b'p%i' % i] = hex(p)
     metadata = _fm0encodemeta(metadata)
     numsuc = len(sucs)
     format = _fm0fixed + (_fm0node * numsuc)
@@ -529,7 +533,7 @@
     subtle handling.
     """
     for mark in markers:
-        if node.nullid in mark[1]:
+        if nullid in mark[1]:
             raise error.Abort(
                 _(
                     b'bad obsolescence marker detected: '
@@ -639,7 +643,7 @@
                     raise ValueError(succ)
         if prec in succs:
             raise ValueError(
-                'in-marker cycle with %s' % pycompat.sysstr(node.hex(prec))
+                'in-marker cycle with %s' % pycompat.sysstr(hex(prec))
             )
 
         metadata = tuple(sorted(pycompat.iteritems(metadata)))
@@ -998,8 +1002,7 @@
 
 @cachefor(b'contentdivergent')
 def _computecontentdivergentset(repo):
-    """the set of rev that compete to be the final successors of some revision.
-    """
+    """the set of rev that compete to be the final successors of some revision."""
     divergent = set()
     obsstore = repo.obsstore
     newermap = {}
@@ -1032,7 +1035,7 @@
         folddigest.update(p.node())
     # Since fold only has to compete against fold for the same successors, it
     # seems fine to use a small ID. Smaller ID save space.
-    return node.hex(folddigest.digest())[:8]
+    return hex(folddigest.digest())[:8]
 
 
 def createmarkers(
--- a/mercurial/obsutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/obsutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -10,11 +10,14 @@
 import re
 
 from .i18n import _
+from .node import (
+    hex,
+    short,
+)
 from . import (
     diffutil,
     encoding,
     error,
-    node as nodemod,
     phases,
     pycompat,
     util,
@@ -381,7 +384,7 @@
 
 
 def metanotblacklisted(metaitem):
-    """ Check that the key of a meta item (extrakey, extravalue) does not
+    """Check that the key of a meta item (extrakey, extravalue) does not
     match at least one of the blacklist pattern
     """
     metakey = metaitem[0]
@@ -439,7 +442,7 @@
 
 
 def geteffectflag(source, successors):
-    """ From an obs-marker relation, compute what changed between the
+    """From an obs-marker relation, compute what changed between the
     predecessor and the successor.
     """
     effects = 0
@@ -816,7 +819,7 @@
 
 
 def _getobsfate(successorssets):
-    """ Compute a changeset obsolescence fate based on its successorssets.
+    """Compute a changeset obsolescence fate based on its successorssets.
     Successors can be the tipmost ones or the immediate ones. This function
     return values are not meant to be shown directly to users, it is meant to
     be used by internal functions only.
@@ -843,7 +846,7 @@
 
 
 def obsfateverb(successorset, markers):
-    """ Return the verb summarizing the successorset and potentially using
+    """Return the verb summarizing the successorset and potentially using
     information from the markers
     """
     if not successorset:
@@ -856,14 +859,12 @@
 
 
 def markersdates(markers):
-    """returns the list of dates for a list of markers
-    """
+    """returns the list of dates for a list of markers"""
     return [m[4] for m in markers]
 
 
 def markersusers(markers):
-    """ Returns a sorted list of markers users without duplicates
-    """
+    """Returns a sorted list of markers users without duplicates"""
     markersmeta = [dict(m[3]) for m in markers]
     users = {
         encoding.tolocal(meta[b'user'])
@@ -875,8 +876,7 @@
 
 
 def markersoperations(markers):
-    """ Returns a sorted list of markers operations without duplicates
-    """
+    """Returns a sorted list of markers operations without duplicates"""
     markersmeta = [dict(m[3]) for m in markers]
     operations = {
         meta.get(b'operation') for meta in markersmeta if meta.get(b'operation')
@@ -886,7 +886,7 @@
 
 
 def obsfateprinter(ui, repo, successors, markers, formatctx):
-    """ Build a obsfate string for a single successorset using all obsfate
+    """Build a obsfate string for a single successorset using all obsfate
     related function defined in obsutil
     """
     quiet = ui.quiet
@@ -950,8 +950,7 @@
 
 
 def _getfilteredreason(repo, changeid, ctx):
-    """return a human-friendly string on why a obsolete changeset is hidden
-    """
+    """return a human-friendly string on why a obsolete changeset is hidden"""
     successors = successorssets(repo, ctx.node())
     fate = _getobsfate(successors)
 
@@ -961,13 +960,13 @@
     elif fate == b'diverged':
         return filteredmsgtable[b'diverged'] % changeid
     elif fate == b'superseded':
-        single_successor = nodemod.short(successors[0][0])
+        single_successor = short(successors[0][0])
         return filteredmsgtable[b'superseded'] % (changeid, single_successor)
     elif fate == b'superseded_split':
 
         succs = []
         for node_id in successors[0]:
-            succs.append(nodemod.short(node_id))
+            succs.append(short(node_id))
 
         if len(succs) <= 2:
             fmtsuccs = b', '.join(succs)
@@ -1044,7 +1043,7 @@
                     b'instability': b'content-divergent',
                     b'divergentnodes': divnodes,
                     b'reason': b'predecessor',
-                    b'node': nodemod.hex(dset[b'commonpredecessor']),
+                    b'node': hex(dset[b'commonpredecessor']),
                 }
             )
     return result
--- a/mercurial/parser.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/parser.py	Tue Jan 19 21:48:43 2021 +0530
@@ -406,12 +406,11 @@
 
 
 def parseerrordetail(inst):
-    """Compose error message from specified ParseError object
-    """
-    if len(inst.args) > 1:
-        return _(b'at %d: %s') % (inst.args[1], inst.args[0])
+    """Compose error message from specified ParseError object"""
+    if inst.location is not None:
+        return _(b'at %d: %s') % (inst.location, inst.message)
     else:
-        return inst.args[0]
+        return inst.message
 
 
 class alias(object):
--- a/mercurial/patch.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/patch.py	Tue Jan 19 21:48:43 2021 +0530
@@ -200,7 +200,7 @@
 
 @contextlib.contextmanager
 def extract(ui, fileobj):
-    '''extract patch from data read from fileobj.
+    """extract patch from data read from fileobj.
 
     patch can be a normal patch or contained in an email message.
 
@@ -214,7 +214,7 @@
       - p1,
       - p2.
     Any item can be missing from the dictionary. If filename is missing,
-    fileobj did not contain a patch. Caller must unlink filename when done.'''
+    fileobj did not contain a patch. Caller must unlink filename when done."""
 
     fd, tmpname = pycompat.mkstemp(prefix=b'hg-patch-')
     tmpfp = os.fdopen(fd, 'wb')
@@ -393,7 +393,7 @@
     gp = None
     gitpatches = []
     for line in lr:
-        line = line.rstrip(b' \r\n')
+        line = line.rstrip(b'\r\n')
         if line.startswith(b'diff --git a/'):
             m = gitre.match(line)
             if m:
@@ -905,8 +905,7 @@
 
 
 class header(object):
-    """patch header
-    """
+    """patch header"""
 
     diffgit_re = re.compile(b'diff --git a/(.*) b/(.*)$')
     diff_re = re.compile(b'diff -r .* (.*)$')
@@ -1272,7 +1271,7 @@
             elif r == 6:  # all
                 ret = skipall = True
             elif r == 7:  # quit
-                raise error.Abort(_(b'user quit'))
+                raise error.CanceledError(_(b'user quit'))
             return ret, skipfile, skipall, newpatches
 
     seen = set()
@@ -1854,7 +1853,7 @@
 
 
 def pathtransform(path, strip, prefix):
-    '''turn a path from a patch into a path suitable for the repository
+    """turn a path from a patch into a path suitable for the repository
 
     prefix, if not empty, is expected to be normalized with a / at the end.
 
@@ -1873,7 +1872,7 @@
     >>> pathtransform(b'a/b/c', 3, b'')
     Traceback (most recent call last):
     PatchError: unable to strip away 1 of 3 dirs from a/b/c
-    '''
+    """
     pathlen = len(path)
     i = 0
     if strip == 0:
@@ -2074,7 +2073,7 @@
                 yield b'file', (afile, bfile, h, gp and gp.copy() or None)
             yield b'hunk', h
         elif x.startswith(b'diff --git a/'):
-            m = gitre.match(x.rstrip(b' \r\n'))
+            m = gitre.match(x.rstrip(b'\r\n'))
             if not m:
                 continue
             if gitpatches is None:
@@ -2503,7 +2502,7 @@
     copysourcematch=None,
     hunksfilterfn=None,
 ):
-    '''yields diff of changes to files between two nodes, or node and
+    """yields diff of changes to files between two nodes, or node and
     working directory.
 
     if node1 is None, use first dirstate parent instead.
@@ -2531,7 +2530,7 @@
 
     hunksfilterfn, if not None, should be a function taking a filectx and
     hunks generator that may yield filtered hunks.
-    '''
+    """
     if not node1 and not node2:
         node1 = repo.dirstate.p1()
 
@@ -2886,10 +2885,10 @@
 
 
 def _filepairs(modified, added, removed, copy, opts):
-    '''generates tuples (f1, f2, copyop), where f1 is the name of the file
+    """generates tuples (f1, f2, copyop), where f1 is the name of the file
     before and f2 is the the name after. For added files, f1 will be None,
     and for removed files, f2 will be None. copyop may be set to None, 'copy'
-    or 'rename' (the latter two only if opts.git is set).'''
+    or 'rename' (the latter two only if opts.git is set)."""
     gone = set()
 
     copyto = {v: k for k, v in copy.items()}
@@ -2948,13 +2947,13 @@
     losedatafn,
     pathfn,
 ):
-    '''given input data, generate a diff and yield it in blocks
+    """given input data, generate a diff and yield it in blocks
 
     If generating a diff would lose data like flags or binary data and
     losedatafn is not None, it will be called.
 
     pathfn is applied to every path in the diff output.
-    '''
+    """
 
     if opts.noprefix:
         aprefix = bprefix = b''
@@ -3079,7 +3078,7 @@
 
 
 def diffcontent(data1, data2, header, binary, opts):
-    """ diffs two versions of a file.
+    """diffs two versions of a file.
 
     data1 and data2 are tuples containg:
 
@@ -3241,9 +3240,9 @@
 
 
 def diffstatui(*args, **kw):
-    '''like diffstat(), but yields 2-tuples of (output, label) for
+    """like diffstat(), but yields 2-tuples of (output, label) for
     ui.write()
-    '''
+    """
 
     for line in diffstat(*args, **kw).splitlines():
         if line and line[-1] in b'+-':
--- a/mercurial/pathutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/pathutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -24,7 +24,7 @@
 
 
 class pathauditor(object):
-    '''ensure that a filesystem path contains no banned components.
+    """ensure that a filesystem path contains no banned components.
     the following properties of a path are checked:
 
     - ends with a directory separator
@@ -44,7 +44,7 @@
     If 'cached' is set to True, audited paths and sub-directories are cached.
     Be careful to not keep the cache of unmanaged directories for long because
     audited paths may be replaced with symlinks.
-    '''
+    """
 
     def __init__(self, root, callback=None, realfs=True, cached=False):
         self.audited = set()
@@ -59,8 +59,8 @@
             self.normcase = lambda x: x
 
     def __call__(self, path, mode=None):
-        '''Check the relative path.
-        path may contain a pattern (e.g. foodir/**.txt)'''
+        """Check the relative path.
+        path may contain a pattern (e.g. foodir/**.txt)"""
 
         path = util.localpath(path)
         normpath = self.normcase(path)
@@ -164,7 +164,7 @@
 
 
 def canonpath(root, cwd, myname, auditor=None):
-    '''return the canonical path of myname, given cwd and root
+    """return the canonical path of myname, given cwd and root
 
     >>> def check(root, cwd, myname):
     ...     a = pathauditor(root, realfs=False)
@@ -204,7 +204,7 @@
     'filename'
     >>> unixonly(b'/repo', b'/repo/subdir', b'filename', b'subdir/filename')
     'subdir/filename'
-    '''
+    """
     if util.endswithsep(root):
         rootsep = root
     else:
@@ -266,7 +266,7 @@
 
 
 def normasprefix(path):
-    '''normalize the specified path as path prefix
+    """normalize the specified path as path prefix
 
     Returned value can be used safely for "p.startswith(prefix)",
     "p[len(prefix):]", and so on.
@@ -280,7 +280,7 @@
     '/foo/bar/'
     >>> normasprefix(b'/').replace(pycompat.ossep, b'/')
     '/'
-    '''
+    """
     d, p = os.path.splitdrive(path)
     if len(p) != len(pycompat.ossep):
         return path + pycompat.ossep
@@ -300,9 +300,9 @@
     '''a multiset of directory names from a set of file paths'''
 
     def __init__(self, map, skip=None):
-        '''
+        """
         a dict map indicates a dirstate while a list indicates a manifest
-        '''
+        """
         self._dirs = {}
         addpath = self.addpath
         if isinstance(map, dict) and skip is not None:
--- a/mercurial/phases.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/phases.py	Tue Jan 19 21:48:43 2021 +0530
@@ -510,21 +510,20 @@
         tr.addfilegenerator(b'phase', (b'phaseroots',), self._write)
         tr.hookargs[b'phases_moved'] = b'1'
 
-    def registernew(self, repo, tr, targetphase, nodes):
+    def registernew(self, repo, tr, targetphase, revs):
         repo = repo.unfiltered()
-        self._retractboundary(repo, tr, targetphase, nodes)
+        self._retractboundary(repo, tr, targetphase, [], revs=revs)
         if tr is not None and b'phases' in tr.changes:
             phasetracking = tr.changes[b'phases']
-            torev = repo.changelog.rev
             phase = self.phase
-            revs = [torev(node) for node in nodes]
-            revs.sort()
-            for rev in revs:
+            for rev in sorted(revs):
                 revphase = phase(repo, rev)
                 _trackphasechange(phasetracking, rev, None, revphase)
         repo.invalidatevolatilesets()
 
-    def advanceboundary(self, repo, tr, targetphase, nodes, dryrun=None):
+    def advanceboundary(
+        self, repo, tr, targetphase, nodes, revs=None, dryrun=None
+    ):
         """Set all 'nodes' to phase 'targetphase'
 
         Nodes with a phase lower than 'targetphase' are not affected.
@@ -535,26 +534,27 @@
         """
         # Be careful to preserve shallow-copied values: do not update
         # phaseroots values, replace them.
+        if revs is None:
+            revs = []
         if tr is None:
             phasetracking = None
         else:
             phasetracking = tr.changes.get(b'phases')
 
         repo = repo.unfiltered()
+        revs = [repo[n].rev() for n in nodes] + [r for r in revs]
 
         changes = set()  # set of revisions to be changed
         delroots = []  # set of root deleted by this path
         for phase in (phase for phase in allphases if phase > targetphase):
             # filter nodes that are not in a compatible phase already
-            nodes = [
-                n for n in nodes if self.phase(repo, repo[n].rev()) >= phase
-            ]
-            if not nodes:
+            revs = [rev for rev in revs if self.phase(repo, rev) >= phase]
+            if not revs:
                 break  # no roots to move anymore
 
             olds = self.phaseroots[phase]
 
-            affected = repo.revs(b'%ln::%ln', olds, nodes)
+            affected = repo.revs(b'%ln::%ld', olds, revs)
             changes.update(affected)
             if dryrun:
                 continue
@@ -611,9 +611,11 @@
                     _trackphasechange(phasetracking, r, phase, targetphase)
         repo.invalidatevolatilesets()
 
-    def _retractboundary(self, repo, tr, targetphase, nodes):
+    def _retractboundary(self, repo, tr, targetphase, nodes, revs=None):
         # Be careful to preserve shallow-copied values: do not update
         # phaseroots values, replace them.
+        if revs is None:
+            revs = []
         if targetphase in (archived, internal) and not supportinternal(repo):
             name = phasenames[targetphase]
             msg = b'this repository does not support the %s phase' % name
@@ -624,7 +626,7 @@
         tonode = repo.changelog.node
         currentroots = {torev(node) for node in self.phaseroots[targetphase]}
         finalroots = oldroots = set(currentroots)
-        newroots = [torev(node) for node in nodes]
+        newroots = [torev(node) for node in nodes] + [r for r in revs]
         newroots = [
             rev for rev in newroots if self.phase(repo, rev) < targetphase
         ]
@@ -679,7 +681,7 @@
         self.invalidate()
 
 
-def advanceboundary(repo, tr, targetphase, nodes, dryrun=None):
+def advanceboundary(repo, tr, targetphase, nodes, revs=None, dryrun=None):
     """Add nodes to a phase changing other nodes phases if necessary.
 
     This function move boundary *forward* this means that all nodes
@@ -691,9 +693,11 @@
 
     Returns a set of revs whose phase is changed or should be changed
     """
+    if revs is None:
+        revs = []
     phcache = repo._phasecache.copy()
     changes = phcache.advanceboundary(
-        repo, tr, targetphase, nodes, dryrun=dryrun
+        repo, tr, targetphase, nodes, revs=revs, dryrun=dryrun
     )
     if not dryrun:
         repo._phasecache.replace(phcache)
@@ -713,14 +717,14 @@
     repo._phasecache.replace(phcache)
 
 
-def registernew(repo, tr, targetphase, nodes):
+def registernew(repo, tr, targetphase, revs):
     """register a new revision and its phase
 
     Code adding revisions to the repository should use this function to
     set new changeset in their target phase (or higher).
     """
     phcache = repo._phasecache.copy()
-    phcache.registernew(repo, tr, targetphase, nodes)
+    phcache.registernew(repo, tr, targetphase, revs)
     repo._phasecache.replace(phcache)
 
 
--- a/mercurial/posix.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/posix.py	Tue Jan 19 21:48:43 2021 +0530
@@ -76,7 +76,7 @@
 
 
 def split(p):
-    '''Same as posixpath.split, but faster
+    """Same as posixpath.split, but faster
 
     >>> import posixpath
     >>> for f in [b'/absolute/path/to/file',
@@ -88,7 +88,7 @@
     ...           b'///multiple_leading_separators_at_root',
     ...           b'']:
     ...     assert split(f) == posixpath.split(f), f
-    '''
+    """
     ht = p.rsplit(b'/', 1)
     if len(ht) == 1:
         return b'', p
@@ -183,9 +183,9 @@
 
 
 def copymode(src, dst, mode=None, enforcewritable=False):
-    '''Copy the file mode from the file at path src to dst.
+    """Copy the file mode from the file at path src to dst.
     If src doesn't exist, we're using mode instead. If mode is None, we're
-    using umask.'''
+    using umask."""
     try:
         st_mode = os.lstat(src).st_mode & 0o777
     except OSError as inst:
@@ -359,24 +359,24 @@
 
 
 def checkosfilename(path):
-    '''Check that the base-relative path is a valid filename on this platform.
-    Returns None if the path is ok, or a UI string describing the problem.'''
+    """Check that the base-relative path is a valid filename on this platform.
+    Returns None if the path is ok, or a UI string describing the problem."""
     return None  # on posix platforms, every path is ok
 
 
 def getfsmountpoint(dirpath):
-    '''Get the filesystem mount point from a directory (best-effort)
+    """Get the filesystem mount point from a directory (best-effort)
 
     Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
-    '''
+    """
     return getattr(osutil, 'getfsmountpoint', lambda x: None)(dirpath)
 
 
 def getfstype(dirpath):
-    '''Get the filesystem type name from a directory (best-effort)
+    """Get the filesystem type name from a directory (best-effort)
 
     Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
-    '''
+    """
     return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
 
 
@@ -419,7 +419,7 @@
 if pycompat.isdarwin:
 
     def normcase(path):
-        '''
+        """
         Normalize a filename for OS X-compatible comparison:
         - escape-encode invalid characters
         - decompose to NFD
@@ -434,7 +434,7 @@
         'e\\xcc\\x81'
         >>> normcase(b'\\xb8\\xca\\xc3\\xca\\xbe\\xc8.JPG') # issue3918
         '%b8%ca%c3\\xca\\xbe%c8.jpg'
-        '''
+        """
 
         try:
             return encoding.asciilower(path)  # exception for non-ASCII
@@ -475,7 +475,12 @@
 
     # default mount points
     cygwinmountpoints = sorted(
-        [b"/usr/bin", b"/usr/lib", b"/cygdrive",], reverse=True
+        [
+            b"/usr/bin",
+            b"/usr/lib",
+            b"/cygdrive",
+        ],
+        reverse=True,
     )
 
     # use upper-ing as normcase as same as NTFS workaround
@@ -553,10 +558,10 @@
 
 
 def findexe(command):
-    '''Find executable for command searching like which does.
+    """Find executable for command searching like which does.
     If command is a basename then PATH is searched for command.
     PATH isn't searched if command is an absolute or relative path.
-    If command isn't found None is returned.'''
+    If command isn't found None is returned."""
     if pycompat.sysplatform == b'OpenVMS':
         return command
 
@@ -587,8 +592,8 @@
 
 
 def statfiles(files):
-    '''Stat each file in files. Yield each stat, or None if a file does not
-    exist or has a type we don't care about.'''
+    """Stat each file in files. Yield each stat, or None if a file does not
+    exist or has a type we don't care about."""
     lstat = os.lstat
     getkind = stat.S_IFMT
     for nf in files:
--- a/mercurial/progress.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/progress.py	Tue Jan 19 21:48:43 2021 +0530
@@ -251,7 +251,7 @@
             return False
 
     def _calibrateestimate(self, topic, now, pos):
-        '''Adjust starttimes and startvals for topic so ETA works better
+        """Adjust starttimes and startvals for topic so ETA works better
 
         If progress is non-linear (ex. get much slower in the last minute),
         it's more friendly to only use a recent time span for ETA and speed
@@ -260,7 +260,7 @@
             [======================================>       ]
                                              ^^^^^^^
                            estimateinterval, only use this for estimation
-        '''
+        """
         interval = self.estimateinterval
         if interval <= 0:
             return
--- a/mercurial/pure/bdiff.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/pure/bdiff.py	Tue Jan 19 21:48:43 2021 +0530
@@ -51,7 +51,10 @@
                 shift += 1
         r.append((a1, b1, l1 + shift))
         prev = a2 + shift, b2 + shift, l2 - shift
-    r.append(prev)
+
+    if prev is not None:
+        r.append(prev)
+
     return r
 
 
--- a/mercurial/pure/charencode.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/pure/charencode.py	Tue Jan 19 21:48:43 2021 +0530
@@ -21,17 +21,17 @@
 
 
 def asciilower(s):
-    '''convert a string to lowercase if ASCII
+    """convert a string to lowercase if ASCII
 
-    Raises UnicodeDecodeError if non-ASCII characters are found.'''
+    Raises UnicodeDecodeError if non-ASCII characters are found."""
     s.decode('ascii')
     return s.lower()
 
 
 def asciiupper(s):
-    '''convert a string to uppercase if ASCII
+    """convert a string to uppercase if ASCII
 
-    Raises UnicodeDecodeError if non-ASCII characters are found.'''
+    Raises UnicodeDecodeError if non-ASCII characters are found."""
     s.decode('ascii')
     return s.upper()
 
--- a/mercurial/pure/mpatch.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/pure/mpatch.py	Tue Jan 19 21:48:43 2021 +0530
@@ -15,8 +15,7 @@
 
 
 class mpatchError(Exception):
-    """error raised when a delta cannot be decoded
-    """
+    """error raised when a delta cannot be decoded"""
 
 
 # This attempts to apply a series of patches in time proportional to
--- a/mercurial/pure/osutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/pure/osutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -39,7 +39,7 @@
 
 
 def listdir(path, stat=False, skip=None):
-    '''listdir(path, stat=False) -> list_of_tuples
+    """listdir(path, stat=False) -> list_of_tuples
 
     Return a sorted list containing information about the entries
     in the directory.
@@ -51,7 +51,7 @@
     Otherwise, each element is a 2-tuple:
 
       (name, type)
-    '''
+    """
     result = []
     prefix = path
     if not prefix.endswith(pycompat.ossep):
@@ -222,7 +222,7 @@
         )
 
     class posixfile(object):
-        '''a file object aiming for POSIX-like semantics
+        """a file object aiming for POSIX-like semantics
 
         CPython's open() returns a file that was opened *without* setting the
         _FILE_SHARE_DELETE flag, which causes rename and unlink to abort.
@@ -231,7 +231,7 @@
         renamed and deleted while they are held open.
         Note that if a file opened with posixfile is unlinked, the file
         remains but cannot be opened again or be recreated under the same name,
-        until all reading processes have closed the file.'''
+        until all reading processes have closed the file."""
 
         def __init__(self, name, mode=b'r', bufsize=-1):
             if b'b' in mode:
@@ -290,10 +290,11 @@
             return getattr(self._file, name)
 
         def __setattr__(self, name, value):
-            '''mimics the read-only attributes of Python file objects
+            """mimics the read-only attributes of Python file objects
             by raising 'TypeError: readonly attribute' if someone tries:
               f = posixfile('foo.txt')
-              f.name = 'bla'  '''
+              f.name = 'bla'
+            """
             return self._file.__setattr__(name, value)
 
         def __enter__(self):
--- a/mercurial/pure/parsers.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/pure/parsers.py	Tue Jan 19 21:48:43 2021 +0530
@@ -94,7 +94,8 @@
     def append(self, tup):
         if '_nodemap' in vars(self):
             self._nodemap[tup[7]] = len(self)
-        self._extra.append(tup)
+        data = _pack(indexformatng, *tup)
+        self._extra.append(data)
 
     def _check_index(self, i):
         if not isinstance(i, int):
@@ -107,14 +108,13 @@
             return nullitem
         self._check_index(i)
         if i >= self._lgt:
-            return self._extra[i - self._lgt]
-        index = self._calculate_index(i)
-        r = struct.unpack(indexformatng, self._data[index : index + indexsize])
-        if i == 0:
-            e = list(r)
-            type = gettype(e[0])
-            e[0] = offset_type(0, type)
-            return tuple(e)
+            data = self._extra[i - self._lgt]
+        else:
+            index = self._calculate_index(i)
+            data = self._data[index : index + indexsize]
+        r = _unpack(indexformatng, data)
+        if self._lgt and i == 0:
+            r = (offset_type(0, gettype(r[0])),) + r[1:]
         return r
 
 
@@ -234,8 +234,7 @@
 
 
 def parse_index_devel_nodemap(data, inline):
-    """like parse_index2, but alway return a PersistentNodeMapIndexObject
-    """
+    """like parse_index2, but alway return a PersistentNodeMapIndexObject"""
     return PersistentNodeMapIndexObject(data), None
 
 
--- a/mercurial/pycompat.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/pycompat.py	Tue Jan 19 21:48:43 2021 +0530
@@ -335,7 +335,7 @@
     def strkwargs(dic):
         """
         Converts the keys of a python dictonary to str i.e. unicodes so that
-        they can be passed as keyword arguments as dictonaries with bytes keys
+        they can be passed as keyword arguments as dictionaries with bytes keys
         can't be passed as keyword arguments to functions on Python 3.
         """
         dic = {k.decode('latin-1'): v for k, v in dic.items()}
@@ -343,7 +343,7 @@
 
     def byteskwargs(dic):
         """
-        Converts keys of python dictonaries to bytes as they were converted to
+        Converts keys of python dictionaries to bytes as they were converted to
         str to pass that dictonary as a keyword argument on Python 3.
         """
         dic = {k.encode('latin-1'): v for k, v in dic.items()}
--- a/mercurial/rcutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/rcutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -39,13 +39,13 @@
 
 
 def envrcitems(env=None):
-    '''Return [(section, name, value, source)] config items.
+    """Return [(section, name, value, source)] config items.
 
     The config items are extracted from environment variables specified by env,
     used to override systemrc, but not userrc.
 
     If env is not provided, encoding.environ will be used.
-    '''
+    """
     if env is None:
         env = encoding.environ
     checklist = [
@@ -73,7 +73,7 @@
 
 
 def rccomponents():
-    '''return an ordered [(type, obj)] about where to load configs.
+    """return an ordered [(type, obj)] about where to load configs.
 
     respect $HGRCPATH. if $HGRCPATH is empty, only .hg/hgrc of current repo is
     used. if $HGRCPATH is not set, the platform default will be used.
@@ -84,7 +84,7 @@
     obj is a string, and is the config file path. if type is 'items', obj is a
     list of (section, name, value, source) that should fill the config directly.
     If type is 'resource', obj is a tuple of (package name, resource name).
-    '''
+    """
     envrc = (b'items', envrcitems())
 
     if b'HGRCPATH' in encoding.environ:
@@ -108,9 +108,9 @@
 
 
 def defaultpagerenv():
-    '''return a dict of default environment variables and their values,
+    """return a dict of default environment variables and their values,
     intended to be set before starting a pager.
-    '''
+    """
     return {b'LESS': b'FRX', b'LV': b'-c'}
 
 
--- a/mercurial/registrar.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/registrar.py	Tue Jan 19 21:48:43 2021 +0530
@@ -37,7 +37,7 @@
 
         keyword = registrar.keyword()
 
-        @keyword('bar')
+        @keyword(b'bar')
         def barfunc(*args, **kwargs):
             '''Explanation of bar keyword ....
             '''
@@ -95,8 +95,7 @@
         self._table.update(registrarbase._table)
 
     def _parsefuncdecl(self, decl):
-        """Parse function declaration and return the name of function in it
-        """
+        """Parse function declaration and return the name of function in it"""
         i = decl.find(b'(')
         if i >= 0:
             return decl[:i]
@@ -121,8 +120,7 @@
         return self._docformat % (decl, doc)
 
     def _extrasetup(self, name, func):
-        """Execute extra setup for registered function, if needed
-        """
+        """Execute extra setup for registered function, if needed"""
 
 
 class command(_funcregistrarbase):
@@ -234,6 +232,12 @@
             self._table[name] = func, list(options)
         return func
 
+    def rename(self, old, new):
+        """rename a command. Used to add aliases, debugstrip ->
+        debugstrip|strip
+        """
+        self._table[new] = self._table.pop(old)
+
 
 INTENT_READONLY = b'readonly'
 
@@ -245,7 +249,7 @@
 
         revsetpredicate = registrar.revsetpredicate()
 
-        @revsetpredicate('mypredicate(arg1, arg2[, arg3])')
+        @revsetpredicate(b'mypredicate(arg1, arg2[, arg3])')
         def mypredicatefunc(repo, subset, x):
             '''Explanation of this revset predicate ....
             '''
@@ -295,7 +299,7 @@
 
         filesetpredicate = registrar.filesetpredicate()
 
-        @filesetpredicate('mypredicate()')
+        @filesetpredicate(b'mypredicate()')
         def mypredicatefunc(mctx, x):
             '''Explanation of this fileset predicate ....
             '''
@@ -339,8 +343,7 @@
 
 
 class _templateregistrarbase(_funcregistrarbase):
-    """Base of decorator to register functions as template specific one
-    """
+    """Base of decorator to register functions as template specific one"""
 
     _docformat = b":%s: %s"
 
@@ -353,7 +356,7 @@
         templatekeyword = registrar.templatekeyword()
 
         # new API (since Mercurial 4.6)
-        @templatekeyword('mykeyword', requires={'repo', 'ctx'})
+        @templatekeyword(b'mykeyword', requires={b'repo', b'ctx'})
         def mykeywordfunc(context, mapping):
             '''Explanation of this template keyword ....
             '''
@@ -385,7 +388,7 @@
 
         templatefilter = registrar.templatefilter()
 
-        @templatefilter('myfilter', intype=bytes)
+        @templatefilter(b'myfilter', intype=bytes)
         def myfilterfunc(text):
             '''Explanation of this template filter ....
             '''
@@ -417,8 +420,8 @@
 
         templatefunc = registrar.templatefunc()
 
-        @templatefunc('myfunc(arg1, arg2[, arg3])', argspec='arg1 arg2 arg3',
-                      requires={'ctx'})
+        @templatefunc(b'myfunc(arg1, arg2[, arg3])', argspec=b'arg1 arg2 arg3',
+                      requires={b'ctx'})
         def myfuncfunc(context, mapping, args):
             '''Explanation of this template function ....
             '''
@@ -457,7 +460,7 @@
 
         internalmerge = registrar.internalmerge()
 
-        @internalmerge('mymerge', internalmerge.mergeonly,
+        @internalmerge(b'mymerge', internalmerge.mergeonly,
                        onfailure=None, precheck=None,
                        binary=False, symlink=False):
         def mymergefunc(repo, mynode, orig, fcd, fco, fca,
--- a/mercurial/repair.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/repair.py	Tue Jan 19 21:48:43 2021 +0530
@@ -209,7 +209,8 @@
                 # transaction and makes assumptions that file storage is
                 # using append-only files. We'll need some kind of storage
                 # API to handle stripping for us.
-                offset = len(tr._entries)
+                oldfiles = set(tr._offsetmap.keys())
+                oldfiles.update(tr._newfiles)
 
                 tr.startgroup()
                 cl.strip(striprev, tr)
@@ -219,8 +220,11 @@
                     repo.file(fn).strip(striprev, tr)
                 tr.endgroup()
 
-                for i in pycompat.xrange(offset, len(tr._entries)):
-                    file, troffset, ignore = tr._entries[i]
+                entries = tr.readjournal()
+
+                for file, troffset in entries:
+                    if file in oldfiles:
+                        continue
                     with repo.svfs(file, b'a', checkambig=True) as fp:
                         fp.truncate(troffset)
                     if troffset == 0:
--- a/mercurial/repoview.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/repoview.py	Tue Jan 19 21:48:43 2021 +0530
@@ -48,8 +48,7 @@
 
 
 def pinnedrevs(repo):
-    """revisions blocking hidden changesets from being filtered
-    """
+    """revisions blocking hidden changesets from being filtered"""
 
     cl = repo.changelog
     pinned = set()
@@ -160,7 +159,7 @@
     This filter out any mutable changeset and any public changeset that may be
     impacted by something happening to a mutable revision.
 
-    This is achieved by filtered everything with a revision number egal or
+    This is achieved by filtered everything with a revision number equal or
     higher than the first mutable changeset is filtered."""
     assert not repo.changelog.filteredrevs
     cl = repo.changelog
--- a/mercurial/requirements.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/requirements.py	Tue Jan 19 21:48:43 2021 +0530
@@ -55,7 +55,7 @@
 # A repository with share implemented safely. The repository has different
 # store and working copy requirements i.e. both `.hg/requires` and
 # `.hg/store/requires` are present.
-SHARESAFE_REQUIREMENT = b'exp-sharesafe'
+SHARESAFE_REQUIREMENT = b'share-safe'
 
 # List of requirements which are working directory specific
 # These requirements cannot be shared between repositories if they
--- a/mercurial/revlog.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/revlog.py	Tue Jan 19 21:48:43 2021 +0530
@@ -161,6 +161,16 @@
         rl.revision(node)
 
 
+# True if a fast implementation for persistent-nodemap is available
+#
+# We also consider we have a "fast" implementation in "pure" python because
+# people using pure don't really have performance consideration (and a
+# wheelbarrow of other slowness source)
+HAS_FAST_PERSISTENT_NODEMAP = rustrevlog is not None or util.safehasattr(
+    parsers, 'BaseIndexObject'
+)
+
+
 @attr.s(slots=True, frozen=True)
 class _revisioninfo(object):
     """Information about a revision that allows building its fulltext
@@ -668,7 +678,7 @@
         if not self._chunkcache:
             self._chunkclear()
         # revnum -> (chain-length, sum-delta-length)
-        self._chaininfocache = {}
+        self._chaininfocache = util.lrucachedict(500)
         # revlog header -> revlog compressor
         self._decompressors = {}
 
@@ -1491,8 +1501,8 @@
 
     def lookup(self, id):
         """locate a node based on:
-            - revision number or str(revision number)
-            - nodeid or subset of hex nodeid
+        - revision number or str(revision number)
+        - nodeid or subset of hex nodeid
         """
         n = self._match(id)
         if n is not None:
@@ -1771,8 +1781,7 @@
             return rev - 1
 
     def issnapshot(self, rev):
-        """tells whether rev is a snapshot
-        """
+        """tells whether rev is a snapshot"""
         if not self._sparserevlog:
             return self.deltaparent(rev) == nullrev
         elif util.safehasattr(self.index, b'issnapshot'):
@@ -1819,7 +1828,7 @@
         elif operation == b'read':
             return flagutil.processflagsread(self, text, flags)
         else:  # write operation
-            return flagutil.processflagswrite(self, text, flags)
+            return flagutil.processflagswrite(self, text, flags, None)
 
     def revision(self, nodeorrev, _df=None, raw=False):
         """return an uncompressed revision of a given node or revision
@@ -2000,21 +2009,13 @@
         ):
             return
 
-        trinfo = tr.find(self.indexfile)
-        if trinfo is None:
+        troffset = tr.findoffset(self.indexfile)
+        if troffset is None:
             raise error.RevlogError(
                 _(b"%s not found in the transaction") % self.indexfile
             )
-
-        trindex = trinfo[2]
-        if trindex is not None:
-            dataoff = self.start(trindex)
-        else:
-            # revlog was stripped at start of transaction, use all leftover data
-            trindex = len(self) - 1
-            dataoff = self.end(tiprev)
-
-        tr.add(self.datafile, dataoff)
+        trindex = 0
+        tr.add(self.datafile, 0)
 
         if fp:
             fp.flush()
@@ -2026,6 +2027,8 @@
         with self._indexfp(b'r') as ifh, self._datafp(b'w') as dfh:
             for r in self:
                 dfh.write(self._getsegmentforrevs(r, r, df=ifh)[1])
+                if troffset <= self.start(r):
+                    trindex = r
 
         with self._indexfp(b'w') as fp:
             self.version &= ~FLAG_INLINE_DATA
@@ -2043,8 +2046,7 @@
         self._chunkclear()
 
     def _nodeduplicatecallback(self, transaction, node):
-        """called when trying to add a node already stored.
-        """
+        """called when trying to add a node already stored."""
 
     def addrevision(
         self,
@@ -2361,14 +2363,21 @@
             ifh.write(entry)
         else:
             offset += curr * self._io.size
-            transaction.add(self.indexfile, offset, curr)
+            transaction.add(self.indexfile, offset)
             ifh.write(entry)
             ifh.write(data[0])
             ifh.write(data[1])
             self._enforceinlinesize(transaction, ifh)
         nodemaputil.setup_persistent_nodemap(transaction, self)
 
-    def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None):
+    def addgroup(
+        self,
+        deltas,
+        linkmapper,
+        transaction,
+        addrevisioncb=None,
+        duplicaterevisioncb=None,
+    ):
         """
         add a delta group
 
@@ -2383,8 +2392,6 @@
         if self._writinghandles:
             raise error.ProgrammingError(b'cannot nest addgroup() calls')
 
-        nodes = []
-
         r = len(self)
         end = 0
         if r:
@@ -2392,10 +2399,10 @@
         ifh = self._indexfp(b"a+")
         isize = r * self._io.size
         if self._inline:
-            transaction.add(self.indexfile, end + isize, r)
+            transaction.add(self.indexfile, end + isize)
             dfh = None
         else:
-            transaction.add(self.indexfile, isize, r)
+            transaction.add(self.indexfile, isize)
             transaction.add(self.datafile, end)
             dfh = self._datafp(b"a+")
 
@@ -2405,6 +2412,7 @@
             ifh.flush()
 
         self._writinghandles = (ifh, dfh)
+        empty = True
 
         try:
             deltacomputer = deltautil.deltacomputer(self)
@@ -2414,11 +2422,12 @@
                 link = linkmapper(linknode)
                 flags = flags or REVIDX_DEFAULT_FLAGS
 
-                nodes.append(node)
-
                 if self.index.has_node(node):
+                    # this can happen if two branches make the same change
                     self._nodeduplicatecallback(transaction, node)
-                    # this can happen if two branches make the same change
+                    if duplicaterevisioncb:
+                        duplicaterevisioncb(self, node)
+                    empty = False
                     continue
 
                 for p in (p1, p2):
@@ -2472,6 +2481,7 @@
 
                 if addrevisioncb:
                     addrevisioncb(self, node)
+                empty = False
 
                 if not dfh and not self._inline:
                     # addrevision switched from inline to conventional
@@ -2486,8 +2496,7 @@
             if dfh:
                 dfh.close()
             ifh.close()
-
-        return nodes
+        return not empty
 
     def iscensored(self, rev):
         """Check if a file revision is censored."""
@@ -2550,7 +2559,7 @@
 
         # then reset internal state in memory to forget those revisions
         self._revisioncache = None
-        self._chaininfocache = {}
+        self._chaininfocache = util.lrucachedict(500)
         self._chunkclear()
 
         del self.index[rev:-1]
--- a/mercurial/revlogutils/nodemap.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/revlogutils/nodemap.py	Tue Jan 19 21:48:43 2021 +0530
@@ -13,11 +13,10 @@
 import re
 import struct
 
-from ..i18n import _
+from ..node import hex
 
 from .. import (
     error,
-    node as nodemod,
     util,
 )
 
@@ -57,8 +56,10 @@
                 data = util.buffer(util.mmapread(fd, data_length))
             else:
                 data = fd.read(data_length)
-    except OSError as e:
-        if e.errno != errno.ENOENT:
+    except (IOError, OSError) as e:
+        if e.errno == errno.ENOENT:
+            return None
+        else:
             raise
     if len(data) < data_length:
         return None
@@ -86,8 +87,7 @@
 
 
 class _NoTransaction(object):
-    """transaction like object to update the nodemap outside a transaction
-    """
+    """transaction like object to update the nodemap outside a transaction"""
 
     def __init__(self):
         self._postclose = {}
@@ -129,8 +129,7 @@
 
 
 def _persist_nodemap(tr, revlog, pending=False):
-    """Write nodemap data on disk for a given revlog
-    """
+    """Write nodemap data on disk for a given revlog"""
     if getattr(revlog, 'filteredrevs', ()):
         raise error.ProgrammingError(
             "cannot persist nodemap of a filtered changelog"
@@ -143,13 +142,6 @@
     ondisk_docket = revlog._nodemap_docket
     feed_data = util.safehasattr(revlog.index, "update_nodemap_data")
     use_mmap = revlog.opener.options.get(b"persistent-nodemap.mmap")
-    mode = revlog.opener.options.get(b"persistent-nodemap.mode")
-    if not can_incremental:
-        msg = _(b"persistent nodemap in strict mode without efficient method")
-        if mode == b'warn':
-            tr._report(b"%s\n" % msg)
-        elif mode == b'strict':
-            raise error.Abort(msg)
 
     data = None
     # first attemp an incremental update of the data
@@ -278,7 +270,7 @@
     """return a new unique identifier.
 
     The identifier is random and composed of ascii characters."""
-    return nodemod.hex(os.urandom(ID_SIZE))
+    return hex(os.urandom(ID_SIZE))
 
 
 class NodeMapDocket(object):
@@ -391,7 +383,9 @@
 #
 #  * value >=  0 -> index of sub-block
 #  * value == -1 -> no value
-#  * value <  -1 -> a revision value: rev = -(value+10)
+#  * value <  -1 -> encoded revision: rev = -(value+2)
+#
+# See REV_OFFSET and _transform_rev below.
 #
 # The implementation focus on simplicity, not on performance. A Rust
 # implementation should provide a efficient version of the same binary
@@ -400,15 +394,13 @@
 
 
 def persistent_data(index):
-    """return the persistent binary form for a nodemap for a given index
-    """
+    """return the persistent binary form for a nodemap for a given index"""
     trie = _build_trie(index)
     return _persist_trie(trie)
 
 
 def update_persistent_data(index, root, max_idx, last_rev):
-    """return the incremental update for persistent nodemap from a given index
-    """
+    """return the incremental update for persistent nodemap from a given index"""
     changed_block, trie = _update_trie(index, root, last_rev)
     return (
         changed_block * S_BLOCK.size,
@@ -463,8 +455,8 @@
     """
     root = Block()
     for rev in range(len(index)):
-        hex = nodemod.hex(index[rev][7])
-        _insert_into_block(index, 0, root, rev, hex)
+        current_hex = hex(index[rev][7])
+        _insert_into_block(index, 0, root, rev, current_hex)
     return root
 
 
@@ -472,8 +464,8 @@
     """consume"""
     changed = 0
     for rev in range(last_rev + 1, len(index)):
-        hex = nodemod.hex(index[rev][7])
-        changed += _insert_into_block(index, 0, root, rev, hex)
+        current_hex = hex(index[rev][7])
+        changed += _insert_into_block(index, 0, root, rev, current_hex)
     return changed, root
 
 
@@ -502,7 +494,7 @@
     else:
         # collision with a previously unique prefix, inserting new
         # vertices to fit both entry.
-        other_hex = nodemod.hex(index[entry][7])
+        other_hex = hex(index[entry][7])
         other_rev = entry
         new = Block()
         block[hex_digit] = new
@@ -606,7 +598,7 @@
             ret = 1
         else:
             all_revs.remove(r)
-        nm_rev = _find_node(root, nodemod.hex(index[r][7]))
+        nm_rev = _find_node(root, hex(index[r][7]))
         if nm_rev is None:
             msg = b"  revision node does not match any entries: %d\n" % r
             ui.write_err(msg)
--- a/mercurial/revlogutils/sidedata.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/revlogutils/sidedata.py	Tue Jan 19 21:48:43 2021 +0530
@@ -13,8 +13,8 @@
 The current implementation is experimental and subject to changes. Do not rely
 on it in production.
 
-Sidedata are stored in the revlog itself, withing the revision rawtext. They
-are inserted, removed from it using the flagprocessors mechanism. The following
+Sidedata are stored in the revlog itself, within the revision rawtext. They
+are inserted and removed from it using the flagprocessors mechanism. The following
 format is currently used::
 
     initial header:
@@ -27,7 +27,7 @@
     normal raw text:
         <all bytes remaining in the rawtext>
 
-This is a simple and effective format. It should be enought to experiment with
+This is a simple and effective format. It should be enough to experiment with
 the concept.
 """
 
--- a/mercurial/revset.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/revset.py	Tue Jan 19 21:48:43 2021 +0530
@@ -11,6 +11,11 @@
 
 from .i18n import _
 from .pycompat import getattr
+from .node import (
+    bin,
+    nullrev,
+    wdirrev,
+)
 from . import (
     dagop,
     destutil,
@@ -20,7 +25,6 @@
     grep as grepmod,
     hbisect,
     match as matchmod,
-    node,
     obsolete as obsmod,
     obsutil,
     pathutil,
@@ -55,7 +59,7 @@
 fullreposet = smartset.fullreposet
 
 # revisions not included in all(), but populated if specified
-_virtualrevs = (node.nullrev, node.wdirrev)
+_virtualrevs = (nullrev, wdirrev)
 
 # Constants for ordering requirement, used in getset():
 #
@@ -177,9 +181,9 @@
 def _makerangeset(repo, subset, m, n, order):
     if m == n:
         r = baseset([m])
-    elif n == node.wdirrev:
+    elif n == wdirrev:
         r = spanset(repo, m, len(repo)) + baseset([n])
-    elif m == node.wdirrev:
+    elif m == wdirrev:
         r = baseset([m]) + spanset(repo, repo.changelog.tiprev(), n - 1)
     elif m < n:
         r = spanset(repo, m, n + 1)
@@ -529,8 +533,7 @@
 
 @predicate(b'author(string)', safe=True, weight=10)
 def author(repo, subset, x):
-    """Alias for ``user(string)``.
-    """
+    """Alias for ``user(string)``."""
     # i18n: "author" is a keyword
     n = getstring(x, _(b"author requires a string"))
     kind, pattern, matcher = _substringmatcher(n, casesensitive=False)
@@ -598,7 +601,7 @@
                 bms.add(repo[bmrev].rev())
     else:
         bms = {repo[r].rev() for r in repo._bookmarks.values()}
-    bms -= {node.nullrev}
+    bms -= {nullrev}
     return subset & bms
 
 
@@ -723,7 +726,6 @@
     cs = set()
     pr = repo.changelog.parentrevs
     minrev = parentset.min()
-    nullrev = node.nullrev
     for r in subset:
         if r <= minrev:
             continue
@@ -737,8 +739,7 @@
 
 @predicate(b'children(set)', safe=True)
 def children(repo, subset, x):
-    """Child changesets of changesets in set.
-    """
+    """Child changesets of changesets in set."""
     s = getset(repo, fullreposet(repo), x)
     cs = _children(repo, subset, s)
     return subset & cs
@@ -746,8 +747,7 @@
 
 @predicate(b'closed()', safe=True, weight=10)
 def closed(repo, subset, x):
-    """Changeset is closed.
-    """
+    """Changeset is closed."""
     # i18n: "closed" is a keyword
     getargs(x, 0, 0, _(b"closed takes no arguments"))
     return subset.filter(
@@ -771,8 +771,7 @@
 
 @predicate(b'commonancestors(set)', safe=True)
 def commonancestors(repo, subset, x):
-    """Changesets that are ancestors of every changeset in set.
-    """
+    """Changesets that are ancestors of every changeset in set."""
     startrevs = getset(repo, fullreposet(repo), x, order=anyorder)
     if not startrevs:
         return baseset()
@@ -868,8 +867,7 @@
 
 @predicate(b'date(interval)', safe=True, weight=10)
 def date(repo, subset, x):
-    """Changesets within the interval, see :hg:`help dates`.
-    """
+    """Changesets within the interval, see :hg:`help dates`."""
     # i18n: "date" is a keyword
     ds = getstring(x, _(b"date requires a string"))
     dm = dateutil.matchdate(ds)
@@ -1108,8 +1106,7 @@
 
 @predicate(b'extinct()', safe=True)
 def extinct(repo, subset, x):
-    """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)
-    """
+    """Obsolete changesets with obsolete descendants only. (EXPERIMENTAL)"""
     # i18n: "extinct" is a keyword
     getargs(x, 0, 0, _(b"extinct takes no arguments"))
     extincts = obsmod.getrevs(repo, b'extinct')
@@ -1216,8 +1213,7 @@
 
 @predicate(b'first(set, [n])', safe=True, takeorder=True, weight=0)
 def first(repo, subset, x, order):
-    """An alias for limit().
-    """
+    """An alias for limit()."""
     return limit(repo, subset, x, order)
 
 
@@ -1341,8 +1337,7 @@
 
 @predicate(b'all()', safe=True)
 def getall(repo, subset, x):
-    """All changesets, the same as ``0:tip``.
-    """
+    """All changesets, the same as ``0:tip``."""
     # i18n: "all" is a keyword
     getargs(x, 0, 0, _(b"all takes no arguments"))
     return subset & spanset(repo)  # drop "null" if any
@@ -1404,7 +1399,7 @@
                     b'_matchfiles expected at most one revision'
                 )
             if value == b'':  # empty means working directory
-                rev = node.wdirrev
+                rev = wdirrev
             else:
                 rev = value
         elif prefix == b'd:':
@@ -1424,7 +1419,6 @@
     # This directly read the changelog data as creating changectx for all
     # revisions is quite expensive.
     getfiles = repo.changelog.readfiles
-    wdirrev = node.wdirrev
 
     def matches(x):
         if x == wdirrev:
@@ -1480,8 +1474,7 @@
 
 @predicate(b'head()', safe=True)
 def head(repo, subset, x):
-    """Changeset is a named branch head.
-    """
+    """Changeset is a named branch head."""
     # i18n: "head" is a keyword
     getargs(x, 0, 0, _(b"head takes no arguments"))
     hs = set()
@@ -1493,30 +1486,28 @@
 
 @predicate(b'heads(set)', safe=True, takeorder=True)
 def heads(repo, subset, x, order):
-    """Members of set with no children in set.
-    """
+    """Members of set with no children in set."""
     # argument set should never define order
     if order == defineorder:
         order = followorder
     inputset = getset(repo, fullreposet(repo), x, order=order)
     wdirparents = None
-    if node.wdirrev in inputset:
+    if wdirrev in inputset:
         # a bit slower, but not common so good enough for now
         wdirparents = [p.rev() for p in repo[None].parents()]
         inputset = set(inputset)
-        inputset.discard(node.wdirrev)
+        inputset.discard(wdirrev)
     heads = repo.changelog.headrevs(inputset)
     if wdirparents is not None:
         heads.difference_update(wdirparents)
-        heads.add(node.wdirrev)
+        heads.add(wdirrev)
     heads = baseset(heads)
     return subset & heads
 
 
 @predicate(b'hidden()', safe=True)
 def hidden(repo, subset, x):
-    """Hidden changesets.
-    """
+    """Hidden changesets."""
     # i18n: "hidden" is a keyword
     getargs(x, 0, 0, _(b"hidden takes no arguments"))
     hiddenrevs = repoview.filterrevs(repo, b'visible')
@@ -1546,8 +1537,7 @@
 
 @predicate(b'limit(set[, n[, offset]])', safe=True, takeorder=True, weight=0)
 def limit(repo, subset, x, order):
-    """First n members of set, defaulting to 1, starting from offset.
-    """
+    """First n members of set, defaulting to 1, starting from offset."""
     args = getargsdict(x, b'limit', b'set n offset')
     if b'set' not in args:
         # i18n: "limit" is a keyword
@@ -1571,8 +1561,7 @@
 
 @predicate(b'last(set, [n])', safe=True, takeorder=True)
 def last(repo, subset, x, order):
-    """Last n members of set, defaulting to 1.
-    """
+    """Last n members of set, defaulting to 1."""
     # i18n: "last" is a keyword
     l = getargs(x, 1, 2, _(b"last requires one or two arguments"))
     lim = 1
@@ -1592,8 +1581,7 @@
 
 @predicate(b'max(set)', safe=True)
 def maxrev(repo, subset, x):
-    """Changeset with highest revision number in set.
-    """
+    """Changeset with highest revision number in set."""
     os = getset(repo, fullreposet(repo), x)
     try:
         m = os.max()
@@ -1608,12 +1596,10 @@
 
 @predicate(b'merge()', safe=True)
 def merge(repo, subset, x):
-    """Changeset is a merge changeset.
-    """
+    """Changeset is a merge changeset."""
     # i18n: "merge" is a keyword
     getargs(x, 0, 0, _(b"merge takes no arguments"))
     cl = repo.changelog
-    nullrev = node.nullrev
 
     def ismerge(r):
         try:
@@ -1626,8 +1612,7 @@
 
 @predicate(b'branchpoint()', safe=True)
 def branchpoint(repo, subset, x):
-    """Changesets with more than one child.
-    """
+    """Changesets with more than one child."""
     # i18n: "branchpoint" is a keyword
     getargs(x, 0, 0, _(b"branchpoint takes no arguments"))
     cl = repo.changelog
@@ -1648,8 +1633,7 @@
 
 @predicate(b'min(set)', safe=True)
 def minrev(repo, subset, x):
-    """Changeset with lowest revision number in set.
-    """
+    """Changeset with lowest revision number in set."""
     os = getset(repo, fullreposet(repo), x)
     try:
         m = os.min()
@@ -1709,23 +1693,22 @@
             if name not in ns.deprecated:
                 names.update(repo[n].rev() for n in ns.nodes(repo, name))
 
-    names -= {node.nullrev}
+    names -= {nullrev}
     return subset & names
 
 
 @predicate(b'id(string)', safe=True)
 def node_(repo, subset, x):
-    """Revision non-ambiguously specified by the given hex string prefix.
-    """
+    """Revision non-ambiguously specified by the given hex string prefix."""
     # i18n: "id" is a keyword
     l = getargs(x, 1, 1, _(b"id requires one argument"))
     # i18n: "id" is a keyword
     n = getstring(l[0], _(b"id requires a string"))
     if len(n) == 40:
         try:
-            rn = repo.changelog.rev(node.bin(n))
+            rn = repo.changelog.rev(bin(n))
         except error.WdirUnsupported:
-            rn = node.wdirrev
+            rn = wdirrev
         except (LookupError, TypeError):
             rn = None
     else:
@@ -1737,7 +1720,7 @@
         except LookupError:
             pass
         except error.WdirUnsupported:
-            rn = node.wdirrev
+            rn = wdirrev
 
     if rn is None:
         return baseset()
@@ -1747,8 +1730,7 @@
 
 @predicate(b'none()', safe=True)
 def none(repo, subset, x):
-    """No changesets.
-    """
+    """No changesets."""
     # i18n: "none" is a keyword
     getargs(x, 0, 0, _(b"none takes no arguments"))
     return baseset()
@@ -1869,8 +1851,7 @@
 
 @predicate(b'p1([set])', safe=True)
 def p1(repo, subset, x):
-    """First parent of changesets in set, or the working directory.
-    """
+    """First parent of changesets in set, or the working directory."""
     if x is None:
         p = repo[x].p1().rev()
         if p >= 0:
@@ -1884,7 +1865,7 @@
             ps.add(cl.parentrevs(r)[0])
         except error.WdirUnsupported:
             ps.add(repo[r].p1().rev())
-    ps -= {node.nullrev}
+    ps -= {nullrev}
     # XXX we should turn this into a baseset instead of a set, smartset may do
     # some optimizations from the fact this is a baseset.
     return subset & ps
@@ -1892,8 +1873,7 @@
 
 @predicate(b'p2([set])', safe=True)
 def p2(repo, subset, x):
-    """Second parent of changesets in set, or the working directory.
-    """
+    """Second parent of changesets in set, or the working directory."""
     if x is None:
         ps = repo[x].parents()
         try:
@@ -1913,7 +1893,7 @@
             parents = repo[r].parents()
             if len(parents) == 2:
                 ps.add(parents[1])
-    ps -= {node.nullrev}
+    ps -= {nullrev}
     # XXX we should turn this into a baseset instead of a set, smartset may do
     # some optimizations from the fact this is a baseset.
     return subset & ps
@@ -1940,7 +1920,7 @@
                 up(parentrevs(r))
             except error.WdirUnsupported:
                 up(p.rev() for p in repo[r].parents())
-    ps -= {node.nullrev}
+    ps -= {nullrev}
     return subset & ps
 
 
@@ -2015,7 +1995,7 @@
         else:
             try:
                 parents = cl.parentrevs(r)
-                if parents[1] != node.nullrev:
+                if parents[1] != nullrev:
                     ps.add(parents[1])
             except error.WdirUnsupported:
                 parents = repo[r].parents()
@@ -2305,8 +2285,7 @@
 
 @predicate(b'reverse(set)', safe=True, takeorder=True, weight=0)
 def reverse(repo, subset, x, order):
-    """Reverse order of set.
-    """
+    """Reverse order of set."""
     l = getset(repo, subset, x, order)
     if order == defineorder:
         l.reverse()
@@ -2315,8 +2294,7 @@
 
 @predicate(b'roots(set)', safe=True)
 def roots(repo, subset, x):
-    """Changesets in set with no parent changeset in set.
-    """
+    """Changesets in set with no parent changeset in set."""
     s = getset(repo, fullreposet(repo), x)
     parents = repo.changelog.parentrevs
 
@@ -2556,8 +2534,7 @@
 
 @predicate(b'orphan()', safe=True)
 def orphan(repo, subset, x):
-    """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)
-    """
+    """Non-obsolete changesets with obsolete ancestors. (EXPERIMENTAL)"""
     # i18n: "orphan" is a keyword
     getargs(x, 0, 0, _(b"orphan takes no arguments"))
     orphan = obsmod.getrevs(repo, b'orphan')
@@ -2566,8 +2543,7 @@
 
 @predicate(b'unstable()', safe=True)
 def unstable(repo, subset, x):
-    """Changesets with instabilities. (EXPERIMENTAL)
-    """
+    """Changesets with instabilities. (EXPERIMENTAL)"""
     # i18n: "unstable" is a keyword
     getargs(x, 0, 0, b'unstable takes no arguments')
     _unstable = set()
@@ -2592,8 +2568,8 @@
     """Working directory. (EXPERIMENTAL)"""
     # i18n: "wdir" is a keyword
     getargs(x, 0, 0, _(b"wdir takes no arguments"))
-    if node.wdirrev in subset or isinstance(subset, fullreposet):
-        return baseset([node.wdirrev])
+    if wdirrev in subset or isinstance(subset, fullreposet):
+        return baseset([wdirrev])
     return baseset()
 
 
@@ -2663,7 +2639,7 @@
     if not s:
         return baseset()
     cl = repo.changelog
-    ls = [cl.rev(node.bin(r)) for r in s.split(b'\0')]
+    ls = [cl.rev(bin(r)) for r in s.split(b'\0')]
     s = subset
     return baseset([r for r in ls if r in s])
 
@@ -2715,7 +2691,15 @@
 
 
 def lookupfn(repo):
-    return lambda symbol: scmutil.isrevsymbol(repo, symbol)
+    def fn(symbol):
+        try:
+            return scmutil.isrevsymbol(repo, symbol)
+        except error.AmbiguousPrefixLookupError:
+            raise error.InputError(
+                b'ambiguous revision identifier: %s' % symbol
+            )
+
+    return fn
 
 
 def match(ui, spec, lookup=None):
@@ -2781,8 +2765,7 @@
 
 
 def loadpredicate(ui, extname, registrarobj):
-    """Load revset predicates from specified registrarobj
-    """
+    """Load revset predicates from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         symbols[name] = func
         if func._safe:
--- a/mercurial/revsetlang.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/revsetlang.py	Tue Jan 19 21:48:43 2021 +0530
@@ -11,9 +11,9 @@
 
 from .i18n import _
 from .pycompat import getattr
+from .node import hex
 from . import (
     error,
-    node,
     parser,
     pycompat,
     smartset,
@@ -83,7 +83,7 @@
 
 
 def tokenize(program, lookup=None, syminitletters=None, symletters=None):
-    '''
+    """
     Parse a revset statement into a stream of tokens
 
     ``syminitletters`` is the set of valid characters for the initial
@@ -102,7 +102,7 @@
     >>> list(tokenize(b"@::"))
     [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)]
 
-    '''
+    """
     if not isinstance(program, bytes):
         raise error.ProgrammingError(
             b'revset statement must be bytes, got %r' % program
@@ -558,14 +558,22 @@
 
     >>> _parsewith(b'foo($1)', syminitletters=_aliassyminitletters)
     ('func', ('symbol', 'foo'), ('symbol', '$1'))
-    >>> _parsewith(b'$1')
-    Traceback (most recent call last):
-      ...
-    ParseError: ("syntax error in revset '$1'", 0)
-    >>> _parsewith(b'foo bar')
-    Traceback (most recent call last):
-      ...
-    ParseError: ('invalid token', 4)
+    >>> from . import error
+    >>> from . import pycompat
+    >>> try:
+    ...   _parsewith(b'$1')
+    ... except error.ParseError as e:
+    ...   pycompat.sysstr(e.message)
+    ...   e.location
+    "syntax error in revset '$1'"
+    0
+    >>> try:
+    ...   _parsewith(b'foo bar')
+    ... except error.ParseError as e:
+    ...   pycompat.sysstr(e.message)
+    ...   e.location
+    'invalid token'
+    4
     """
     if lookup and spec.startswith(b'revset(') and spec.endswith(b')'):
         lookup = None
@@ -613,8 +621,7 @@
 
 
 def foldconcat(tree):
-    """Fold elements to be concatenated by `##`
-    """
+    """Fold elements to be concatenated by `##`"""
     if not isinstance(tree, tuple) or tree[0] in (
         b'string',
         b'symbol',
@@ -642,8 +649,8 @@
     try:
         return _parsewith(spec, lookup=lookup)
     except error.ParseError as inst:
-        if len(inst.args) > 1:  # has location
-            loc = inst.args[1]
+        if inst.location is not None:
+            loc = inst.location
             # Remove newlines -- spaces are equivalent whitespace.
             spec = spec.replace(b'\n', b' ')
             # We want the caret to point to the place in the template that
@@ -680,7 +687,7 @@
         parse(arg)  # make sure syntax errors are confined
         return b'(%s)' % arg
     elif c == b'n':
-        return _quote(node.hex(arg))
+        return _quote(hex(arg))
     elif c == b'b':
         try:
             return _quote(arg.branch())
@@ -700,7 +707,7 @@
     elif t == b's':
         return b"_list(%s)" % _quote(b"\0".join(s))
     elif t == b'n':
-        return b"_hexlist('%s')" % b"\0".join(node.hex(a) for a in s)
+        return b"_hexlist('%s')" % b"\0".join(hex(a) for a in s)
     elif t == b'b':
         try:
             return b"_list('%s')" % b"\0".join(a.branch() for a in s)
@@ -734,7 +741,7 @@
 
 
 def formatspec(expr, *args):
-    '''
+    """
     This is a convenience function for using revsets internally, and
     escapes arguments appropriately. Aliases are intentionally ignored
     so that intended expression behavior isn't accidentally subverted.
@@ -769,7 +776,7 @@
     "sort((:), 'desc', 'user')"
     >>> formatspec(b'%ls', [b'a', b"'"])
     "_list('a\\\\x00\\\\'')"
-    '''
+    """
     parsed = _parseargs(expr, args)
     ret = []
     for t, arg in parsed:
--- a/mercurial/rewriteutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/rewriteutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -10,10 +10,13 @@
 import re
 
 from .i18n import _
+from .node import (
+    hex,
+    nullrev,
+)
 
 from . import (
     error,
-    node,
     obsolete,
     obsutil,
     revset,
@@ -30,23 +33,23 @@
 
     Make sure this function is called after taking the lock.
     """
-    if node.nullrev in revs:
+    if nullrev in revs:
         msg = _(b"cannot %s null changeset") % action
         hint = _(b"no changeset checked out")
-        raise error.Abort(msg, hint=hint)
+        raise error.InputError(msg, hint=hint)
 
     if len(repo[None].parents()) > 1:
-        raise error.Abort(_(b"cannot %s while merging") % action)
+        raise error.StateError(_(b"cannot %s while merging") % action)
 
     publicrevs = repo.revs(b'%ld and public()', revs)
     if publicrevs:
         msg = _(b"cannot %s public changesets") % action
         hint = _(b"see 'hg help phases' for details")
-        raise error.Abort(msg, hint=hint)
+        raise error.InputError(msg, hint=hint)
 
     newunstable = disallowednewunstable(repo, revs)
     if newunstable:
-        raise error.Abort(_(b"cannot %s changeset with children") % action)
+        raise error.InputError(_(b"cannot %s changeset with children") % action)
 
 
 def disallowednewunstable(repo, revs):
@@ -113,7 +116,7 @@
         if len(successors) == 1 and len(successors[0]) == 1:
             successor = successors[0][0]
             if successor is not None:
-                newhash = node.hex(successor)
+                newhash = hex(successor)
                 commitmsg = commitmsg.replace(h, newhash[: len(h)])
             else:
                 repo.ui.note(
--- a/mercurial/scmposix.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/scmposix.py	Tue Jan 19 21:48:43 2021 +0530
@@ -27,7 +27,7 @@
         rcs.extend(
             [
                 os.path.join(rcdir, f)
-                for f, kind in util.listdir(rcdir)
+                for f, kind in sorted(util.listdir(rcdir))
                 if f.endswith(b".rc")
             ]
         )
--- a/mercurial/scmutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/scmutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -66,11 +66,11 @@
 
 @attr.s(slots=True, repr=False)
 class status(object):
-    '''Struct with a list of files per status.
+    """Struct with a list of files per status.
 
     The 'deleted', 'unknown' and 'ignored' properties are only
     relevant to the working copy.
-    '''
+    """
 
     modified = attr.ib(default=attr.Factory(list))
     added = attr.ib(default=attr.Factory(list))
@@ -123,9 +123,9 @@
 
 
 def nochangesfound(ui, repo, excluded=None):
-    '''Report no changes for push/pull, excluded is None or a list of
+    """Report no changes for push/pull, excluded is None or a list of
     nodes excluded from the push/pull.
-    '''
+    """
     secretlist = []
     if excluded:
         for n in excluded:
@@ -148,6 +148,8 @@
     return func() if no exception happens. otherwise do some error handling
     and return an exit code accordingly. does not handle all exceptions.
     """
+    coarse_exit_code = -1
+    detailed_exit_code = -1
     try:
         try:
             return func()
@@ -157,6 +159,7 @@
     # Global exception handling, alphabetically
     # Mercurial-specific first, followed by built-in and library exceptions
     except error.LockHeld as inst:
+        detailed_exit_code = 20
         if inst.errno == errno.ETIMEDOUT:
             reason = _(b'timed out waiting for lock held by %r') % (
                 pycompat.bytestr(inst.locker)
@@ -170,6 +173,7 @@
         if not inst.locker:
             ui.error(_(b"(lock might be very busy)\n"))
     except error.LockUnavailable as inst:
+        detailed_exit_code = 20
         ui.error(
             _(b"abort: could not lock %s: %s\n")
             % (
@@ -178,6 +182,7 @@
             )
         )
     except error.OutOfBandError as inst:
+        detailed_exit_code = 100
         if inst.args:
             msg = _(b"abort: remote error:\n")
         else:
@@ -188,7 +193,7 @@
         if inst.hint:
             ui.error(b'(%s)\n' % inst.hint)
     except error.RepoError as inst:
-        ui.error(_(b"abort: %s!\n") % inst)
+        ui.error(_(b"abort: %s\n") % inst)
         if inst.hint:
             ui.error(_(b"(%s)\n") % inst.hint)
     except error.ResponseError as inst:
@@ -203,43 +208,58 @@
         else:
             ui.error(b"\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
     except error.CensoredNodeError as inst:
-        ui.error(_(b"abort: file censored %s!\n") % inst)
+        ui.error(_(b"abort: file censored %s\n") % inst)
     except error.StorageError as inst:
-        ui.error(_(b"abort: %s!\n") % inst)
+        ui.error(_(b"abort: %s\n") % inst)
         if inst.hint:
             ui.error(_(b"(%s)\n") % inst.hint)
+        detailed_exit_code = 50
     except error.InterventionRequired as inst:
         ui.error(b"%s\n" % inst)
         if inst.hint:
             ui.error(_(b"(%s)\n") % inst.hint)
-        return 1
+        detailed_exit_code = 240
+        coarse_exit_code = 1
     except error.WdirUnsupported:
         ui.error(_(b"abort: working directory revision cannot be specified\n"))
     except error.Abort as inst:
-        ui.error(_(b"abort: %s\n") % inst.message)
-        if inst.hint:
-            ui.error(_(b"(%s)\n") % inst.hint)
+        if isinstance(inst, (error.InputError, error.ParseError)):
+            detailed_exit_code = 10
+        elif isinstance(inst, error.StateError):
+            detailed_exit_code = 20
+        elif isinstance(inst, error.ConfigError):
+            detailed_exit_code = 30
+        elif isinstance(inst, error.SecurityError):
+            detailed_exit_code = 150
+        elif isinstance(inst, error.CanceledError):
+            detailed_exit_code = 250
+        ui.error(inst.format())
+    except error.WorkerError as inst:
+        # Don't print a message -- the worker already should have
+        return inst.status_code
     except ImportError as inst:
-        ui.error(_(b"abort: %s!\n") % stringutil.forcebytestr(inst))
+        ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
         m = stringutil.forcebytestr(inst).split()[-1]
         if m in b"mpatch bdiff".split():
             ui.error(_(b"(did you forget to compile extensions?)\n"))
         elif m in b"zlib".split():
             ui.error(_(b"(is your Python install correct?)\n"))
+    except util.urlerr.httperror as inst:
+        detailed_exit_code = 100
+        ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
+    except util.urlerr.urlerror as inst:
+        detailed_exit_code = 100
+        try:  # usually it is in the form (errno, strerror)
+            reason = inst.reason.args[1]
+        except (AttributeError, IndexError):
+            # it might be anything, for example a string
+            reason = inst.reason
+        if isinstance(reason, pycompat.unicode):
+            # SSLError of Python 2.7.9 contains a unicode
+            reason = encoding.unitolocal(reason)
+        ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
     except (IOError, OSError) as inst:
-        if util.safehasattr(inst, b"code"):  # HTTPError
-            ui.error(_(b"abort: %s\n") % stringutil.forcebytestr(inst))
-        elif util.safehasattr(inst, b"reason"):  # URLError or SSLError
-            try:  # usually it is in the form (errno, strerror)
-                reason = inst.reason.args[1]
-            except (AttributeError, IndexError):
-                # it might be anything, for example a string
-                reason = inst.reason
-            if isinstance(reason, pycompat.unicode):
-                # SSLError of Python 2.7.9 contains a unicode
-                reason = encoding.unitolocal(reason)
-            ui.error(_(b"abort: error: %s\n") % stringutil.forcebytestr(reason))
-        elif (
+        if (
             util.safehasattr(inst, b"args")
             and inst.args
             and inst.args[0] == errno.EPIPE
@@ -263,34 +283,40 @@
     except SystemExit as inst:
         # Commands shouldn't sys.exit directly, but give a return code.
         # Just in case catch this and and pass exit code to caller.
-        return inst.code
+        detailed_exit_code = 254
+        coarse_exit_code = inst.code
 
-    return -1
+    if ui.configbool(b'ui', b'detailed-exit-code'):
+        return detailed_exit_code
+    else:
+        return coarse_exit_code
 
 
 def checknewlabel(repo, lbl, kind):
     # Do not use the "kind" parameter in ui output.
     # It makes strings difficult to translate.
     if lbl in [b'tip', b'.', b'null']:
-        raise error.Abort(_(b"the name '%s' is reserved") % lbl)
+        raise error.InputError(_(b"the name '%s' is reserved") % lbl)
     for c in (b':', b'\0', b'\n', b'\r'):
         if c in lbl:
-            raise error.Abort(
+            raise error.InputError(
                 _(b"%r cannot be used in a name") % pycompat.bytestr(c)
             )
     try:
         int(lbl)
-        raise error.Abort(_(b"cannot use an integer as a name"))
+        raise error.InputError(_(b"cannot use an integer as a name"))
     except ValueError:
         pass
     if lbl.strip() != lbl:
-        raise error.Abort(_(b"leading or trailing whitespace in name %r") % lbl)
+        raise error.InputError(
+            _(b"leading or trailing whitespace in name %r") % lbl
+        )
 
 
 def checkfilename(f):
     '''Check that the filename f is an acceptable filename for a tracked file'''
     if b'\r' in f or b'\n' in f:
-        raise error.Abort(
+        raise error.InputError(
             _(b"'\\n' and '\\r' disallowed in filenames: %r")
             % pycompat.bytestr(f)
         )
@@ -305,13 +331,13 @@
         if msg:
             msg = b"%s: %s" % (msg, procutil.shellquote(f))
             if abort:
-                raise error.Abort(msg)
+                raise error.InputError(msg)
             ui.warn(_(b"warning: %s\n") % msg)
 
 
 def checkportabilityalert(ui):
-    '''check if the user's config requests nothing, a warning, or abort for
-    non-portable filenames'''
+    """check if the user's config requests nothing, a warning, or abort for
+    non-portable filenames"""
     val = ui.config(b'ui', b'portablefilenames')
     lval = val.lower()
     bval = stringutil.parsebool(val)
@@ -377,8 +403,8 @@
 
 
 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
-    '''yield every hg repository under path, always recursively.
-    The recurse flag will only control recursion into repo working dirs'''
+    """yield every hg repository under path, always recursively.
+    The recurse flag will only control recursion into repo working dirs"""
 
     def errhandler(err):
         if err.filename == path:
@@ -768,7 +794,7 @@
 
 
 def walkchangerevs(repo, revs, makefilematcher, prepare):
-    '''Iterate over files and the revs in a "windowed" way.
+    """Iterate over files and the revs in a "windowed" way.
 
     Callers most commonly need to iterate backwards over the history
     in which they are interested. Doing so has awful (quadratic-looking)
@@ -780,7 +806,7 @@
 
     This function returns an iterator yielding contexts. Before
     yielding each context, the iterator will first call the prepare
-    function on each context in the window in forward order.'''
+    function on each context in the window in forward order."""
 
     if not revs:
         return []
@@ -872,17 +898,17 @@
 
 
 def anypats(pats, opts):
-    '''Checks if any patterns, including --include and --exclude were given.
+    """Checks if any patterns, including --include and --exclude were given.
 
     Some commands (e.g. addremove) use this condition for deciding whether to
     print absolute or relative paths.
-    '''
+    """
     return bool(pats or opts.get(b'include') or opts.get(b'exclude'))
 
 
 def expandpats(pats):
-    '''Expand bare globs when running on windows.
-    On posix we assume it already has already been done by sh.'''
+    """Expand bare globs when running on windows.
+    On posix we assume it already has already been done by sh."""
     if not util.expandglobs:
         return list(pats)
     ret = []
@@ -903,9 +929,9 @@
 def matchandpats(
     ctx, pats=(), opts=None, globbed=False, default=b'relpath', badfn=None
 ):
-    '''Return a matcher and the patterns that were used.
+    """Return a matcher and the patterns that were used.
     The matcher will warn about bad matches, unless an alternate badfn callback
-    is provided.'''
+    is provided."""
     if opts is None:
         opts = {}
     if not globbed and default == b'relpath':
@@ -976,7 +1002,7 @@
 
 
 def backuppath(ui, repo, filepath):
-    '''customize where working copy backup files (.orig files) are created
+    """customize where working copy backup files (.orig files) are created
 
     Fetch user defined path from config file: [ui] origbackuppath = <path>
     Fall back to default (filepath with .orig suffix) if not specified
@@ -984,7 +1010,7 @@
     filepath is repo-relative
 
     Returns an absolute path
-    '''
+    """
     origvfs = getorigvfs(ui, repo)
     if origvfs is None:
         return repo.wjoin(filepath + b".orig")
@@ -1275,8 +1301,8 @@
 
 
 def marktouched(repo, files, similarity=0.0):
-    '''Assert that files have somehow been operated upon. files are relative to
-    the repo root.'''
+    """Assert that files have somehow been operated upon. files are relative to
+    the repo root."""
     m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
     rejected = []
 
@@ -1310,11 +1336,11 @@
 
 
 def _interestingfiles(repo, matcher):
-    '''Walk dirstate with matcher, looking for files that addremove would care
+    """Walk dirstate with matcher, looking for files that addremove would care
     about.
 
     This is different from dirstate.status because it doesn't care about
-    whether files are modified or clean.'''
+    whether files are modified or clean."""
     added, unknown, deleted, removed, forgotten = [], [], [], [], []
     audit_path = pathutil.pathauditor(repo.root, cached=True)
 
@@ -1369,8 +1395,8 @@
 
 
 def _markchanges(repo, unknown, deleted, renames):
-    '''Marks the files in unknown as added, the files in deleted as removed,
-    and the files in renames as copied.'''
+    """Marks the files in unknown as added, the files in deleted as removed,
+    and the files in renames as copied."""
     wctx = repo[None]
     with repo.wlock():
         wctx.forget(deleted)
@@ -1399,10 +1425,10 @@
         endrev = len(repo)
 
     def getrenamed(fn, rev):
-        '''looks up all renames for a file (up to endrev) the first
+        """looks up all renames for a file (up to endrev) the first
         time the file is given. It indexes on the changerev and only
         parses the manifest if linkrev != changerev.
-        Returns rename info for fn at changerev rev.'''
+        Returns rename info for fn at changerev rev."""
         if fn not in rcache:
             rcache[fn] = {}
             fl = repo.file(fn)
@@ -1523,7 +1549,7 @@
 
 
 def filterrequirements(requirements):
-    """ filters the requirements into two sets:
+    """filters the requirements into two sets:
 
     wcreq: requirements which should be written in .hg/requires
     storereq: which should be written in .hg/store/requires
@@ -1547,7 +1573,12 @@
 
 
 def writereporequirements(repo, requirements=None):
-    """ writes requirements for the repo to .hg/requires """
+    """writes requirements for the repo
+
+    Requirements are written to .hg/requires and .hg/store/requires based
+    on whether share-safe mode is enabled and which requirements are wdir
+    requirements and which are store requirements
+    """
     if requirements:
         repo.requirements = requirements
     wcreq, storereq = filterrequirements(repo.requirements)
@@ -1555,6 +1586,9 @@
         writerequires(repo.vfs, wcreq)
     if storereq is not None:
         writerequires(repo.svfs, storereq)
+    elif repo.ui.configbool(b'format', b'usestore'):
+        # only remove store requires if we are using store
+        repo.svfs.tryunlink(b'requires')
 
 
 def writerequires(opener, requirements):
@@ -1770,7 +1804,7 @@
             k = encoding.tolocal(k)
             try:
                 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
-            except (error.LookupError, error.RepoLookupError):
+            except (error.LookupError, error.RepoLookupError, error.InputError):
                 pass  # we ignore data for nodes that don't exist locally
     finally:
         if proc:
@@ -1843,8 +1877,7 @@
 
 
 def gdinitconfig(ui):
-    """helper function to know if a repo should be created as general delta
-    """
+    """helper function to know if a repo should be created as general delta"""
     # experimental config: format.generaldelta
     return ui.configbool(b'format', b'generaldelta') or ui.configbool(
         b'format', b'usegeneraldelta'
@@ -1852,8 +1885,7 @@
 
 
 def gddeltaconfig(ui):
-    """helper function to know if incoming delta should be optimised
-    """
+    """helper function to know if incoming delta should be optimised"""
     # experimental config: format.generaldelta
     return ui.configbool(b'format', b'generaldelta')
 
@@ -2163,12 +2195,12 @@
     return _(b"%s and %d others") % (first, len(nodes) - maxnumnodes)
 
 
-def enforcesinglehead(repo, tr, desc, accountclosed=False):
+def enforcesinglehead(repo, tr, desc, accountclosed, filtername):
     """check that no named branch has multiple heads"""
     if desc in (b'strip', b'repair'):
         # skip the logic during strip
         return
-    visible = repo.filtered(b'visible')
+    visible = repo.filtered(filtername)
     # possible improvement: we could restrict the check to affected branch
     bm = visible.branchmap()
     for name in bm:
@@ -2274,10 +2306,18 @@
 
 
 def bookmarkrevs(repo, mark):
+    """Select revisions reachable by a given bookmark
+
+    If the bookmarked revision isn't a head, an empty set will be returned.
     """
-    Select revisions reachable by a given bookmark
-    """
-    return repo.revs(
+    return repo.revs(format_bookmark_revspec(mark))
+
+
+def format_bookmark_revspec(mark):
+    """Build a revset expression to select revisions reachable by a given
+    bookmark"""
+    mark = b'literal:' + mark
+    return revsetlang.formatspec(
         b"ancestors(bookmark(%s)) - "
         b"ancestors(head() and not bookmark(%s)) - "
         b"ancestors(bookmark() and not bookmark(%s))",
--- a/mercurial/scmwindows.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/scmwindows.py	Tue Jan 19 21:48:43 2021 +0530
@@ -31,7 +31,7 @@
 
     def _processdir(progrcd):
         if os.path.isdir(progrcd):
-            for f, kind in util.listdir(progrcd):
+            for f, kind in sorted(util.listdir(progrcd)):
                 if f.endswith(b'.rc'):
                     rcpath.append(os.path.join(progrcd, f))
 
@@ -68,7 +68,7 @@
 
 def userrcpath():
     '''return os-specific hgrc search path to the user dir'''
-    home = os.path.expanduser(b'~')
+    home = _legacy_expanduser(b'~')
     path = [os.path.join(home, b'mercurial.ini'), os.path.join(home, b'.hgrc')]
     userprofile = encoding.environ.get(b'USERPROFILE')
     if userprofile and userprofile != home:
@@ -77,5 +77,37 @@
     return path
 
 
+def _legacy_expanduser(path):
+    """Expand ~ and ~user constructs in the pre 3.8 style"""
+
+    # Python 3.8+ changed the expansion of '~' from HOME to USERPROFILE.  See
+    # https://bugs.python.org/issue36264.  It also seems to capitalize the drive
+    # letter, as though it was processed through os.path.realpath().
+    if not path.startswith(b'~'):
+        return path
+
+    i, n = 1, len(path)
+    while i < n and path[i] not in b'\\/':
+        i += 1
+
+    if b'HOME' in encoding.environ:
+        userhome = encoding.environ[b'HOME']
+    elif b'USERPROFILE' in encoding.environ:
+        userhome = encoding.environ[b'USERPROFILE']
+    elif b'HOMEPATH' not in encoding.environ:
+        return path
+    else:
+        try:
+            drive = encoding.environ[b'HOMEDRIVE']
+        except KeyError:
+            drive = b''
+        userhome = os.path.join(drive, encoding.environ[b'HOMEPATH'])
+
+    if i != 1:  # ~user
+        userhome = os.path.join(os.path.dirname(userhome), path[1:i])
+
+    return userhome + path[i:]
+
+
 def termsize(ui):
     return win32.termsize()
--- a/mercurial/setdiscovery.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/setdiscovery.py	Tue Jan 19 21:48:43 2021 +0530
@@ -290,11 +290,18 @@
     fullsamplesize=200,
     abortwhenunrelated=True,
     ancestorsof=None,
-    samplegrowth=1.05,
+    audit=None,
 ):
-    '''Return a tuple (common, anyincoming, remoteheads) used to identify
+    """Return a tuple (common, anyincoming, remoteheads) used to identify
     missing nodes from or in remote.
-    '''
+
+    The audit argument is an optional dictionnary that a caller can pass. it
+    will be updated with extra data about the discovery, this is useful for
+    debug.
+    """
+
+    samplegrowth = float(ui.config(b'devel', b'discovery.grow-sample.rate'))
+
     start = util.timer()
 
     roundtrips = 0
@@ -307,9 +314,8 @@
     else:
         ownheads = [rev for rev in cl.headrevs() if rev != nullrev]
 
-    # early exit if we know all the specified remote heads already
-    ui.debug(b"query 1; heads\n")
-    roundtrips += 1
+    initial_head_exchange = ui.configbool(b'devel', b'discovery.exchange-heads')
+
     # We also ask remote about all the local heads. That set can be arbitrarily
     # large, so we used to limit it size to `initialsamplesize`. We no longer
     # do as it proved counter productive. The skipped heads could lead to a
@@ -361,25 +367,39 @@
     #     graph (with many heads) attached to, but very independant to a the
     #     "simple" graph on the server. This is a fairly usual case and have
     #     not been met in the wild so far.
-    if remote.limitedarguments:
-        sample = _limitsample(ownheads, initialsamplesize)
-        # indices between sample and externalized version must match
-        sample = list(sample)
-    else:
-        sample = ownheads
+    if initial_head_exchange:
+        if remote.limitedarguments:
+            sample = _limitsample(ownheads, initialsamplesize)
+            # indices between sample and externalized version must match
+            sample = list(sample)
+        else:
+            sample = ownheads
 
-    with remote.commandexecutor() as e:
-        fheads = e.callcommand(b'heads', {})
-        fknown = e.callcommand(
-            b'known', {b'nodes': [clnode(r) for r in sample],}
-        )
+        ui.debug(b"query 1; heads\n")
+        roundtrips += 1
+        with remote.commandexecutor() as e:
+            fheads = e.callcommand(b'heads', {})
+            fknown = e.callcommand(
+                b'known',
+                {
+                    b'nodes': [clnode(r) for r in sample],
+                },
+            )
+
+        srvheadhashes, yesno = fheads.result(), fknown.result()
 
-    srvheadhashes, yesno = fheads.result(), fknown.result()
+        if audit is not None:
+            audit[b'total-roundtrips'] = 1
 
-    if cl.tip() == nullid:
-        if srvheadhashes != [nullid]:
-            return [nullid], True, srvheadhashes
-        return [nullid], False, []
+        if cl.tip() == nullid:
+            if srvheadhashes != [nullid]:
+                return [nullid], True, srvheadhashes
+            return [nullid], False, []
+    else:
+        # we still need the remote head for the function return
+        with remote.commandexecutor() as e:
+            fheads = e.callcommand(b'heads', {})
+        srvheadhashes = fheads.result()
 
     # start actual discovery (we note this before the next "if" for
     # compatibility reasons)
@@ -396,27 +416,35 @@
         except error.LookupError:
             continue
 
-    if len(knownsrvheads) == len(srvheadhashes):
-        ui.debug(b"all remote heads known locally\n")
-        return srvheadhashes, False, srvheadhashes
+    if initial_head_exchange:
+        # early exit if we know all the specified remote heads already
+        if len(knownsrvheads) == len(srvheadhashes):
+            ui.debug(b"all remote heads known locally\n")
+            return srvheadhashes, False, srvheadhashes
 
-    if len(sample) == len(ownheads) and all(yesno):
-        ui.note(_(b"all local changesets known remotely\n"))
-        ownheadhashes = [clnode(r) for r in ownheads]
-        return ownheadhashes, True, srvheadhashes
+        if len(sample) == len(ownheads) and all(yesno):
+            ui.note(_(b"all local changesets known remotely\n"))
+            ownheadhashes = [clnode(r) for r in ownheads]
+            return ownheadhashes, True, srvheadhashes
 
     # full blown discovery
 
+    # if the server has a limit to its arguments size, we can't grow the sample.
+    hard_limit_sample = remote.limitedarguments
+    grow_sample = local.ui.configbool(b'devel', b'discovery.grow-sample')
+    hard_limit_sample = hard_limit_sample and grow_sample
+
     randomize = ui.configbool(b'devel', b'discovery.randomize')
     disco = partialdiscovery(
-        local, ownheads, remote.limitedarguments, randomize=randomize
+        local, ownheads, hard_limit_sample, randomize=randomize
     )
-    # treat remote heads (and maybe own heads) as a first implicit sample
-    # response
-    disco.addcommons(knownsrvheads)
-    disco.addinfo(zip(sample, yesno))
+    if initial_head_exchange:
+        # treat remote heads (and maybe own heads) as a first implicit sample
+        # response
+        disco.addcommons(knownsrvheads)
+        disco.addinfo(zip(sample, yesno))
 
-    full = False
+    full = not initial_head_exchange
     progress = ui.makeprogress(_(b'searching'), unit=_(b'queries'))
     while not disco.iscomplete():
 
@@ -427,7 +455,7 @@
                 ui.debug(b"taking initial sample\n")
             samplefunc = disco.takefullsample
             targetsize = fullsamplesize
-            if not remote.limitedarguments:
+            if not hard_limit_sample:
                 fullsamplesize = int(fullsamplesize * samplegrowth)
         else:
             # use even cheaper initial sample
@@ -449,7 +477,10 @@
 
         with remote.commandexecutor() as e:
             yesno = e.callcommand(
-                b'known', {b'nodes': [clnode(r) for r in sample],}
+                b'known',
+                {
+                    b'nodes': [clnode(r) for r in sample],
+                },
             ).result()
 
         full = True
@@ -467,6 +498,9 @@
     missing = set(result) - set(knownsrvheads)
     ui.log(b'discovery', msg, len(result), len(missing), roundtrips, elapsed)
 
+    if audit is not None:
+        audit[b'total-roundtrips'] = roundtrips
+
     if not result and srvheadhashes != [nullid]:
         if abortwhenunrelated:
             raise error.Abort(_(b"repository is unrelated"))
--- a/mercurial/shelve.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/shelve.py	Tue Jan 19 21:48:43 2021 +0530
@@ -28,11 +28,15 @@
 import stat
 
 from .i18n import _
-from .pycompat import open
+from .node import (
+    bin,
+    hex,
+    nullid,
+    nullrev,
+)
 from . import (
     bookmarks,
     bundle2,
-    bundlerepo,
     changegroup,
     cmdutil,
     discovery,
@@ -43,7 +47,6 @@
     mdiff,
     merge,
     mergestate as mergestatemod,
-    node as nodemod,
     patch,
     phases,
     pycompat,
@@ -61,97 +64,78 @@
 backupdir = b'shelve-backup'
 shelvedir = b'shelved'
 shelvefileextensions = [b'hg', b'patch', b'shelve']
-# universal extension is present in all types of shelves
-patchextension = b'patch'
 
 # we never need the user, so we use a
 # generic user for all shelve operations
 shelveuser = b'shelve@localhost'
 
 
-class shelvedfile(object):
-    """Helper for the file storing a single shelve
+class ShelfDir(object):
+    def __init__(self, repo, for_backups=False):
+        if for_backups:
+            self.vfs = vfsmod.vfs(repo.vfs.join(backupdir))
+        else:
+            self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
 
-    Handles common functions on shelve files (.hg/.patch) using
-    the vfs layer"""
+    def get(self, name):
+        return Shelf(self.vfs, name)
 
-    def __init__(self, repo, name, filetype=None):
-        self.repo = repo
+    def listshelves(self):
+        """return all shelves in repo as list of (time, name)"""
+        try:
+            names = self.vfs.listdir()
+        except OSError as err:
+            if err.errno != errno.ENOENT:
+                raise
+            return []
+        info = []
+        seen = set()
+        for filename in names:
+            name = filename.rsplit(b'.', 1)[0]
+            if name in seen:
+                continue
+            seen.add(name)
+            shelf = self.get(name)
+            if not shelf.exists():
+                continue
+            mtime = shelf.mtime()
+            info.append((mtime, name))
+        return sorted(info, reverse=True)
+
+
+class Shelf(object):
+    """Represents a shelf, including possibly multiple files storing it.
+
+    Old shelves will have a .patch and a .hg file. Newer shelves will
+    also have a .shelve file. This class abstracts away some of the
+    differences and lets you work with the shelf as a whole.
+    """
+
+    def __init__(self, vfs, name):
+        self.vfs = vfs
         self.name = name
-        self.vfs = vfsmod.vfs(repo.vfs.join(shelvedir))
-        self.backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
-        self.ui = self.repo.ui
-        if filetype:
-            self.fname = name + b'.' + filetype
-        else:
-            self.fname = name
 
     def exists(self):
-        return self.vfs.exists(self.fname)
-
-    def filename(self):
-        return self.vfs.join(self.fname)
-
-    def backupfilename(self):
-        def gennames(base):
-            yield base
-            base, ext = base.rsplit(b'.', 1)
-            for i in itertools.count(1):
-                yield b'%s-%d.%s' % (base, i, ext)
-
-        name = self.backupvfs.join(self.fname)
-        for n in gennames(name):
-            if not self.backupvfs.exists(n):
-                return n
-
-    def movetobackup(self):
-        if not self.backupvfs.isdir():
-            self.backupvfs.makedir()
-        util.rename(self.filename(), self.backupfilename())
-
-    def stat(self):
-        return self.vfs.stat(self.fname)
-
-    def opener(self, mode=b'rb'):
-        try:
-            return self.vfs(self.fname, mode)
-        except IOError as err:
-            if err.errno != errno.ENOENT:
-                raise
-            raise error.Abort(_(b"shelved change '%s' not found") % self.name)
-
-    def applybundle(self, tr):
-        fp = self.opener()
-        try:
-            targetphase = phases.internal
-            if not phases.supportinternal(self.repo):
-                targetphase = phases.secret
-            gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
-            pretip = self.repo[b'tip']
-            bundle2.applybundle(
-                self.repo,
-                gen,
-                tr,
-                source=b'unshelve',
-                url=b'bundle:' + self.vfs.join(self.fname),
-                targetphase=targetphase,
-            )
-            shelvectx = self.repo[b'tip']
-            if pretip == shelvectx:
-                shelverev = tr.changes[b'revduplicates'][-1]
-                shelvectx = self.repo[shelverev]
-            return shelvectx
-        finally:
-            fp.close()
-
-    def bundlerepo(self):
-        path = self.vfs.join(self.fname)
-        return bundlerepo.instance(
-            self.repo.baseui, b'bundle://%s+%s' % (self.repo.root, path), False
+        return self.vfs.exists(self.name + b'.patch') and self.vfs.exists(
+            self.name + b'.hg'
         )
 
-    def writebundle(self, bases, node):
-        cgversion = changegroup.safeversion(self.repo)
+    def mtime(self):
+        return self.vfs.stat(self.name + b'.patch')[stat.ST_MTIME]
+
+    def writeinfo(self, info):
+        scmutil.simplekeyvaluefile(self.vfs, self.name + b'.shelve').write(info)
+
+    def hasinfo(self):
+        return self.vfs.exists(self.name + b'.shelve')
+
+    def readinfo(self):
+        return scmutil.simplekeyvaluefile(
+            self.vfs, self.name + b'.shelve'
+        ).read()
+
+    def writebundle(self, repo, bases, node):
+        cgversion = changegroup.safeversion(repo)
         if cgversion == b'01':
             btype = b'HG10BZ'
             compression = None
@@ -159,22 +143,76 @@
             btype = b'HG20'
             compression = b'BZ'
 
-        repo = self.repo.unfiltered()
+        repo = repo.unfiltered()
 
         outgoing = discovery.outgoing(
             repo, missingroots=bases, ancestorsof=[node]
         )
         cg = changegroup.makechangegroup(repo, outgoing, cgversion, b'shelve')
 
+        bundle_filename = self.vfs.join(self.name + b'.hg')
         bundle2.writebundle(
-            self.ui, cg, self.fname, btype, self.vfs, compression=compression
+            repo.ui,
+            cg,
+            bundle_filename,
+            btype,
+            self.vfs,
+            compression=compression,
         )
 
-    def writeinfo(self, info):
-        scmutil.simplekeyvaluefile(self.vfs, self.fname).write(info)
+    def applybundle(self, repo, tr):
+        filename = self.name + b'.hg'
+        fp = self.vfs(filename)
+        try:
+            targetphase = phases.internal
+            if not phases.supportinternal(repo):
+                targetphase = phases.secret
+            gen = exchange.readbundle(repo.ui, fp, filename, self.vfs)
+            pretip = repo[b'tip']
+            bundle2.applybundle(
+                repo,
+                gen,
+                tr,
+                source=b'unshelve',
+                url=b'bundle:' + self.vfs.join(filename),
+                targetphase=targetphase,
+            )
+            shelvectx = repo[b'tip']
+            if pretip == shelvectx:
+                shelverev = tr.changes[b'revduplicates'][-1]
+                shelvectx = repo[shelverev]
+            return shelvectx
+        finally:
+            fp.close()
 
-    def readinfo(self):
-        return scmutil.simplekeyvaluefile(self.vfs, self.fname).read()
+    def open_patch(self, mode=b'rb'):
+        return self.vfs(self.name + b'.patch', mode)
+
+    def _backupfilename(self, backupvfs, filename):
+        def gennames(base):
+            yield base
+            base, ext = base.rsplit(b'.', 1)
+            for i in itertools.count(1):
+                yield b'%s-%d.%s' % (base, i, ext)
+
+        for n in gennames(filename):
+            if not backupvfs.exists(n):
+                return backupvfs.join(n)
+
+    def movetobackup(self, backupvfs):
+        if not backupvfs.isdir():
+            backupvfs.makedir()
+        for suffix in shelvefileextensions:
+            filename = self.name + b'.' + suffix
+            if self.vfs.exists(filename):
+                util.rename(
+                    self.vfs.join(filename),
+                    self._backupfilename(backupvfs, filename),
+                )
+
+    def delete(self):
+        for ext in shelvefileextensions:
+            self.vfs.tryunlink(self.name + b'.' + ext)
 
 
 class shelvedstate(object):
@@ -196,11 +234,11 @@
     def _verifyandtransform(cls, d):
         """Some basic shelvestate syntactic verification and transformation"""
         try:
-            d[b'originalwctx'] = nodemod.bin(d[b'originalwctx'])
-            d[b'pendingctx'] = nodemod.bin(d[b'pendingctx'])
-            d[b'parents'] = [nodemod.bin(h) for h in d[b'parents'].split(b' ')]
+            d[b'originalwctx'] = bin(d[b'originalwctx'])
+            d[b'pendingctx'] = bin(d[b'pendingctx'])
+            d[b'parents'] = [bin(h) for h in d[b'parents'].split(b' ')]
             d[b'nodestoremove'] = [
-                nodemod.bin(h) for h in d[b'nodestoremove'].split(b' ')
+                bin(h) for h in d[b'nodestoremove'].split(b' ')
             ]
         except (ValueError, TypeError, KeyError) as err:
             raise error.CorruptedState(pycompat.bytestr(err))
@@ -296,14 +334,10 @@
     ):
         info = {
             b"name": name,
-            b"originalwctx": nodemod.hex(originalwctx.node()),
-            b"pendingctx": nodemod.hex(pendingctx.node()),
-            b"parents": b' '.join(
-                [nodemod.hex(p) for p in repo.dirstate.parents()]
-            ),
-            b"nodestoremove": b' '.join(
-                [nodemod.hex(n) for n in nodestoremove]
-            ),
+            b"originalwctx": hex(originalwctx.node()),
+            b"pendingctx": hex(pendingctx.node()),
+            b"parents": b' '.join([hex(p) for p in repo.dirstate.parents()]),
+            b"nodestoremove": b' '.join([hex(n) for n in nodestoremove]),
             b"branchtorestore": branchtorestore,
             b"keep": cls._keep if keep else cls._nokeep,
             b"activebook": activebook or cls._noactivebook,
@@ -320,21 +354,18 @@
 
 
 def cleanupoldbackups(repo):
-    vfs = vfsmod.vfs(repo.vfs.join(backupdir))
     maxbackups = repo.ui.configint(b'shelve', b'maxbackups')
-    hgfiles = [f for f in vfs.listdir() if f.endswith(b'.' + patchextension)]
-    hgfiles = sorted([(vfs.stat(f)[stat.ST_MTIME], f) for f in hgfiles])
+    backup_dir = ShelfDir(repo, for_backups=True)
+    hgfiles = backup_dir.listshelves()
     if maxbackups > 0 and maxbackups < len(hgfiles):
-        bordermtime = hgfiles[-maxbackups][0]
+        bordermtime = hgfiles[maxbackups - 1][0]
     else:
         bordermtime = None
-    for mtime, f in hgfiles[: len(hgfiles) - maxbackups]:
+    for mtime, name in hgfiles[maxbackups:]:
         if mtime == bordermtime:
             # keep it, because timestamp can't decide exact order of backups
             continue
-        base = f[: -(1 + len(patchextension))]
-        for ext in shelvefileextensions:
-            vfs.tryunlink(base + b'.' + ext)
+        backup_dir.get(name).delete()
 
 
 def _backupactivebookmark(repo):
@@ -350,8 +381,7 @@
 
 
 def _aborttransaction(repo, tr):
-    '''Abort current transaction for shelve/unshelve, but keep dirstate
-    '''
+    """Abort current transaction for shelve/unshelve, but keep dirstate"""
     dirstatebackupname = b'dirstate.shelve'
     repo.dirstate.savebackup(tr, dirstatebackupname)
     tr.abort()
@@ -376,7 +406,7 @@
         label = label.replace(b'.', b'_', 1)
 
     if name:
-        if shelvedfile(repo, name, patchextension).exists():
+        if ShelfDir(repo).get(name).exists():
             e = _(b"a shelved change named '%s' already exists") % name
             raise error.Abort(e)
 
@@ -389,8 +419,9 @@
             raise error.Abort(_(b"shelved change names can not start with '.'"))
 
     else:
+        shelf_dir = ShelfDir(repo)
         for n in gennames():
-            if not shelvedfile(repo, n, patchextension).exists():
+            if not shelf_dir.get(n).exists():
                 name = n
                 break
 
@@ -401,7 +432,7 @@
     """return all mutable ancestors for ctx (included)
 
     Much faster than the revset ancestors(ctx) & draft()"""
-    seen = {nodemod.nullrev}
+    seen = {nullrev}
     visit = collections.deque()
     visit.append(ctx)
     while visit:
@@ -465,11 +496,12 @@
 
 
 def _shelvecreatedcommit(repo, node, name, match):
-    info = {b'node': nodemod.hex(node)}
-    shelvedfile(repo, name, b'shelve').writeinfo(info)
+    info = {b'node': hex(node)}
+    shelf = ShelfDir(repo).get(name)
+    shelf.writeinfo(info)
     bases = list(mutableancestors(repo[node]))
-    shelvedfile(repo, name, b'hg').writebundle(bases, node)
-    with shelvedfile(repo, name, patchextension).opener(b'wb') as fp:
+    shelf.writebundle(repo, bases, node)
+    with shelf.open_patch(b'wb') as fp:
         cmdutil.exportfile(
             repo, [node], fp, opts=mdiff.diffopts(git=True), match=match
         )
@@ -502,7 +534,7 @@
     parent = parents[0]
     origbranch = wctx.branch()
 
-    if parent.node() != nodemod.nullid:
+    if parent.node() != nullid:
         desc = b"changes to: %s" % parent.description().split(b'\n', 1)[0]
     else:
         desc = b'(changes in empty repository)'
@@ -564,6 +596,10 @@
                 scmutil.movedirstate(repo, parent, match)
         else:
             hg.update(repo, parent.node())
+            ms = mergestatemod.mergestate.read(repo)
+            if not ms.unresolvedcount():
+                ms.reset()
+
         if origbranch != repo[b'.'].branch() and not _isbareshelve(pats, opts):
             repo.dirstate.setbranch(origbranch)
 
@@ -590,54 +626,29 @@
     """subcommand that deletes all shelves"""
 
     with repo.wlock():
-        for (name, _type) in repo.vfs.readdir(shelvedir):
-            suffix = name.rsplit(b'.', 1)[-1]
-            if suffix in shelvefileextensions:
-                shelvedfile(repo, name).movetobackup()
+        shelf_dir = ShelfDir(repo)
+        backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
+        for _mtime, name in shelf_dir.listshelves():
+            shelf_dir.get(name).movetobackup(backupvfs)
             cleanupoldbackups(repo)
 
 
 def deletecmd(ui, repo, pats):
     """subcommand that deletes a specific shelve"""
     if not pats:
-        raise error.Abort(_(b'no shelved changes specified!'))
+        raise error.InputError(_(b'no shelved changes specified!'))
     with repo.wlock():
+        backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
         for name in pats:
-            try:
-                for suffix in shelvefileextensions:
-                    shfile = shelvedfile(repo, name, suffix)
-                    # patch file is necessary, as it should
-                    # be present for any kind of shelve,
-                    # but the .hg file is optional as in future we
-                    # will add obsolete shelve with does not create a
-                    # bundle
-                    if shfile.exists() or suffix == patchextension:
-                        shfile.movetobackup()
-            except OSError as err:
-                if err.errno != errno.ENOENT:
-                    raise
-                raise error.Abort(_(b"shelved change '%s' not found") % name)
+            shelf = ShelfDir(repo).get(name)
+            if not shelf.exists():
+                raise error.InputError(
+                    _(b"shelved change '%s' not found") % name
+                )
+            shelf.movetobackup(backupvfs)
             cleanupoldbackups(repo)
 
 
-def listshelves(repo):
-    """return all shelves in repo as list of (time, filename)"""
-    try:
-        names = repo.vfs.readdir(shelvedir)
-    except OSError as err:
-        if err.errno != errno.ENOENT:
-            raise
-        return []
-    info = []
-    for (name, _type) in names:
-        pfx, sfx = name.rsplit(b'.', 1)
-        if not pfx or sfx != patchextension:
-            continue
-        st = shelvedfile(repo, name).stat()
-        info.append((st[stat.ST_MTIME], shelvedfile(repo, pfx).filename()))
-    return sorted(info, reverse=True)
-
-
 def listcmd(ui, repo, pats, opts):
     """subcommand that displays the list of shelves"""
     pats = set(pats)
@@ -646,23 +657,23 @@
         width = ui.termwidth()
     namelabel = b'shelve.newest'
     ui.pager(b'shelve')
-    for mtime, name in listshelves(repo):
-        sname = util.split(name)[1]
-        if pats and sname not in pats:
+    shelf_dir = ShelfDir(repo)
+    for mtime, name in shelf_dir.listshelves():
+        if pats and name not in pats:
             continue
-        ui.write(sname, label=namelabel)
+        ui.write(name, label=namelabel)
         namelabel = b'shelve.name'
         if ui.quiet:
             ui.write(b'\n')
             continue
-        ui.write(b' ' * (16 - len(sname)))
+        ui.write(b' ' * (16 - len(name)))
         used = 16
         date = dateutil.makedate(mtime)
         age = b'(%s)' % templatefilters.age(date, abbrev=True)
         ui.write(age, label=b'shelve.age')
         ui.write(b' ' * (12 - len(age)))
         used += 12
-        with open(name + b'.' + patchextension, b'rb') as fp:
+        with shelf_dir.get(name).open_patch() as fp:
             while True:
                 line = fp.readline()
                 if not line:
@@ -687,16 +698,16 @@
 
 def patchcmds(ui, repo, pats, opts):
     """subcommand that displays shelves"""
+    shelf_dir = ShelfDir(repo)
     if len(pats) == 0:
-        shelves = listshelves(repo)
+        shelves = shelf_dir.listshelves()
         if not shelves:
             raise error.Abort(_(b"there are no shelves to show"))
         mtime, name = shelves[0]
-        sname = util.split(name)[1]
-        pats = [sname]
+        pats = [name]
 
     for shelfname in pats:
-        if not shelvedfile(repo, shelfname, patchextension).exists():
+        if not shelf_dir.get(shelfname).exists():
             raise error.Abort(_(b"cannot find shelf %s") % shelfname)
 
     listcmd(ui, repo, pats, opts)
@@ -787,10 +798,8 @@
 def unshelvecleanup(ui, repo, name, opts):
     """remove related files after an unshelve"""
     if not opts.get(b'keep'):
-        for filetype in shelvefileextensions:
-            shfile = shelvedfile(repo, name, filetype)
-            if shfile.exists():
-                shfile.movetobackup()
+        backupvfs = vfsmod.vfs(repo.vfs.join(backupdir))
+        ShelfDir(repo).get(name).movetobackup(backupvfs)
         cleanupoldbackups(repo)
 
 
@@ -813,7 +822,7 @@
         pendingctx = state.pendingctx
 
         with repo.dirstate.parentchange():
-            repo.setparents(state.pendingctx.node(), nodemod.nullid)
+            repo.setparents(state.pendingctx.node(), nullid)
             repo.dirstate.write(repo.currenttransaction())
 
         targetphase = phases.internal
@@ -822,7 +831,7 @@
         overrides = {(b'phases', b'new-commit'): targetphase}
         with repo.ui.configoverride(overrides, b'unshelve'):
             with repo.dirstate.parentchange():
-                repo.setparents(state.parents[0], nodemod.nullid)
+                repo.setparents(state.parents[0], nullid)
                 newnode, ispartialunshelve = _createunshelvectx(
                     ui, repo, shelvectx, basename, interactive, opts
                 )
@@ -890,16 +899,17 @@
     """Recreate commit in the repository during the unshelve"""
     repo = repo.unfiltered()
     node = None
-    if shelvedfile(repo, basename, b'shelve').exists():
-        node = shelvedfile(repo, basename, b'shelve').readinfo()[b'node']
+    shelf = ShelfDir(repo).get(basename)
+    if shelf.hasinfo():
+        node = shelf.readinfo()[b'node']
     if node is None or node not in repo:
         with ui.configoverride({(b'ui', b'quiet'): True}):
-            shelvectx = shelvedfile(repo, basename, b'hg').applybundle(tr)
+            shelvectx = shelf.applybundle(repo, tr)
         # We might not strip the unbundled changeset, so we should keep track of
         # the unshelve node in case we need to reuse it (eg: unshelve --keep)
         if node is None:
-            info = {b'node': nodemod.hex(shelvectx.node())}
-            shelvedfile(repo, basename, b'shelve').writeinfo(info)
+            info = {b'node': hex(shelvectx.node())}
+            shelf.writeinfo(info)
     else:
         shelvectx = repo[node]
 
@@ -1017,7 +1027,7 @@
             raise error.ConflictResolutionRequired(b'unshelve')
 
         with repo.dirstate.parentchange():
-            repo.setparents(tmpwctx.node(), nodemod.nullid)
+            repo.setparents(tmpwctx.node(), nullid)
             newnode, ispartialunshelve = _createunshelvectx(
                 ui, repo, shelvectx, basename, interactive, opts
             )
@@ -1082,12 +1092,14 @@
         shelved.append(opts[b"name"])
 
     if interactive and opts.get(b'keep'):
-        raise error.Abort(_(b'--keep on --interactive is not yet supported'))
+        raise error.InputError(
+            _(b'--keep on --interactive is not yet supported')
+        )
     if abortf or continuef:
         if abortf and continuef:
-            raise error.Abort(_(b'cannot use both abort and continue'))
+            raise error.InputError(_(b'cannot use both abort and continue'))
         if shelved:
-            raise error.Abort(
+            raise error.InputError(
                 _(
                     b'cannot combine abort/continue with '
                     b'naming a shelved change'
@@ -1100,22 +1112,24 @@
         if abortf:
             return unshelveabort(ui, repo, state)
         elif continuef and interactive:
-            raise error.Abort(_(b'cannot use both continue and interactive'))
+            raise error.InputError(
+                _(b'cannot use both continue and interactive')
+            )
         elif continuef:
             return unshelvecontinue(ui, repo, state, opts)
     elif len(shelved) > 1:
-        raise error.Abort(_(b'can only unshelve one change at a time'))
+        raise error.InputError(_(b'can only unshelve one change at a time'))
     elif not shelved:
-        shelved = listshelves(repo)
+        shelved = ShelfDir(repo).listshelves()
         if not shelved:
-            raise error.Abort(_(b'no shelved changes to apply!'))
-        basename = util.split(shelved[0][1])[1]
+            raise error.StateError(_(b'no shelved changes to apply!'))
+        basename = shelved[0][1]
         ui.status(_(b"unshelving change '%s'\n") % basename)
     else:
         basename = shelved[0]
 
-    if not shelvedfile(repo, basename, patchextension).exists():
-        raise error.Abort(_(b"shelved change '%s' not found") % basename)
+    if not ShelfDir(repo).get(basename).exists():
+        raise error.InputError(_(b"shelved change '%s' not found") % basename)
 
     return _dounshelve(ui, repo, basename, opts)
 
--- a/mercurial/similar.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/similar.py	Tue Jan 19 21:48:43 2021 +0530
@@ -15,11 +15,11 @@
 
 
 def _findexactmatches(repo, added, removed):
-    '''find renamed files that have no changes
+    """find renamed files that have no changes
 
     Takes a list of new filectxs and a list of removed filectxs, and yields
     (before, after) tuples of exact matches.
-    '''
+    """
     # Build table of removed files: {hash(fctx.data()): [fctx, ...]}.
     # We use hash() to discard fctx.data() from memory.
     hashes = {}
@@ -77,11 +77,11 @@
 
 
 def _findsimilarmatches(repo, added, removed, threshold):
-    '''find potentially renamed files based on similar file content
+    """find potentially renamed files based on similar file content
 
     Takes a list of new filectxs and a list of removed filectxs, and yields
     (before, after, score) tuples of partial matches.
-    '''
+    """
     copies = {}
     progress = repo.ui.makeprogress(
         _(b'searching for similar files'), unit=_(b'files'), total=len(removed)
--- a/mercurial/simplemerge.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/simplemerge.py	Tue Jan 19 21:48:43 2021 +0530
@@ -19,10 +19,10 @@
 from __future__ import absolute_import
 
 from .i18n import _
+from .node import nullid
 from . import (
     error,
     mdiff,
-    node as nodemod,
     pycompat,
     util,
 )
@@ -57,8 +57,7 @@
 
 
 def compare_range(a, astart, aend, b, bstart, bend):
-    """Compare a[astart:aend] == b[bstart:bend], without slicing.
-    """
+    """Compare a[astart:aend] == b[bstart:bend], without slicing."""
     if (aend - astart) != (bend - bstart):
         return False
     for ia, ib in zip(
@@ -102,8 +101,7 @@
         localorother=None,
         minimize=False,
     ):
-        """Return merge in cvs-like form.
-        """
+        """Return merge in cvs-like form."""
         self.conflicts = False
         newline = b'\n'
         if len(self.a) > 0:
@@ -435,9 +433,9 @@
     then we just warn)"""
     if stringutil.binary(text):
         msg = _(b"%s looks like a binary file.") % path
-        if not opts.get(b'quiet'):
+        if not opts.get('quiet'):
             ui.warn(_(b'warning: %s\n') % msg)
-        if not opts.get(b'text'):
+        if not opts.get('text'):
             raise error.Abort(msg)
     return text
 
@@ -454,7 +452,69 @@
 def is_not_null(ctx):
     if not util.safehasattr(ctx, "node"):
         return False
-    return ctx.node() != nodemod.nullid
+    return ctx.node() != nullid
+
+
+def _mergediff(m3, name_a, name_b, name_base):
+    lines = []
+    conflicts = False
+    for group in m3.merge_groups():
+        if group[0] == b'conflict':
+            base_lines, a_lines, b_lines = group[1:]
+            base_text = b''.join(base_lines)
+            b_blocks = list(
+                mdiff.allblocks(
+                    base_text,
+                    b''.join(b_lines),
+                    lines1=base_lines,
+                    lines2=b_lines,
+                )
+            )
+            a_blocks = list(
+                mdiff.allblocks(
+                    base_text,
+                    b''.join(a_lines),
+                    lines1=base_lines,
+                    lines2=b_lines,
+                )
+            )
+
+            def matching_lines(blocks):
+                return sum(
+                    block[1] - block[0]
+                    for block, kind in blocks
+                    if kind == b'='
+                )
+
+            def diff_lines(blocks, lines1, lines2):
+                for block, kind in blocks:
+                    if kind == b'=':
+                        for line in lines1[block[0] : block[1]]:
+                            yield b' ' + line
+                    else:
+                        for line in lines1[block[0] : block[1]]:
+                            yield b'-' + line
+                        for line in lines2[block[2] : block[3]]:
+                            yield b'+' + line
+
+            lines.append(b"<<<<<<<\n")
+            if matching_lines(a_blocks) < matching_lines(b_blocks):
+                lines.append(b"======= %s\n" % name_a)
+                lines.extend(a_lines)
+                lines.append(b"------- %s\n" % name_base)
+                lines.append(b"+++++++ %s\n" % name_b)
+                lines.extend(diff_lines(b_blocks, base_lines, b_lines))
+            else:
+                lines.append(b"------- %s\n" % name_base)
+                lines.append(b"+++++++ %s\n" % name_a)
+                lines.extend(diff_lines(a_blocks, base_lines, a_lines))
+                lines.append(b"======= %s\n" % name_b)
+                lines.extend(b_lines)
+            lines.append(b">>>>>>>\n")
+            conflicts = True
+        else:
+            lines.extend(group[1])
+    return lines, conflicts
 
 
 def simplemerge(ui, localctx, basectx, otherctx, **opts):
@@ -462,7 +522,6 @@
 
     The merged result is written into `localctx`.
     """
-    opts = pycompat.byteskwargs(opts)
 
     def readctx(ctx):
         # Merges were always run in the working copy before, which means
@@ -474,11 +533,11 @@
         # repository usually sees) might be more useful.
         return _verifytext(ctx.decodeddata(), ctx.path(), ui, opts)
 
-    mode = opts.get(b'mode', b'merge')
+    mode = opts.get('mode', b'merge')
     name_a, name_b, name_base = None, None, None
     if mode != b'union':
         name_a, name_b, name_base = _picklabels(
-            [localctx.path(), otherctx.path(), None], opts.get(b'label', [])
+            [localctx.path(), otherctx.path(), None], opts.get('label', [])
         )
 
     try:
@@ -490,7 +549,7 @@
 
     m3 = Merge3Text(basetext, localtext, othertext)
     extrakwargs = {
-        b"localorother": opts.get(b"localorother", None),
+        b"localorother": opts.get("localorother", None),
         b'minimize': True,
     }
     if mode == b'union':
@@ -502,14 +561,15 @@
         extrakwargs[b'name_base'] = name_base
         extrakwargs[b'minimize'] = False
 
-    mergedtext = b""
-    for line in m3.merge_lines(
-        name_a=name_a, name_b=name_b, **pycompat.strkwargs(extrakwargs)
-    ):
-        if opts.get(b'print'):
-            ui.fout.write(line)
-        else:
-            mergedtext += line
+    if mode == b'mergediff':
+        lines, conflicts = _mergediff(m3, name_a, name_b, name_base)
+    else:
+        lines = list(
+            m3.merge_lines(
+                name_a=name_a, name_b=name_b, **pycompat.strkwargs(extrakwargs)
+            )
+        )
+        conflicts = m3.conflicts
 
     # merge flags if necessary
     flags = localctx.flags()
@@ -521,8 +581,11 @@
         addedflags = (localflags ^ otherflags) - baseflags
         flags = b''.join(sorted(commonflags | addedflags))
 
-    if not opts.get(b'print'):
+    mergedtext = b''.join(lines)
+    if opts.get('print'):
+        ui.fout.write(mergedtext)
+    else:
         localctx.write(mergedtext, flags)
 
-    if m3.conflicts and not mode == b'union':
+    if conflicts and not mode == b'union':
         return 1
--- a/mercurial/sshpeer.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/sshpeer.py	Tue Jan 19 21:48:43 2021 +0530
@@ -121,8 +121,7 @@
         return self._call(b'readline')
 
     def _call(self, methname, data=None):
-        """call <methname> on "main", forward output of "side" while blocking
-        """
+        """call <methname> on "main", forward output of "side" while blocking"""
         # data can be '' or 0
         if (data is not None and not data) or self._main.closed:
             _forwardoutput(self._ui, self._side)
--- a/mercurial/sslutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/sslutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -16,10 +16,10 @@
 
 from .i18n import _
 from .pycompat import getattr
+from .node import hex
 from . import (
     encoding,
     error,
-    node,
     pycompat,
     util,
 )
@@ -227,8 +227,7 @@
 
 
 def commonssloptions(minimumprotocol):
-    """Return SSLContext options common to servers and clients.
-    """
+    """Return SSLContext options common to servers and clients."""
     if minimumprotocol not in configprotocols:
         raise ValueError(b'protocol value not supported: %s' % minimumprotocol)
 
@@ -470,7 +469,7 @@
     # closed
     # - see http://bugs.python.org/issue13721
     if not sslsocket.cipher():
-        raise error.Abort(_(b'ssl connection failed'))
+        raise error.SecurityError(_(b'ssl connection failed'))
 
     sslsocket._hgstate = {
         b'caloaded': caloaded,
@@ -617,11 +616,11 @@
 
 
 def _verifycert(cert, hostname):
-    '''Verify that cert (in socket.getpeercert() format) matches hostname.
+    """Verify that cert (in socket.getpeercert() format) matches hostname.
     CRLs is not handled.
 
     Returns error message if any problems are found and None on success.
-    '''
+    """
     if not cert:
         return _(b'no certificate received')
 
@@ -736,10 +735,10 @@
         peercert = sock.getpeercert(True)
         peercert2 = sock.getpeercert()
     except AttributeError:
-        raise error.Abort(_(b'%s ssl connection error') % host)
+        raise error.SecurityError(_(b'%s ssl connection error') % host)
 
     if not peercert:
-        raise error.Abort(
+        raise error.SecurityError(
             _(b'%s certificate error: no certificate received') % host
         )
 
@@ -763,9 +762,9 @@
     # If a certificate fingerprint is pinned, use it and only it to
     # validate the remote cert.
     peerfingerprints = {
-        b'sha1': node.hex(hashutil.sha1(peercert).digest()),
-        b'sha256': node.hex(hashlib.sha256(peercert).digest()),
-        b'sha512': node.hex(hashlib.sha512(peercert).digest()),
+        b'sha1': hex(hashutil.sha1(peercert).digest()),
+        b'sha256': hex(hashlib.sha256(peercert).digest()),
+        b'sha512': hex(hashlib.sha512(peercert).digest()),
     }
 
     def fmtfingerprint(s):
@@ -801,7 +800,7 @@
         else:
             section = b'hostsecurity'
             nice = b'%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
-        raise error.Abort(
+        raise error.SecurityError(
             _(b'certificate for %s has unexpected fingerprint %s')
             % (host, nice),
             hint=_(b'check %s configuration') % section,
@@ -810,7 +809,7 @@
     # Security is enabled but no CAs are loaded. We can't establish trust
     # for the cert so abort.
     if not sock._hgstate[b'caloaded']:
-        raise error.Abort(
+        raise error.SecurityError(
             _(
                 b'unable to verify security of %s (no loaded CA certificates); '
                 b'refusing to connect'
@@ -826,7 +825,7 @@
 
     msg = _verifycert(peercert2, shost)
     if msg:
-        raise error.Abort(
+        raise error.SecurityError(
             _(b'%s certificate error: %s') % (host, msg),
             hint=_(
                 b'set hostsecurity.%s:certfingerprints=%s '
--- a/mercurial/state.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/state.py	Tue Jan 19 21:48:43 2021 +0530
@@ -55,7 +55,7 @@
     """
 
     def __init__(self, repo, fname):
-        """ repo is the repo object
+        """repo is the repo object
         fname is the file name in which data should be stored in .hg directory
         """
         self._repo = repo
@@ -105,11 +105,11 @@
 
 class _statecheck(object):
     """a utility class that deals with multistep operations like graft,
-       histedit, bisect, update etc and check whether such commands
-       are in an unfinished conditition or not and return appropriate message
-       and hint.
-       It also has the ability to register and determine the states of any new
-       multistep operation or multistep command extension.
+    histedit, bisect, update etc and check whether such commands
+    are in an unfinished conditition or not and return appropriate message
+    and hint.
+    It also has the ability to register and determine the states of any new
+    multistep operation or multistep command extension.
     """
 
     def __init__(
@@ -173,7 +173,11 @@
                 return _(
                     b"use 'hg %s --continue', 'hg %s --abort', "
                     b"or 'hg %s --stop'"
-                ) % (self._opname, self._opname, self._opname,)
+                ) % (
+                    self._opname,
+                    self._opname,
+                    self._opname,
+                )
 
         return self._cmdhint
 
--- a/mercurial/statichttprepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/statichttprepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -61,7 +61,10 @@
             code = f.code
         except urlerr.httperror as inst:
             num = inst.code == 404 and errno.ENOENT or None
-            raise IOError(num, inst)
+            # Explicitly convert the exception to str as Py3 will try
+            # convert it to local encoding and with as the HTTPResponse
+            # instance doesn't support encode.
+            raise IOError(num, str(inst))
         except urlerr.urlerror as inst:
             raise IOError(None, inst.reason)
 
--- a/mercurial/statprof.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/statprof.py	Tue Jan 19 21:48:43 2021 +0530
@@ -411,11 +411,11 @@
 
 
 def reset(frequency=None):
-    '''Clear out the state of the profiler.  Do not call while the
+    """Clear out the state of the profiler.  Do not call while the
     profiler is running.
 
     The optional frequency argument specifies the number of samples to
-    collect per second.'''
+    collect per second."""
     assert state.profile_level == 0, b"Can't reset() while statprof is running"
     CodeSite.cache.clear()
     state.reset(frequency)
@@ -525,8 +525,8 @@
 
 
 def display_by_line(data, fp):
-    '''Print the profiler data with each sample line represented
-    as one row in a table.  Sorted by self-time per line.'''
+    """Print the profiler data with each sample line represented
+    as one row in a table.  Sorted by self-time per line."""
     stats = SiteStats.buildstats(data.samples)
     stats.sort(reverse=True, key=lambda x: x.selfseconds())
 
@@ -554,9 +554,9 @@
 
 
 def display_by_method(data, fp):
-    '''Print the profiler data with each sample function represented
+    """Print the profiler data with each sample function represented
     as one row in a table.  Important lines within that function are
-    output as nested rows.  Sorted by self-time per line.'''
+    output as nested rows.  Sorted by self-time per line."""
     fp.write(
         b'%5.5s %10.10s   %7.7s  %-8.8s\n'
         % (b'%  ', b'cumulative', b'self', b'')
@@ -732,6 +732,9 @@
                     i += 1
                 if i < len(stack):
                     child.add(stack[i:], time)
+            else:
+                # Normally this is done by the .add() calls
+                child.count += time
 
     root = HotNode(None)
     lasttime = data.samples[0].time
@@ -749,12 +752,8 @@
         ]
         if site:
             indent = depth * 2 - 1
-            filename = b''
-            function = b''
-            if len(node.children) > 0:
-                childsite = list(pycompat.itervalues(node.children))[0].site
-                filename = (childsite.filename() + b':').ljust(15)
-                function = childsite.function
+            filename = (site.filename() + b':').ljust(15)
+            function = site.function
 
             # lots of string formatting
             listpattern = (
@@ -769,10 +768,18 @@
                 filename,
                 function,
             )
-            codepattern = b'%' + (b'%d' % (55 - len(liststring))) + b's %d:  %s'
+            # 4 to account for the word 'line'
+            spacing_len = max(4, 55 - len(liststring))
+            prefix = b''
+            if spacing_len == 4:
+                prefix = b', '
+
+            codepattern = b'%s%s %d: %s%s'
             codestring = codepattern % (
-                b'line',
+                prefix,
+                b'line'.rjust(spacing_len),
                 site.lineno,
+                b''.ljust(max(0, 4 - len(str(site.lineno)))),
                 site.getsource(30),
             )
 
@@ -835,9 +842,9 @@
 
 
 def simplifypath(path):
-    '''Attempt to make the path to a Python module easier to read by
+    """Attempt to make the path to a Python module easier to read by
     removing whatever part of the Python search path it was found
-    on.'''
+    on."""
 
     if path in _pathcache:
         return _pathcache[path]
--- a/mercurial/store.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/store.py	Tue Jan 19 21:48:43 2021 +0530
@@ -14,11 +14,11 @@
 
 from .i18n import _
 from .pycompat import getattr
+from .node import hex
 from . import (
     changelog,
     error,
     manifest,
-    node,
     policy,
     pycompat,
     util,
@@ -52,7 +52,7 @@
 # This avoids a collision between a file named foo and a dir named
 # foo.i or foo.d
 def _encodedir(path):
-    '''
+    """
     >>> _encodedir(b'data/foo.i')
     'data/foo.i'
     >>> _encodedir(b'data/foo.i/bla.i')
@@ -61,7 +61,7 @@
     'data/foo.i.hg.hg/bla.i'
     >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
     'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
-    '''
+    """
     return (
         path.replace(b".hg/", b".hg.hg/")
         .replace(b".i/", b".i.hg/")
@@ -73,14 +73,14 @@
 
 
 def decodedir(path):
-    '''
+    """
     >>> decodedir(b'data/foo.i')
     'data/foo.i'
     >>> decodedir(b'data/foo.i.hg/bla.i')
     'data/foo.i/bla.i'
     >>> decodedir(b'data/foo.i.hg.hg/bla.i')
     'data/foo.i.hg/bla.i'
-    '''
+    """
     if b".hg/" not in path:
         return path
     return (
@@ -91,14 +91,14 @@
 
 
 def _reserved():
-    ''' characters that are problematic for filesystems
+    """characters that are problematic for filesystems
 
     * ascii escapes (0..31)
     * ascii hi (126..255)
     * windows specials
 
     these characters will be escaped by encodefunctions
-    '''
+    """
     winreserved = [ord(x) for x in u'\\:*?"<>|']
     for x in range(32):
         yield x
@@ -109,7 +109,7 @@
 
 
 def _buildencodefun():
-    '''
+    """
     >>> enc, dec = _buildencodefun()
 
     >>> enc(b'nothing/special.txt')
@@ -131,7 +131,7 @@
     'the~07quick~adshot'
     >>> dec(b'the~07quick~adshot')
     'the\\x07quick\\xadshot'
-    '''
+    """
     e = b'_'
     xchr = pycompat.bytechr
     asciistr = list(map(xchr, range(127)))
@@ -172,23 +172,23 @@
 
 
 def encodefilename(s):
-    '''
+    """
     >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
     'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
-    '''
+    """
     return _encodefname(encodedir(s))
 
 
 def decodefilename(s):
-    '''
+    """
     >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
     'foo.i/bar.d/bla.hg/hi:world?/HELLO'
-    '''
+    """
     return decodedir(_decodefname(s))
 
 
 def _buildlowerencodefun():
-    '''
+    """
     >>> f = _buildlowerencodefun()
     >>> f(b'nothing/special.txt')
     'nothing/special.txt'
@@ -198,7 +198,7 @@
     'hello~3aworld~3f'
     >>> f(b'the\\x07quick\\xADshot')
     'the~07quick~adshot'
-    '''
+    """
     xchr = pycompat.bytechr
     cmap = {xchr(x): xchr(x) for x in pycompat.xrange(127)}
     for x in _reserved():
@@ -220,7 +220,7 @@
 
 
 def _auxencode(path, dotencode):
-    '''
+    """
     Encodes filenames containing names reserved by Windows or which end in
     period or space. Does not touch other single reserved characters c.
     Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
@@ -240,7 +240,7 @@
     ['foo.~20']
     >>> _auxencode([b' .foo'], True)
     ['~20.foo']
-    '''
+    """
     for i, n in enumerate(path):
         if not n:
             continue
@@ -273,7 +273,7 @@
 
 
 def _hashencode(path, dotencode):
-    digest = node.hex(hashutil.sha1(path).digest())
+    digest = hex(hashutil.sha1(path).digest())
     le = lowerencode(path[5:]).split(b'/')  # skips prefix 'data/' or 'meta/'
     parts = _auxencode(le, dotencode)
     basename = parts[-1]
@@ -305,7 +305,7 @@
 
 
 def _hybridencode(path, dotencode):
-    '''encodes path with a length limit
+    """encodes path with a length limit
 
     Encodes all paths that begin with 'data/', according to the following.
 
@@ -334,7 +334,7 @@
 
     The string 'data/' at the beginning is replaced with 'dh/', if the hashed
     encoding was used.
-    '''
+    """
     path = encodedir(path)
     ef = _encodefname(path).split(b'/')
     res = b'/'.join(_auxencode(ef, dotencode))
@@ -389,7 +389,11 @@
 
 
 def isrevlog(f, kind, st):
-    return kind == stat.S_IFREG and f[-2:] in (b'.i', b'.d')
+    if kind != stat.S_IFREG:
+        return False
+    if f[-2:] in (b'.i', b'.d', b'.n'):
+        return True
+    return f[-3:] == b'.nd'
 
 
 class basicstore(object):
@@ -444,11 +448,11 @@
         return reversed(self._walk(b'', False))
 
     def walk(self, matcher=None):
-        '''yields (unencoded, encoded, size)
+        """yields (unencoded, encoded, size)
 
         if a matcher is passed, storage files of only those tracked paths
         are passed with matches the matcher
-        '''
+        """
         # yield data files first
         for x in self.datafiles(matcher):
             yield x
@@ -517,10 +521,10 @@
         self.addls = set()
 
     def ensureloaded(self, warn=None):
-        '''read the fncache file if not already read.
+        """read the fncache file if not already read.
 
         If the file on disk is corrupted, raise. If warn is provided,
-        warn and keep going instead.'''
+        warn and keep going instead."""
         if self.entries is None:
             self._load(warn)
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/strip.py	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,281 @@
+from __future__ import absolute_import
+
+from .i18n import _
+from .pycompat import getattr
+from .node import nullid
+from . import (
+    bookmarks as bookmarksmod,
+    cmdutil,
+    error,
+    hg,
+    lock as lockmod,
+    mergestate as mergestatemod,
+    pycompat,
+    registrar,
+    repair,
+    scmutil,
+    util,
+)
+
+release = lockmod.release
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+
+def checklocalchanges(repo, force=False):
+    s = repo.status()
+    if not force:
+        cmdutil.checkunfinished(repo)
+        cmdutil.bailifchanged(repo)
+    else:
+        cmdutil.checkunfinished(repo, skipmerge=True)
+    return s
+
+
+def _findupdatetarget(repo, nodes):
+    unode, p2 = repo.changelog.parents(nodes[0])
+    currentbranch = repo[None].branch()
+
+    if (
+        util.safehasattr(repo, b'mq')
+        and p2 != nullid
+        and p2 in [x.node for x in repo.mq.applied]
+    ):
+        unode = p2
+    elif currentbranch != repo[unode].branch():
+        pwdir = b'parents(wdir())'
+        revset = b'max(((parents(%ln::%r) + %r) - %ln::%r) and branch(%s))'
+        branchtarget = repo.revs(
+            revset, nodes, pwdir, pwdir, nodes, pwdir, currentbranch
+        )
+        if branchtarget:
+            cl = repo.changelog
+            unode = cl.node(branchtarget.first())
+
+    return unode
+
+
+def strip(
+    ui,
+    repo,
+    revs,
+    update=True,
+    backup=True,
+    force=None,
+    bookmarks=None,
+    soft=False,
+):
+    with repo.wlock(), repo.lock():
+
+        if update:
+            checklocalchanges(repo, force=force)
+            urev = _findupdatetarget(repo, revs)
+            hg.clean(repo, urev)
+            repo.dirstate.write(repo.currenttransaction())
+
+        if soft:
+            repair.softstrip(ui, repo, revs, backup)
+        else:
+            repair.strip(ui, repo, revs, backup)
+
+        repomarks = repo._bookmarks
+        if bookmarks:
+            with repo.transaction(b'strip') as tr:
+                if repo._activebookmark in bookmarks:
+                    bookmarksmod.deactivate(repo)
+                repomarks.applychanges(repo, tr, [(b, None) for b in bookmarks])
+            for bookmark in sorted(bookmarks):
+                ui.write(_(b"bookmark '%s' deleted\n") % bookmark)
+
+
+@command(
+    b"debugstrip",
+    [
+        (
+            b'r',
+            b'rev',
+            [],
+            _(
+                b'strip specified revision (optional, '
+                b'can specify revisions without this '
+                b'option)'
+            ),
+            _(b'REV'),
+        ),
+        (
+            b'f',
+            b'force',
+            None,
+            _(
+                b'force removal of changesets, discard '
+                b'uncommitted changes (no backup)'
+            ),
+        ),
+        (b'', b'no-backup', None, _(b'do not save backup bundle')),
+        (
+            b'',
+            b'nobackup',
+            None,
+            _(b'do not save backup bundle (DEPRECATED)'),
+        ),
+        (b'n', b'', None, _(b'ignored  (DEPRECATED)')),
+        (
+            b'k',
+            b'keep',
+            None,
+            _(b"do not modify working directory during strip"),
+        ),
+        (
+            b'B',
+            b'bookmark',
+            [],
+            _(b"remove revs only reachable from given bookmark"),
+            _(b'BOOKMARK'),
+        ),
+        (
+            b'',
+            b'soft',
+            None,
+            _(b"simply drop changesets from visible history (EXPERIMENTAL)"),
+        ),
+    ],
+    _(b'hg debugstrip [-k] [-f] [-B bookmark] [-r] REV...'),
+    helpcategory=command.CATEGORY_MAINTENANCE,
+)
+def debugstrip(ui, repo, *revs, **opts):
+    """strip changesets and all their descendants from the repository
+
+    The strip command removes the specified changesets and all their
+    descendants. If the working directory has uncommitted changes, the
+    operation is aborted unless the --force flag is supplied, in which
+    case changes will be discarded.
+
+    If a parent of the working directory is stripped, then the working
+    directory will automatically be updated to the most recent
+    available ancestor of the stripped parent after the operation
+    completes.
+
+    Any stripped changesets are stored in ``.hg/strip-backup`` as a
+    bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
+    be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
+    where BUNDLE is the bundle file created by the strip. Note that
+    the local revision numbers will in general be different after the
+    restore.
+
+    Use the --no-backup option to discard the backup bundle once the
+    operation completes.
+
+    Strip is not a history-rewriting operation and can be used on
+    changesets in the public phase. But if the stripped changesets have
+    been pushed to a remote repository you will likely pull them again.
+
+    Return 0 on success.
+    """
+    opts = pycompat.byteskwargs(opts)
+    backup = True
+    if opts.get(b'no_backup') or opts.get(b'nobackup'):
+        backup = False
+
+    cl = repo.changelog
+    revs = list(revs) + opts.get(b'rev')
+    revs = set(scmutil.revrange(repo, revs))
+
+    with repo.wlock():
+        bookmarks = set(opts.get(b'bookmark'))
+        if bookmarks:
+            repomarks = repo._bookmarks
+            if not bookmarks.issubset(repomarks):
+                raise error.Abort(
+                    _(b"bookmark '%s' not found")
+                    % b','.join(sorted(bookmarks - set(repomarks.keys())))
+                )
+
+            # If the requested bookmark is not the only one pointing to a
+            # a revision we have to only delete the bookmark and not strip
+            # anything. revsets cannot detect that case.
+            nodetobookmarks = {}
+            for mark, node in pycompat.iteritems(repomarks):
+                nodetobookmarks.setdefault(node, []).append(mark)
+            for marks in nodetobookmarks.values():
+                if bookmarks.issuperset(marks):
+                    rsrevs = scmutil.bookmarkrevs(repo, marks[0])
+                    revs.update(set(rsrevs))
+            if not revs:
+                with repo.lock(), repo.transaction(b'bookmark') as tr:
+                    bmchanges = [(b, None) for b in bookmarks]
+                    repomarks.applychanges(repo, tr, bmchanges)
+                for bookmark in sorted(bookmarks):
+                    ui.write(_(b"bookmark '%s' deleted\n") % bookmark)
+
+        if not revs:
+            raise error.Abort(_(b'empty revision set'))
+
+        descendants = set(cl.descendants(revs))
+        strippedrevs = revs.union(descendants)
+        roots = revs.difference(descendants)
+
+        # if one of the wdir parent is stripped we'll need
+        # to update away to an earlier revision
+        update = any(
+            p != nullid and cl.rev(p) in strippedrevs
+            for p in repo.dirstate.parents()
+        )
+
+        rootnodes = {cl.node(r) for r in roots}
+
+        q = getattr(repo, 'mq', None)
+        if q is not None and q.applied:
+            # refresh queue state if we're about to strip
+            # applied patches
+            if cl.rev(repo.lookup(b'qtip')) in strippedrevs:
+                q.applieddirty = True
+                start = 0
+                end = len(q.applied)
+                for i, statusentry in enumerate(q.applied):
+                    if statusentry.node in rootnodes:
+                        # if one of the stripped roots is an applied
+                        # patch, only part of the queue is stripped
+                        start = i
+                        break
+                del q.applied[start:end]
+                q.savedirty()
+
+        revs = sorted(rootnodes)
+        if update and opts.get(b'keep'):
+            urev = _findupdatetarget(repo, revs)
+            uctx = repo[urev]
+
+            # only reset the dirstate for files that would actually change
+            # between the working context and uctx
+            descendantrevs = repo.revs(b"only(., %d)", uctx.rev())
+            changedfiles = []
+            for rev in descendantrevs:
+                # blindly reset the files, regardless of what actually changed
+                changedfiles.extend(repo[rev].files())
+
+            # reset files that only changed in the dirstate too
+            dirstate = repo.dirstate
+            dirchanges = [f for f in dirstate if dirstate[f] != b'n']
+            changedfiles.extend(dirchanges)
+
+            repo.dirstate.rebuild(urev, uctx.manifest(), changedfiles)
+            repo.dirstate.write(repo.currenttransaction())
+
+            # clear resolve state
+            mergestatemod.mergestate.clean(repo)
+
+            update = False
+
+        strip(
+            ui,
+            repo,
+            revs,
+            backup=backup,
+            update=update,
+            force=opts.get(b'force'),
+            bookmarks=bookmarks,
+            soft=opts[b'soft'],
+        )
+
+    return 0
--- a/mercurial/subrepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/subrepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -18,6 +18,12 @@
 import xml.dom.minidom
 
 from .i18n import _
+from .node import (
+    bin,
+    hex,
+    nullid,
+    short,
+)
 from . import (
     cmdutil,
     encoding,
@@ -26,7 +32,6 @@
     logcmdutil,
     match as matchmod,
     merge as merge,
-    node,
     pathutil,
     phases,
     pycompat,
@@ -49,9 +54,9 @@
 
 
 def _expandedabspath(path):
-    '''
+    """
     get a path or url and if it is a path expand it and return an absolute path
-    '''
+    """
     expandedpath = util.urllocalpath(util.expandpath(path))
     u = util.url(expandedpath)
     if not u.scheme:
@@ -61,7 +66,7 @@
 
 def _getstorehashcachename(remotepath):
     '''get a unique filename for the store hash cache of a remote repository'''
-    return node.hex(hashutil.sha1(_expandedabspath(remotepath)).digest())[0:12]
+    return hex(hashutil.sha1(_expandedabspath(remotepath)).digest())[0:12]
 
 
 class SubrepoAbort(error.Abort):
@@ -268,8 +273,7 @@
             )
 
     def bailifchanged(self, ignoreupdate=False, hint=None):
-        """raise Abort if subrepository is ``dirty()``
-        """
+        """raise Abort if subrepository is ``dirty()``"""
         dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate, missing=True)
         if dirtyreason:
             raise error.Abort(dirtyreason, hint=hint)
@@ -291,8 +295,7 @@
         raise NotImplementedError
 
     def phase(self, state):
-        """returns phase of specified state in the subrepository.
-        """
+        """returns phase of specified state in the subrepository."""
         return phases.public
 
     def remove(self):
@@ -384,10 +387,10 @@
         return total
 
     def walk(self, match):
-        '''
+        """
         walk recursively through the directory tree, finding all files
         matched by the match function
-        '''
+        """
 
     def forget(self, match, prefix, uipathfn, dryrun, interactive):
         return ([], [])
@@ -423,9 +426,9 @@
         return revid
 
     def unshare(self):
-        '''
+        """
         convert this repository from shared to normal storage.
-        '''
+        """
 
     def verify(self, onpush=False):
         """verify the revision of this repository that is held in `_state` is
@@ -437,14 +440,12 @@
 
     @propertycache
     def wvfs(self):
-        """return vfs to access the working directory of this subrepository
-        """
+        """return vfs to access the working directory of this subrepository"""
         return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
 
     @propertycache
     def _relpath(self):
-        """return path to this subrepository as seen from outermost repository
-        """
+        """return path to this subrepository as seen from outermost repository"""
         return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
 
 
@@ -503,16 +504,16 @@
         return clean
 
     def _calcstorehash(self, remotepath):
-        '''calculate a unique "store hash"
+        """calculate a unique "store hash"
 
         This method is used to to detect when there are changes that may
-        require a push to a given remote path.'''
+        require a push to a given remote path."""
         # sort the files that will be hashed in increasing (likely) file size
         filelist = (b'bookmarks', b'store/phaseroots', b'store/00changelog.i')
         yield b'# %s\n' % _expandedabspath(remotepath)
         vfs = self._repo.vfs
         for relname in filelist:
-            filehash = node.hex(hashutil.sha1(vfs.tryread(relname)).digest())
+            filehash = hex(hashutil.sha1(vfs.tryread(relname)).digest())
             yield b'%s = %s\n' % (relname, filehash)
 
     @propertycache
@@ -525,11 +526,11 @@
         return self._cachestorehashvfs.tryreadlines(cachefile, b'r')
 
     def _cachestorehash(self, remotepath):
-        '''cache the current store hash
+        """cache the current store hash
 
         Each remote repo requires its own store hash cache, because a subrepo
         store may be "clean" versus a given remote repo, but not versus another
-        '''
+        """
         cachefile = _getstorehashcachename(remotepath)
         with self._repo.lock():
             storehash = list(self._calcstorehash(remotepath))
@@ -537,8 +538,7 @@
             vfs.writelines(cachefile, storehash, mode=b'wb', notindexed=True)
 
     def _getctx(self):
-        '''fetch the context for this subrepo revision, possibly a workingctx
-        '''
+        """fetch the context for this subrepo revision, possibly a workingctx"""
         if self._ctx.rev() is None:
             return self._repo[None]  # workingctx if parent is workingctx
         else:
@@ -606,11 +606,11 @@
     @annotatesubrepoerror
     def diff(self, ui, diffopts, node2, match, prefix, **opts):
         try:
-            node1 = node.bin(self._state[1])
+            node1 = bin(self._state[1])
             # We currently expect node2 to come from substate and be
             # in hex format
             if node2 is not None:
-                node2 = node.bin(node2)
+                node2 = bin(node2)
             logcmdutil.diffordiffstat(
                 ui,
                 self._repo,
@@ -674,7 +674,7 @@
         n = self._repo.commit(text, user, date)
         if not n:
             return self._repo[b'.'].hex()  # different version checked out
-        return node.hex(n)
+        return hex(n)
 
     @annotatesubrepoerror
     def phase(self, state):
@@ -685,7 +685,7 @@
         # we can't fully delete the repository as it may contain
         # local-only history
         self.ui.note(_(b'removing subrepo %s\n') % subrelpath(self))
-        hg.clean(self._repo, node.nullid, False)
+        hg.clean(self._repo, nullid, False)
 
     def _get(self, state):
         source, revision, kind = state
@@ -1024,7 +1024,7 @@
                 # explicit warning.
                 msg = _(b"subrepo '%s' is hidden in revision %s") % (
                     self._relpath,
-                    node.short(self._ctx.node()),
+                    short(self._ctx.node()),
                 )
 
                 if onpush:
@@ -1037,7 +1037,7 @@
             # don't treat this as an error for `hg verify`.
             msg = _(b"subrepo '%s' not found in revision %s") % (
                 self._relpath,
-                node.short(self._ctx.node()),
+                short(self._ctx.node()),
             )
 
             if onpush:
@@ -1048,14 +1048,12 @@
 
     @propertycache
     def wvfs(self):
-        """return own wvfs for efficiency and consistency
-        """
+        """return own wvfs for efficiency and consistency"""
         return self._repo.wvfs
 
     @propertycache
     def _relpath(self):
-        """return path to this subrepository as seen from outermost repository
-        """
+        """return path to this subrepository as seen from outermost repository"""
         # Keep consistent dir separators by avoiding vfs.join(self._path)
         return reporelpath(self._repo)
 
@@ -1170,12 +1168,16 @@
                 externals.append(path)
             elif item == 'missing':
                 missing.append(path)
-            if item not in (
-                '',
-                'normal',
-                'unversioned',
-                'external',
-            ) or props not in ('', 'none', 'normal'):
+            if (
+                item
+                not in (
+                    '',
+                    'normal',
+                    'unversioned',
+                    'external',
+                )
+                or props not in ('', 'none', 'normal')
+            ):
                 changes.append(path)
         for path in changes:
             for ext in externals:
@@ -1384,7 +1386,7 @@
 
     @staticmethod
     def _checkversion(out):
-        '''ensure git version is new enough
+        """ensure git version is new enough
 
         >>> _checkversion = gitsubrepo._checkversion
         >>> _checkversion(b'git version 1.6.0')
@@ -1405,7 +1407,7 @@
         'unknown'
         >>> _checkversion(b'no')
         'unknown'
-        '''
+        """
         version = gitsubrepo._gitversion(out)
         # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
         # despite the docstring comment.  For now, error on 1.4.0, warn on
@@ -1516,9 +1518,9 @@
         self._gitcommand([b'update-index', b'-q', b'--refresh'])
 
     def _gitbranchmap(self):
-        '''returns 2 things:
+        """returns 2 things:
         a map from git branch to revision
-        a map from revision to branches'''
+        a map from revision to branches"""
         branch2rev = {}
         rev2branch = {}
 
--- a/mercurial/tagmerge.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/tagmerge.py	Tue Jan 19 21:48:43 2021 +0530
@@ -75,24 +75,21 @@
 
 from .i18n import _
 from .node import (
-    hex,
-    nullid,
+    nullhex,
 )
 from . import (
     tags as tagsmod,
     util,
 )
 
-hexnullid = hex(nullid)
-
 
 def readtagsformerge(ui, repo, lines, fn=b'', keeplinenums=False):
-    '''read the .hgtags file into a structure that is suitable for merging
+    """read the .hgtags file into a structure that is suitable for merging
 
     Depending on the keeplinenums flag, clear the line numbers associated
     with each tag. This is done because only the line numbers of the first
     parent are useful for merging.
-    '''
+    """
     filetags = tagsmod._readtaghist(
         ui, repo, lines, fn=fn, recode=None, calcnodelines=True
     )[1]
@@ -104,7 +101,7 @@
 
 
 def grouptagnodesbyline(tagnodes):
-    '''
+    """
     Group nearby nodes (i.e. those that must be written next to each other)
 
     The input is a list of [node, position] pairs, corresponding to a given tag
@@ -118,7 +115,7 @@
     position is None).
 
     The result is a list of [position, [consecutive node list]]
-    '''
+    """
     firstlinenum = None
     for hexnode, linenum in tagnodes:
         firstlinenum = linenum
@@ -139,14 +136,14 @@
 
 
 def writemergedtags(fcd, mergedtags):
-    '''
+    """
     write the merged tags while trying to minimize the diff to the first parent
 
     This function uses the ordering info stored on the merged tags dict to
     generate an .hgtags file which is correct (in the sense that its contents
     correspond to the result of the tag merge) while also being as close as
     possible to the first parent's .hgtags file.
-    '''
+    """
     # group the node-tag pairs that must be written next to each other
     for tname, taglist in list(mergedtags.items()):
         mergedtags[tname] = grouptagnodesbyline(taglist)
@@ -175,12 +172,12 @@
 
 
 def singletagmerge(p1nodes, p2nodes):
-    '''
+    """
     merge the nodes corresponding to a single tag
 
     Note that the inputs are lists of node-linenum pairs (i.e. not just lists
     of nodes)
-    '''
+    """
     if not p2nodes:
         return p1nodes
     if not p1nodes:
@@ -221,10 +218,10 @@
 
 
 def merge(repo, fcd, fco, fca):
-    '''
+    """
     Merge the tags of two revisions, taking into account the base tags
     Try to minimize the diff between the merged tags and the first parent tags
-    '''
+    """
     ui = repo.ui
     # read the p1, p2 and base tags
     # only keep the line numbers for the p1 tags
@@ -246,8 +243,8 @@
         pnlosttagset = basetagset - pntagset
         for t in pnlosttagset:
             pntags[t] = basetags[t]
-            if pntags[t][-1][0] != hexnullid:
-                pntags[t].append([hexnullid, None])
+            if pntags[t][-1][0] != nullhex:
+                pntags[t].append([nullhex, None])
 
     conflictedtags = []  # for reporting purposes
     mergedtags = util.sortdict(p1tags)
--- a/mercurial/tags.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/tags.py	Tue Jan 19 21:48:43 2021 +0530
@@ -177,12 +177,12 @@
 
 
 def findglobaltags(ui, repo):
-    '''Find global tags in a repo: return a tagsmap
+    """Find global tags in a repo: return a tagsmap
 
     tagsmap: tag name to (node, hist) 2-tuples.
 
     The tags cache is read and updated as a side-effect of calling.
-    '''
+    """
     (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
     if cachetags is not None:
         assert not shouldwrite
@@ -267,7 +267,7 @@
 
 
 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
-    '''Read tag definitions from a file (or any source of lines).
+    """Read tag definitions from a file (or any source of lines).
 
     This function returns two sortdicts with similar information:
 
@@ -283,7 +283,7 @@
     When calcnodelines is False the hextaglines dict is not calculated (an
     empty dict is returned). This is done to improve this function's
     performance in cases where the line numbers are not needed.
-    '''
+    """
 
     bintaghist = util.sortdict()
     hextaglines = util.sortdict()
@@ -325,14 +325,14 @@
 
 
 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
-    '''Read tag definitions from a file (or any source of lines).
+    """Read tag definitions from a file (or any source of lines).
 
     Returns a mapping from tag name to (node, hist).
 
     "node" is the node id from the last line read for that name. "hist"
     is the list of node ids previously associated with it (in file order).
     All node ids are binary, not hex.
-    '''
+    """
     filetags, nodelines = _readtaghist(
         ui, repo, lines, fn, recode=recode, calcnodelines=calcnodelines
     )
@@ -390,7 +390,7 @@
 
 
 def _readtagcache(ui, repo):
-    '''Read the tag cache.
+    """Read the tag cache.
 
     Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
 
@@ -406,7 +406,7 @@
 
     If the cache is not up to date, the caller is responsible for reading tag
     info from each returned head. (See findglobaltags().)
-    '''
+    """
     try:
         cachefile = repo.cachevfs(_filename(repo), b'r')
         # force reading the file for static-http
@@ -549,7 +549,7 @@
 
 
 def tag(repo, names, node, message, local, user, date, editor=False):
-    '''tag a revision with one or more symbolic names.
+    """tag a revision with one or more symbolic names.
 
     names is a list of strings or, when adding a single tag, names may be a
     string.
@@ -567,7 +567,7 @@
 
     user: name of user to use if committing
 
-    date: date tuple to use if committing'''
+    date: date tuple to use if committing"""
 
     if not local:
         m = matchmod.exact([b'.hgtags'])
@@ -759,7 +759,7 @@
         properprefix = node[0:4]
 
         # Validate and return existing entry.
-        if record != _fnodesmissingrec:
+        if record != _fnodesmissingrec and len(record) == _fnodesrecsize:
             fileprefix = record[0:4]
 
             if fileprefix == properprefix:
--- a/mercurial/templatefilters.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/templatefilters.py	Tue Jan 19 21:48:43 2021 +0530
@@ -12,10 +12,10 @@
 import time
 
 from .i18n import _
+from .node import hex
 from . import (
     encoding,
     error,
-    node,
     pycompat,
     registrar,
     smartset,
@@ -280,7 +280,7 @@
     """Any text. Convert a binary Mercurial node identifier into
     its long hexadecimal representation.
     """
-    return node.hex(text)
+    return hex(text)
 
 
 @templatefilter(b'hgdate', intype=templateutil.date)
@@ -548,8 +548,7 @@
 
 
 def loadfilter(ui, extname, registrarobj):
-    """Load template filter from specified registrarobj
-    """
+    """Load template filter from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         filters[name] = func
 
--- a/mercurial/templatefuncs.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/templatefuncs.py	Tue Jan 19 21:48:43 2021 +0530
@@ -912,8 +912,7 @@
 
 
 def loadfunction(ui, extname, registrarobj):
-    """Load template function from specified registrarobj
-    """
+    """Load template function from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         funcs[name] = func
 
--- a/mercurial/templatekw.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/templatekw.py	Tue Jan 19 21:48:43 2021 +0530
@@ -634,6 +634,19 @@
     return b''
 
 
+@templatekeyword(b'onelinesummary', requires={b'ui', b'ctx'})
+def showonelinesummary(context, mapping):
+    """String. A one-line summary for the ctx (not including trailing newline).
+    The default template be overridden in command-templates.oneline-summary."""
+    # Avoid cycle:
+    # mercurial.cmdutil -> mercurial.templatekw -> mercurial.cmdutil
+    from . import cmdutil
+
+    ui = context.resource(mapping, b'ui')
+    ctx = context.resource(mapping, b'ctx')
+    return cmdutil.format_changeset_summary(ui, ctx)
+
+
 @templatekeyword(b'path', requires={b'fctx'})
 def showpath(context, mapping):
     """String. Repository-absolute path of the current file. (EXPERIMENTAL)"""
@@ -981,8 +994,7 @@
 
 
 def loadkeyword(ui, extname, registrarobj):
-    """Load template keyword from specified registrarobj
-    """
+    """Load template keyword from specified registrarobj"""
     for name, func in pycompat.iteritems(registrarobj._table):
         keywords[name] = func
 
--- a/mercurial/templater.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/templater.py	Tue Jan 19 21:48:43 2021 +0530
@@ -312,9 +312,9 @@
 
 
 def _addparseerrorhint(inst, tmpl):
-    if len(inst.args) <= 1:
-        return  # no location
-    loc = inst.args[1]
+    if inst.location is None:
+        return
+    loc = inst.location
     # Offset the caret location by the number of newlines before the
     # location of the error, since we will replace one-char newlines
     # with the two-char literal r'\n'.
@@ -376,14 +376,22 @@
     ('string', 'foo')
     >>> parseexpr(b'foo(bar)')
     ('func', ('symbol', 'foo'), ('symbol', 'bar'))
-    >>> parseexpr(b'foo(')
-    Traceback (most recent call last):
-      ...
-    ParseError: ('not a prefix: end', 4)
-    >>> parseexpr(b'"foo" "bar"')
-    Traceback (most recent call last):
-      ...
-    ParseError: ('invalid token', 7)
+    >>> from . import error
+    >>> from . import pycompat
+    >>> try:
+    ...   parseexpr(b'foo(')
+    ... except error.ParseError as e:
+    ...   pycompat.sysstr(e.message)
+    ...   e.location
+    'not a prefix: end'
+    4
+    >>> try:
+    ...   parseexpr(b'"foo" "bar"')
+    ... except error.ParseError as e:
+    ...   pycompat.sysstr(e.message)
+    ...   e.location
+    'invalid token'
+    7
     """
     try:
         return _parseexpr(expr)
@@ -443,7 +451,7 @@
 
 
 def _runrecursivesymbol(context, mapping, key):
-    raise error.Abort(_(b"recursive reference '%s' in template") % key)
+    raise error.InputError(_(b"recursive reference '%s' in template") % key)
 
 
 def buildtemplate(exp, context):
@@ -655,7 +663,7 @@
 
 
 class engine(object):
-    '''template expansion engine.
+    """template expansion engine.
 
     template expansion works like this. a map file contains key=value
     pairs. if value is quoted, it is treated as string. otherwise, it
@@ -672,7 +680,7 @@
     {key%format}.
 
     filter uses function to transform value. syntax is
-    {key|filter1|filter2|...}.'''
+    {key|filter1|filter2|...}."""
 
     def __init__(self, loader, filters=None, defaults=None, resources=None):
         self._loader = loader
@@ -773,9 +781,9 @@
             return False
 
     def process(self, t, mapping):
-        '''Perform expansion. t is name of map element to expand.
+        """Perform expansion. t is name of map element to expand.
         mapping contains added elements for use during expansion. Is a
-        generator.'''
+        generator."""
         func, data = self._load(t)
         return self._expand(func, data, mapping)
 
@@ -849,7 +857,11 @@
         if subresource:
             data = subresource.read()
             conf.parse(
-                abs, data, sections=sections, remap=remap, include=include,
+                abs,
+                data,
+                sections=sections,
+                remap=remap,
+                include=include,
             )
 
     data = fp.read()
@@ -1086,12 +1098,12 @@
 
 
 def open_template(name, templatepath=None):
-    '''returns a file-like object for the given template, and its full path
+    """returns a file-like object for the given template, and its full path
 
     If the name is a relative path and we're in a frozen binary, the template
     will be read from the mercurial.templates package instead. The returned path
     will then be the relative path.
-    '''
+    """
     # Does the name point directly to a map file?
     if os.path.isfile(name) or os.path.isabs(name):
         return name, open(name, mode='rb')
--- a/mercurial/testing/storage.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/testing/storage.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1021,7 +1021,12 @@
     def testcensored(self):
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
+        stored1 = storageutil.packmeta(
+            {
+                b'censored': b'tombstone',
+            },
+            b'',
+        )
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1050,7 +1055,12 @@
 
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
+        stored1 = storageutil.packmeta(
+            {
+                b'censored': b'tombstone',
+            },
+            b'',
+        )
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo', None, tr, 0, nullid, nullid)
@@ -1117,7 +1127,22 @@
             return 0
 
         with self._maketransactionfn() as tr:
-            nodes = f.addgroup([], None, tr, addrevisioncb=cb)
+            nodes = []
+
+            def onchangeset(cl, node):
+                nodes.append(node)
+                cb(cl, node)
+
+            def ondupchangeset(cl, node):
+                nodes.append(node)
+
+            f.addgroup(
+                [],
+                None,
+                tr,
+                addrevisioncb=onchangeset,
+                duplicaterevisioncb=ondupchangeset,
+            )
 
         self.assertEqual(nodes, [])
         self.assertEqual(callbackargs, [])
@@ -1136,7 +1161,22 @@
         ]
 
         with self._maketransactionfn() as tr:
-            nodes = f.addgroup(deltas, linkmapper, tr, addrevisioncb=cb)
+            nodes = []
+
+            def onchangeset(cl, node):
+                nodes.append(node)
+                cb(cl, node)
+
+            def ondupchangeset(cl, node):
+                nodes.append(node)
+
+            f.addgroup(
+                deltas,
+                linkmapper,
+                tr,
+                addrevisioncb=onchangeset,
+                duplicaterevisioncb=ondupchangeset,
+            )
 
         self.assertEqual(
             nodes,
@@ -1175,7 +1215,19 @@
             deltas.append((nodes[i], nullid, nullid, nullid, nullid, delta, 0))
 
         with self._maketransactionfn() as tr:
-            self.assertEqual(f.addgroup(deltas, lambda x: 0, tr), nodes)
+            newnodes = []
+
+            def onchangeset(cl, node):
+                newnodes.append(node)
+
+            f.addgroup(
+                deltas,
+                lambda x: 0,
+                tr,
+                addrevisioncb=onchangeset,
+                duplicaterevisioncb=onchangeset,
+            )
+            self.assertEqual(newnodes, nodes)
 
         self.assertEqual(len(f), len(deltas))
         self.assertEqual(list(f.revs()), [0, 1, 2])
@@ -1190,7 +1242,12 @@
         # Attempt to apply a delta made against a censored revision.
         f = self._makefilefn()
 
-        stored1 = storageutil.packmeta({b'censored': b'tombstone',}, b'')
+        stored1 = storageutil.packmeta(
+            {
+                b'censored': b'tombstone',
+            },
+            b'',
+        )
 
         with self._maketransactionfn() as tr:
             node0 = f.add(b'foo\n' * 30, None, tr, 0, nullid, nullid)
--- a/mercurial/transaction.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/transaction.py	Tue Jan 19 21:48:43 2021 +0530
@@ -56,7 +56,7 @@
     unlink=True,
     checkambigfiles=None,
 ):
-    for f, o, _ignore in entries:
+    for f, o in entries:
         if o or not unlink:
             checkambig = checkambigfiles and (f, b'') in checkambigfiles
             try:
@@ -158,8 +158,8 @@
         vfsmap[b''] = opener  # set default value
         self._vfsmap = vfsmap
         self._after = after
-        self._entries = []
-        self._map = {}
+        self._offsetmap = {}
+        self._newfiles = set()
         self._journal = journalname
         self._undoname = undoname
         self._queue = []
@@ -180,7 +180,7 @@
 
         # a dict of arguments to be passed to hooks
         self.hookargs = {}
-        self._file = opener.open(self._journal, b"w")
+        self._file = opener.open(self._journal, b"w+")
 
         # a list of ('location', 'path', 'backuppath', cache) entries.
         # - if 'backuppath' is empty, no file existed at backup time
@@ -243,26 +243,36 @@
         This is used by strip to delay vision of strip offset. The transaction
         sees either none or all of the strip actions to be done."""
         q = self._queue.pop()
-        for f, o, data in q:
-            self._addentry(f, o, data)
+        for f, o in q:
+            self._addentry(f, o)
 
     @active
-    def add(self, file, offset, data=None):
+    def add(self, file, offset):
         """record the state of an append-only file before update"""
-        if file in self._map or file in self._backupmap:
+        if (
+            file in self._newfiles
+            or file in self._offsetmap
+            or file in self._backupmap
+        ):
             return
         if self._queue:
-            self._queue[-1].append((file, offset, data))
+            self._queue[-1].append((file, offset))
             return
 
-        self._addentry(file, offset, data)
+        self._addentry(file, offset)
 
-    def _addentry(self, file, offset, data):
+    def _addentry(self, file, offset):
         """add a append-only entry to memory and on-disk state"""
-        if file in self._map or file in self._backupmap:
+        if (
+            file in self._newfiles
+            or file in self._offsetmap
+            or file in self._backupmap
+        ):
             return
-        self._entries.append((file, offset, data))
-        self._map[file] = len(self._entries) - 1
+        if offset:
+            self._offsetmap[file] = offset
+        else:
+            self._newfiles.add(file)
         # add enough data to the journal to do the truncate
         self._file.write(b"%s\0%d\n" % (file, offset))
         self._file.flush()
@@ -282,7 +292,11 @@
             msg = b'cannot use transaction.addbackup inside "group"'
             raise error.ProgrammingError(msg)
 
-        if file in self._map or file in self._backupmap:
+        if (
+            file in self._newfiles
+            or file in self._offsetmap
+            or file in self._backupmap
+        ):
             return
         vfs = self._vfsmap[location]
         dirname, filename = vfs.split(file)
@@ -395,24 +409,39 @@
         return any
 
     @active
-    def find(self, file):
-        if file in self._map:
-            return self._entries[self._map[file]]
-        if file in self._backupmap:
-            return self._backupentries[self._backupmap[file]]
-        return None
+    def findoffset(self, file):
+        if file in self._newfiles:
+            return 0
+        return self._offsetmap.get(file)
+
+    @active
+    def readjournal(self):
+        self._file.seek(0)
+        entries = []
+        for l in self._file.readlines():
+            file, troffset = l.split(b'\0')
+            entries.append((file, int(troffset)))
+        return entries
 
     @active
-    def replace(self, file, offset, data=None):
-        '''
+    def replace(self, file, offset):
+        """
         replace can only replace already committed entries
         that are not pending in the queue
-        '''
-
-        if file not in self._map:
+        """
+        if file in self._newfiles:
+            if not offset:
+                return
+            self._newfiles.remove(file)
+            self._offsetmap[file] = offset
+        elif file in self._offsetmap:
+            if not offset:
+                del self._offsetmap[file]
+                self._newfiles.add(file)
+            else:
+                self._offsetmap[file] = offset
+        else:
             raise KeyError(file)
-        index = self._map[file]
-        self._entries[index] = (file, offset, data)
         self._file.write(b"%s\0%d\n" % (file, offset))
         self._file.flush()
 
@@ -447,9 +476,9 @@
 
     @active
     def writepending(self):
-        '''write pending file to temporary version
+        """write pending file to temporary version
 
-        This is used to allow hooks to view a transaction before commit'''
+        This is used to allow hooks to view a transaction before commit"""
         categories = sorted(self._pendingcallback)
         for cat in categories:
             # remove callback since the data will have been flushed
@@ -460,8 +489,7 @@
 
     @active
     def hasfinalize(self, category):
-        """check is a callback already exist for a category
-        """
+        """check is a callback already exist for a category"""
         return category in self._finalizecallback
 
     @active
@@ -504,11 +532,11 @@
 
     @active
     def addvalidator(self, category, callback):
-        """ adds a callback to be called when validating the transaction.
+        """adds a callback to be called when validating the transaction.
 
         The transaction will be given as the first argument to the callback.
 
-        callback should raise exception if to abort transaction """
+        callback should raise exception if to abort transaction"""
         self._validatecallback[category] = callback
 
     @active
@@ -552,7 +580,8 @@
                     self._report(
                         b"couldn't remove %s: %s\n" % (vfs.join(b), inst)
                     )
-        self._entries = []
+        self._offsetmap = {}
+        self._newfiles = set()
         self._writeundo()
         if self._after:
             self._after()
@@ -594,9 +623,9 @@
 
     @active
     def abort(self):
-        '''abort the transaction (generally called on error, or when the
+        """abort the transaction (generally called on error, or when the
         transaction is not explicitly committed before going out of
-        scope)'''
+        scope)"""
         self._abort()
 
     def _writeundo(self):
@@ -629,13 +658,14 @@
         undobackupfile.close()
 
     def _abort(self):
+        entries = self.readjournal()
         self._count = 0
         self._usages = 0
         self._file.close()
         self._backupsfile.close()
 
         try:
-            if not self._entries and not self._backupentries:
+            if not entries and not self._backupentries:
                 if self._backupjournal:
                     self._opener.unlink(self._backupjournal)
                 if self._journal:
@@ -654,7 +684,7 @@
                     self._report,
                     self._opener,
                     self._vfsmap,
-                    self._entries,
+                    entries,
                     self._backupentries,
                     False,
                     checkambigfiles=self._checkambigfiles,
@@ -696,7 +726,7 @@
     for l in lines:
         try:
             f, o = l.split(b'\0')
-            entries.append((f, int(o), None))
+            entries.append((f, int(o)))
         except ValueError:
             report(
                 _(b"couldn't read journal entry %r!\n") % pycompat.bytestr(l)
--- a/mercurial/treediscovery.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/treediscovery.py	Tue Jan 19 21:48:43 2021 +0530
@@ -20,7 +20,7 @@
 )
 
 
-def findcommonincoming(repo, remote, heads=None, force=False):
+def findcommonincoming(repo, remote, heads=None, force=False, audit=None):
     """Return a tuple (common, fetch, heads) used to identify the common
     subset of nodes between repo and remote.
 
@@ -41,6 +41,9 @@
         with remote.commandexecutor() as e:
             heads = e.callcommand(b'heads', {}).result()
 
+    if audit is not None:
+        audit[b'total-roundtrips'] = 1
+
     if repo.changelog.tip() == nullid:
         base.add(nullid)
         if heads != [nullid]:
@@ -117,7 +120,10 @@
             for p in pycompat.xrange(0, len(r), 10):
                 with remote.commandexecutor() as e:
                     branches = e.callcommand(
-                        b'branches', {b'nodes': r[p : p + 10],}
+                        b'branches',
+                        {
+                            b'nodes': r[p : p + 10],
+                        },
                     ).result()
 
                 for b in branches:
@@ -178,5 +184,7 @@
 
     progress.complete()
     repo.ui.debug(b"%d total queries\n" % reqcnt)
+    if audit is not None:
+        audit[b'total-roundtrips'] = reqcnt
 
     return base, list(fetch), heads
--- a/mercurial/txnutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/txnutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -13,20 +13,20 @@
 
 
 def mayhavepending(root):
-    '''return whether 'root' may have pending changes, which are
+    """return whether 'root' may have pending changes, which are
     visible to this process.
-    '''
+    """
     return root == encoding.environ.get(b'HG_PENDING')
 
 
 def trypending(root, vfs, filename, **kwargs):
-    '''Open  file to be read according to HG_PENDING environment variable
+    """Open  file to be read according to HG_PENDING environment variable
 
     This opens '.pending' of specified 'filename' only when HG_PENDING
     is equal to 'root'.
 
     This returns '(fp, is_pending_opened)' tuple.
-    '''
+    """
     if mayhavepending(root):
         try:
             return (vfs(b'%s.pending' % filename, **kwargs), True)
--- a/mercurial/ui.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/ui.py	Tue Jan 19 21:48:43 2021 +0530
@@ -60,6 +60,8 @@
 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
 tweakrc = b"""
 [ui]
+# Gives detailed exit codes for input/user errors, config errors, etc.
+detailed-exit-code = True
 # The rollback command is dangerous. As a rule, don't use it.
 rollback = False
 # Make `hg status` report copy information
@@ -464,10 +466,12 @@
 
             try:
                 cfg.read(filename, fp, sections=sections, remap=remap)
-            except error.ParseError as inst:
+            except error.ConfigError as inst:
                 if trusted:
                     raise
-                self.warn(_(b'ignored: %s\n') % stringutil.forcebytestr(inst))
+                self.warn(
+                    _(b'ignored %s: %s\n') % (inst.location, inst.message)
+                )
 
         self._applyconfig(cfg, trusted, root)
 
@@ -507,6 +511,8 @@
                 del cfg[b'defaults'][k]
             for k, v in cfg.items(b'commands'):
                 del cfg[b'commands'][k]
+            for k, v in cfg.items(b'command-templates'):
+                del cfg[b'command-templates'][k]
         # Don't remove aliases from the configuration if in the exceptionlist
         if self.plain(b'alias'):
             for k, v in cfg.items(b'alias'):
@@ -667,6 +673,18 @@
                     )
         return value
 
+    def config_default(self, section, name):
+        """return the default value for a config option
+
+        The default is returned "raw", for example if it is a callable, the
+        callable was not called.
+        """
+        item = self._knownconfig.get(section, {}).get(name)
+
+        if item is None:
+            raise KeyError((section, name))
+        return item.default
+
     def configsuboptions(self, section, name, default=_unset, untrusted=False):
         """Get a config option and all sub-options.
 
@@ -919,7 +937,7 @@
                 yield section, name, value
 
     def plain(self, feature=None):
-        '''is plain mode active?
+        """is plain mode active?
 
         Plain mode means that all configuration variables which affect
         the behavior and output of Mercurial should be
@@ -933,7 +951,7 @@
         - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
         - False if feature is disabled by default and not included in HGPLAIN
         - True otherwise
-        '''
+        """
         if (
             b'HGPLAIN' not in encoding.environ
             and b'HGPLAINEXCEPT' not in encoding.environ
@@ -1106,7 +1124,7 @@
         return self._colormode != b'win32'
 
     def write(self, *args, **opts):
-        '''write args to output
+        """write args to output
 
         By default, this method simply writes to the buffer or stdout.
         Color mode can be set on the UI class to have the output decorated
@@ -1127,7 +1145,7 @@
         When labeling output for a specific command, a label of
         "cmdname.type" is recommended. For example, status issues
         a label of "status.modified" for modified files.
-        '''
+        """
         dest = self._fout
 
         # inlined _write() for speed
@@ -1447,9 +1465,9 @@
         return _reqexithandlers
 
     def atexit(self, func, *args, **kwargs):
-        '''register a function to run after dispatching a request
+        """register a function to run after dispatching a request
 
-        Handlers do not stay registered across request boundaries.'''
+        Handlers do not stay registered across request boundaries."""
         self._exithandlers.append((func, args, kwargs))
         return func
 
@@ -1478,8 +1496,14 @@
         alldefaults = frozenset([b"text", b"curses"])
 
         featureinterfaces = {
-            b"chunkselector": [b"text", b"curses",],
-            b"histedit": [b"text", b"curses",],
+            b"chunkselector": [
+                b"text",
+                b"curses",
+            ],
+            b"histedit": [
+                b"text",
+                b"curses",
+            ],
         }
 
         # Feature-specific interface
@@ -1526,7 +1550,7 @@
         return choseninterface
 
     def interactive(self):
-        '''is interactive input allowed?
+        """is interactive input allowed?
 
         An interactive session is a session where input can be reasonably read
         from `sys.stdin'. If this function returns false, any attempt to read
@@ -1538,7 +1562,7 @@
         to a terminal device.
 
         This function refers to input only; for output, see `ui.formatted()'.
-        '''
+        """
         i = self.configbool(b"ui", b"interactive")
         if i is None:
             # some environments replace stdin without implementing isatty
@@ -1548,8 +1572,7 @@
         return i
 
     def termwidth(self):
-        '''how wide is the terminal in columns?
-        '''
+        """how wide is the terminal in columns?"""
         if b'COLUMNS' in encoding.environ:
             try:
                 return int(encoding.environ[b'COLUMNS'])
@@ -1558,7 +1581,7 @@
         return scmutil.termsize(self)[0]
 
     def formatted(self):
-        '''should formatted output be used?
+        """should formatted output be used?
 
         It is often desirable to format the output to suite the output medium.
         Examples of this are truncating long lines or colorizing messages.
@@ -1573,7 +1596,7 @@
 
         This function refers to output only; for input, see `ui.interactive()'.
         This function always returns false when in plain mode, see `ui.plain()'.
-        '''
+        """
         if self.plain():
             return False
 
@@ -1740,40 +1763,40 @@
             raise error.ResponseExpected()
 
     def status(self, *msg, **opts):
-        '''write status message to output (if ui.quiet is False)
+        """write status message to output (if ui.quiet is False)
 
         This adds an output label of "ui.status".
-        '''
+        """
         if not self.quiet:
             self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
 
     def warn(self, *msg, **opts):
-        '''write warning message to output (stderr)
+        """write warning message to output (stderr)
 
         This adds an output label of "ui.warning".
-        '''
+        """
         self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
 
     def error(self, *msg, **opts):
-        '''write error message to output (stderr)
+        """write error message to output (stderr)
 
         This adds an output label of "ui.error".
-        '''
+        """
         self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
 
     def note(self, *msg, **opts):
-        '''write note to output (if ui.verbose is True)
+        """write note to output (if ui.verbose is True)
 
         This adds an output label of "ui.note".
-        '''
+        """
         if self.verbose:
             self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
 
     def debug(self, *msg, **opts):
-        '''write debug message to output (if ui.debugflag is True)
+        """write debug message to output (if ui.debugflag is True)
 
         This adds an output label of "ui.debug".
-        '''
+        """
         if self.debugflag:
             self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
             self.log(b'debug', b'%s', b''.join(msg))
@@ -1848,7 +1871,7 @@
             self.system(
                 b"%s \"%s\"" % (editor, name),
                 environ=environ,
-                onerr=error.Abort,
+                onerr=error.CanceledError,
                 errprefix=_(b"edit failed"),
                 blockedtag=b'editor',
             )
@@ -1869,12 +1892,12 @@
         errprefix=None,
         blockedtag=None,
     ):
-        '''execute shell command with appropriate output stream. command
+        """execute shell command with appropriate output stream. command
         output will be redirected if fout is not stdout.
 
         if command fails and onerr is None, return status, else raise onerr
         object as exception.
-        '''
+        """
         if blockedtag is None:
             # Long cmds tend to be because of an absolute path on cmd. Keep
             # the tail end instead
@@ -1901,9 +1924,9 @@
         return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
 
     def traceback(self, exc=None, force=False):
-        '''print exception traceback if traceback printing enabled or forced.
+        """print exception traceback if traceback printing enabled or forced.
         only to call in exception handler. returns true if traceback
-        printed.'''
+        printed."""
         if self.tracebackflag or force:
             if exc is None:
                 exc = sys.exc_info()
@@ -2005,7 +2028,7 @@
         self._loggers[name] = logger
 
     def log(self, event, msgfmt, *msgargs, **opts):
-        '''hook for logging facility extensions
+        """hook for logging facility extensions
 
         event should be a readily-identifiable subsystem, which will
         allow filtering.
@@ -2014,7 +2037,7 @@
         *msgargs are %-formatted into it.
 
         **opts currently has no defined meanings.
-        '''
+        """
         if not self._loggers:
             return
         activeloggers = [
@@ -2034,7 +2057,7 @@
             self._loggers = registeredloggers
 
     def label(self, msg, label):
-        '''style msg based on supplied label
+        """style msg based on supplied label
 
         If some color mode is enabled, this will add the necessary control
         characters to apply such color. In addition, 'debug' color mode adds
@@ -2042,7 +2065,7 @@
 
         ui.write(s, 'label') is equivalent to
         ui.write(ui.label(s, 'label')).
-        '''
+        """
         if self._colormode is not None:
             return color.colorlabel(self, msg, label)
         return msg
--- a/mercurial/unionrepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/unionrepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -71,11 +71,11 @@
             p1node = self.revlog2.node(p1rev)
             p2node = self.revlog2.node(p2rev)
 
-            # TODO: it's probably wrong to set compressed length to None, but
+            # TODO: it's probably wrong to set compressed length to -1, but
             # I have no idea if csize is valid in the base revlog context.
             e = (
                 flags,
-                None,
+                -1,
                 rsize,
                 base,
                 link,
@@ -129,6 +129,7 @@
         linkmapper,
         transaction,
         addrevisioncb=None,
+        duplicaterevisioncb=None,
         maybemissingparents=False,
     ):
         raise NotImplementedError
--- a/mercurial/upgrade.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/upgrade.py	Tue Jan 19 21:48:43 2021 +0530
@@ -7,1135 +7,27 @@
 
 from __future__ import absolute_import
 
-import stat
-
 from .i18n import _
-from .pycompat import getattr
 from . import (
-    changelog,
     error,
-    filelog,
     hg,
     localrepo,
-    manifest,
-    metadata,
+    lock as lockmod,
     pycompat,
-    requirements,
-    revlog,
+    requirements as requirementsmod,
     scmutil,
-    util,
-    vfs as vfsmod,
 )
 
-from .utils import compression
-
-# list of requirements that request a clone of all revlog if added/removed
-RECLONES_REQUIREMENTS = {
-    b'generaldelta',
-    requirements.SPARSEREVLOG_REQUIREMENT,
-}
-
-
-def requiredsourcerequirements(repo):
-    """Obtain requirements required to be present to upgrade a repo.
-
-    An upgrade will not be allowed if the repository doesn't have the
-    requirements returned by this function.
-    """
-    return {
-        # Introduced in Mercurial 0.9.2.
-        b'revlogv1',
-        # Introduced in Mercurial 0.9.2.
-        b'store',
-    }
-
-
-def blocksourcerequirements(repo):
-    """Obtain requirements that will prevent an upgrade from occurring.
-
-    An upgrade cannot be performed if the source repository contains a
-    requirements in the returned set.
-    """
-    return {
-        # The upgrade code does not yet support these experimental features.
-        # This is an artificial limitation.
-        requirements.TREEMANIFEST_REQUIREMENT,
-        # This was a precursor to generaldelta and was never enabled by default.
-        # It should (hopefully) not exist in the wild.
-        b'parentdelta',
-        # Upgrade should operate on the actual store, not the shared link.
-        requirements.SHARED_REQUIREMENT,
-    }
-
-
-def supportremovedrequirements(repo):
-    """Obtain requirements that can be removed during an upgrade.
-
-    If an upgrade were to create a repository that dropped a requirement,
-    the dropped requirement must appear in the returned set for the upgrade
-    to be allowed.
-    """
-    supported = {
-        requirements.SPARSEREVLOG_REQUIREMENT,
-        requirements.SIDEDATA_REQUIREMENT,
-        requirements.COPIESSDC_REQUIREMENT,
-        requirements.NODEMAP_REQUIREMENT,
-    }
-    for name in compression.compengines:
-        engine = compression.compengines[name]
-        if engine.available() and engine.revlogheader():
-            supported.add(b'exp-compression-%s' % name)
-            if engine.name() == b'zstd':
-                supported.add(b'revlog-compression-zstd')
-    return supported
-
-
-def supporteddestrequirements(repo):
-    """Obtain requirements that upgrade supports in the destination.
-
-    If the result of the upgrade would create requirements not in this set,
-    the upgrade is disallowed.
-
-    Extensions should monkeypatch this to add their custom requirements.
-    """
-    supported = {
-        b'dotencode',
-        b'fncache',
-        b'generaldelta',
-        b'revlogv1',
-        b'store',
-        requirements.SPARSEREVLOG_REQUIREMENT,
-        requirements.SIDEDATA_REQUIREMENT,
-        requirements.COPIESSDC_REQUIREMENT,
-        requirements.NODEMAP_REQUIREMENT,
-        requirements.SHARESAFE_REQUIREMENT,
-    }
-    for name in compression.compengines:
-        engine = compression.compengines[name]
-        if engine.available() and engine.revlogheader():
-            supported.add(b'exp-compression-%s' % name)
-            if engine.name() == b'zstd':
-                supported.add(b'revlog-compression-zstd')
-    return supported
-
-
-def allowednewrequirements(repo):
-    """Obtain requirements that can be added to a repository during upgrade.
-
-    This is used to disallow proposed requirements from being added when
-    they weren't present before.
-
-    We use a list of allowed requirement additions instead of a list of known
-    bad additions because the whitelist approach is safer and will prevent
-    future, unknown requirements from accidentally being added.
-    """
-    supported = {
-        b'dotencode',
-        b'fncache',
-        b'generaldelta',
-        requirements.SPARSEREVLOG_REQUIREMENT,
-        requirements.SIDEDATA_REQUIREMENT,
-        requirements.COPIESSDC_REQUIREMENT,
-        requirements.NODEMAP_REQUIREMENT,
-    }
-    for name in compression.compengines:
-        engine = compression.compengines[name]
-        if engine.available() and engine.revlogheader():
-            supported.add(b'exp-compression-%s' % name)
-            if engine.name() == b'zstd':
-                supported.add(b'revlog-compression-zstd')
-    return supported
-
-
-def preservedrequirements(repo):
-    return set()
-
-
-deficiency = b'deficiency'
-optimisation = b'optimization'
-
-
-class improvement(object):
-    """Represents an improvement that can be made as part of an upgrade.
-
-    The following attributes are defined on each instance:
-
-    name
-       Machine-readable string uniquely identifying this improvement. It
-       will be mapped to an action later in the upgrade process.
-
-    type
-       Either ``deficiency`` or ``optimisation``. A deficiency is an obvious
-       problem. An optimization is an action (sometimes optional) that
-       can be taken to further improve the state of the repository.
-
-    description
-       Message intended for humans explaining the improvement in more detail,
-       including the implications of it. For ``deficiency`` types, should be
-       worded in the present tense. For ``optimisation`` types, should be
-       worded in the future tense.
-
-    upgrademessage
-       Message intended for humans explaining what an upgrade addressing this
-       issue will do. Should be worded in the future tense.
-    """
-
-    def __init__(self, name, type, description, upgrademessage):
-        self.name = name
-        self.type = type
-        self.description = description
-        self.upgrademessage = upgrademessage
-
-    def __eq__(self, other):
-        if not isinstance(other, improvement):
-            # This is what python tell use to do
-            return NotImplemented
-        return self.name == other.name
-
-    def __ne__(self, other):
-        return not (self == other)
-
-    def __hash__(self):
-        return hash(self.name)
-
-
-allformatvariant = []
-
-
-def registerformatvariant(cls):
-    allformatvariant.append(cls)
-    return cls
-
-
-class formatvariant(improvement):
-    """an improvement subclass dedicated to repository format"""
-
-    type = deficiency
-    ### The following attributes should be defined for each class:
-
-    # machine-readable string uniquely identifying this improvement. it will be
-    # mapped to an action later in the upgrade process.
-    name = None
-
-    # message intended for humans explaining the improvement in more detail,
-    # including the implications of it ``deficiency`` types, should be worded
-    # in the present tense.
-    description = None
-
-    # message intended for humans explaining what an upgrade addressing this
-    # issue will do. should be worded in the future tense.
-    upgrademessage = None
-
-    # value of current Mercurial default for new repository
-    default = None
-
-    def __init__(self):
-        raise NotImplementedError()
-
-    @staticmethod
-    def fromrepo(repo):
-        """current value of the variant in the repository"""
-        raise NotImplementedError()
-
-    @staticmethod
-    def fromconfig(repo):
-        """current value of the variant in the configuration"""
-        raise NotImplementedError()
-
-
-class requirementformatvariant(formatvariant):
-    """formatvariant based on a 'requirement' name.
-
-    Many format variant are controlled by a 'requirement'. We define a small
-    subclass to factor the code.
-    """
-
-    # the requirement that control this format variant
-    _requirement = None
-
-    @staticmethod
-    def _newreporequirements(ui):
-        return localrepo.newreporequirements(
-            ui, localrepo.defaultcreateopts(ui)
-        )
-
-    @classmethod
-    def fromrepo(cls, repo):
-        assert cls._requirement is not None
-        return cls._requirement in repo.requirements
-
-    @classmethod
-    def fromconfig(cls, repo):
-        assert cls._requirement is not None
-        return cls._requirement in cls._newreporequirements(repo.ui)
-
-
-@registerformatvariant
-class fncache(requirementformatvariant):
-    name = b'fncache'
-
-    _requirement = b'fncache'
-
-    default = True
-
-    description = _(
-        b'long and reserved filenames may not work correctly; '
-        b'repository performance is sub-optimal'
-    )
-
-    upgrademessage = _(
-        b'repository will be more resilient to storing '
-        b'certain paths and performance of certain '
-        b'operations should be improved'
-    )
-
-
-@registerformatvariant
-class dotencode(requirementformatvariant):
-    name = b'dotencode'
-
-    _requirement = b'dotencode'
-
-    default = True
-
-    description = _(
-        b'storage of filenames beginning with a period or '
-        b'space may not work correctly'
-    )
-
-    upgrademessage = _(
-        b'repository will be better able to store files '
-        b'beginning with a space or period'
-    )
-
-
-@registerformatvariant
-class generaldelta(requirementformatvariant):
-    name = b'generaldelta'
-
-    _requirement = b'generaldelta'
-
-    default = True
-
-    description = _(
-        b'deltas within internal storage are unable to '
-        b'choose optimal revisions; repository is larger and '
-        b'slower than it could be; interaction with other '
-        b'repositories may require extra network and CPU '
-        b'resources, making "hg push" and "hg pull" slower'
-    )
-
-    upgrademessage = _(
-        b'repository storage will be able to create '
-        b'optimal deltas; new repository data will be '
-        b'smaller and read times should decrease; '
-        b'interacting with other repositories using this '
-        b'storage model should require less network and '
-        b'CPU resources, making "hg push" and "hg pull" '
-        b'faster'
-    )
-
-
-@registerformatvariant
-class sparserevlog(requirementformatvariant):
-    name = b'sparserevlog'
-
-    _requirement = requirements.SPARSEREVLOG_REQUIREMENT
-
-    default = True
-
-    description = _(
-        b'in order to limit disk reading and memory usage on older '
-        b'version, the span of a delta chain from its root to its '
-        b'end is limited, whatever the relevant data in this span. '
-        b'This can severly limit Mercurial ability to build good '
-        b'chain of delta resulting is much more storage space being '
-        b'taken and limit reusability of on disk delta during '
-        b'exchange.'
-    )
-
-    upgrademessage = _(
-        b'Revlog supports delta chain with more unused data '
-        b'between payload. These gaps will be skipped at read '
-        b'time. This allows for better delta chains, making a '
-        b'better compression and faster exchange with server.'
-    )
-
-
-@registerformatvariant
-class sidedata(requirementformatvariant):
-    name = b'sidedata'
-
-    _requirement = requirements.SIDEDATA_REQUIREMENT
-
-    default = False
-
-    description = _(
-        b'Allows storage of extra data alongside a revision, '
-        b'unlocking various caching options.'
-    )
-
-    upgrademessage = _(b'Allows storage of extra data alongside a revision.')
-
-
-@registerformatvariant
-class persistentnodemap(requirementformatvariant):
-    name = b'persistent-nodemap'
-
-    _requirement = requirements.NODEMAP_REQUIREMENT
-
-    default = False
-
-    description = _(
-        b'persist the node -> rev mapping on disk to speedup lookup'
-    )
-
-    upgrademessage = _(b'Speedup revision lookup by node id.')
-
-
-@registerformatvariant
-class copiessdc(requirementformatvariant):
-    name = b'copies-sdc'
-
-    _requirement = requirements.COPIESSDC_REQUIREMENT
-
-    default = False
-
-    description = _(b'Stores copies information alongside changesets.')
-
-    upgrademessage = _(
-        b'Allows to use more efficient algorithm to deal with ' b'copy tracing.'
-    )
-
-
-@registerformatvariant
-class removecldeltachain(formatvariant):
-    name = b'plain-cl-delta'
-
-    default = True
-
-    description = _(
-        b'changelog storage is using deltas instead of '
-        b'raw entries; changelog reading and any '
-        b'operation relying on changelog data are slower '
-        b'than they could be'
-    )
-
-    upgrademessage = _(
-        b'changelog storage will be reformated to '
-        b'store raw entries; changelog reading will be '
-        b'faster; changelog size may be reduced'
-    )
-
-    @staticmethod
-    def fromrepo(repo):
-        # Mercurial 4.0 changed changelogs to not use delta chains. Search for
-        # changelogs with deltas.
-        cl = repo.changelog
-        chainbase = cl.chainbase
-        return all(rev == chainbase(rev) for rev in cl)
-
-    @staticmethod
-    def fromconfig(repo):
-        return True
-
-
-@registerformatvariant
-class compressionengine(formatvariant):
-    name = b'compression'
-    default = b'zlib'
-
-    description = _(
-        b'Compresion algorithm used to compress data. '
-        b'Some engine are faster than other'
-    )
-
-    upgrademessage = _(
-        b'revlog content will be recompressed with the new algorithm.'
-    )
-
-    @classmethod
-    def fromrepo(cls, repo):
-        # we allow multiple compression engine requirement to co-exist because
-        # strickly speaking, revlog seems to support mixed compression style.
-        #
-        # The compression used for new entries will be "the last one"
-        compression = b'zlib'
-        for req in repo.requirements:
-            prefix = req.startswith
-            if prefix(b'revlog-compression-') or prefix(b'exp-compression-'):
-                compression = req.split(b'-', 2)[2]
-        return compression
-
-    @classmethod
-    def fromconfig(cls, repo):
-        compengines = repo.ui.configlist(b'format', b'revlog-compression')
-        # return the first valid value as the selection code would do
-        for comp in compengines:
-            if comp in util.compengines:
-                return comp
-
-        # no valide compression found lets display it all for clarity
-        return b','.join(compengines)
-
-
-@registerformatvariant
-class compressionlevel(formatvariant):
-    name = b'compression-level'
-    default = b'default'
-
-    description = _(b'compression level')
-
-    upgrademessage = _(b'revlog content will be recompressed')
-
-    @classmethod
-    def fromrepo(cls, repo):
-        comp = compressionengine.fromrepo(repo)
-        level = None
-        if comp == b'zlib':
-            level = repo.ui.configint(b'storage', b'revlog.zlib.level')
-        elif comp == b'zstd':
-            level = repo.ui.configint(b'storage', b'revlog.zstd.level')
-        if level is None:
-            return b'default'
-        return bytes(level)
-
-    @classmethod
-    def fromconfig(cls, repo):
-        comp = compressionengine.fromconfig(repo)
-        level = None
-        if comp == b'zlib':
-            level = repo.ui.configint(b'storage', b'revlog.zlib.level')
-        elif comp == b'zstd':
-            level = repo.ui.configint(b'storage', b'revlog.zstd.level')
-        if level is None:
-            return b'default'
-        return bytes(level)
-
-
-def finddeficiencies(repo):
-    """returns a list of deficiencies that the repo suffer from"""
-    deficiencies = []
-
-    # We could detect lack of revlogv1 and store here, but they were added
-    # in 0.9.2 and we don't support upgrading repos without these
-    # requirements, so let's not bother.
-
-    for fv in allformatvariant:
-        if not fv.fromrepo(repo):
-            deficiencies.append(fv)
-
-    return deficiencies
-
-
-# search without '-' to support older form on newer client.
-#
-# We don't enforce backward compatibility for debug command so this
-# might eventually be dropped. However, having to use two different
-# forms in script when comparing result is anoying enough to add
-# backward compatibility for a while.
-legacy_opts_map = {
-    b'redeltaparent': b're-delta-parent',
-    b'redeltamultibase': b're-delta-multibase',
-    b'redeltaall': b're-delta-all',
-    b'redeltafulladd': b're-delta-fulladd',
-}
-
-
-def findoptimizations(repo):
-    """Determine optimisation that could be used during upgrade"""
-    # These are unconditionally added. There is logic later that figures out
-    # which ones to apply.
-    optimizations = []
-
-    optimizations.append(
-        improvement(
-            name=b're-delta-parent',
-            type=optimisation,
-            description=_(
-                b'deltas within internal storage will be recalculated to '
-                b'choose an optimal base revision where this was not '
-                b'already done; the size of the repository may shrink and '
-                b'various operations may become faster; the first time '
-                b'this optimization is performed could slow down upgrade '
-                b'execution considerably; subsequent invocations should '
-                b'not run noticeably slower'
-            ),
-            upgrademessage=_(
-                b'deltas within internal storage will choose a new '
-                b'base revision if needed'
-            ),
-        )
-    )
-
-    optimizations.append(
-        improvement(
-            name=b're-delta-multibase',
-            type=optimisation,
-            description=_(
-                b'deltas within internal storage will be recalculated '
-                b'against multiple base revision and the smallest '
-                b'difference will be used; the size of the repository may '
-                b'shrink significantly when there are many merges; this '
-                b'optimization will slow down execution in proportion to '
-                b'the number of merges in the repository and the amount '
-                b'of files in the repository; this slow down should not '
-                b'be significant unless there are tens of thousands of '
-                b'files and thousands of merges'
-            ),
-            upgrademessage=_(
-                b'deltas within internal storage will choose an '
-                b'optimal delta by computing deltas against multiple '
-                b'parents; may slow down execution time '
-                b'significantly'
-            ),
-        )
-    )
-
-    optimizations.append(
-        improvement(
-            name=b're-delta-all',
-            type=optimisation,
-            description=_(
-                b'deltas within internal storage will always be '
-                b'recalculated without reusing prior deltas; this will '
-                b'likely make execution run several times slower; this '
-                b'optimization is typically not needed'
-            ),
-            upgrademessage=_(
-                b'deltas within internal storage will be fully '
-                b'recomputed; this will likely drastically slow down '
-                b'execution time'
-            ),
-        )
-    )
-
-    optimizations.append(
-        improvement(
-            name=b're-delta-fulladd',
-            type=optimisation,
-            description=_(
-                b'every revision will be re-added as if it was new '
-                b'content. It will go through the full storage '
-                b'mechanism giving extensions a chance to process it '
-                b'(eg. lfs). This is similar to "re-delta-all" but even '
-                b'slower since more logic is involved.'
-            ),
-            upgrademessage=_(
-                b'each revision will be added as new content to the '
-                b'internal storage; this will likely drastically slow '
-                b'down execution time, but some extensions might need '
-                b'it'
-            ),
-        )
-    )
-
-    return optimizations
-
-
-def determineactions(repo, deficiencies, sourcereqs, destreqs):
-    """Determine upgrade actions that will be performed.
-
-    Given a list of improvements as returned by ``finddeficiencies`` and
-    ``findoptimizations``, determine the list of upgrade actions that
-    will be performed.
-
-    The role of this function is to filter improvements if needed, apply
-    recommended optimizations from the improvements list that make sense,
-    etc.
-
-    Returns a list of action names.
-    """
-    newactions = []
-
-    for d in deficiencies:
-        name = d._requirement
-
-        # If the action is a requirement that doesn't show up in the
-        # destination requirements, prune the action.
-        if name is not None and name not in destreqs:
-            continue
-
-        newactions.append(d)
-
-    # FUTURE consider adding some optimizations here for certain transitions.
-    # e.g. adding generaldelta could schedule parent redeltas.
-
-    return newactions
-
-
-def _revlogfrompath(repo, path):
-    """Obtain a revlog from a repo path.
-
-    An instance of the appropriate class is returned.
-    """
-    if path == b'00changelog.i':
-        return changelog.changelog(repo.svfs)
-    elif path.endswith(b'00manifest.i'):
-        mandir = path[: -len(b'00manifest.i')]
-        return manifest.manifestrevlog(repo.svfs, tree=mandir)
-    else:
-        # reverse of "/".join(("data", path + ".i"))
-        return filelog.filelog(repo.svfs, path[5:-2])
-
-
-def _copyrevlog(tr, destrepo, oldrl, unencodedname):
-    """copy all relevant files for `oldrl` into `destrepo` store
-
-    Files are copied "as is" without any transformation. The copy is performed
-    without extra checks. Callers are responsible for making sure the copied
-    content is compatible with format of the destination repository.
-    """
-    oldrl = getattr(oldrl, '_revlog', oldrl)
-    newrl = _revlogfrompath(destrepo, unencodedname)
-    newrl = getattr(newrl, '_revlog', newrl)
-
-    oldvfs = oldrl.opener
-    newvfs = newrl.opener
-    oldindex = oldvfs.join(oldrl.indexfile)
-    newindex = newvfs.join(newrl.indexfile)
-    olddata = oldvfs.join(oldrl.datafile)
-    newdata = newvfs.join(newrl.datafile)
-
-    with newvfs(newrl.indexfile, b'w'):
-        pass  # create all the directories
-
-    util.copyfile(oldindex, newindex)
-    copydata = oldrl.opener.exists(oldrl.datafile)
-    if copydata:
-        util.copyfile(olddata, newdata)
-
-    if not (
-        unencodedname.endswith(b'00changelog.i')
-        or unencodedname.endswith(b'00manifest.i')
-    ):
-        destrepo.svfs.fncache.add(unencodedname)
-        if copydata:
-            destrepo.svfs.fncache.add(unencodedname[:-2] + b'.d')
-
-
-UPGRADE_CHANGELOG = object()
-UPGRADE_MANIFEST = object()
-UPGRADE_FILELOG = object()
-
-UPGRADE_ALL_REVLOGS = frozenset(
-    [UPGRADE_CHANGELOG, UPGRADE_MANIFEST, UPGRADE_FILELOG]
+from .upgrade_utils import (
+    actions as upgrade_actions,
+    engine as upgrade_engine,
 )
 
-
-def getsidedatacompanion(srcrepo, dstrepo):
-    sidedatacompanion = None
-    removedreqs = srcrepo.requirements - dstrepo.requirements
-    addedreqs = dstrepo.requirements - srcrepo.requirements
-    if requirements.SIDEDATA_REQUIREMENT in removedreqs:
-
-        def sidedatacompanion(rl, rev):
-            rl = getattr(rl, '_revlog', rl)
-            if rl.flags(rev) & revlog.REVIDX_SIDEDATA:
-                return True, (), {}, 0, 0
-            return False, (), {}, 0, 0
-
-    elif requirements.COPIESSDC_REQUIREMENT in addedreqs:
-        sidedatacompanion = metadata.getsidedataadder(srcrepo, dstrepo)
-    elif requirements.COPIESSDC_REQUIREMENT in removedreqs:
-        sidedatacompanion = metadata.getsidedataremover(srcrepo, dstrepo)
-    return sidedatacompanion
-
-
-def matchrevlog(revlogfilter, entry):
-    """check if a revlog is selected for cloning.
-
-    In other words, are there any updates which need to be done on revlog
-    or it can be blindly copied.
-
-    The store entry is checked against the passed filter"""
-    if entry.endswith(b'00changelog.i'):
-        return UPGRADE_CHANGELOG in revlogfilter
-    elif entry.endswith(b'00manifest.i'):
-        return UPGRADE_MANIFEST in revlogfilter
-    return UPGRADE_FILELOG in revlogfilter
-
-
-def _clonerevlogs(
-    ui,
-    srcrepo,
-    dstrepo,
-    tr,
-    deltareuse,
-    forcedeltabothparents,
-    revlogs=UPGRADE_ALL_REVLOGS,
-):
-    """Copy revlogs between 2 repos."""
-    revcount = 0
-    srcsize = 0
-    srcrawsize = 0
-    dstsize = 0
-    fcount = 0
-    frevcount = 0
-    fsrcsize = 0
-    frawsize = 0
-    fdstsize = 0
-    mcount = 0
-    mrevcount = 0
-    msrcsize = 0
-    mrawsize = 0
-    mdstsize = 0
-    crevcount = 0
-    csrcsize = 0
-    crawsize = 0
-    cdstsize = 0
-
-    alldatafiles = list(srcrepo.store.walk())
-
-    # Perform a pass to collect metadata. This validates we can open all
-    # source files and allows a unified progress bar to be displayed.
-    for unencoded, encoded, size in alldatafiles:
-        if unencoded.endswith(b'.d'):
-            continue
-
-        rl = _revlogfrompath(srcrepo, unencoded)
-
-        info = rl.storageinfo(
-            exclusivefiles=True,
-            revisionscount=True,
-            trackedsize=True,
-            storedsize=True,
-        )
-
-        revcount += info[b'revisionscount'] or 0
-        datasize = info[b'storedsize'] or 0
-        rawsize = info[b'trackedsize'] or 0
-
-        srcsize += datasize
-        srcrawsize += rawsize
-
-        # This is for the separate progress bars.
-        if isinstance(rl, changelog.changelog):
-            crevcount += len(rl)
-            csrcsize += datasize
-            crawsize += rawsize
-        elif isinstance(rl, manifest.manifestrevlog):
-            mcount += 1
-            mrevcount += len(rl)
-            msrcsize += datasize
-            mrawsize += rawsize
-        elif isinstance(rl, filelog.filelog):
-            fcount += 1
-            frevcount += len(rl)
-            fsrcsize += datasize
-            frawsize += rawsize
-        else:
-            error.ProgrammingError(b'unknown revlog type')
-
-    if not revcount:
-        return
-
-    ui.status(
-        _(
-            b'migrating %d total revisions (%d in filelogs, %d in manifests, '
-            b'%d in changelog)\n'
-        )
-        % (revcount, frevcount, mrevcount, crevcount)
-    )
-    ui.status(
-        _(b'migrating %s in store; %s tracked data\n')
-        % ((util.bytecount(srcsize), util.bytecount(srcrawsize)))
-    )
-
-    # Used to keep track of progress.
-    progress = None
-
-    def oncopiedrevision(rl, rev, node):
-        progress.increment()
-
-    sidedatacompanion = getsidedatacompanion(srcrepo, dstrepo)
-
-    # Do the actual copying.
-    # FUTURE this operation can be farmed off to worker processes.
-    seen = set()
-    for unencoded, encoded, size in alldatafiles:
-        if unencoded.endswith(b'.d'):
-            continue
-
-        oldrl = _revlogfrompath(srcrepo, unencoded)
-
-        if isinstance(oldrl, changelog.changelog) and b'c' not in seen:
-            ui.status(
-                _(
-                    b'finished migrating %d manifest revisions across %d '
-                    b'manifests; change in size: %s\n'
-                )
-                % (mrevcount, mcount, util.bytecount(mdstsize - msrcsize))
-            )
-
-            ui.status(
-                _(
-                    b'migrating changelog containing %d revisions '
-                    b'(%s in store; %s tracked data)\n'
-                )
-                % (
-                    crevcount,
-                    util.bytecount(csrcsize),
-                    util.bytecount(crawsize),
-                )
-            )
-            seen.add(b'c')
-            progress = srcrepo.ui.makeprogress(
-                _(b'changelog revisions'), total=crevcount
-            )
-        elif isinstance(oldrl, manifest.manifestrevlog) and b'm' not in seen:
-            ui.status(
-                _(
-                    b'finished migrating %d filelog revisions across %d '
-                    b'filelogs; change in size: %s\n'
-                )
-                % (frevcount, fcount, util.bytecount(fdstsize - fsrcsize))
-            )
+from .utils import (
+    stringutil,
+)
 
-            ui.status(
-                _(
-                    b'migrating %d manifests containing %d revisions '
-                    b'(%s in store; %s tracked data)\n'
-                )
-                % (
-                    mcount,
-                    mrevcount,
-                    util.bytecount(msrcsize),
-                    util.bytecount(mrawsize),
-                )
-            )
-            seen.add(b'm')
-            if progress:
-                progress.complete()
-            progress = srcrepo.ui.makeprogress(
-                _(b'manifest revisions'), total=mrevcount
-            )
-        elif b'f' not in seen:
-            ui.status(
-                _(
-                    b'migrating %d filelogs containing %d revisions '
-                    b'(%s in store; %s tracked data)\n'
-                )
-                % (
-                    fcount,
-                    frevcount,
-                    util.bytecount(fsrcsize),
-                    util.bytecount(frawsize),
-                )
-            )
-            seen.add(b'f')
-            if progress:
-                progress.complete()
-            progress = srcrepo.ui.makeprogress(
-                _(b'file revisions'), total=frevcount
-            )
-
-        if matchrevlog(revlogs, unencoded):
-            ui.note(
-                _(b'cloning %d revisions from %s\n') % (len(oldrl), unencoded)
-            )
-            newrl = _revlogfrompath(dstrepo, unencoded)
-            oldrl.clone(
-                tr,
-                newrl,
-                addrevisioncb=oncopiedrevision,
-                deltareuse=deltareuse,
-                forcedeltabothparents=forcedeltabothparents,
-                sidedatacompanion=sidedatacompanion,
-            )
-        else:
-            msg = _(b'blindly copying %s containing %i revisions\n')
-            ui.note(msg % (unencoded, len(oldrl)))
-            _copyrevlog(tr, dstrepo, oldrl, unencoded)
-
-            newrl = _revlogfrompath(dstrepo, unencoded)
-
-        info = newrl.storageinfo(storedsize=True)
-        datasize = info[b'storedsize'] or 0
-
-        dstsize += datasize
-
-        if isinstance(newrl, changelog.changelog):
-            cdstsize += datasize
-        elif isinstance(newrl, manifest.manifestrevlog):
-            mdstsize += datasize
-        else:
-            fdstsize += datasize
-
-    progress.complete()
-
-    ui.status(
-        _(
-            b'finished migrating %d changelog revisions; change in size: '
-            b'%s\n'
-        )
-        % (crevcount, util.bytecount(cdstsize - csrcsize))
-    )
-
-    ui.status(
-        _(
-            b'finished migrating %d total revisions; total change in store '
-            b'size: %s\n'
-        )
-        % (revcount, util.bytecount(dstsize - srcsize))
-    )
-
-
-def _filterstorefile(srcrepo, dstrepo, requirements, path, mode, st):
-    """Determine whether to copy a store file during upgrade.
-
-    This function is called when migrating store files from ``srcrepo`` to
-    ``dstrepo`` as part of upgrading a repository.
-
-    Args:
-      srcrepo: repo we are copying from
-      dstrepo: repo we are copying to
-      requirements: set of requirements for ``dstrepo``
-      path: store file being examined
-      mode: the ``ST_MODE`` file type of ``path``
-      st: ``stat`` data structure for ``path``
-
-    Function should return ``True`` if the file is to be copied.
-    """
-    # Skip revlogs.
-    if path.endswith((b'.i', b'.d', b'.n', b'.nd')):
-        return False
-    # Skip transaction related files.
-    if path.startswith(b'undo'):
-        return False
-    # Only copy regular files.
-    if mode != stat.S_IFREG:
-        return False
-    # Skip other skipped files.
-    if path in (b'lock', b'fncache'):
-        return False
-
-    return True
-
-
-def _finishdatamigration(ui, srcrepo, dstrepo, requirements):
-    """Hook point for extensions to perform additional actions during upgrade.
-
-    This function is called after revlogs and store files have been copied but
-    before the new store is swapped into the original location.
-    """
-
-
-def _upgraderepo(
-    ui, srcrepo, dstrepo, requirements, actions, revlogs=UPGRADE_ALL_REVLOGS
-):
-    """Do the low-level work of upgrading a repository.
-
-    The upgrade is effectively performed as a copy between a source
-    repository and a temporary destination repository.
-
-    The source repository is unmodified for as long as possible so the
-    upgrade can abort at any time without causing loss of service for
-    readers and without corrupting the source repository.
-    """
-    assert srcrepo.currentwlock()
-    assert dstrepo.currentwlock()
-
-    ui.status(
-        _(
-            b'(it is safe to interrupt this process any time before '
-            b'data migration completes)\n'
-        )
-    )
-
-    if b're-delta-all' in actions:
-        deltareuse = revlog.revlog.DELTAREUSENEVER
-    elif b're-delta-parent' in actions:
-        deltareuse = revlog.revlog.DELTAREUSESAMEREVS
-    elif b're-delta-multibase' in actions:
-        deltareuse = revlog.revlog.DELTAREUSESAMEREVS
-    elif b're-delta-fulladd' in actions:
-        deltareuse = revlog.revlog.DELTAREUSEFULLADD
-    else:
-        deltareuse = revlog.revlog.DELTAREUSEALWAYS
-
-    with dstrepo.transaction(b'upgrade') as tr:
-        _clonerevlogs(
-            ui,
-            srcrepo,
-            dstrepo,
-            tr,
-            deltareuse,
-            b're-delta-multibase' in actions,
-            revlogs=revlogs,
-        )
-
-    # Now copy other files in the store directory.
-    # The sorted() makes execution deterministic.
-    for p, kind, st in sorted(srcrepo.store.vfs.readdir(b'', stat=True)):
-        if not _filterstorefile(srcrepo, dstrepo, requirements, p, kind, st):
-            continue
-
-        srcrepo.ui.status(_(b'copying %s\n') % p)
-        src = srcrepo.store.rawvfs.join(p)
-        dst = dstrepo.store.rawvfs.join(p)
-        util.copyfile(src, dst, copystat=True)
-
-    _finishdatamigration(ui, srcrepo, dstrepo, requirements)
-
-    ui.status(_(b'data fully migrated to temporary repository\n'))
-
-    backuppath = pycompat.mkdtemp(prefix=b'upgradebackup.', dir=srcrepo.path)
-    backupvfs = vfsmod.vfs(backuppath)
-
-    # Make a backup of requires file first, as it is the first to be modified.
-    util.copyfile(srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires'))
-
-    # We install an arbitrary requirement that clients must not support
-    # as a mechanism to lock out new clients during the data swap. This is
-    # better than allowing a client to continue while the repository is in
-    # an inconsistent state.
-    ui.status(
-        _(
-            b'marking source repository as being upgraded; clients will be '
-            b'unable to read from repository\n'
-        )
-    )
-    scmutil.writereporequirements(
-        srcrepo, srcrepo.requirements | {b'upgradeinprogress'}
-    )
-
-    ui.status(_(b'starting in-place swap of repository data\n'))
-    ui.status(_(b'replaced files will be backed up at %s\n') % backuppath)
-
-    # Now swap in the new store directory. Doing it as a rename should make
-    # the operation nearly instantaneous and atomic (at least in well-behaved
-    # environments).
-    ui.status(_(b'replacing store...\n'))
-    tstart = util.timer()
-    util.rename(srcrepo.spath, backupvfs.join(b'store'))
-    util.rename(dstrepo.spath, srcrepo.spath)
-    elapsed = util.timer() - tstart
-    ui.status(
-        _(
-            b'store replacement complete; repository was inconsistent for '
-            b'%0.1fs\n'
-        )
-        % elapsed
-    )
-
-    # We first write the requirements file. Any new requirements will lock
-    # out legacy clients.
-    ui.status(
-        _(
-            b'finalizing requirements file and making repository readable '
-            b'again\n'
-        )
-    )
-    scmutil.writereporequirements(srcrepo, requirements)
-
-    # The lock file from the old store won't be removed because nothing has a
-    # reference to its new location. So clean it up manually. Alternatively, we
-    # could update srcrepo.svfs and other variables to point to the new
-    # location. This is simpler.
-    backupvfs.unlink(b'store/lock')
-
-    return backuppath
+allformatvariant = upgrade_actions.allformatvariant
 
 
 def upgraderepo(
@@ -1146,93 +38,43 @@
     backup=True,
     manifest=None,
     changelog=None,
+    filelogs=None,
 ):
     """Upgrade a repository in place."""
     if optimize is None:
-        optimize = []
-    optimize = {legacy_opts_map.get(o, o) for o in optimize}
+        optimize = {}
     repo = repo.unfiltered()
 
-    revlogs = set(UPGRADE_ALL_REVLOGS)
-    specentries = ((b'c', changelog), (b'm', manifest))
+    revlogs = set(upgrade_engine.UPGRADE_ALL_REVLOGS)
+    specentries = (
+        (upgrade_engine.UPGRADE_CHANGELOG, changelog),
+        (upgrade_engine.UPGRADE_MANIFEST, manifest),
+        (upgrade_engine.UPGRADE_FILELOGS, filelogs),
+    )
     specified = [(y, x) for (y, x) in specentries if x is not None]
     if specified:
         # we have some limitation on revlogs to be recloned
         if any(x for y, x in specified):
             revlogs = set()
-            for r, enabled in specified:
+            for upgrade, enabled in specified:
                 if enabled:
-                    if r == b'c':
-                        revlogs.add(UPGRADE_CHANGELOG)
-                    elif r == b'm':
-                        revlogs.add(UPGRADE_MANIFEST)
+                    revlogs.add(upgrade)
         else:
             # none are enabled
-            for r, __ in specified:
-                if r == b'c':
-                    revlogs.discard(UPGRADE_CHANGELOG)
-                elif r == b'm':
-                    revlogs.discard(UPGRADE_MANIFEST)
+            for upgrade, __ in specified:
+                revlogs.discard(upgrade)
 
     # Ensure the repository can be upgraded.
-    missingreqs = requiredsourcerequirements(repo) - repo.requirements
-    if missingreqs:
-        raise error.Abort(
-            _(b'cannot upgrade repository; requirement missing: %s')
-            % _(b', ').join(sorted(missingreqs))
-        )
-
-    blockedreqs = blocksourcerequirements(repo) & repo.requirements
-    if blockedreqs:
-        raise error.Abort(
-            _(
-                b'cannot upgrade repository; unsupported source '
-                b'requirement: %s'
-            )
-            % _(b', ').join(sorted(blockedreqs))
-        )
-
-    # FUTURE there is potentially a need to control the wanted requirements via
-    # command arguments or via an extension hook point.
-    newreqs = localrepo.newreporequirements(
-        repo.ui, localrepo.defaultcreateopts(repo.ui)
-    )
-    newreqs.update(preservedrequirements(repo))
+    upgrade_actions.check_source_requirements(repo)
 
-    noremovereqs = (
-        repo.requirements - newreqs - supportremovedrequirements(repo)
-    )
-    if noremovereqs:
-        raise error.Abort(
-            _(
-                b'cannot upgrade repository; requirement would be '
-                b'removed: %s'
-            )
-            % _(b', ').join(sorted(noremovereqs))
-        )
+    default_options = localrepo.defaultcreateopts(repo.ui)
+    newreqs = localrepo.newreporequirements(repo.ui, default_options)
+    newreqs.update(upgrade_actions.preservedrequirements(repo))
 
-    noaddreqs = newreqs - repo.requirements - allowednewrequirements(repo)
-    if noaddreqs:
-        raise error.Abort(
-            _(
-                b'cannot upgrade repository; do not support adding '
-                b'requirement: %s'
-            )
-            % _(b', ').join(sorted(noaddreqs))
-        )
-
-    unsupportedreqs = newreqs - supporteddestrequirements(repo)
-    if unsupportedreqs:
-        raise error.Abort(
-            _(
-                b'cannot upgrade repository; do not support '
-                b'destination requirement: %s'
-            )
-            % _(b', ').join(sorted(unsupportedreqs))
-        )
+    upgrade_actions.check_requirements_changes(repo, newreqs)
 
     # Find and validate all improvements that can be made.
-    alloptimizations = findoptimizations(repo)
+    alloptimizations = upgrade_actions.findoptimizations(repo)
 
     # Apply and Validate arguments.
     optimizations = []
@@ -1248,75 +90,41 @@
             hint=_(b'run without arguments to see valid optimizations'),
         )
 
-    deficiencies = finddeficiencies(repo)
-    actions = determineactions(repo, deficiencies, repo.requirements, newreqs)
-    actions.extend(
-        o
-        for o in sorted(optimizations)
-        # determineactions could have added optimisation
-        if o not in actions
+    format_upgrades = upgrade_actions.find_format_upgrades(repo)
+    up_actions = upgrade_actions.determine_upgrade_actions(
+        repo, format_upgrades, optimizations, repo.requirements, newreqs
     )
+    removed_actions = upgrade_actions.find_format_downgrades(repo)
 
     removedreqs = repo.requirements - newreqs
     addedreqs = newreqs - repo.requirements
 
-    if revlogs != UPGRADE_ALL_REVLOGS:
-        incompatible = RECLONES_REQUIREMENTS & (removedreqs | addedreqs)
+    if revlogs != upgrade_engine.UPGRADE_ALL_REVLOGS:
+        incompatible = upgrade_actions.RECLONES_REQUIREMENTS & (
+            removedreqs | addedreqs
+        )
         if incompatible:
             msg = _(
                 b'ignoring revlogs selection flags, format requirements '
                 b'change: %s\n'
             )
             ui.warn(msg % b', '.join(sorted(incompatible)))
-            revlogs = UPGRADE_ALL_REVLOGS
-
-    def write_labeled(l, label):
-        first = True
-        for r in sorted(l):
-            if not first:
-                ui.write(b', ')
-            ui.write(r, label=label)
-            first = False
+            revlogs = upgrade_engine.UPGRADE_ALL_REVLOGS
 
-    def printrequirements():
-        ui.write(_(b'requirements\n'))
-        ui.write(_(b'   preserved: '))
-        write_labeled(
-            newreqs & repo.requirements, "upgrade-repo.requirement.preserved"
-        )
-        ui.write((b'\n'))
-        removed = repo.requirements - newreqs
-        if repo.requirements - newreqs:
-            ui.write(_(b'   removed: '))
-            write_labeled(removed, "upgrade-repo.requirement.removed")
-            ui.write((b'\n'))
-        added = newreqs - repo.requirements
-        if added:
-            ui.write(_(b'   added: '))
-            write_labeled(added, "upgrade-repo.requirement.added")
-            ui.write((b'\n'))
-        ui.write(b'\n')
-
-    def printoptimisations():
-        optimisations = [a for a in actions if a.type == optimisation]
-        optimisations.sort(key=lambda a: a.name)
-        if optimisations:
-            ui.write(_(b'optimisations: '))
-            write_labeled(
-                [a.name for a in optimisations],
-                "upgrade-repo.optimisation.performed",
-            )
-            ui.write(b'\n\n')
-
-    def printupgradeactions():
-        for a in actions:
-            ui.status(b'%s\n   %s\n\n' % (a.name, a.upgrademessage))
+    upgrade_op = upgrade_actions.UpgradeOperation(
+        ui,
+        newreqs,
+        repo.requirements,
+        up_actions,
+        removed_actions,
+        revlogs,
+    )
 
     if not run:
         fromconfig = []
         onlydefault = []
 
-        for d in deficiencies:
+        for d in format_upgrades:
             if d.fromconfig(repo):
                 fromconfig.append(d)
             elif d.default:
@@ -1346,12 +154,7 @@
 
             ui.status(b'\n')
         else:
-            ui.status(
-                _(
-                    b'(no feature deficiencies found in existing '
-                    b'repository)\n'
-                )
-            )
+            ui.status(_(b'(no format upgrades found in existing repository)\n'))
 
         ui.status(
             _(
@@ -1360,30 +163,30 @@
             )
         )
 
-        printrequirements()
-        printoptimisations()
-        printupgradeactions()
+        upgrade_op.print_requirements()
+        upgrade_op.print_optimisations()
+        upgrade_op.print_upgrade_actions()
+        upgrade_op.print_affected_revlogs()
 
-        unusedoptimize = [i for i in alloptimizations if i not in actions]
-
-        if unusedoptimize:
+        if upgrade_op.unused_optimizations:
             ui.status(
                 _(
                     b'additional optimizations are available by specifying '
                     b'"--optimize <name>":\n\n'
                 )
             )
-            for i in unusedoptimize:
-                ui.status(_(b'%s\n   %s\n\n') % (i.name, i.description))
+            upgrade_op.print_unused_optimizations()
         return
 
+    if not (upgrade_op.upgrade_actions or upgrade_op.removed_actions):
+        ui.status(_(b'nothing to do\n'))
+        return
     # Else we're in the run=true case.
     ui.write(_(b'upgrade will perform the following actions:\n\n'))
-    printrequirements()
-    printoptimisations()
-    printupgradeactions()
-
-    upgradeactions = [a.name for a in actions]
+    upgrade_op.print_requirements()
+    upgrade_op.print_optimisations()
+    upgrade_op.print_upgrade_actions()
+    upgrade_op.print_affected_revlogs()
 
     ui.status(_(b'beginning upgrade...\n'))
     with repo.wlock(), repo.lock():
@@ -1398,7 +201,7 @@
         try:
             ui.status(
                 _(
-                    b'creating temporary repository to stage migrated '
+                    b'creating temporary repository to stage upgraded '
                     b'data: %s\n'
                 )
                 % tmppath
@@ -1409,12 +212,12 @@
             dstrepo = hg.repository(repoui, path=tmppath, create=True)
 
             with dstrepo.wlock(), dstrepo.lock():
-                backuppath = _upgraderepo(
-                    ui, repo, dstrepo, newreqs, upgradeactions, revlogs=revlogs
+                backuppath = upgrade_engine.upgrade(
+                    ui, repo, dstrepo, upgrade_op
                 )
-            if not (backup or backuppath is None):
+            if not backup:
                 ui.status(
-                    _(b'removing old repository content%s\n') % backuppath
+                    _(b'removing old repository content %s\n') % backuppath
                 )
                 repo.vfs.rmtree(backuppath, forcibly=True)
                 backuppath = None
@@ -1434,3 +237,115 @@
                         b'repository is verified\n'
                     )
                 )
+
+            upgrade_op.print_post_op_messages()
+
+
+def upgrade_share_to_safe(
+    ui,
+    hgvfs,
+    storevfs,
+    current_requirements,
+    mismatch_config,
+    mismatch_warn,
+):
+    """Upgrades a share to use share-safe mechanism"""
+    wlock = None
+    store_requirements = localrepo._readrequires(storevfs, False)
+    original_crequirements = current_requirements.copy()
+    # after upgrade, store requires will be shared, so lets find
+    # the requirements which are not present in store and
+    # write them to share's .hg/requires
+    diffrequires = current_requirements - store_requirements
+    # add share-safe requirement as it will mark the share as share-safe
+    diffrequires.add(requirementsmod.SHARESAFE_REQUIREMENT)
+    current_requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
+    # in `allow` case, we don't try to upgrade, we just respect the source
+    # state, update requirements and continue
+    if mismatch_config == b'allow':
+        return
+    try:
+        wlock = lockmod.trylock(ui, hgvfs, b'wlock', 0, 0)
+        # some process might change the requirement in between, re-read
+        # and update current_requirements
+        locked_requirements = localrepo._readrequires(hgvfs, True)
+        if locked_requirements != original_crequirements:
+            removed = current_requirements - locked_requirements
+            # update current_requirements in place because it's passed
+            # as reference
+            current_requirements -= removed
+            current_requirements |= locked_requirements
+            diffrequires = current_requirements - store_requirements
+            # add share-safe requirement as it will mark the share as share-safe
+            diffrequires.add(requirementsmod.SHARESAFE_REQUIREMENT)
+            current_requirements.add(requirementsmod.SHARESAFE_REQUIREMENT)
+        scmutil.writerequires(hgvfs, diffrequires)
+        ui.warn(_(b'repository upgraded to use share-safe mode\n'))
+    except error.LockError as e:
+        if mismatch_config == b'upgrade-abort':
+            raise error.Abort(
+                _(b'failed to upgrade share, got error: %s')
+                % stringutil.forcebytestr(e.strerror)
+            )
+        elif mismatch_warn:
+            ui.warn(
+                _(b'failed to upgrade share, got error: %s\n')
+                % stringutil.forcebytestr(e.strerror)
+            )
+    finally:
+        if wlock:
+            wlock.release()
+
+
+def downgrade_share_to_non_safe(
+    ui,
+    hgvfs,
+    sharedvfs,
+    current_requirements,
+    mismatch_config,
+    mismatch_warn,
+):
+    """Downgrades a share which use share-safe to not use it"""
+    wlock = None
+    source_requirements = localrepo._readrequires(sharedvfs, True)
+    original_crequirements = current_requirements.copy()
+    # we cannot be 100% sure on which requirements were present in store when
+    # the source supported share-safe. However, we do know that working
+    # directory requirements were not there. Hence we remove them
+    source_requirements -= requirementsmod.WORKING_DIR_REQUIREMENTS
+    current_requirements |= source_requirements
+    current_requirements.remove(requirementsmod.SHARESAFE_REQUIREMENT)
+    if mismatch_config == b'allow':
+        return
+
+    try:
+        wlock = lockmod.trylock(ui, hgvfs, b'wlock', 0, 0)
+        # some process might change the requirement in between, re-read
+        # and update current_requirements
+        locked_requirements = localrepo._readrequires(hgvfs, True)
+        if locked_requirements != original_crequirements:
+            removed = current_requirements - locked_requirements
+            # update current_requirements in place because it's passed
+            # as reference
+            current_requirements -= removed
+            current_requirements |= locked_requirements
+            current_requirements |= source_requirements
+            current_requirements -= set(requirementsmod.SHARESAFE_REQUIREMENT)
+        scmutil.writerequires(hgvfs, current_requirements)
+        ui.warn(_(b'repository downgraded to not use share-safe mode\n'))
+    except error.LockError as e:
+        # If upgrade-abort is set, abort when upgrade fails, else let the
+        # process continue as `upgrade-allow` is set
+        if mismatch_config == b'downgrade-abort':
+            raise error.Abort(
+                _(b'failed to downgrade share, got error: %s')
+                % stringutil.forcebytestr(e.strerror)
+            )
+        elif mismatch_warn:
+            ui.warn(
+                _(b'failed to downgrade share, got error: %s\n')
+                % stringutil.forcebytestr(e.strerror)
+            )
+    finally:
+        if wlock:
+            wlock.release()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/upgrade_utils/actions.py	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,911 @@
+# upgrade.py - functions for in place upgrade of Mercurial repository
+#
+# Copyright (c) 2016-present, Gregory Szorc
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+from ..i18n import _
+from .. import (
+    error,
+    localrepo,
+    requirements,
+    revlog,
+    util,
+)
+
+from ..utils import compression
+
+# list of requirements that request a clone of all revlog if added/removed
+RECLONES_REQUIREMENTS = {
+    b'generaldelta',
+    requirements.SPARSEREVLOG_REQUIREMENT,
+}
+
+
+def preservedrequirements(repo):
+    return set()
+
+
+FORMAT_VARIANT = b'deficiency'
+OPTIMISATION = b'optimization'
+
+
+class improvement(object):
+    """Represents an improvement that can be made as part of an upgrade.
+
+    The following attributes are defined on each instance:
+
+    name
+       Machine-readable string uniquely identifying this improvement. It
+       will be mapped to an action later in the upgrade process.
+
+    type
+       Either ``FORMAT_VARIANT`` or ``OPTIMISATION``.
+       A format variant is where we change the storage format. Not all format
+       variant changes are an obvious problem.
+       An optimization is an action (sometimes optional) that
+       can be taken to further improve the state of the repository.
+
+    description
+       Message intended for humans explaining the improvement in more detail,
+       including the implications of it. For ``FORMAT_VARIANT`` types, should be
+       worded in the present tense. For ``OPTIMISATION`` types, should be
+       worded in the future tense.
+
+    upgrademessage
+       Message intended for humans explaining what an upgrade addressing this
+       issue will do. Should be worded in the future tense.
+
+    postupgrademessage
+       Message intended for humans which will be shown post an upgrade
+       operation when the improvement will be added
+
+    postdowngrademessage
+       Message intended for humans which will be shown post an upgrade
+       operation in which this improvement was removed
+    """
+
+    def __init__(self, name, type, description, upgrademessage):
+        self.name = name
+        self.type = type
+        self.description = description
+        self.upgrademessage = upgrademessage
+        self.postupgrademessage = None
+        self.postdowngrademessage = None
+
+    def __eq__(self, other):
+        if not isinstance(other, improvement):
+            # This is what python tell use to do
+            return NotImplemented
+        return self.name == other.name
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def __hash__(self):
+        return hash(self.name)
+
+
+allformatvariant = []
+
+
+def registerformatvariant(cls):
+    allformatvariant.append(cls)
+    return cls
+
+
+class formatvariant(improvement):
+    """an improvement subclass dedicated to repository format"""
+
+    type = FORMAT_VARIANT
+    ### The following attributes should be defined for each class:
+
+    # machine-readable string uniquely identifying this improvement. it will be
+    # mapped to an action later in the upgrade process.
+    name = None
+
+    # message intended for humans explaining the improvement in more detail,
+    # including the implications of it ``FORMAT_VARIANT`` types, should be
+    # worded
+    # in the present tense.
+    description = None
+
+    # message intended for humans explaining what an upgrade addressing this
+    # issue will do. should be worded in the future tense.
+    upgrademessage = None
+
+    # value of current Mercurial default for new repository
+    default = None
+
+    # Message intended for humans which will be shown post an upgrade
+    # operation when the improvement will be added
+    postupgrademessage = None
+
+    # Message intended for humans which will be shown post an upgrade
+    # operation in which this improvement was removed
+    postdowngrademessage = None
+
+    def __init__(self):
+        raise NotImplementedError()
+
+    @staticmethod
+    def fromrepo(repo):
+        """current value of the variant in the repository"""
+        raise NotImplementedError()
+
+    @staticmethod
+    def fromconfig(repo):
+        """current value of the variant in the configuration"""
+        raise NotImplementedError()
+
+
+class requirementformatvariant(formatvariant):
+    """formatvariant based on a 'requirement' name.
+
+    Many format variant are controlled by a 'requirement'. We define a small
+    subclass to factor the code.
+    """
+
+    # the requirement that control this format variant
+    _requirement = None
+
+    @staticmethod
+    def _newreporequirements(ui):
+        return localrepo.newreporequirements(
+            ui, localrepo.defaultcreateopts(ui)
+        )
+
+    @classmethod
+    def fromrepo(cls, repo):
+        assert cls._requirement is not None
+        return cls._requirement in repo.requirements
+
+    @classmethod
+    def fromconfig(cls, repo):
+        assert cls._requirement is not None
+        return cls._requirement in cls._newreporequirements(repo.ui)
+
+
+@registerformatvariant
+class fncache(requirementformatvariant):
+    name = b'fncache'
+
+    _requirement = b'fncache'
+
+    default = True
+
+    description = _(
+        b'long and reserved filenames may not work correctly; '
+        b'repository performance is sub-optimal'
+    )
+
+    upgrademessage = _(
+        b'repository will be more resilient to storing '
+        b'certain paths and performance of certain '
+        b'operations should be improved'
+    )
+
+
+@registerformatvariant
+class dotencode(requirementformatvariant):
+    name = b'dotencode'
+
+    _requirement = b'dotencode'
+
+    default = True
+
+    description = _(
+        b'storage of filenames beginning with a period or '
+        b'space may not work correctly'
+    )
+
+    upgrademessage = _(
+        b'repository will be better able to store files '
+        b'beginning with a space or period'
+    )
+
+
+@registerformatvariant
+class generaldelta(requirementformatvariant):
+    name = b'generaldelta'
+
+    _requirement = b'generaldelta'
+
+    default = True
+
+    description = _(
+        b'deltas within internal storage are unable to '
+        b'choose optimal revisions; repository is larger and '
+        b'slower than it could be; interaction with other '
+        b'repositories may require extra network and CPU '
+        b'resources, making "hg push" and "hg pull" slower'
+    )
+
+    upgrademessage = _(
+        b'repository storage will be able to create '
+        b'optimal deltas; new repository data will be '
+        b'smaller and read times should decrease; '
+        b'interacting with other repositories using this '
+        b'storage model should require less network and '
+        b'CPU resources, making "hg push" and "hg pull" '
+        b'faster'
+    )
+
+
+@registerformatvariant
+class sharesafe(requirementformatvariant):
+    name = b'share-safe'
+    _requirement = requirements.SHARESAFE_REQUIREMENT
+
+    default = False
+
+    description = _(
+        b'old shared repositories do not share source repository '
+        b'requirements and config. This leads to various problems '
+        b'when the source repository format is upgraded or some new '
+        b'extensions are enabled.'
+    )
+
+    upgrademessage = _(
+        b'Upgrades a repository to share-safe format so that future '
+        b'shares of this repository share its requirements and configs.'
+    )
+
+    postdowngrademessage = _(
+        b'repository downgraded to not use share safe mode, '
+        b'existing shares will not work and needs to'
+        b' be reshared.'
+    )
+
+    postupgrademessage = _(
+        b'repository upgraded to share safe mode, existing'
+        b' shares will still work in old non-safe mode. '
+        b'Re-share existing shares to use them in safe mode'
+        b' New shares will be created in safe mode.'
+    )
+
+
+@registerformatvariant
+class sparserevlog(requirementformatvariant):
+    name = b'sparserevlog'
+
+    _requirement = requirements.SPARSEREVLOG_REQUIREMENT
+
+    default = True
+
+    description = _(
+        b'in order to limit disk reading and memory usage on older '
+        b'version, the span of a delta chain from its root to its '
+        b'end is limited, whatever the relevant data in this span. '
+        b'This can severly limit Mercurial ability to build good '
+        b'chain of delta resulting is much more storage space being '
+        b'taken and limit reusability of on disk delta during '
+        b'exchange.'
+    )
+
+    upgrademessage = _(
+        b'Revlog supports delta chain with more unused data '
+        b'between payload. These gaps will be skipped at read '
+        b'time. This allows for better delta chains, making a '
+        b'better compression and faster exchange with server.'
+    )
+
+
+@registerformatvariant
+class sidedata(requirementformatvariant):
+    name = b'sidedata'
+
+    _requirement = requirements.SIDEDATA_REQUIREMENT
+
+    default = False
+
+    description = _(
+        b'Allows storage of extra data alongside a revision, '
+        b'unlocking various caching options.'
+    )
+
+    upgrademessage = _(b'Allows storage of extra data alongside a revision.')
+
+
+@registerformatvariant
+class persistentnodemap(requirementformatvariant):
+    name = b'persistent-nodemap'
+
+    _requirement = requirements.NODEMAP_REQUIREMENT
+
+    default = False
+
+    description = _(
+        b'persist the node -> rev mapping on disk to speedup lookup'
+    )
+
+    upgrademessage = _(b'Speedup revision lookup by node id.')
+
+
+@registerformatvariant
+class copiessdc(requirementformatvariant):
+    name = b'copies-sdc'
+
+    _requirement = requirements.COPIESSDC_REQUIREMENT
+
+    default = False
+
+    description = _(b'Stores copies information alongside changesets.')
+
+    upgrademessage = _(
+        b'Allows to use more efficient algorithm to deal with ' b'copy tracing.'
+    )
+
+
+@registerformatvariant
+class removecldeltachain(formatvariant):
+    name = b'plain-cl-delta'
+
+    default = True
+
+    description = _(
+        b'changelog storage is using deltas instead of '
+        b'raw entries; changelog reading and any '
+        b'operation relying on changelog data are slower '
+        b'than they could be'
+    )
+
+    upgrademessage = _(
+        b'changelog storage will be reformated to '
+        b'store raw entries; changelog reading will be '
+        b'faster; changelog size may be reduced'
+    )
+
+    @staticmethod
+    def fromrepo(repo):
+        # Mercurial 4.0 changed changelogs to not use delta chains. Search for
+        # changelogs with deltas.
+        cl = repo.changelog
+        chainbase = cl.chainbase
+        return all(rev == chainbase(rev) for rev in cl)
+
+    @staticmethod
+    def fromconfig(repo):
+        return True
+
+
+@registerformatvariant
+class compressionengine(formatvariant):
+    name = b'compression'
+    default = b'zlib'
+
+    description = _(
+        b'Compresion algorithm used to compress data. '
+        b'Some engine are faster than other'
+    )
+
+    upgrademessage = _(
+        b'revlog content will be recompressed with the new algorithm.'
+    )
+
+    @classmethod
+    def fromrepo(cls, repo):
+        # we allow multiple compression engine requirement to co-exist because
+        # strickly speaking, revlog seems to support mixed compression style.
+        #
+        # The compression used for new entries will be "the last one"
+        compression = b'zlib'
+        for req in repo.requirements:
+            prefix = req.startswith
+            if prefix(b'revlog-compression-') or prefix(b'exp-compression-'):
+                compression = req.split(b'-', 2)[2]
+        return compression
+
+    @classmethod
+    def fromconfig(cls, repo):
+        compengines = repo.ui.configlist(b'format', b'revlog-compression')
+        # return the first valid value as the selection code would do
+        for comp in compengines:
+            if comp in util.compengines:
+                return comp
+
+        # no valide compression found lets display it all for clarity
+        return b','.join(compengines)
+
+
+@registerformatvariant
+class compressionlevel(formatvariant):
+    name = b'compression-level'
+    default = b'default'
+
+    description = _(b'compression level')
+
+    upgrademessage = _(b'revlog content will be recompressed')
+
+    @classmethod
+    def fromrepo(cls, repo):
+        comp = compressionengine.fromrepo(repo)
+        level = None
+        if comp == b'zlib':
+            level = repo.ui.configint(b'storage', b'revlog.zlib.level')
+        elif comp == b'zstd':
+            level = repo.ui.configint(b'storage', b'revlog.zstd.level')
+        if level is None:
+            return b'default'
+        return bytes(level)
+
+    @classmethod
+    def fromconfig(cls, repo):
+        comp = compressionengine.fromconfig(repo)
+        level = None
+        if comp == b'zlib':
+            level = repo.ui.configint(b'storage', b'revlog.zlib.level')
+        elif comp == b'zstd':
+            level = repo.ui.configint(b'storage', b'revlog.zstd.level')
+        if level is None:
+            return b'default'
+        return bytes(level)
+
+
+def find_format_upgrades(repo):
+    """returns a list of format upgrades which can be perform on the repo"""
+    upgrades = []
+
+    # We could detect lack of revlogv1 and store here, but they were added
+    # in 0.9.2 and we don't support upgrading repos without these
+    # requirements, so let's not bother.
+
+    for fv in allformatvariant:
+        if not fv.fromrepo(repo):
+            upgrades.append(fv)
+
+    return upgrades
+
+
+def find_format_downgrades(repo):
+    """returns a list of format downgrades which will be performed on the repo
+    because of disabled config option for them"""
+
+    downgrades = []
+
+    for fv in allformatvariant:
+        if fv.name == b'compression':
+            # If there is a compression change between repository
+            # and config, destination repository compression will change
+            # and current compression will be removed.
+            if fv.fromrepo(repo) != fv.fromconfig(repo):
+                downgrades.append(fv)
+            continue
+        # format variant exist in repo but does not exist in new repository
+        # config
+        if fv.fromrepo(repo) and not fv.fromconfig(repo):
+            downgrades.append(fv)
+
+    return downgrades
+
+
+ALL_OPTIMISATIONS = []
+
+
+def register_optimization(obj):
+    ALL_OPTIMISATIONS.append(obj)
+    return obj
+
+
+register_optimization(
+    improvement(
+        name=b're-delta-parent',
+        type=OPTIMISATION,
+        description=_(
+            b'deltas within internal storage will be recalculated to '
+            b'choose an optimal base revision where this was not '
+            b'already done; the size of the repository may shrink and '
+            b'various operations may become faster; the first time '
+            b'this optimization is performed could slow down upgrade '
+            b'execution considerably; subsequent invocations should '
+            b'not run noticeably slower'
+        ),
+        upgrademessage=_(
+            b'deltas within internal storage will choose a new '
+            b'base revision if needed'
+        ),
+    )
+)
+
+register_optimization(
+    improvement(
+        name=b're-delta-multibase',
+        type=OPTIMISATION,
+        description=_(
+            b'deltas within internal storage will be recalculated '
+            b'against multiple base revision and the smallest '
+            b'difference will be used; the size of the repository may '
+            b'shrink significantly when there are many merges; this '
+            b'optimization will slow down execution in proportion to '
+            b'the number of merges in the repository and the amount '
+            b'of files in the repository; this slow down should not '
+            b'be significant unless there are tens of thousands of '
+            b'files and thousands of merges'
+        ),
+        upgrademessage=_(
+            b'deltas within internal storage will choose an '
+            b'optimal delta by computing deltas against multiple '
+            b'parents; may slow down execution time '
+            b'significantly'
+        ),
+    )
+)
+
+register_optimization(
+    improvement(
+        name=b're-delta-all',
+        type=OPTIMISATION,
+        description=_(
+            b'deltas within internal storage will always be '
+            b'recalculated without reusing prior deltas; this will '
+            b'likely make execution run several times slower; this '
+            b'optimization is typically not needed'
+        ),
+        upgrademessage=_(
+            b'deltas within internal storage will be fully '
+            b'recomputed; this will likely drastically slow down '
+            b'execution time'
+        ),
+    )
+)
+
+register_optimization(
+    improvement(
+        name=b're-delta-fulladd',
+        type=OPTIMISATION,
+        description=_(
+            b'every revision will be re-added as if it was new '
+            b'content. It will go through the full storage '
+            b'mechanism giving extensions a chance to process it '
+            b'(eg. lfs). This is similar to "re-delta-all" but even '
+            b'slower since more logic is involved.'
+        ),
+        upgrademessage=_(
+            b'each revision will be added as new content to the '
+            b'internal storage; this will likely drastically slow '
+            b'down execution time, but some extensions might need '
+            b'it'
+        ),
+    )
+)
+
+
+def findoptimizations(repo):
+    """Determine optimisation that could be used during upgrade"""
+    # These are unconditionally added. There is logic later that figures out
+    # which ones to apply.
+    return list(ALL_OPTIMISATIONS)
+
+
+def determine_upgrade_actions(
+    repo, format_upgrades, optimizations, sourcereqs, destreqs
+):
+    """Determine upgrade actions that will be performed.
+
+    Given a list of improvements as returned by ``find_format_upgrades`` and
+    ``findoptimizations``, determine the list of upgrade actions that
+    will be performed.
+
+    The role of this function is to filter improvements if needed, apply
+    recommended optimizations from the improvements list that make sense,
+    etc.
+
+    Returns a list of action names.
+    """
+    newactions = []
+
+    for d in format_upgrades:
+        name = d._requirement
+
+        # If the action is a requirement that doesn't show up in the
+        # destination requirements, prune the action.
+        if name is not None and name not in destreqs:
+            continue
+
+        newactions.append(d)
+
+    newactions.extend(o for o in sorted(optimizations) if o not in newactions)
+
+    # FUTURE consider adding some optimizations here for certain transitions.
+    # e.g. adding generaldelta could schedule parent redeltas.
+
+    return newactions
+
+
+class UpgradeOperation(object):
+    """represent the work to be done during an upgrade"""
+
+    def __init__(
+        self,
+        ui,
+        new_requirements,
+        current_requirements,
+        upgrade_actions,
+        removed_actions,
+        revlogs_to_process,
+    ):
+        self.ui = ui
+        self.new_requirements = new_requirements
+        self.current_requirements = current_requirements
+        # list of upgrade actions the operation will perform
+        self.upgrade_actions = upgrade_actions
+        self._upgrade_actions_names = set([a.name for a in upgrade_actions])
+        self.removed_actions = removed_actions
+        self.revlogs_to_process = revlogs_to_process
+        # requirements which will be added by the operation
+        self._added_requirements = (
+            self.new_requirements - self.current_requirements
+        )
+        # requirements which will be removed by the operation
+        self._removed_requirements = (
+            self.current_requirements - self.new_requirements
+        )
+        # requirements which will be preserved by the operation
+        self._preserved_requirements = (
+            self.current_requirements & self.new_requirements
+        )
+        # optimizations which are not used and it's recommended that they
+        # should use them
+        all_optimizations = findoptimizations(None)
+        self.unused_optimizations = [
+            i for i in all_optimizations if i not in self.upgrade_actions
+        ]
+
+        # delta reuse mode of this upgrade operation
+        self.delta_reuse_mode = revlog.revlog.DELTAREUSEALWAYS
+        if b're-delta-all' in self._upgrade_actions_names:
+            self.delta_reuse_mode = revlog.revlog.DELTAREUSENEVER
+        elif b're-delta-parent' in self._upgrade_actions_names:
+            self.delta_reuse_mode = revlog.revlog.DELTAREUSESAMEREVS
+        elif b're-delta-multibase' in self._upgrade_actions_names:
+            self.delta_reuse_mode = revlog.revlog.DELTAREUSESAMEREVS
+        elif b're-delta-fulladd' in self._upgrade_actions_names:
+            self.delta_reuse_mode = revlog.revlog.DELTAREUSEFULLADD
+
+        # should this operation force re-delta of both parents
+        self.force_re_delta_both_parents = (
+            b're-delta-multibase' in self._upgrade_actions_names
+        )
+
+    def _write_labeled(self, l, label):
+        """
+        Utility function to aid writing of a list under one label
+        """
+        first = True
+        for r in sorted(l):
+            if not first:
+                self.ui.write(b', ')
+            self.ui.write(r, label=label)
+            first = False
+
+    def print_requirements(self):
+        self.ui.write(_(b'requirements\n'))
+        self.ui.write(_(b'   preserved: '))
+        self._write_labeled(
+            self._preserved_requirements, "upgrade-repo.requirement.preserved"
+        )
+        self.ui.write((b'\n'))
+        if self._removed_requirements:
+            self.ui.write(_(b'   removed: '))
+            self._write_labeled(
+                self._removed_requirements, "upgrade-repo.requirement.removed"
+            )
+            self.ui.write((b'\n'))
+        if self._added_requirements:
+            self.ui.write(_(b'   added: '))
+            self._write_labeled(
+                self._added_requirements, "upgrade-repo.requirement.added"
+            )
+            self.ui.write((b'\n'))
+        self.ui.write(b'\n')
+
+    def print_optimisations(self):
+        optimisations = [
+            a for a in self.upgrade_actions if a.type == OPTIMISATION
+        ]
+        optimisations.sort(key=lambda a: a.name)
+        if optimisations:
+            self.ui.write(_(b'optimisations: '))
+            self._write_labeled(
+                [a.name for a in optimisations],
+                "upgrade-repo.optimisation.performed",
+            )
+            self.ui.write(b'\n\n')
+
+    def print_upgrade_actions(self):
+        for a in self.upgrade_actions:
+            self.ui.status(b'%s\n   %s\n\n' % (a.name, a.upgrademessage))
+
+    def print_affected_revlogs(self):
+        if not self.revlogs_to_process:
+            self.ui.write((b'no revlogs to process\n'))
+        else:
+            self.ui.write((b'processed revlogs:\n'))
+            for r in sorted(self.revlogs_to_process):
+                self.ui.write((b'  - %s\n' % r))
+        self.ui.write((b'\n'))
+
+    def print_unused_optimizations(self):
+        for i in self.unused_optimizations:
+            self.ui.status(_(b'%s\n   %s\n\n') % (i.name, i.description))
+
+    def has_upgrade_action(self, name):
+        """ Check whether the upgrade operation will perform this action """
+        return name in self._upgrade_actions_names
+
+    def print_post_op_messages(self):
+        """ print post upgrade operation warning messages """
+        for a in self.upgrade_actions:
+            if a.postupgrademessage is not None:
+                self.ui.warn(b'%s\n' % a.postupgrademessage)
+        for a in self.removed_actions:
+            if a.postdowngrademessage is not None:
+                self.ui.warn(b'%s\n' % a.postdowngrademessage)
+
+
+###  Code checking if a repository can got through the upgrade process at all. #
+
+
+def requiredsourcerequirements(repo):
+    """Obtain requirements required to be present to upgrade a repo.
+
+    An upgrade will not be allowed if the repository doesn't have the
+    requirements returned by this function.
+    """
+    return {
+        # Introduced in Mercurial 0.9.2.
+        b'revlogv1',
+        # Introduced in Mercurial 0.9.2.
+        b'store',
+    }
+
+
+def blocksourcerequirements(repo):
+    """Obtain requirements that will prevent an upgrade from occurring.
+
+    An upgrade cannot be performed if the source repository contains a
+    requirements in the returned set.
+    """
+    return {
+        # The upgrade code does not yet support these experimental features.
+        # This is an artificial limitation.
+        requirements.TREEMANIFEST_REQUIREMENT,
+        # This was a precursor to generaldelta and was never enabled by default.
+        # It should (hopefully) not exist in the wild.
+        b'parentdelta',
+        # Upgrade should operate on the actual store, not the shared link.
+        requirements.SHARED_REQUIREMENT,
+    }
+
+
+def check_source_requirements(repo):
+    """Ensure that no existing requirements prevent the repository upgrade"""
+
+    required = requiredsourcerequirements(repo)
+    missingreqs = required - repo.requirements
+    if missingreqs:
+        msg = _(b'cannot upgrade repository; requirement missing: %s')
+        missingreqs = b', '.join(sorted(missingreqs))
+        raise error.Abort(msg % missingreqs)
+
+    blocking = blocksourcerequirements(repo)
+    blockingreqs = blocking & repo.requirements
+    if blockingreqs:
+        m = _(b'cannot upgrade repository; unsupported source requirement: %s')
+        blockingreqs = b', '.join(sorted(blockingreqs))
+        raise error.Abort(m % blockingreqs)
+
+
+### Verify the validity of the planned requirement changes ####################
+
+
+def supportremovedrequirements(repo):
+    """Obtain requirements that can be removed during an upgrade.
+
+    If an upgrade were to create a repository that dropped a requirement,
+    the dropped requirement must appear in the returned set for the upgrade
+    to be allowed.
+    """
+    supported = {
+        requirements.SPARSEREVLOG_REQUIREMENT,
+        requirements.SIDEDATA_REQUIREMENT,
+        requirements.COPIESSDC_REQUIREMENT,
+        requirements.NODEMAP_REQUIREMENT,
+        requirements.SHARESAFE_REQUIREMENT,
+    }
+    for name in compression.compengines:
+        engine = compression.compengines[name]
+        if engine.available() and engine.revlogheader():
+            supported.add(b'exp-compression-%s' % name)
+            if engine.name() == b'zstd':
+                supported.add(b'revlog-compression-zstd')
+    return supported
+
+
+def supporteddestrequirements(repo):
+    """Obtain requirements that upgrade supports in the destination.
+
+    If the result of the upgrade would create requirements not in this set,
+    the upgrade is disallowed.
+
+    Extensions should monkeypatch this to add their custom requirements.
+    """
+    supported = {
+        b'dotencode',
+        b'fncache',
+        b'generaldelta',
+        b'revlogv1',
+        b'store',
+        requirements.SPARSEREVLOG_REQUIREMENT,
+        requirements.SIDEDATA_REQUIREMENT,
+        requirements.COPIESSDC_REQUIREMENT,
+        requirements.NODEMAP_REQUIREMENT,
+        requirements.SHARESAFE_REQUIREMENT,
+    }
+    for name in compression.compengines:
+        engine = compression.compengines[name]
+        if engine.available() and engine.revlogheader():
+            supported.add(b'exp-compression-%s' % name)
+            if engine.name() == b'zstd':
+                supported.add(b'revlog-compression-zstd')
+    return supported
+
+
+def allowednewrequirements(repo):
+    """Obtain requirements that can be added to a repository during upgrade.
+
+    This is used to disallow proposed requirements from being added when
+    they weren't present before.
+
+    We use a list of allowed requirement additions instead of a list of known
+    bad additions because the whitelist approach is safer and will prevent
+    future, unknown requirements from accidentally being added.
+    """
+    supported = {
+        b'dotencode',
+        b'fncache',
+        b'generaldelta',
+        requirements.SPARSEREVLOG_REQUIREMENT,
+        requirements.SIDEDATA_REQUIREMENT,
+        requirements.COPIESSDC_REQUIREMENT,
+        requirements.NODEMAP_REQUIREMENT,
+        requirements.SHARESAFE_REQUIREMENT,
+    }
+    for name in compression.compengines:
+        engine = compression.compengines[name]
+        if engine.available() and engine.revlogheader():
+            supported.add(b'exp-compression-%s' % name)
+            if engine.name() == b'zstd':
+                supported.add(b'revlog-compression-zstd')
+    return supported
+
+
+def check_requirements_changes(repo, new_reqs):
+    old_reqs = repo.requirements
+
+    support_removal = supportremovedrequirements(repo)
+    no_remove_reqs = old_reqs - new_reqs - support_removal
+    if no_remove_reqs:
+        msg = _(b'cannot upgrade repository; requirement would be removed: %s')
+        no_remove_reqs = b', '.join(sorted(no_remove_reqs))
+        raise error.Abort(msg % no_remove_reqs)
+
+    support_addition = allowednewrequirements(repo)
+    no_add_reqs = new_reqs - old_reqs - support_addition
+    if no_add_reqs:
+        m = _(b'cannot upgrade repository; do not support adding requirement: ')
+        no_add_reqs = b', '.join(sorted(no_add_reqs))
+        raise error.Abort(m + no_add_reqs)
+
+    supported = supporteddestrequirements(repo)
+    unsupported_reqs = new_reqs - supported
+    if unsupported_reqs:
+        msg = _(
+            b'cannot upgrade repository; do not support destination '
+            b'requirement: %s'
+        )
+        unsupported_reqs = b', '.join(sorted(unsupported_reqs))
+        raise error.Abort(msg % unsupported_reqs)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/upgrade_utils/engine.py	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,521 @@
+# upgrade.py - functions for in place upgrade of Mercurial repository
+#
+# Copyright (c) 2016-present, Gregory Szorc
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from __future__ import absolute_import
+
+import stat
+
+from ..i18n import _
+from ..pycompat import getattr
+from .. import (
+    changelog,
+    error,
+    filelog,
+    manifest,
+    metadata,
+    pycompat,
+    requirements,
+    revlog,
+    scmutil,
+    util,
+    vfs as vfsmod,
+)
+
+
+def _revlogfrompath(repo, path):
+    """Obtain a revlog from a repo path.
+
+    An instance of the appropriate class is returned.
+    """
+    if path == b'00changelog.i':
+        return changelog.changelog(repo.svfs)
+    elif path.endswith(b'00manifest.i'):
+        mandir = path[: -len(b'00manifest.i')]
+        return manifest.manifestrevlog(repo.svfs, tree=mandir)
+    else:
+        # reverse of "/".join(("data", path + ".i"))
+        return filelog.filelog(repo.svfs, path[5:-2])
+
+
+def _copyrevlog(tr, destrepo, oldrl, unencodedname):
+    """copy all relevant files for `oldrl` into `destrepo` store
+
+    Files are copied "as is" without any transformation. The copy is performed
+    without extra checks. Callers are responsible for making sure the copied
+    content is compatible with format of the destination repository.
+    """
+    oldrl = getattr(oldrl, '_revlog', oldrl)
+    newrl = _revlogfrompath(destrepo, unencodedname)
+    newrl = getattr(newrl, '_revlog', newrl)
+
+    oldvfs = oldrl.opener
+    newvfs = newrl.opener
+    oldindex = oldvfs.join(oldrl.indexfile)
+    newindex = newvfs.join(newrl.indexfile)
+    olddata = oldvfs.join(oldrl.datafile)
+    newdata = newvfs.join(newrl.datafile)
+
+    with newvfs(newrl.indexfile, b'w'):
+        pass  # create all the directories
+
+    util.copyfile(oldindex, newindex)
+    copydata = oldrl.opener.exists(oldrl.datafile)
+    if copydata:
+        util.copyfile(olddata, newdata)
+
+    if not (
+        unencodedname.endswith(b'00changelog.i')
+        or unencodedname.endswith(b'00manifest.i')
+    ):
+        destrepo.svfs.fncache.add(unencodedname)
+        if copydata:
+            destrepo.svfs.fncache.add(unencodedname[:-2] + b'.d')
+
+
+UPGRADE_CHANGELOG = b"changelog"
+UPGRADE_MANIFEST = b"manifest"
+UPGRADE_FILELOGS = b"all-filelogs"
+
+UPGRADE_ALL_REVLOGS = frozenset(
+    [UPGRADE_CHANGELOG, UPGRADE_MANIFEST, UPGRADE_FILELOGS]
+)
+
+
+def getsidedatacompanion(srcrepo, dstrepo):
+    sidedatacompanion = None
+    removedreqs = srcrepo.requirements - dstrepo.requirements
+    addedreqs = dstrepo.requirements - srcrepo.requirements
+    if requirements.SIDEDATA_REQUIREMENT in removedreqs:
+
+        def sidedatacompanion(rl, rev):
+            rl = getattr(rl, '_revlog', rl)
+            if rl.flags(rev) & revlog.REVIDX_SIDEDATA:
+                return True, (), {}, 0, 0
+            return False, (), {}, 0, 0
+
+    elif requirements.COPIESSDC_REQUIREMENT in addedreqs:
+        sidedatacompanion = metadata.getsidedataadder(srcrepo, dstrepo)
+    elif requirements.COPIESSDC_REQUIREMENT in removedreqs:
+        sidedatacompanion = metadata.getsidedataremover(srcrepo, dstrepo)
+    return sidedatacompanion
+
+
+def matchrevlog(revlogfilter, entry):
+    """check if a revlog is selected for cloning.
+
+    In other words, are there any updates which need to be done on revlog
+    or it can be blindly copied.
+
+    The store entry is checked against the passed filter"""
+    if entry.endswith(b'00changelog.i'):
+        return UPGRADE_CHANGELOG in revlogfilter
+    elif entry.endswith(b'00manifest.i'):
+        return UPGRADE_MANIFEST in revlogfilter
+    return UPGRADE_FILELOGS in revlogfilter
+
+
+def _perform_clone(
+    ui,
+    dstrepo,
+    tr,
+    old_revlog,
+    unencoded,
+    upgrade_op,
+    sidedatacompanion,
+    oncopiedrevision,
+):
+    """ returns the new revlog object created"""
+    newrl = None
+    if matchrevlog(upgrade_op.revlogs_to_process, unencoded):
+        ui.note(
+            _(b'cloning %d revisions from %s\n') % (len(old_revlog), unencoded)
+        )
+        newrl = _revlogfrompath(dstrepo, unencoded)
+        old_revlog.clone(
+            tr,
+            newrl,
+            addrevisioncb=oncopiedrevision,
+            deltareuse=upgrade_op.delta_reuse_mode,
+            forcedeltabothparents=upgrade_op.force_re_delta_both_parents,
+            sidedatacompanion=sidedatacompanion,
+        )
+    else:
+        msg = _(b'blindly copying %s containing %i revisions\n')
+        ui.note(msg % (unencoded, len(old_revlog)))
+        _copyrevlog(tr, dstrepo, old_revlog, unencoded)
+
+        newrl = _revlogfrompath(dstrepo, unencoded)
+    return newrl
+
+
+def _clonerevlogs(
+    ui,
+    srcrepo,
+    dstrepo,
+    tr,
+    upgrade_op,
+):
+    """Copy revlogs between 2 repos."""
+    revcount = 0
+    srcsize = 0
+    srcrawsize = 0
+    dstsize = 0
+    fcount = 0
+    frevcount = 0
+    fsrcsize = 0
+    frawsize = 0
+    fdstsize = 0
+    mcount = 0
+    mrevcount = 0
+    msrcsize = 0
+    mrawsize = 0
+    mdstsize = 0
+    crevcount = 0
+    csrcsize = 0
+    crawsize = 0
+    cdstsize = 0
+
+    alldatafiles = list(srcrepo.store.walk())
+    # mapping of data files which needs to be cloned
+    # key is unencoded filename
+    # value is revlog_object_from_srcrepo
+    manifests = {}
+    changelogs = {}
+    filelogs = {}
+
+    # Perform a pass to collect metadata. This validates we can open all
+    # source files and allows a unified progress bar to be displayed.
+    for unencoded, encoded, size in alldatafiles:
+        if not unencoded.endswith(b'.i'):
+            continue
+
+        rl = _revlogfrompath(srcrepo, unencoded)
+
+        info = rl.storageinfo(
+            exclusivefiles=True,
+            revisionscount=True,
+            trackedsize=True,
+            storedsize=True,
+        )
+
+        revcount += info[b'revisionscount'] or 0
+        datasize = info[b'storedsize'] or 0
+        rawsize = info[b'trackedsize'] or 0
+
+        srcsize += datasize
+        srcrawsize += rawsize
+
+        # This is for the separate progress bars.
+        if isinstance(rl, changelog.changelog):
+            changelogs[unencoded] = rl
+            crevcount += len(rl)
+            csrcsize += datasize
+            crawsize += rawsize
+        elif isinstance(rl, manifest.manifestrevlog):
+            manifests[unencoded] = rl
+            mcount += 1
+            mrevcount += len(rl)
+            msrcsize += datasize
+            mrawsize += rawsize
+        elif isinstance(rl, filelog.filelog):
+            filelogs[unencoded] = rl
+            fcount += 1
+            frevcount += len(rl)
+            fsrcsize += datasize
+            frawsize += rawsize
+        else:
+            error.ProgrammingError(b'unknown revlog type')
+
+    if not revcount:
+        return
+
+    ui.status(
+        _(
+            b'migrating %d total revisions (%d in filelogs, %d in manifests, '
+            b'%d in changelog)\n'
+        )
+        % (revcount, frevcount, mrevcount, crevcount)
+    )
+    ui.status(
+        _(b'migrating %s in store; %s tracked data\n')
+        % ((util.bytecount(srcsize), util.bytecount(srcrawsize)))
+    )
+
+    # Used to keep track of progress.
+    progress = None
+
+    def oncopiedrevision(rl, rev, node):
+        progress.increment()
+
+    sidedatacompanion = getsidedatacompanion(srcrepo, dstrepo)
+
+    # Migrating filelogs
+    ui.status(
+        _(
+            b'migrating %d filelogs containing %d revisions '
+            b'(%s in store; %s tracked data)\n'
+        )
+        % (
+            fcount,
+            frevcount,
+            util.bytecount(fsrcsize),
+            util.bytecount(frawsize),
+        )
+    )
+    progress = srcrepo.ui.makeprogress(_(b'file revisions'), total=frevcount)
+    for unencoded, oldrl in sorted(filelogs.items()):
+        newrl = _perform_clone(
+            ui,
+            dstrepo,
+            tr,
+            oldrl,
+            unencoded,
+            upgrade_op,
+            sidedatacompanion,
+            oncopiedrevision,
+        )
+        info = newrl.storageinfo(storedsize=True)
+        fdstsize += info[b'storedsize'] or 0
+    ui.status(
+        _(
+            b'finished migrating %d filelog revisions across %d '
+            b'filelogs; change in size: %s\n'
+        )
+        % (frevcount, fcount, util.bytecount(fdstsize - fsrcsize))
+    )
+
+    # Migrating manifests
+    ui.status(
+        _(
+            b'migrating %d manifests containing %d revisions '
+            b'(%s in store; %s tracked data)\n'
+        )
+        % (
+            mcount,
+            mrevcount,
+            util.bytecount(msrcsize),
+            util.bytecount(mrawsize),
+        )
+    )
+    if progress:
+        progress.complete()
+    progress = srcrepo.ui.makeprogress(
+        _(b'manifest revisions'), total=mrevcount
+    )
+    for unencoded, oldrl in sorted(manifests.items()):
+        newrl = _perform_clone(
+            ui,
+            dstrepo,
+            tr,
+            oldrl,
+            unencoded,
+            upgrade_op,
+            sidedatacompanion,
+            oncopiedrevision,
+        )
+        info = newrl.storageinfo(storedsize=True)
+        mdstsize += info[b'storedsize'] or 0
+    ui.status(
+        _(
+            b'finished migrating %d manifest revisions across %d '
+            b'manifests; change in size: %s\n'
+        )
+        % (mrevcount, mcount, util.bytecount(mdstsize - msrcsize))
+    )
+
+    # Migrating changelog
+    ui.status(
+        _(
+            b'migrating changelog containing %d revisions '
+            b'(%s in store; %s tracked data)\n'
+        )
+        % (
+            crevcount,
+            util.bytecount(csrcsize),
+            util.bytecount(crawsize),
+        )
+    )
+    if progress:
+        progress.complete()
+    progress = srcrepo.ui.makeprogress(
+        _(b'changelog revisions'), total=crevcount
+    )
+    for unencoded, oldrl in sorted(changelogs.items()):
+        newrl = _perform_clone(
+            ui,
+            dstrepo,
+            tr,
+            oldrl,
+            unencoded,
+            upgrade_op,
+            sidedatacompanion,
+            oncopiedrevision,
+        )
+        info = newrl.storageinfo(storedsize=True)
+        cdstsize += info[b'storedsize'] or 0
+    progress.complete()
+    ui.status(
+        _(
+            b'finished migrating %d changelog revisions; change in size: '
+            b'%s\n'
+        )
+        % (crevcount, util.bytecount(cdstsize - csrcsize))
+    )
+
+    dstsize = fdstsize + mdstsize + cdstsize
+    ui.status(
+        _(
+            b'finished migrating %d total revisions; total change in store '
+            b'size: %s\n'
+        )
+        % (revcount, util.bytecount(dstsize - srcsize))
+    )
+
+
+def _files_to_copy_post_revlog_clone(srcrepo):
+    """yields files which should be copied to destination after revlogs
+    are cloned"""
+    for path, kind, st in sorted(srcrepo.store.vfs.readdir(b'', stat=True)):
+        # don't copy revlogs as they are already cloned
+        if path.endswith((b'.i', b'.d', b'.n', b'.nd')):
+            continue
+        # Skip transaction related files.
+        if path.startswith(b'undo'):
+            continue
+        # Only copy regular files.
+        if kind != stat.S_IFREG:
+            continue
+        # Skip other skipped files.
+        if path in (b'lock', b'fncache'):
+            continue
+        # TODO: should we skip cache too?
+
+        yield path
+
+
+def _replacestores(currentrepo, upgradedrepo, backupvfs, upgrade_op):
+    """Replace the stores after current repository is upgraded
+
+    Creates a backup of current repository store at backup path
+    Replaces upgraded store files in current repo from upgraded one
+
+    Arguments:
+      currentrepo: repo object of current repository
+      upgradedrepo: repo object of the upgraded data
+      backupvfs: vfs object for the backup path
+      upgrade_op: upgrade operation object
+                  to be used to decide what all is upgraded
+    """
+    # TODO: don't blindly rename everything in store
+    # There can be upgrades where store is not touched at all
+    util.rename(currentrepo.spath, backupvfs.join(b'store'))
+    util.rename(upgradedrepo.spath, currentrepo.spath)
+
+
+def finishdatamigration(ui, srcrepo, dstrepo, requirements):
+    """Hook point for extensions to perform additional actions during upgrade.
+
+    This function is called after revlogs and store files have been copied but
+    before the new store is swapped into the original location.
+    """
+
+
+def upgrade(ui, srcrepo, dstrepo, upgrade_op):
+    """Do the low-level work of upgrading a repository.
+
+    The upgrade is effectively performed as a copy between a source
+    repository and a temporary destination repository.
+
+    The source repository is unmodified for as long as possible so the
+    upgrade can abort at any time without causing loss of service for
+    readers and without corrupting the source repository.
+    """
+    assert srcrepo.currentwlock()
+    assert dstrepo.currentwlock()
+
+    ui.status(
+        _(
+            b'(it is safe to interrupt this process any time before '
+            b'data migration completes)\n'
+        )
+    )
+
+    with dstrepo.transaction(b'upgrade') as tr:
+        _clonerevlogs(
+            ui,
+            srcrepo,
+            dstrepo,
+            tr,
+            upgrade_op,
+        )
+
+    # Now copy other files in the store directory.
+    for p in _files_to_copy_post_revlog_clone(srcrepo):
+        srcrepo.ui.status(_(b'copying %s\n') % p)
+        src = srcrepo.store.rawvfs.join(p)
+        dst = dstrepo.store.rawvfs.join(p)
+        util.copyfile(src, dst, copystat=True)
+
+    finishdatamigration(ui, srcrepo, dstrepo, requirements)
+
+    ui.status(_(b'data fully upgraded in a temporary repository\n'))
+
+    backuppath = pycompat.mkdtemp(prefix=b'upgradebackup.', dir=srcrepo.path)
+    backupvfs = vfsmod.vfs(backuppath)
+
+    # Make a backup of requires file first, as it is the first to be modified.
+    util.copyfile(srcrepo.vfs.join(b'requires'), backupvfs.join(b'requires'))
+
+    # We install an arbitrary requirement that clients must not support
+    # as a mechanism to lock out new clients during the data swap. This is
+    # better than allowing a client to continue while the repository is in
+    # an inconsistent state.
+    ui.status(
+        _(
+            b'marking source repository as being upgraded; clients will be '
+            b'unable to read from repository\n'
+        )
+    )
+    scmutil.writereporequirements(
+        srcrepo, srcrepo.requirements | {b'upgradeinprogress'}
+    )
+
+    ui.status(_(b'starting in-place swap of repository data\n'))
+    ui.status(_(b'replaced files will be backed up at %s\n') % backuppath)
+
+    # Now swap in the new store directory. Doing it as a rename should make
+    # the operation nearly instantaneous and atomic (at least in well-behaved
+    # environments).
+    ui.status(_(b'replacing store...\n'))
+    tstart = util.timer()
+    _replacestores(srcrepo, dstrepo, backupvfs, upgrade_op)
+    elapsed = util.timer() - tstart
+    ui.status(
+        _(
+            b'store replacement complete; repository was inconsistent for '
+            b'%0.1fs\n'
+        )
+        % elapsed
+    )
+
+    # We first write the requirements file. Any new requirements will lock
+    # out legacy clients.
+    ui.status(
+        _(
+            b'finalizing requirements file and making repository readable '
+            b'again\n'
+        )
+    )
+    scmutil.writereporequirements(srcrepo, upgrade_op.new_requirements)
+
+    # The lock file from the old store won't be removed because nothing has a
+    # reference to its new location. So clean it up manually. Alternatively, we
+    # could update srcrepo.svfs and other variables to point to the new
+    # location. This is simpler.
+    backupvfs.unlink(b'store/lock')
+
+    return backuppath
--- a/mercurial/url.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/url.py	Tue Jan 19 21:48:43 2021 +0530
@@ -35,13 +35,13 @@
 
 
 def escape(s, quote=None):
-    '''Replace special characters "&", "<" and ">" to HTML-safe sequences.
+    """Replace special characters "&", "<" and ">" to HTML-safe sequences.
     If the optional flag quote is true, the quotation mark character (")
     is also translated.
 
     This is the same as cgi.escape in Python, but always operates on
     bytes, whereas cgi.escape in Python 3 only works on unicodes.
-    '''
+    """
     s = s.replace(b"&", b"&amp;")
     s = s.replace(b"<", b"&lt;")
     s = s.replace(b">", b"&gt;")
@@ -586,7 +586,7 @@
     loggingopts=None,
     sendaccept=True,
 ):
-    '''
+    """
     construct an opener suitable for urllib2
     authinfo will be added to the password manager
 
@@ -600,7 +600,7 @@
 
     ``sendaccept`` allows controlling whether the ``Accept`` request header
     is sent. The header is sent by default.
-    '''
+    """
     timeout = ui.configwith(float, b'http', b'timeout')
     handlers = []
 
--- a/mercurial/urllibcompat.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/urllibcompat.py	Tue Jan 19 21:48:43 2021 +0530
@@ -83,10 +83,22 @@
     )
     import urllib.response
 
-    urlreq._registeraliases(urllib.response, (b"addclosehook", b"addinfourl",))
+    urlreq._registeraliases(
+        urllib.response,
+        (
+            b"addclosehook",
+            b"addinfourl",
+        ),
+    )
     import urllib.error
 
-    urlerr._registeraliases(urllib.error, (b"HTTPError", b"URLError",))
+    urlerr._registeraliases(
+        urllib.error,
+        (
+            b"HTTPError",
+            b"URLError",
+        ),
+    )
     import http.server
 
     httpserver._registeraliases(
@@ -179,12 +191,28 @@
             b"urlopen",
         ),
     )
-    urlreq._registeraliases(urlparse, (b"urlparse", b"urlunparse",))
+    urlreq._registeraliases(
+        urlparse,
+        (
+            b"urlparse",
+            b"urlunparse",
+        ),
+    )
     urlreq._registeralias(urlparse, b"parse_qs", b"parseqs")
     urlreq._registeralias(urlparse, b"parse_qsl", b"parseqsl")
-    urlerr._registeraliases(urllib2, (b"HTTPError", b"URLError",))
+    urlerr._registeraliases(
+        urllib2,
+        (
+            b"HTTPError",
+            b"URLError",
+        ),
+    )
     httpserver._registeraliases(
-        BaseHTTPServer, (b"HTTPServer", b"BaseHTTPRequestHandler",)
+        BaseHTTPServer,
+        (
+            b"HTTPServer",
+            b"BaseHTTPRequestHandler",
+        ),
     )
     httpserver._registeraliases(
         SimpleHTTPServer, (b"SimpleHTTPRequestHandler",)
--- a/mercurial/util.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/util.py	Tue Jan 19 21:48:43 2021 +0530
@@ -42,12 +42,12 @@
     open,
     setattr,
 )
+from .node import hex
 from hgdemandimport import tracing
 from . import (
     encoding,
     error,
     i18n,
-    node as nodemod,
     policy,
     pycompat,
     urllibcompat,
@@ -254,7 +254,7 @@
     def __getitem__(self, key):
         if key not in DIGESTS:
             raise error.Abort(_(b'unknown digest type: %s') % k)
-        return nodemod.hex(self._hashes[key].digest())
+        return hex(self._hashes[key].digest())
 
     def __iter__(self):
         return iter(self._hashes)
@@ -1264,7 +1264,7 @@
 
 
 class sortdict(collections.OrderedDict):
-    '''a simple sorted dictionary
+    """a simple sorted dictionary
 
     >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
     >>> d2 = d1.copy()
@@ -1276,7 +1276,7 @@
     >>> d1.insert(1, b'a.5', 0.5)
     >>> d1
     sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
-    '''
+    """
 
     def __setitem__(self, key, value):
         if key in self:
@@ -1761,8 +1761,8 @@
 
 
 def increasingchunks(source, min=1024, max=65536):
-    '''return no less than min bytes per chunk while data remains,
-    doubling min after each chunk until it reaches max'''
+    """return no less than min bytes per chunk while data remains,
+    doubling min after each chunk until it reaches max"""
 
     def log2(x):
         if not x:
@@ -1833,7 +1833,7 @@
 
 
 def pathto(root, n1, n2):
-    '''return the relative path from one place to another.
+    """return the relative path from one place to another.
     root should use os.sep to separate directories
     n1 should use os.sep to separate directories
     n2 should use "/" to separate directories
@@ -1842,7 +1842,7 @@
     If n1 is a relative path, it's assumed it's
     relative to root.
     n2 should always be relative to root.
-    '''
+    """
     if not n1:
         return localpath(n2)
     if os.path.isabs(n1):
@@ -1892,7 +1892,7 @@
 
 
 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
-    '''copy a file, preserving mode and optionally other stat info like
+    """copy a file, preserving mode and optionally other stat info like
     atime/mtime
 
     checkambig argument is used with filestat, and is useful only if
@@ -1900,7 +1900,7 @@
     repo.wlock).
 
     copystat and checkambig should be exclusive.
-    '''
+    """
     assert not (copystat and checkambig)
     oldstat = None
     if os.path.lexists(dest):
@@ -2017,7 +2017,7 @@
 
 
 def checkwinfilename(path):
-    r'''Check that the base-relative path is a valid filename on Windows.
+    r"""Check that the base-relative path is a valid filename on Windows.
     Returns None if the path is ok, or a UI string describing the problem.
 
     >>> checkwinfilename(b"just/a/normal/path")
@@ -2039,7 +2039,7 @@
     "filename ends with '\\', which is invalid on Windows"
     >>> checkwinfilename(b"foo\\/bar")
     "directory name ends with '\\', which is invalid on Windows"
-    '''
+    """
     if path.endswith(b'\\'):
         return _(b"filename ends with '\\', which is invalid on Windows")
     if b'\\/' in path:
@@ -2175,11 +2175,11 @@
             _re2 = False
 
     def compile(self, pat, flags=0):
-        '''Compile a regular expression, using re2 if possible
+        """Compile a regular expression, using re2 if possible
 
         For best performance, use only re2-compatible regexp features. The
         only flags from the re module that are re2-compatible are
-        IGNORECASE and MULTILINE.'''
+        IGNORECASE and MULTILINE."""
         if _re2 is None:
             self._checkre2()
         if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
@@ -2195,11 +2195,11 @@
 
     @propertycache
     def escape(self):
-        '''Return the version of escape corresponding to self.compile.
+        """Return the version of escape corresponding to self.compile.
 
         This is imperfect because whether re2 or re is used for a particular
         function depends on the flags, etc, but it's the best we can do.
-        '''
+        """
         global _re2
         if _re2 is None:
             self._checkre2()
@@ -2215,7 +2215,7 @@
 
 
 def fspath(name, root):
-    '''Get name in the case stored in the filesystem
+    """Get name in the case stored in the filesystem
 
     The name should be relative to root, and be normcase-ed for efficiency.
 
@@ -2223,7 +2223,7 @@
     called, for case-sensitive filesystems (simply because it's expensive).
 
     The root should be normcase-ed, too.
-    '''
+    """
 
     def _makefspathcacheentry(dir):
         return {normcase(n): n for n in os.listdir(dir)}
@@ -2301,11 +2301,11 @@
 
 
 def splitpath(path):
-    '''Split path by os.sep.
+    """Split path by os.sep.
     Note that this function does not use os.altsep because this is
     an alternative of simple "xxx.split(os.sep)".
     It is recommended to use os.path.normpath() before using this
-    function if need.'''
+    function if need."""
     return path.split(pycompat.ossep)
 
 
@@ -2459,7 +2459,7 @@
 
 
 class atomictempfile(object):
-    '''writable file object that atomically updates a file
+    """writable file object that atomically updates a file
 
     All writes will go to a temporary copy of the original file. Call
     close() when you are done writing, and atomictempfile will rename
@@ -2470,7 +2470,7 @@
     checkambig argument of constructor is used with filestat, and is
     useful only if target file is guarded by any lock (e.g. repo.lock
     or repo.wlock).
-    '''
+    """
 
     def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
         self.__name = name  # permanent name
@@ -3365,7 +3365,7 @@
 
 
 def timed(func):
-    '''Report the execution time of a function call to stderr.
+    """Report the execution time of a function call to stderr.
 
     During development, use as a decorator when you need to measure
     the cost of a function, e.g. as follows:
@@ -3373,7 +3373,7 @@
     @util.timed
     def foo(a, b, c):
         pass
-    '''
+    """
 
     def wrapper(*args, **kwargs):
         with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
@@ -3404,7 +3404,7 @@
 
 
 def sizetoint(s):
-    '''Convert a space specifier to a byte count.
+    """Convert a space specifier to a byte count.
 
     >>> sizetoint(b'30')
     30
@@ -3412,7 +3412,7 @@
     2252
     >>> sizetoint(b'6M')
     6291456
-    '''
+    """
     t = s.strip().lower()
     try:
         for k, u in _sizeunits:
@@ -3424,9 +3424,9 @@
 
 
 class hooks(object):
-    '''A collection of hook functions that can be used to extend a
+    """A collection of hook functions that can be used to extend a
     function's behavior. Hooks are called in lexicographic order,
-    based on the names of their sources.'''
+    based on the names of their sources."""
 
     def __init__(self):
         self._hooks = []
@@ -3443,7 +3443,7 @@
 
 
 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
-    '''Yields lines for a nicely formatted stacktrace.
+    """Yields lines for a nicely formatted stacktrace.
     Skips the 'skip' last entries, then return the last 'depth' entries.
     Each file+linenumber is formatted according to fileline.
     Each line is formatted according to line.
@@ -3453,7 +3453,7 @@
       function
 
     Not be used in production code but very convenient while developing.
-    '''
+    """
     entries = [
         (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
         for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
@@ -3475,12 +3475,12 @@
     depth=0,
     prefix=b'',
 ):
-    '''Writes a message to f (stderr) with a nicely formatted stacktrace.
+    """Writes a message to f (stderr) with a nicely formatted stacktrace.
     Skips the 'skip' entries closest to the call, then show 'depth' entries.
     By default it will flush stdout first.
     It can be used everywhere and intentionally does not require an ui object.
     Not be used in production code but very convenient while developing.
-    '''
+    """
     if otherf:
         otherf.flush()
     f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
--- a/mercurial/utils/cborutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/utils/cborutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -641,19 +641,28 @@
 
                 elif special == SPECIAL_START_ARRAY:
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': [],}
+                        {
+                            b'remaining': value,
+                            b'v': [],
+                        }
                     )
                     self._state = self._STATE_WANT_ARRAY_VALUE
 
                 elif special == SPECIAL_START_MAP:
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': {},}
+                        {
+                            b'remaining': value,
+                            b'v': {},
+                        }
                     )
                     self._state = self._STATE_WANT_MAP_KEY
 
                 elif special == SPECIAL_START_SET:
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': set(),}
+                        {
+                            b'remaining': value,
+                            b'v': set(),
+                        }
                     )
                     self._state = self._STATE_WANT_SET_VALUE
 
@@ -684,7 +693,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     # self._state doesn't need changed.
@@ -711,7 +723,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_SET_VALUE
@@ -775,7 +790,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_ARRAY_VALUE
@@ -789,7 +807,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_MAP_KEY
@@ -803,7 +824,10 @@
                     lastc[b'remaining'] -= 1
 
                     self._collectionstack.append(
-                        {b'remaining': value, b'v': newvalue,}
+                        {
+                            b'remaining': value,
+                            b'v': newvalue,
+                        }
                     )
 
                     self._state = self._STATE_WANT_SET_VALUE
--- a/mercurial/utils/compression.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/utils/compression.py	Tue Jan 19 21:48:43 2021 +0530
@@ -29,7 +29,8 @@
 CLIENTROLE = b'client'
 
 compewireprotosupport = collections.namedtuple(
-    'compenginewireprotosupport', ('name', 'serverpriority', 'clientpriority'),
+    'compenginewireprotosupport',
+    ('name', 'serverpriority', 'clientpriority'),
 )
 
 
@@ -616,8 +617,10 @@
     def wireprotosupport(self):
         return compewireprotosupport(b'none', 0, 10)
 
-    # We don't implement revlogheader because it is handled specially
-    # in the revlog class.
+    # revlog special cases the uncompressed case, but implementing
+    # revlogheader allows forcing uncompressed storage.
+    def revlogheader(self):
+        return b'\0'
 
     def compressstream(self, it, opts=None):
         return it
--- a/mercurial/utils/dateutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/utils/dateutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -53,12 +53,17 @@
     b'%I:%M%p',
 )
 
-extendeddateformats = defaultdateformats + (b"%Y", b"%Y-%m", b"%b", b"%b %Y",)
+extendeddateformats = defaultdateformats + (
+    b"%Y",
+    b"%Y-%m",
+    b"%b",
+    b"%b %Y",
+)
 
 
 def makedate(timestamp=None):
-    '''Return a unix timestamp (or the current time) as a (unixtime,
-    offset) tuple based off the local timezone.'''
+    """Return a unix timestamp (or the current time) as a (unixtime,
+    offset) tuple based off the local timezone."""
     if timestamp is None:
         timestamp = time.time()
     if timestamp < 0:
@@ -115,7 +120,7 @@
 
 def parsetimezone(s):
     """find a trailing timezone, if any, in string, and return a
-       (offset, remainder) pair"""
+    (offset, remainder) pair"""
     s = pycompat.bytestr(s)
 
     if s.endswith(b"GMT") or s.endswith(b"UTC"):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/utils/memorytop.py	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,44 @@
+# memorytop requires Python 3.4
+#
+# Usage: set PYTHONTRACEMALLOC=n in the environment of the hg invocation,
+# where n>= is the number of frames to show in the backtrace. Put calls to
+# memorytop in strategic places to show the current memory use by allocation
+# site.
+
+import gc
+import tracemalloc
+
+
+def memorytop(limit=10):
+    gc.collect()
+    snapshot = tracemalloc.take_snapshot()
+
+    snapshot = snapshot.filter_traces(
+        (
+            tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
+            tracemalloc.Filter(False, "<frozen importlib._bootstrap_external>"),
+            tracemalloc.Filter(False, "<unknown>"),
+        )
+    )
+    stats = snapshot.statistics('traceback')
+
+    total = sum(stat.size for stat in stats)
+    print("\nTotal allocated size: %.1f KiB\n" % (total / 1024))
+    print("Lines with the biggest net allocations")
+    for index, stat in enumerate(stats[:limit], 1):
+        print(
+            "#%d: %d objects using %.1f KiB"
+            % (index, stat.count, stat.size / 1024)
+        )
+        for line in stat.traceback.format(most_recent_first=True):
+            print('    ', line)
+
+    other = stats[limit:]
+    if other:
+        size = sum(stat.size for stat in other)
+        count = sum(stat.count for stat in other)
+        print(
+            "%s other: %d objects using %.1f KiB"
+            % (len(other), count, size / 1024)
+        )
+    print()
--- a/mercurial/utils/procutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/utils/procutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -50,6 +50,16 @@
         return False
 
 
+class BadFile(io.RawIOBase):
+    """Dummy file object to simulate closed stdio behavior"""
+
+    def readinto(self, b):
+        raise IOError(errno.EBADF, 'Bad file descriptor')
+
+    def write(self, b):
+        raise IOError(errno.EBADF, 'Bad file descriptor')
+
+
 class LineBufferedWrapper(object):
     def __init__(self, orig):
         self.orig = orig
@@ -80,6 +90,13 @@
     return LineBufferedWrapper(stream)
 
 
+def unwrap_line_buffered(stream):
+    if isinstance(stream, LineBufferedWrapper):
+        assert not isinstance(stream.orig, LineBufferedWrapper)
+        return stream.orig
+    return stream
+
+
 class WriteAllWrapper(object):
     def __init__(self, orig):
         self.orig = orig
@@ -114,12 +131,25 @@
 
 
 if pycompat.ispy3:
-    # Python 3 implements its own I/O streams.
+    # Python 3 implements its own I/O streams. Unlike stdio of C library,
+    # sys.stdin/stdout/stderr may be None if underlying fd is closed.
+
     # TODO: .buffer might not exist if std streams were replaced; we'll need
     # a silly wrapper to make a bytes stream backed by a unicode one.
-    stdin = sys.stdin.buffer
-    stdout = _make_write_all(sys.stdout.buffer)
-    stderr = _make_write_all(sys.stderr.buffer)
+
+    if sys.stdin is None:
+        stdin = BadFile()
+    else:
+        stdin = sys.stdin.buffer
+    if sys.stdout is None:
+        stdout = BadFile()
+    else:
+        stdout = _make_write_all(sys.stdout.buffer)
+    if sys.stderr is None:
+        stderr = BadFile()
+    else:
+        stderr = _make_write_all(sys.stderr.buffer)
+
     if pycompat.iswindows:
         # Work around Windows bugs.
         stdout = platform.winstdout(stdout)
@@ -285,10 +315,10 @@
 
 
 def tempfilter(s, cmd):
-    '''filter string S through a pair of temporary files with CMD.
+    """filter string S through a pair of temporary files with CMD.
     CMD is used as a template to create the real command to be run,
     with the strings INFILE and OUTFILE replaced by the real names of
-    the temporary files generated.'''
+    the temporary files generated."""
     inname, outname = None, None
     try:
         infd, inname = pycompat.mkstemp(prefix=b'hg-filter-in-')
@@ -458,17 +488,16 @@
 
 
 def tonativeenv(env):
-    '''convert the environment from bytes to strings suitable for Popen(), etc.
-    '''
+    """convert the environment from bytes to strings suitable for Popen(), etc."""
     return pycompat.rapply(tonativestr, env)
 
 
 def system(cmd, environ=None, cwd=None, out=None):
-    '''enhanced shell command execution.
+    """enhanced shell command execution.
     run with environment maybe modified, maybe in different dir.
 
     if out is specified, it is assumed to be a file-like object that has a
-    write() method. stdout and stderr will be redirected to out.'''
+    write() method. stdout and stderr will be redirected to out."""
     try:
         stdout.flush()
     except Exception:
@@ -635,21 +664,35 @@
         stderr=None,
         ensurestart=True,
         record_wait=None,
+        stdin_bytes=None,
     ):
         '''Spawn a command without waiting for it to finish.'''
         # we can't use close_fds *and* redirect stdin. I'm not sure that we
         # need to because the detached process has no console connection.
-        p = subprocess.Popen(
-            pycompat.rapply(tonativestr, script),
-            shell=shell,
-            env=tonativeenv(env),
-            close_fds=True,
-            creationflags=_creationflags,
-            stdout=stdout,
-            stderr=stderr,
-        )
-        if record_wait is not None:
-            record_wait(p.wait)
+
+        try:
+            stdin = None
+            if stdin_bytes is not None:
+                stdin = pycompat.unnamedtempfile()
+                stdin.write(stdin_bytes)
+                stdin.flush()
+                stdin.seek(0)
+
+            p = subprocess.Popen(
+                pycompat.rapply(tonativestr, script),
+                shell=shell,
+                env=tonativeenv(env),
+                close_fds=True,
+                creationflags=_creationflags,
+                stdin=stdin,
+                stdout=stdout,
+                stderr=stderr,
+            )
+            if record_wait is not None:
+                record_wait(p.wait)
+        finally:
+            if stdin is not None:
+                stdin.close()
 
 
 else:
@@ -662,15 +705,16 @@
         stderr=None,
         ensurestart=True,
         record_wait=None,
+        stdin_bytes=None,
     ):
-        '''Spawn a command without waiting for it to finish.
+        """Spawn a command without waiting for it to finish.
 
 
         When `record_wait` is not None, the spawned process will not be fully
         detached and the `record_wait` argument will be called with a the
         `Subprocess.wait` function for the spawned process.  This is mostly
         useful for developers that need to make sure the spawned process
-        finished before a certain point. (eg: writing test)'''
+        finished before a certain point. (eg: writing test)"""
         if pycompat.isdarwin:
             # avoid crash in CoreFoundation in case another thread
             # calls gui() while we're calling fork().
@@ -722,15 +766,21 @@
             if record_wait is None:
                 # Start a new session
                 os.setsid()
+            # connect stdin to devnull to make sure the subprocess can't
+            # muck up that stream for mercurial.
+            if stdin_bytes is None:
+                stdin = open(os.devnull, b'r')
+            else:
+                stdin = pycompat.unnamedtempfile()
+                stdin.write(stdin_bytes)
+                stdin.flush()
+                stdin.seek(0)
 
-            stdin = open(os.devnull, b'r')
             if stdout is None:
                 stdout = open(os.devnull, b'w')
             if stderr is None:
                 stderr = open(os.devnull, b'w')
 
-            # connect stdin to devnull to make sure the subprocess can't
-            # muck up that stream for mercurial.
             p = subprocess.Popen(
                 cmd,
                 shell=shell,
@@ -754,5 +804,6 @@
         finally:
             # mission accomplished, this child needs to exit and not
             # continue the hg process here.
+            stdin.close()
             if record_wait is None:
                 os._exit(returncode)
--- a/mercurial/utils/stringutil.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/utils/stringutil.py	Tue Jan 19 21:48:43 2021 +0530
@@ -494,15 +494,15 @@
 
 @attr.s(hash=True)
 class mailmapping(object):
-    '''Represents a username/email key or value in
-    a mailmap file'''
+    """Represents a username/email key or value in
+    a mailmap file"""
 
     email = attr.ib()
     name = attr.ib(default=None)
 
 
 def _ismailmaplineinvalid(names, emails):
-    '''Returns True if the parsed names and emails
+    """Returns True if the parsed names and emails
     in a mailmap entry are invalid.
 
     >>> # No names or emails fails
@@ -522,7 +522,7 @@
     >>> emails = [b'proper@email.com', b'commit@email.com']
     >>> _ismailmaplineinvalid(names, emails)
     False
-    '''
+    """
     return not emails or not names and len(emails) < 2
 
 
@@ -597,11 +597,13 @@
             continue
 
         mailmapkey = mailmapping(
-            email=emails[-1], name=names[-1] if len(names) == 2 else None,
+            email=emails[-1],
+            name=names[-1] if len(names) == 2 else None,
         )
 
         mailmap[mailmapkey] = mailmapping(
-            email=emails[0], name=names[0] if names else None,
+            email=emails[0],
+            name=names[0] if names else None,
         )
 
     return mailmap
@@ -659,7 +661,7 @@
 
 
 def isauthorwellformed(author):
-    '''Return True if the author field is well formed
+    """Return True if the author field is well formed
     (ie "Contributor Name <contrib@email.dom>")
 
     >>> isauthorwellformed(b'Good Author <good@author.com>')
@@ -676,7 +678,7 @@
     False
     >>> isauthorwellformed(b'Bad Author <author>')
     False
-    '''
+    """
     return _correctauthorformat.match(author) is not None
 
 
--- a/mercurial/vfs.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/vfs.py	Tue Jan 19 21:48:43 2021 +0530
@@ -83,12 +83,12 @@
 
     @util.propertycache
     def open(self):
-        '''Open ``path`` file, which is relative to vfs root.
+        """Open ``path`` file, which is relative to vfs root.
 
         Newly created directories are marked as "not to be indexed by
         the content indexing service", if ``notindexed`` is specified
         for "write" mode access.
-        '''
+        """
         return self.__call__
 
     def read(self, path):
@@ -142,9 +142,9 @@
         return os.path.islink(self.join(path))
 
     def isfileorlink(self, path=None):
-        '''return whether path is a regular file or a symlink
+        """return whether path is a regular file or a symlink
 
-        Unlike isfile, this doesn't follow symlinks.'''
+        Unlike isfile, this doesn't follow symlinks."""
         try:
             st = self.lstat(path)
         except OSError:
@@ -228,8 +228,7 @@
         return util.readlink(self.join(path))
 
     def removedirs(self, path=None):
-        """Remove a leaf directory and all empty intermediate ones
-        """
+        """Remove a leaf directory and all empty intermediate ones"""
         return util.removedirs(self.join(path))
 
     def rmdir(self, path=None):
@@ -332,7 +331,7 @@
 
 
 class vfs(abstractvfs):
-    '''Operate files relative to a base directory
+    """Operate files relative to a base directory
 
     This class is used to hide the details of COW semantics and
     remote file access from higher level code.
@@ -340,7 +339,7 @@
     'cacheaudited' should be enabled only if (a) vfs object is short-lived, or
     (b) the base directory is managed by hg and considered sort-of append-only.
     See pathutil.pathauditor() for details.
-    '''
+    """
 
     def __init__(
         self,
@@ -397,7 +396,7 @@
         auditpath=True,
         makeparentdirs=True,
     ):
-        '''Open ``path`` file, which is relative to vfs root.
+        """Open ``path`` file, which is relative to vfs root.
 
         By default, parent directories are created as needed. Newly created
         directories are marked as "not to be indexed by the content indexing
@@ -426,7 +425,7 @@
         truncation), if it is owned by another. Therefore, use
         combination of append mode and checkambig=True only in limited
         cases (see also issue5418 and issue5584 for detail).
-        '''
+        """
         if auditpath:
             self._auditpath(path, mode)
         f = self.join(path)
--- a/mercurial/win32.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/win32.py	Tue Jan 19 21:48:43 2021 +0530
@@ -385,13 +385,13 @@
 
 
 def checkcertificatechain(cert, build=True):
-    '''Tests the given certificate to see if there is a complete chain to a
-       trusted root certificate.  As a side effect, missing certificates are
-       downloaded and installed unless ``build=False``.  True is returned if a
-       chain to a trusted root exists (even if built on the fly), otherwise
-       False.  NB: A chain to a trusted root does NOT imply that the certificate
-       is valid.
-    '''
+    """Tests the given certificate to see if there is a complete chain to a
+    trusted root certificate.  As a side effect, missing certificates are
+    downloaded and installed unless ``build=False``.  True is returned if a
+    chain to a trusted root exists (even if built on the fly), otherwise
+    False.  NB: A chain to a trusted root does NOT imply that the certificate
+    is valid.
+    """
 
     chainctxptr = ctypes.POINTER(CERT_CHAIN_CONTEXT)
 
@@ -488,8 +488,8 @@
 
 
 def testpid(pid):
-    '''return True if pid is still running or unable to
-    determine, False otherwise'''
+    """return True if pid is still running or unable to
+    determine, False otherwise"""
     h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid)
     if h:
         try:
@@ -576,10 +576,10 @@
 
 
 def setsignalhandler():
-    '''Register a termination handler for console events including
+    """Register a termination handler for console events including
     CTRL+C. python signal handlers do not work well with socket
     operations.
-    '''
+    """
 
     def handler(event):
         _kernel32.ExitProcess(1)
@@ -627,8 +627,8 @@
 
 
 def enablevtmode():
-    '''Enable virtual terminal mode for the associated console.  Return True if
-    enabled, else False.'''
+    """Enable virtual terminal mode for the associated console.  Return True if
+    enabled, else False."""
 
     ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4
 
--- a/mercurial/windows.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/windows.py	Tue Jan 19 21:48:43 2021 +0530
@@ -195,13 +195,13 @@
 
 
 class winstdout(object):
-    '''Some files on Windows misbehave.
+    """Some files on Windows misbehave.
 
     When writing to a broken pipe, EINVAL instead of EPIPE may be raised.
 
     When writing too many bytes to a console at the same, a "Not enough space"
     error may happen. Python 3 already works around that.
-    '''
+    """
 
     def __init__(self, fp):
         self.fp = fp
@@ -497,11 +497,11 @@
 
 
 def findexe(command):
-    '''Find executable for command searching like cmd.exe does.
+    """Find executable for command searching like cmd.exe does.
     If command is a basename then PATH is searched for command.
     PATH isn't searched if command is an absolute or relative path.
     An extension from PATHEXT is found and added if not present.
-    If command isn't found None is returned.'''
+    If command isn't found None is returned."""
     pathext = encoding.environ.get(b'PATHEXT', b'.COM;.EXE;.BAT;.CMD')
     pathexts = [ext for ext in pathext.lower().split(pycompat.ospathsep)]
     if os.path.splitext(command)[1].lower() in pathexts:
@@ -529,10 +529,10 @@
 
 
 def statfiles(files):
-    '''Stat each file in files. Yield each stat, or None if a file
+    """Stat each file in files. Yield each stat, or None if a file
     does not exist or has a type we don't care about.
 
-    Cluster and cache stat per directory to minimize number of OS stat calls.'''
+    Cluster and cache stat per directory to minimize number of OS stat calls."""
     dircache = {}  # dirname -> filename -> status | None if file does not exist
     getkind = stat.S_IFMT
     for nf in files:
@@ -630,14 +630,14 @@
 
 
 def lookupreg(key, valname=None, scope=None):
-    ''' Look up a key/value name in the Windows registry.
+    """Look up a key/value name in the Windows registry.
 
     valname: value name. If unspecified, the default value for the key
     is used.
     scope: optionally specify scope for registry lookup, this can be
     a sequence of scopes to look up in order. Default (CURRENT_USER,
     LOCAL_MACHINE).
-    '''
+    """
     if scope is None:
         scope = (winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE)
     elif not isinstance(scope, (list, tuple)):
--- a/mercurial/wireprotoframing.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/wireprotoframing.py	Tue Jan 19 21:48:43 2021 +0530
@@ -456,7 +456,10 @@
 def createalternatelocationresponseframe(stream, requestid, location):
     data = {
         b'status': b'redirect',
-        b'location': {b'url': location.url, b'mediatype': location.mediatype,},
+        b'location': {
+            b'url': location.url,
+            b'mediatype': location.mediatype,
+        },
     }
 
     for a in (
@@ -490,7 +493,12 @@
 def createcommanderrorresponse(stream, requestid, message, args=None):
     # TODO should this be using a list of {'msg': ..., 'args': {}} so atom
     # formatting works consistently?
-    m = {b'status': b'error', b'error': {b'message': message,}}
+    m = {
+        b'status': b'error',
+        b'error': {
+            b'message': message,
+        },
+    }
 
     if args:
         m[b'error'][b'args'] = args
@@ -510,7 +518,12 @@
     assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
 
     payload = b''.join(
-        cborutil.streamencode({b'type': errtype, b'message': [{b'msg': msg}],})
+        cborutil.streamencode(
+            {
+                b'type': errtype,
+                b'message': [{b'msg': msg}],
+            }
+        )
     )
 
     yield stream.makeframe(
@@ -1292,14 +1305,18 @@
                 for frame in gen:
                     yield frame
 
-        return b'sendframes', {b'framegen': makegen(),}
+        return b'sendframes', {
+            b'framegen': makegen(),
+        }
 
     def _handlesendframes(self, framegen):
         if self._deferoutput:
             self._bufferedframegens.append(framegen)
             return b'noop', {}
         else:
-            return b'sendframes', {b'framegen': framegen,}
+            return b'sendframes', {
+                b'framegen': framegen,
+            }
 
     def onservererror(self, stream, requestid, msg):
         ensureserverstream(stream)
@@ -1351,7 +1368,9 @@
         return s
 
     def _makeerrorresult(self, msg):
-        return b'error', {b'message': msg,}
+        return b'error', {
+            b'message': msg,
+        }
 
     def _makeruncommandresult(self, requestid):
         entry = self._receivingcommands[requestid]
@@ -1397,7 +1416,9 @@
         )
 
     def _makewantframeresult(self):
-        return b'wantframe', {b'state': self._state,}
+        return b'wantframe', {
+            b'state': self._state,
+        }
 
     def _validatecommandrequestframe(self, frame):
         new = frame.flags & FLAG_COMMAND_REQUEST_NEW
@@ -1802,7 +1823,9 @@
             return (
                 request,
                 b'sendframes',
-                {b'framegen': self._makecommandframes(request),},
+                {
+                    b'framegen': self._makecommandframes(request),
+                },
             )
 
     def flushcommands(self):
@@ -1835,7 +1858,9 @@
                 for frame in self._makecommandframes(request):
                     yield frame
 
-        return b'sendframes', {b'framegen': makeframes(),}
+        return b'sendframes', {
+            b'framegen': makeframes(),
+        }
 
     def _makecommandframes(self, request):
         """Emit frames to issue a command request.
@@ -1851,7 +1876,9 @@
 
             payload = b''.join(
                 cborutil.streamencode(
-                    {b'contentencodings': self._clientcontentencoders,}
+                    {
+                        b'contentencodings': self._clientcontentencoders,
+                    }
                 )
             )
 
--- a/mercurial/wireprotoserver.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/wireprotoserver.py	Tue Jan 19 21:48:43 2021 +0530
@@ -8,7 +8,6 @@
 
 import contextlib
 import struct
-import sys
 import threading
 
 from .i18n import _
@@ -851,7 +850,6 @@
     def serve_forever(self):
         self.serveuntil(threading.Event())
         self._ui.restorefinout(self._fin, self._fout)
-        sys.exit(0)
 
     def serveuntil(self, ev):
         """Serve until a threading.Event is set."""
--- a/mercurial/wireprototypes.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/wireprototypes.py	Tue Jan 19 21:48:43 2021 +0530
@@ -33,14 +33,23 @@
 
 # All available wire protocol transports.
 TRANSPORTS = {
-    SSHV1: {b'transport': b'ssh', b'version': 1,},
+    SSHV1: {
+        b'transport': b'ssh',
+        b'version': 1,
+    },
     SSHV2: {
         b'transport': b'ssh',
         # TODO mark as version 2 once all commands are implemented.
         b'version': 1,
     },
-    b'http-v1': {b'transport': b'http', b'version': 1,},
-    HTTP_WIREPROTO_V2: {b'transport': b'http', b'version': 2,},
+    b'http-v1': {
+        b'transport': b'http',
+        b'version': 1,
+    },
+    HTTP_WIREPROTO_V2: {
+        b'transport': b'http',
+        b'version': 2,
+    },
 }
 
 
--- a/mercurial/wireprotov1peer.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/wireprotov1peer.py	Tue Jan 19 21:48:43 2021 +0530
@@ -36,7 +36,7 @@
 
 
 def batchable(f):
-    '''annotation for batchable methods
+    """annotation for batchable methods
 
     Such methods must implement a coroutine as follows:
 
@@ -56,7 +56,7 @@
     method, but adds the original method as an attribute called "batchable",
     which is used by remotebatch to split the call into separate encoding and
     decoding phases.
-    '''
+    """
 
     def plain(*args, **opts):
         batchable = f(*args, **opts)
@@ -474,7 +474,7 @@
             return changegroupmod.cg1unpacker(f, b'UN')
 
     def unbundle(self, bundle, heads, url):
-        '''Send cg (a readable file-like object representing the
+        """Send cg (a readable file-like object representing the
         changegroup to push, typically a chunkbuffer object) to the
         remote server as a bundle.
 
@@ -485,7 +485,7 @@
 
         `url` is the url the client thinks it's pushing to, which is
         visible to hooks.
-        '''
+        """
 
         if heads != [b'force'] and self.capable(b'unbundlehash'):
             heads = wireprototypes.encodelist(
@@ -655,6 +655,5 @@
         raise NotImplementedError()
 
     def _abort(self, exception):
-        """clearly abort the wire protocol connection and raise the exception
-        """
+        """clearly abort the wire protocol connection and raise the exception"""
         raise NotImplementedError()
--- a/mercurial/wireprotov1server.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/wireprotov1server.py	Tue Jan 19 21:48:43 2021 +0530
@@ -19,6 +19,7 @@
 
 from . import (
     bundle2,
+    bundlecaches,
     changegroup as changegroupmod,
     discovery,
     encoding,
@@ -272,7 +273,7 @@
     data center given the client's IP address.
     """
     return wireprototypes.bytesresponse(
-        repo.vfs.tryread(b'clonebundles.manifest')
+        repo.vfs.tryread(bundlecaches.CB_MANIFEST_FILE)
     )
 
 
@@ -387,8 +388,8 @@
     manifest = repo.vfs.tryread(b'pullbundles.manifest')
     if not manifest:
         return None
-    res = exchange.parseclonebundlesmanifest(repo, manifest)
-    res = exchange.filterclonebundleentries(repo, res)
+    res = bundlecaches.parseclonebundlesmanifest(repo, manifest)
+    res = bundlecaches.filterclonebundleentries(repo, res)
     if not res:
         return None
     cl = repo.unfiltered().changelog
@@ -601,10 +602,10 @@
 
 @wireprotocommand(b'stream_out', permission=b'pull')
 def stream(repo, proto):
-    '''If the server supports streaming clone, it advertises the "stream"
+    """If the server supports streaming clone, it advertises the "stream"
     capability with a value representing the version and flags of the repo
     it is serving. Client checks to see if it understands the format.
-    '''
+    """
     return wireprototypes.streamreslegacy(streamclone.generatev1wireproto(repo))
 
 
@@ -684,9 +685,7 @@
                     # We did not change it to minimise code change.
                     # This need to be moved to something proper.
                     # Feel free to do it.
-                    procutil.stderr.write(b"abort: %s\n" % exc.message)
-                    if exc.hint is not None:
-                        procutil.stderr.write(b"(%s)\n" % exc.hint)
+                    procutil.stderr.write(exc.format())
                     procutil.stderr.flush()
                     return wireprototypes.pushres(
                         0, output.getvalue() if output else b''
--- a/mercurial/wireprotov2server.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/wireprotov2server.py	Tue Jan 19 21:48:43 2021 +0530
@@ -982,7 +982,10 @@
         b'revisions': {
             b'type': b'list',
             b'example': [
-                {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
+                {
+                    b'type': b'changesetexplicit',
+                    b'nodes': [b'abcdef...'],
+                }
             ],
         },
         b'fields': {
@@ -1046,7 +1049,7 @@
         followingdata = []
 
         if b'revision' in fields:
-            revisiondata = cl.rawdata(node)
+            revisiondata = cl.revision(node)
             followingmeta.append((b'revision', len(revisiondata)))
             followingdata.append(revisiondata)
 
@@ -1166,14 +1169,20 @@
             b'default': lambda: False,
             b'example': True,
         },
-        b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
+        b'nodes': {
+            b'type': b'list',
+            b'example': [b'0123456...'],
+        },
         b'fields': {
             b'type': b'set',
             b'default': set,
             b'example': {b'parents', b'revision'},
             b'validvalues': {b'parents', b'revision', b'linknode'},
         },
-        b'path': {b'type': b'bytes', b'example': b'foo.txt',},
+        b'path': {
+            b'type': b'bytes',
+            b'example': b'foo.txt',
+        },
     },
     permission=b'pull',
     # TODO censoring a file revision won't invalidate the cache.
@@ -1262,7 +1271,10 @@
         b'revisions': {
             b'type': b'list',
             b'example': [
-                {b'type': b'changesetexplicit', b'nodes': [b'abcdef...'],}
+                {
+                    b'type': b'changesetexplicit',
+                    b'nodes': [b'abcdef...'],
+                }
             ],
         },
     },
@@ -1375,7 +1387,12 @@
 
 @wireprotocommand(
     b'listkeys',
-    args={b'namespace': {b'type': b'bytes', b'example': b'ns',},},
+    args={
+        b'namespace': {
+            b'type': b'bytes',
+            b'example': b'ns',
+        },
+    },
     permission=b'pull',
 )
 def listkeysv2(repo, proto, namespace):
@@ -1390,7 +1407,12 @@
 
 @wireprotocommand(
     b'lookup',
-    args={b'key': {b'type': b'bytes', b'example': b'foo',},},
+    args={
+        b'key': {
+            b'type': b'bytes',
+            b'example': b'foo',
+        },
+    },
     permission=b'pull',
 )
 def lookupv2(repo, proto, key):
@@ -1415,7 +1437,10 @@
 @wireprotocommand(
     b'manifestdata',
     args={
-        b'nodes': {b'type': b'list', b'example': [b'0123456...'],},
+        b'nodes': {
+            b'type': b'list',
+            b'example': [b'0123456...'],
+        },
         b'haveparents': {
             b'type': b'bool',
             b'default': lambda: False,
@@ -1427,7 +1452,10 @@
             b'example': {b'parents', b'revision'},
             b'validvalues': {b'parents', b'revision'},
         },
-        b'tree': {b'type': b'bytes', b'example': b'',},
+        b'tree': {
+            b'type': b'bytes',
+            b'example': b'',
+        },
     },
     permission=b'pull',
     cachekeyfn=makecommandcachekeyfn(b'manifestdata', 1, allargs=True),
@@ -1485,10 +1513,22 @@
 @wireprotocommand(
     b'pushkey',
     args={
-        b'namespace': {b'type': b'bytes', b'example': b'ns',},
-        b'key': {b'type': b'bytes', b'example': b'key',},
-        b'old': {b'type': b'bytes', b'example': b'old',},
-        b'new': {b'type': b'bytes', b'example': b'new',},
+        b'namespace': {
+            b'type': b'bytes',
+            b'example': b'ns',
+        },
+        b'key': {
+            b'type': b'bytes',
+            b'example': b'key',
+        },
+        b'old': {
+            b'type': b'bytes',
+            b'example': b'old',
+        },
+        b'new': {
+            b'type': b'bytes',
+            b'example': b'new',
+        },
     },
     permission=b'push',
 )
--- a/mercurial/worker.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/mercurial/worker.py	Tue Jan 19 21:48:43 2021 +0530
@@ -67,6 +67,9 @@
 
 if pycompat.ispy3:
 
+    def ismainthread():
+        return threading.current_thread() == threading.main_thread()
+
     class _blockingreader(object):
         def __init__(self, wrapped):
             self._wrapped = wrapped
@@ -100,6 +103,9 @@
 
 else:
 
+    def ismainthread():
+        return isinstance(threading.current_thread(), threading._MainThread)
+
     def _blockingreader(wrapped):
         return wrapped
 
@@ -116,8 +122,8 @@
 
 
 def worthwhile(ui, costperop, nops, threadsafe=True):
-    '''try to determine whether the benefit of multiple processes can
-    outweigh the cost of starting them'''
+    """try to determine whether the benefit of multiple processes can
+    outweigh the cost of starting them"""
 
     if not threadsafe and _DISALLOW_THREAD_UNSAFE:
         return False
@@ -131,7 +137,7 @@
 def worker(
     ui, costperarg, func, staticargs, args, hasretval=False, threadsafe=True
 ):
-    '''run a function, possibly in parallel in multiple worker
+    """run a function, possibly in parallel in multiple worker
     processes.
 
     returns a progress iterator
@@ -153,8 +159,13 @@
     threadsafe - whether work items are thread safe and can be executed using
     a thread-based worker. Should be disabled for CPU heavy tasks that don't
     release the GIL.
-    '''
+    """
     enabled = ui.configbool(b'worker', b'enabled')
+    if enabled and _platformworker is _posixworker and not ismainthread():
+        # The POSIX worker has to install a handler for SIGCHLD.
+        # Python up to 3.9 only allows this in the main thread.
+        enabled = False
+
     if enabled and worthwhile(ui, costperarg, len(args), threadsafe=threadsafe):
         return _platformworker(ui, func, staticargs, args, hasretval)
     return func(*staticargs + (args,))
@@ -300,16 +311,16 @@
     if status:
         if status < 0:
             os.kill(os.getpid(), -status)
-        sys.exit(status)
+        raise error.WorkerError(status)
     if hasretval:
         yield True, retval
 
 
 def _posixexitstatus(code):
-    '''convert a posix exit status into the same form returned by
+    """convert a posix exit status into the same form returned by
     os.spawnv
 
-    returns None if the process was stopped instead of exiting'''
+    returns None if the process was stopped instead of exiting"""
     if os.WIFEXITED(code):
         return os.WEXITSTATUS(code)
     elif os.WIFSIGNALED(code):
@@ -423,7 +434,7 @@
 
 
 def partition(lst, nslices):
-    '''partition a list into N slices of roughly equal size
+    """partition a list into N slices of roughly equal size
 
     The current strategy takes every Nth element from the input. If
     we ever write workers that need to preserve grouping in input
@@ -450,6 +461,6 @@
         What we should really be doing is have workers read filenames from a
         ordered queue. This preserves locality and also keeps any worker from
         getting more than one file out of balance.
-    '''
+    """
     for i in range(nslices):
         yield lst[i::nslices]
--- a/relnotes/5.6	Thu Dec 24 15:58:08 2020 +0900
+++ b/relnotes/5.6	Tue Jan 19 21:48:43 2021 +0530
@@ -8,6 +8,9 @@
  * New revset predicate `diffcontains(pattern)` for filtering revisions
    in the same way as `hg grep --diff pattern`.
 
+ * The memory footprint per changeset and per file during pull/unbundle
+   operations has been significantly reduced.
+
 
 == New Experimental Features ==
 
@@ -27,3 +30,15 @@
    the higher-level functions available in the same module cover your
    use cases.
 
+ * `phases.registernew` now takes a set of revisions instead of a list
+   of nodes. `phases.advanceboundary` takes an optional set of revisions
+   in addition to the list of nodes. The corresponeding members of the
+   `phasecache` class follow this change.
+
+ * The `addgroup` member of `revlog` classes no longer keeps a list of
+   all found nodes. It now returns True iff a node was found in the group.
+   An optional callback for duplicated nodes can be used by callers to keep
+   track of all nodes themselve.
+
+ * The `_chaininfocache` of `revlog` classes has been changed from a dict
+   to a LRU cache.
--- a/relnotes/next	Thu Dec 24 15:58:08 2020 +0900
+++ b/relnotes/next	Tue Jan 19 21:48:43 2021 +0530
@@ -1,9 +1,54 @@
 == New Features ==
 
+ * There is a new config section for templates used by hg commands. It
+   is called `[command-templates]`. Some existing config options have
+   been deprecated in favor of config options in the new
+   section. These are: `ui.logtemplate` to `command-templates.log`,
+   `ui.graphnodetemplate` to `command-templates.graphnode`,
+   `ui.mergemarkertemplate` to `command-templates.mergemarker`,
+   `ui.pre-merge-tool-output-template` to
+   `command-templates.pre-merge-tool-output`.
+
+ * There is a new set of config options for the template used for the
+   one-line commit summary displayed by various commands, such as `hg
+   rebase`. The main one is `command-templates.oneline-summary`. That
+   can be overridden per command with
+   `command-templates.oneline-summary.<command>`, where `<command>`
+   can be e.g. `rebase`. As part of this effort, the default format
+   from `hg rebase` was reorganized a bit.
+
+ * `hg strip`, from the strip extension, is now a core command, `hg
+   debugstrip`. The extension remains for compatibility.
+
+ * `hg diff` and `hg extdiff` now support `--from <rev>` and `--to <rev>`
+   arguments as clearer alternatives to `-r <revs>`. `-r <revs>` has been
+   deprecated.
+
+ * The memory footprint per changeset during pull/unbundle
+   operations has been further reduced.
+
+ * There is a new internal merge tool called `internal:mergediff` (can
+   be set as the value for the `merge` config in the `[ui]`
+   section). It resolves merges the same was as `internal:merge` and
+   `internal:merge3`, but it shows conflicts differently. Instead of
+   showing 2 or 3 snapshots of the conflicting pieces of code, it
+   shows one snapshot and a diff. This may be useful when at least one
+   side of the conflict is similar to the base. The new marker style
+   is also supported by "premerge" as
+   `merge-tools.<tool>.premerge=keep-mergediff`.
+
+ * External hooks are now called with `HGPLAIN=1` preset.
+
+ * The `branchmap` cache is updated more intelligently and can be
+   significantly faster for repositories with many branches and changesets.
 
 
 == New Experimental Features ==
 
+* `experimental.single-head-per-branch:public-changes-only` can be used
+  restrict the single head check to public revision. This is useful for
+  overlay repository that have both a publishing and non-publishing view
+  of the same storage.
 
 
 == Bug Fixes ==
@@ -12,6 +57,8 @@
 
 == Backwards Compatibility Changes ==
 
+ * `--force-lock` and `--force-wlock` options on `hg debuglock` command are
+   renamed to `--force-free-lock` and `--force-free-wlock` respectively.
 
 
 == Internal API Changes ==
--- a/rust/Cargo.lock	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/Cargo.lock	Tue Jan 19 21:48:43 2021 +0530
@@ -7,10 +7,10 @@
 
 [[package]]
 name = "aho-corasick"
-version = "0.7.13"
+version = "0.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -26,8 +26,8 @@
 version = "0.2.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "hermit-abi 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -42,13 +42,21 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "bitmaps"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "byteorder"
 version = "1.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "cc"
-version = "1.0.60"
+version = "1.0.66"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -60,6 +68,11 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "clap"
 version = "2.33.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -74,35 +87,27 @@
 ]
 
 [[package]]
+name = "const_fn"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "cpython"
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
  "python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "crc32fast"
-version = "1.2.0"
+version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "crossbeam"
-version = "0.7.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-queue 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -115,40 +120,38 @@
 ]
 
 [[package]]
-name = "crossbeam-deque"
-version = "0.7.3"
+name = "crossbeam-channel"
+version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-epoch 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "crossbeam-epoch"
-version = "0.8.2"
+version = "0.9.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "const_fn 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "memoffset 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memoffset 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
-name = "crossbeam-queue"
-version = "0.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
 name = "crossbeam-utils"
 version = "0.7.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -159,12 +162,22 @@
 ]
 
 [[package]]
+name = "crossbeam-utils"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "ctor"
 version = "0.1.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -185,20 +198,40 @@
  "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
  "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "termcolor 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "flate2"
-version = "1.0.17"
+version = "1.0.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crc32fast 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "libz-sys 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "miniz_oxide 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miniz_oxide 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "format-bytes"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "format-bytes-macros 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "format-bytes-macros"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -217,7 +250,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -228,10 +261,10 @@
 
 [[package]]
 name = "hermit-abi"
-version = "0.1.16"
+version = "0.1.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -245,24 +278,26 @@
 dependencies = [
  "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rayon 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rayon 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
  "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "twox-hash 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "twox-hash 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "zstd 0.5.3+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -273,7 +308,7 @@
  "cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "hg-core 0.1.0",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -286,6 +321,19 @@
 ]
 
 [[package]]
+name = "im-rc"
+version = "15.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand_xoshiro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "sized-chunks 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "itertools"
 version = "0.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -298,7 +346,7 @@
 version = "0.1.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -308,7 +356,7 @@
 
 [[package]]
 name = "libc"
-version = "0.2.77"
+version = "0.2.81"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -316,9 +364,9 @@
 version = "1.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "cc 1.0.60 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
- "vcpkg 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "vcpkg 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -336,7 +384,7 @@
 
 [[package]]
 name = "memchr"
-version = "2.3.3"
+version = "2.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -344,13 +392,13 @@
 version = "0.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "memoffset"
-version = "0.5.6"
+version = "0.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -370,15 +418,15 @@
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "miniz_oxide"
-version = "0.4.2"
+version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "adler 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -387,7 +435,7 @@
 
 [[package]]
 name = "num-traits"
-version = "0.2.12"
+version = "0.2.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -398,8 +446,8 @@
 version = "1.13.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "hermit-abi 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -412,12 +460,12 @@
 
 [[package]]
 name = "pkg-config"
-version = "0.3.18"
+version = "0.3.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "ppv-lite86"
-version = "0.2.9"
+version = "0.2.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -432,8 +480,13 @@
 ]
 
 [[package]]
+name = "proc-macro-hack"
+version = "0.5.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "proc-macro2"
-version = "1.0.21"
+version = "1.0.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -444,8 +497,8 @@
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -453,8 +506,8 @@
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -467,7 +520,7 @@
 version = "1.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -475,7 +528,7 @@
 version = "0.3.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -485,7 +538,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -497,7 +550,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -508,7 +561,7 @@
 version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "ppv-lite86 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -558,24 +611,32 @@
 ]
 
 [[package]]
+name = "rand_xoshiro"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "rayon"
-version = "1.4.0"
+version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rayon-core 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rayon-core 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "rayon-core"
-version = "1.8.1"
+version = "1.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-channel 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
@@ -595,18 +656,18 @@
 
 [[package]]
 name = "regex"
-version = "1.3.9"
+version = "1.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "aho-corasick 0.7.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.6.18 (registry+https://github.com/rust-lang/crates.io-index)",
+ "aho-corasick 0.7.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)",
  "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "regex-syntax"
-version = "0.6.18"
+version = "0.6.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -623,6 +684,7 @@
 dependencies = [
  "clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "hg-core 0.1.0",
  "log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -634,7 +696,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -659,16 +721,30 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "sized-chunks"
+version = "0.6.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "static_assertions"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "strsim"
 version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "syn"
-version = "1.0.41"
+version = "1.0.54"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "proc-macro2 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
@@ -679,7 +755,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "redox_syscall 0.1.57 (registry+https://github.com/rust-lang/crates.io-index)",
  "remove_dir_all 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -688,7 +764,7 @@
 
 [[package]]
 name = "termcolor"
-version = "1.1.0"
+version = "1.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -715,20 +791,27 @@
 version = "0.1.44"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "wasi 0.10.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "twox-hash"
-version = "1.5.0"
+version = "1.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
+ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "static_assertions 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "typenum"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "unicode-width"
 version = "0.1.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -740,7 +823,7 @@
 
 [[package]]
 name = "vcpkg"
-version = "0.2.10"
+version = "0.2.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -749,6 +832,11 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
+name = "version_check"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
 name = "wasi"
 version = "0.9.0+wasi-snapshot-preview1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -798,7 +886,7 @@
 version = "2.0.5+zstd.1.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
  "zstd-sys 1.4.17+zstd.1.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -807,63 +895,70 @@
 version = "1.4.17+zstd.1.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "cc 1.0.60 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
  "glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [metadata]
 "checksum adler 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
-"checksum aho-corasick 0.7.13 (registry+https://github.com/rust-lang/crates.io-index)" = "043164d8ba5c4c3035fec9bbee8647c0261d788f3474306f93bb65901cae0e86"
+"checksum aho-corasick 0.7.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5"
 "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
 "checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
 "checksum autocfg 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
 "checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
+"checksum bitmaps 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
 "checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
-"checksum cc 1.0.60 (registry+https://github.com/rust-lang/crates.io-index)" = "ef611cc68ff783f18535d77ddd080185275713d852c4f5cbb6122c462a7a825c"
+"checksum cc 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
 "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
+"checksum cfg-if 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 "checksum clap 2.33.3 (registry+https://github.com/rust-lang/crates.io-index)" = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002"
+"checksum const_fn 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd51eab21ab4fd6a3bf889e2d0958c0a6e3a61ad04260325e919e652a2a62826"
 "checksum cpython 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bfaf3847ab963e40c4f6dd8d6be279bdf74007ae2413786a0dcbb28c52139a95"
-"checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1"
-"checksum crossbeam 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e"
+"checksum crc32fast 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a"
 "checksum crossbeam-channel 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87"
-"checksum crossbeam-deque 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9f02af974daeee82218205558e51ec8768b48cf524bd01d550abe5573a608285"
-"checksum crossbeam-epoch 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace"
-"checksum crossbeam-queue 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570"
+"checksum crossbeam-channel 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
+"checksum crossbeam-deque 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
+"checksum crossbeam-epoch 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a1aaa739f95311c2c7887a76863f500026092fb1dce0161dab577e559ef3569d"
 "checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
+"checksum crossbeam-utils 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "02d96d1e189ef58269ebe5b97953da3274d83a93af647c2ddd6f9dab28cedb8d"
 "checksum ctor 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "7fbaabec2c953050352311293be5c6aba8e141ba19d6811862b232d6fd020484"
 "checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
 "checksum either 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
 "checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
-"checksum flate2 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "766d0e77a2c1502169d4a93ff3b8c15a71fd946cd0126309752104e5f3c46d94"
+"checksum flate2 1.0.19 (registry+https://github.com/rust-lang/crates.io-index)" = "7411863d55df97a419aa64cb4d2f167103ea9d767e2c54a1868b7ac3f6b47129"
+"checksum format-bytes 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1a7374eb574cd29ae45878554298091c554c3286a17b3afa440a3e2710ae0790"
+"checksum format-bytes-macros 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4edcc04201cea17a0e6b937adebd46b93fba09924c7e6ed8c515a35ce8432cbc"
 "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
 "checksum gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2"
 "checksum getrandom 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "fc587bc0ec293155d5bfa6b9891ec18a1e330c234f896ea47fbada4cadbe47e6"
 "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
-"checksum hermit-abi 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c30f6d0bc6b00693347368a67d41b58f2fb851215ff1da49e90fe2c5c667151"
+"checksum hermit-abi 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8"
 "checksum hex 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "644f9158b2f133fd50f5fb3242878846d9eb792e445c893805ff0e3824006e35"
 "checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f"
+"checksum im-rc 15.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ca8957e71f04a205cb162508f9326aea04676c8dfd0711220190d6b83664f3f"
 "checksum itertools 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b"
 "checksum jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2"
 "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-"checksum libc 0.2.77 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f96b10ec2560088a8e76961b00d47107b3a625fecb76dedb29ee7ccbf98235"
+"checksum libc 0.2.81 (registry+https://github.com/rust-lang/crates.io-index)" = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb"
 "checksum libz-sys 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
 "checksum log 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b"
 "checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
-"checksum memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400"
+"checksum memchr 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525"
 "checksum memmap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6585fd95e7bb50d6cc31e20d4cf9afb4e2ba16c5846fc76793f11218da9c475b"
-"checksum memoffset 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa"
+"checksum memoffset 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87"
 "checksum micro-timer 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2620153e1d903d26b72b89f0e9c48d8c4756cba941c185461dddc234980c298c"
 "checksum micro-timer-macros 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e28a3473e6abd6e9aab36aaeef32ad22ae0bd34e79f376643594c2b152ec1c5d"
-"checksum miniz_oxide 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c60c0dfe32c10b43a144bad8fc83538c52f58302c92300ea7ec7bf7b38d5a7b9"
-"checksum num-traits 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ac267bcc07f48ee5f8935ab0d24f316fb722d7a1292e2913f0cc196b29ffd611"
+"checksum miniz_oxide 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
+"checksum num-traits 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
 "checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3"
 "checksum output_vt100 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53cdc5b785b7a58c5aad8216b3dfa114df64b0b06ae6e1501cef91df2fbdf8f9"
-"checksum pkg-config 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)" = "d36492546b6af1463394d46f0c834346f31548646f6ba10849802c9c9a27ac33"
-"checksum ppv-lite86 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c36fa947111f5c62a733b652544dd0016a43ce89619538a8ef92724a6f501a20"
+"checksum pkg-config 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
+"checksum ppv-lite86 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
 "checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427"
-"checksum proc-macro2 1.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "36e28516df94f3dd551a587da5357459d9b36d945a7c37c3557928c1c2ff2a2c"
+"checksum proc-macro-hack 0.5.19 (registry+https://github.com/rust-lang/crates.io-index)" = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
+"checksum proc-macro2 1.0.24 (registry+https://github.com/rust-lang/crates.io-index)" = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71"
 "checksum python27-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67cb041de8615111bf224dd75667af5f25c6e032118251426fed7f1b70ce4c8c"
 "checksum python3-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90af11779515a1e530af60782d273b59ac79d33b0e253c071a728563957c76d4"
 "checksum quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
@@ -878,29 +973,34 @@
 "checksum rand_distr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2"
 "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
 "checksum rand_pcg 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429"
-"checksum rayon 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cfd016f0c045ad38b5251be2c9c0ab806917f82da4d36b2a327e5166adad9270"
-"checksum rayon-core 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e8c4fec834fb6e6d2dd5eece3c7b432a52f0ba887cf40e595190c4107edc08bf"
+"checksum rand_xoshiro 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004"
+"checksum rayon 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674"
+"checksum rayon-core 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a"
 "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
 "checksum redox_syscall 0.1.57 (registry+https://github.com/rust-lang/crates.io-index)" = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
-"checksum regex 1.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3780fcf44b193bc4d09f36d2a3c87b251da4a046c87795a0d35f4f927ad8e6"
-"checksum regex-syntax 0.6.18 (registry+https://github.com/rust-lang/crates.io-index)" = "26412eb97c6b088a6997e05f69403a802a92d520de2f8e63c2b65f9e0f47c4e8"
+"checksum regex 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "38cf2c13ed4745de91a5eb834e11c00bcc3709e773173b2ce4c56c9fbde04b9c"
+"checksum regex-syntax 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "3b181ba2dcf07aaccad5448e8ead58db5b742cf85dfe035e2227f137a539a189"
 "checksum remove_dir_all 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
 "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a"
 "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
 "checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
 "checksum scopeguard 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+"checksum sized-chunks 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1ec31ceca5644fa6d444cc77548b88b67f46db6f7c71683b0f9336e671830d2f"
+"checksum static_assertions 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
 "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
-"checksum syn 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "6690e3e9f692504b941dc6c3b188fd28df054f7fb8469ab40680df52fdcc842b"
+"checksum syn 1.0.54 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2af957a63d6bd42255c359c93d9bfdb97076bd3b820897ce55ffbfbf107f44"
 "checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9"
-"checksum termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb6bfa289a4d7c5766392812c0a1f4c1ba45afa1ad47803c11e1f407d846d75f"
+"checksum termcolor 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4"
 "checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
 "checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
 "checksum time 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
-"checksum twox-hash 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bfd5b7557925ce778ff9b9ef90e3ade34c524b5ff10e239c69a42d546d2af56"
+"checksum twox-hash 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "04f8ab788026715fa63b31960869617cba39117e520eb415b0139543e325ab59"
+"checksum typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33"
 "checksum unicode-width 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
 "checksum unicode-xid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564"
-"checksum vcpkg 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "6454029bf181f092ad1b853286f23e2c507d8e8194d01d92da4a55c274a5508c"
+"checksum vcpkg 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb"
 "checksum vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
+"checksum version_check 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed"
 "checksum wasi 0.10.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
 "checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
 "checksum winapi 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
--- a/rust/README.rst	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/README.rst	Tue Jan 19 21:48:43 2021 +0530
@@ -40,7 +40,7 @@
 You might want to check the `features` section in ``hg-cpython/Cargo.toml``.
 It may contain features that might be interesting to try out.
 
-To use features from the Makefile, use the `HG_RUST_FEATURES` environment 
+To use features from the Makefile, use the `HG_RUST_FEATURES` environment
 variable: for instance `HG_RUST_FEATURES="some-feature other-feature"`
 
 Profiling
@@ -51,6 +51,14 @@
 indicate why the rust code cannot be used (say, using lookarounds in
 hgignore).
 
+Creating a ``.cargo/config`` file with the following content enables
+debug information in optimized builds. This make profiles more informative
+with source file name and line number for Rust stack frames and
+(in some cases) stack frames for Rust functions that have been inlined.
+
+  [profile.release]
+  debug = true
+
 ``py-spy`` (https://github.com/benfred/py-spy) can be used to
 construct a single profile with rust functions and python functions
 (as opposed to ``hg --profile``, which attributes time spent in rust
@@ -58,6 +66,11 @@
 as opposed to tools for native code like ``perf``, which attribute
 time to the python interpreter instead of python functions).
 
+Example usage:
+
+  $ make PURE=--rust local # Don't forget to recompile after a code change
+  $ py-spy record --native --output /tmp/profile.svg -- ./hg ...
+
 Developing Rust
 ===============
 
--- a/rust/hg-core/Cargo.toml	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/Cargo.toml	Tue Jan 19 21:48:43 2021 +0530
@@ -11,6 +11,7 @@
 [dependencies]
 byteorder = "1.3.4"
 hex = "0.4.2"
+im-rc = "15.0.*"
 lazy_static = "1.4.0"
 memchr = "2.3.3"
 rand = "0.7.3"
@@ -20,12 +21,13 @@
 regex = "1.3.9"
 twox-hash = "1.5.0"
 same-file = "1.0.6"
-crossbeam = "0.7.3"
+crossbeam-channel = "0.4"
 micro-timer = "0.3.0"
 log = "0.4.8"
 memmap = "0.7.0"
 zstd = "0.5.3"
 rust-crypto = "0.2.36"
+format-bytes = "0.1.2"
 
 # We don't use the `miniz-oxide` backend to not change rhg benchmarks and until
 # we have a clearer view of which backend is the fastest.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/config.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,14 @@
+// config.rs
+//
+// Copyright 2020
+//      Valentin Gatien-Baron,
+//      Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+//! Mercurial config parsing and interfaces.
+
+mod config;
+mod layer;
+pub use config::Config;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/config/config.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,197 @@
+// config.rs
+//
+// Copyright 2020
+//      Valentin Gatien-Baron,
+//      Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+use super::layer;
+use crate::config::layer::{ConfigError, ConfigLayer, ConfigValue};
+use std::path::PathBuf;
+
+use crate::operations::find_root;
+use crate::utils::files::read_whole_file;
+
+/// Holds the config values for the current repository
+/// TODO update this docstring once we support more sources
+pub struct Config {
+    layers: Vec<layer::ConfigLayer>,
+}
+
+impl std::fmt::Debug for Config {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        for (index, layer) in self.layers.iter().rev().enumerate() {
+            write!(
+                f,
+                "==== Layer {} (trusted: {}) ====\n{:?}",
+                index, layer.trusted, layer
+            )?;
+        }
+        Ok(())
+    }
+}
+
+pub enum ConfigSource {
+    /// Absolute path to a config file
+    AbsPath(PathBuf),
+    /// Already parsed (from the CLI, env, Python resources, etc.)
+    Parsed(layer::ConfigLayer),
+}
+
+pub fn parse_bool(v: &[u8]) -> Option<bool> {
+    match v.to_ascii_lowercase().as_slice() {
+        b"1" | b"yes" | b"true" | b"on" | b"always" => Some(true),
+        b"0" | b"no" | b"false" | b"off" | b"never" => Some(false),
+        _ => None,
+    }
+}
+
+impl Config {
+    /// Loads in order, which means that the precedence is the same
+    /// as the order of `sources`.
+    pub fn load_from_explicit_sources(
+        sources: Vec<ConfigSource>,
+    ) -> Result<Self, ConfigError> {
+        let mut layers = vec![];
+
+        for source in sources.into_iter() {
+            match source {
+                ConfigSource::Parsed(c) => layers.push(c),
+                ConfigSource::AbsPath(c) => {
+                    // TODO check if it should be trusted
+                    // mercurial/ui.py:427
+                    let data = match read_whole_file(&c) {
+                        Err(_) => continue, // same as the python code
+                        Ok(data) => data,
+                    };
+                    layers.extend(ConfigLayer::parse(&c, &data)?)
+                }
+            }
+        }
+
+        Ok(Config { layers })
+    }
+
+    /// Loads the local config. In a future version, this will also load the
+    /// `$HOME/.hgrc` and more to mirror the Python implementation.
+    pub fn load() -> Result<Self, ConfigError> {
+        let root = find_root().unwrap();
+        Ok(Self::load_from_explicit_sources(vec![
+            ConfigSource::AbsPath(root.join(".hg/hgrc")),
+        ])?)
+    }
+
+    /// Returns an `Err` if the first value found is not a valid boolean.
+    /// Otherwise, returns an `Ok(option)`, where `option` is the boolean if
+    /// found, or `None`.
+    pub fn get_option(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Result<Option<bool>, ConfigError> {
+        match self.get_inner(&section, &item) {
+            Some((layer, v)) => match parse_bool(&v.bytes) {
+                Some(b) => Ok(Some(b)),
+                None => Err(ConfigError::Parse {
+                    origin: layer.origin.to_owned(),
+                    line: v.line,
+                    bytes: v.bytes.to_owned(),
+                }),
+            },
+            None => Ok(None),
+        }
+    }
+
+    /// Returns the corresponding boolean in the config. Returns `Ok(false)`
+    /// if the value is not found, an `Err` if it's not a valid boolean.
+    pub fn get_bool(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Result<bool, ConfigError> {
+        Ok(self.get_option(section, item)?.unwrap_or(false))
+    }
+
+    /// Returns the raw value bytes of the first one found, or `None`.
+    pub fn get(&self, section: &[u8], item: &[u8]) -> Option<&[u8]> {
+        self.get_inner(section, item)
+            .map(|(_, value)| value.bytes.as_ref())
+    }
+
+    /// Returns the layer and the value of the first one found, or `None`.
+    fn get_inner(
+        &self,
+        section: &[u8],
+        item: &[u8],
+    ) -> Option<(&ConfigLayer, &ConfigValue)> {
+        for layer in self.layers.iter().rev() {
+            if !layer.trusted {
+                continue;
+            }
+            if let Some(v) = layer.get(&section, &item) {
+                return Some((&layer, v));
+            }
+        }
+        None
+    }
+
+    /// Get raw values bytes from all layers (even untrusted ones) in order
+    /// of precedence.
+    #[cfg(test)]
+    fn get_all(&self, section: &[u8], item: &[u8]) -> Vec<&[u8]> {
+        let mut res = vec![];
+        for layer in self.layers.iter().rev() {
+            if let Some(v) = layer.get(&section, &item) {
+                res.push(v.bytes.as_ref());
+            }
+        }
+        res
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use pretty_assertions::assert_eq;
+    use std::fs::File;
+    use std::io::Write;
+
+    #[test]
+    fn test_include_layer_ordering() {
+        let tmpdir = tempfile::tempdir().unwrap();
+        let tmpdir_path = tmpdir.path();
+        let mut included_file =
+            File::create(&tmpdir_path.join("included.rc")).unwrap();
+
+        included_file.write_all(b"[section]\nitem=value1").unwrap();
+        let base_config_path = tmpdir_path.join("base.rc");
+        let mut config_file = File::create(&base_config_path).unwrap();
+        let data =
+            b"[section]\nitem=value0\n%include included.rc\nitem=value2";
+        config_file.write_all(data).unwrap();
+
+        let sources = vec![ConfigSource::AbsPath(base_config_path)];
+        let config = Config::load_from_explicit_sources(sources)
+            .expect("expected valid config");
+
+        dbg!(&config);
+
+        let (_, value) = config.get_inner(b"section", b"item").unwrap();
+        assert_eq!(
+            value,
+            &ConfigValue {
+                bytes: b"value2".to_vec(),
+                line: Some(4)
+            }
+        );
+
+        let value = config.get(b"section", b"item").unwrap();
+        assert_eq!(value, b"value2",);
+        assert_eq!(
+            config.get_all(b"section", b"item"),
+            [b"value2", b"value1", b"value0"]
+        );
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/config/layer.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,268 @@
+// layer.rs
+//
+// Copyright 2020
+//      Valentin Gatien-Baron,
+//      Raphaël Gomès <rgomes@octobus.net>
+//
+// This software may be used and distributed according to the terms of the
+// GNU General Public License version 2 or any later version.
+
+use crate::utils::files::{
+    get_bytes_from_path, get_path_from_bytes, read_whole_file,
+};
+use format_bytes::format_bytes;
+use lazy_static::lazy_static;
+use regex::bytes::Regex;
+use std::collections::HashMap;
+use std::io;
+use std::path::{Path, PathBuf};
+
+lazy_static! {
+    static ref SECTION_RE: Regex = make_regex(r"^\[([^\[]+)\]");
+    static ref ITEM_RE: Regex = make_regex(r"^([^=\s][^=]*?)\s*=\s*((.*\S)?)");
+    /// Continuation whitespace
+    static ref CONT_RE: Regex = make_regex(r"^\s+(\S|\S.*\S)\s*$");
+    static ref EMPTY_RE: Regex = make_regex(r"^(;|#|\s*$)");
+    static ref COMMENT_RE: Regex = make_regex(r"^(;|#)");
+    /// A directive that allows for removing previous entries
+    static ref UNSET_RE: Regex = make_regex(r"^%unset\s+(\S+)");
+    /// A directive that allows for including other config files
+    static ref INCLUDE_RE: Regex = make_regex(r"^%include\s+(\S|\S.*\S)\s*$");
+}
+
+/// All config values separated by layers of precedence.
+/// Each config source may be split in multiple layers if `%include` directives
+/// are used.
+/// TODO detail the general precedence
+#[derive(Clone)]
+pub struct ConfigLayer {
+    /// Mapping of the sections to their items
+    sections: HashMap<Vec<u8>, ConfigItem>,
+    /// All sections (and their items/values) in a layer share the same origin
+    pub origin: ConfigOrigin,
+    /// Whether this layer comes from a trusted user or group
+    pub trusted: bool,
+}
+
+impl ConfigLayer {
+    pub fn new(origin: ConfigOrigin) -> Self {
+        ConfigLayer {
+            sections: HashMap::new(),
+            trusted: true, // TODO check
+            origin,
+        }
+    }
+
+    /// Add an entry to the config, overwriting the old one if already present.
+    pub fn add(
+        &mut self,
+        section: Vec<u8>,
+        item: Vec<u8>,
+        value: Vec<u8>,
+        line: Option<usize>,
+    ) {
+        self.sections
+            .entry(section)
+            .or_insert_with(|| HashMap::new())
+            .insert(item, ConfigValue { bytes: value, line });
+    }
+
+    /// Returns the config value in `<section>.<item>` if it exists
+    pub fn get(&self, section: &[u8], item: &[u8]) -> Option<&ConfigValue> {
+        Some(self.sections.get(section)?.get(item)?)
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.sections.is_empty()
+    }
+
+    /// Returns a `Vec` of layers in order of precedence (so, in read order),
+    /// recursively parsing the `%include` directives if any.
+    pub fn parse(src: &Path, data: &[u8]) -> Result<Vec<Self>, ConfigError> {
+        let mut layers = vec![];
+
+        // Discard byte order mark if any
+        let data = if data.starts_with(b"\xef\xbb\xbf") {
+            &data[3..]
+        } else {
+            data
+        };
+
+        // TODO check if it's trusted
+        let mut current_layer = Self::new(ConfigOrigin::File(src.to_owned()));
+
+        let mut lines_iter =
+            data.split(|b| *b == b'\n').enumerate().peekable();
+        let mut section = b"".to_vec();
+
+        while let Some((index, bytes)) = lines_iter.next() {
+            if let Some(m) = INCLUDE_RE.captures(&bytes) {
+                let filename_bytes = &m[1];
+                let filename_to_include = get_path_from_bytes(&filename_bytes);
+                match read_include(&src, &filename_to_include) {
+                    (include_src, Ok(data)) => {
+                        layers.push(current_layer);
+                        layers.extend(Self::parse(&include_src, &data)?);
+                        current_layer =
+                            Self::new(ConfigOrigin::File(src.to_owned()));
+                    }
+                    (_, Err(e)) => {
+                        return Err(ConfigError::IncludeError {
+                            path: filename_to_include.to_owned(),
+                            io_error: e,
+                        })
+                    }
+                }
+            } else if let Some(_) = EMPTY_RE.captures(&bytes) {
+            } else if let Some(m) = SECTION_RE.captures(&bytes) {
+                section = m[1].to_vec();
+            } else if let Some(m) = ITEM_RE.captures(&bytes) {
+                let item = m[1].to_vec();
+                let mut value = m[2].to_vec();
+                loop {
+                    match lines_iter.peek() {
+                        None => break,
+                        Some((_, v)) => {
+                            if let Some(_) = COMMENT_RE.captures(&v) {
+                            } else if let Some(_) = CONT_RE.captures(&v) {
+                                value.extend(b"\n");
+                                value.extend(&m[1]);
+                            } else {
+                                break;
+                            }
+                        }
+                    };
+                    lines_iter.next();
+                }
+                current_layer.add(
+                    section.clone(),
+                    item,
+                    value,
+                    Some(index + 1),
+                );
+            } else if let Some(m) = UNSET_RE.captures(&bytes) {
+                if let Some(map) = current_layer.sections.get_mut(&section) {
+                    map.remove(&m[1]);
+                }
+            } else {
+                return Err(ConfigError::Parse {
+                    origin: ConfigOrigin::File(src.to_owned()),
+                    line: Some(index + 1),
+                    bytes: bytes.to_owned(),
+                });
+            }
+        }
+        if !current_layer.is_empty() {
+            layers.push(current_layer);
+        }
+        Ok(layers)
+    }
+}
+
+impl std::fmt::Debug for ConfigLayer {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        let mut sections: Vec<_> = self.sections.iter().collect();
+        sections.sort_by(|e0, e1| e0.0.cmp(e1.0));
+
+        for (section, items) in sections.into_iter() {
+            let mut items: Vec<_> = items.into_iter().collect();
+            items.sort_by(|e0, e1| e0.0.cmp(e1.0));
+
+            for (item, config_entry) in items {
+                writeln!(
+                    f,
+                    "{}",
+                    String::from_utf8_lossy(&format_bytes!(
+                        b"{}.{}={} # {}",
+                        section,
+                        item,
+                        &config_entry.bytes,
+                        &self.origin.to_bytes(),
+                    ))
+                )?
+            }
+        }
+        Ok(())
+    }
+}
+
+/// Mapping of section item to value.
+/// In the following:
+/// ```text
+/// [ui]
+/// paginate=no
+/// ```
+/// "paginate" is the section item and "no" the value.
+pub type ConfigItem = HashMap<Vec<u8>, ConfigValue>;
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct ConfigValue {
+    /// The raw bytes of the value (be it from the CLI, env or from a file)
+    pub bytes: Vec<u8>,
+    /// Only present if the value comes from a file, 1-indexed.
+    pub line: Option<usize>,
+}
+
+#[derive(Clone, Debug)]
+pub enum ConfigOrigin {
+    /// The value comes from a configuration file
+    File(PathBuf),
+    /// The value comes from the environment like `$PAGER` or `$EDITOR`
+    Environment(Vec<u8>),
+    /* TODO cli
+     * TODO defaults (configitems.py)
+     * TODO extensions
+     * TODO Python resources?
+     * Others? */
+}
+
+impl ConfigOrigin {
+    /// TODO use some kind of dedicated trait?
+    pub fn to_bytes(&self) -> Vec<u8> {
+        match self {
+            ConfigOrigin::File(p) => get_bytes_from_path(p),
+            ConfigOrigin::Environment(e) => e.to_owned(),
+        }
+    }
+}
+
+#[derive(Debug)]
+pub enum ConfigError {
+    Parse {
+        origin: ConfigOrigin,
+        line: Option<usize>,
+        bytes: Vec<u8>,
+    },
+    /// Failed to include a sub config file
+    IncludeError {
+        path: PathBuf,
+        io_error: std::io::Error,
+    },
+    /// Any IO error that isn't expected
+    IO(std::io::Error),
+}
+
+impl From<std::io::Error> for ConfigError {
+    fn from(e: std::io::Error) -> Self {
+        Self::IO(e)
+    }
+}
+
+fn make_regex(pattern: &'static str) -> Regex {
+    Regex::new(pattern).expect("expected a valid regex")
+}
+
+/// Includes are relative to the file they're defined in, unless they're
+/// absolute.
+fn read_include(
+    old_src: &Path,
+    new_src: &Path,
+) -> (PathBuf, io::Result<Vec<u8>>) {
+    if new_src.is_absolute() {
+        (new_src.to_path_buf(), read_whole_file(&new_src))
+    } else {
+        let dir = old_src.parent().unwrap();
+        let new_src = dir.join(&new_src);
+        (new_src.to_owned(), read_whole_file(&new_src))
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/copy_tracing.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,797 @@
+use crate::utils::hg_path::HgPath;
+use crate::utils::hg_path::HgPathBuf;
+use crate::Revision;
+use crate::NULL_REVISION;
+
+use im_rc::ordmap::DiffItem;
+use im_rc::ordmap::Entry;
+use im_rc::ordmap::OrdMap;
+
+use std::cmp::Ordering;
+use std::collections::HashMap;
+use std::convert::TryInto;
+
+pub type PathCopies = HashMap<HgPathBuf, HgPathBuf>;
+
+type PathToken = usize;
+
+#[derive(Clone, Debug, PartialEq, Copy)]
+struct TimeStampedPathCopy {
+    /// revision at which the copy information was added
+    rev: Revision,
+    /// the copy source, (Set to None in case of deletion of the associated
+    /// key)
+    path: Option<PathToken>,
+}
+
+/// maps CopyDestination to Copy Source (+ a "timestamp" for the operation)
+type TimeStampedPathCopies = OrdMap<PathToken, TimeStampedPathCopy>;
+
+/// hold parent 1, parent 2 and relevant files actions.
+pub type RevInfo<'a> = (Revision, Revision, ChangedFiles<'a>);
+
+/// represent the files affected by a changesets
+///
+/// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
+/// all the data categories tracked by it.
+/// This hold a subset of mercurial.metadata.ChangingFiles as we do not need
+/// all the data categories tracked by it.
+pub struct ChangedFiles<'a> {
+    nb_items: u32,
+    index: &'a [u8],
+    data: &'a [u8],
+}
+
+/// Represent active changes that affect the copy tracing.
+enum Action<'a> {
+    /// The parent ? children edge is removing a file
+    ///
+    /// (actually, this could be the edge from the other parent, but it does
+    /// not matters)
+    Removed(&'a HgPath),
+    /// The parent ? children edge introduce copy information between (dest,
+    /// source)
+    Copied(&'a HgPath, &'a HgPath),
+}
+
+/// This express the possible "special" case we can get in a merge
+///
+/// See mercurial/metadata.py for details on these values.
+#[derive(PartialEq)]
+enum MergeCase {
+    /// Merged: file had history on both side that needed to be merged
+    Merged,
+    /// Salvaged: file was candidate for deletion, but survived the merge
+    Salvaged,
+    /// Normal: Not one of the two cases above
+    Normal,
+}
+
+type FileChange<'a> = (u8, &'a HgPath, &'a HgPath);
+
+const EMPTY: &[u8] = b"";
+const COPY_MASK: u8 = 3;
+const P1_COPY: u8 = 2;
+const P2_COPY: u8 = 3;
+const ACTION_MASK: u8 = 28;
+const REMOVED: u8 = 12;
+const MERGED: u8 = 8;
+const SALVAGED: u8 = 16;
+
+impl<'a> ChangedFiles<'a> {
+    const INDEX_START: usize = 4;
+    const ENTRY_SIZE: u32 = 9;
+    const FILENAME_START: u32 = 1;
+    const COPY_SOURCE_START: u32 = 5;
+
+    pub fn new(data: &'a [u8]) -> Self {
+        assert!(
+            data.len() >= 4,
+            "data size ({}) is too small to contain the header (4)",
+            data.len()
+        );
+        let nb_items_raw: [u8; 4] = (&data[0..=3])
+            .try_into()
+            .expect("failed to turn 4 bytes into 4 bytes");
+        let nb_items = u32::from_be_bytes(nb_items_raw);
+
+        let index_size = (nb_items * Self::ENTRY_SIZE) as usize;
+        let index_end = Self::INDEX_START + index_size;
+
+        assert!(
+            data.len() >= index_end,
+            "data size ({}) is too small to fit the index_data ({})",
+            data.len(),
+            index_end
+        );
+
+        let ret = ChangedFiles {
+            nb_items,
+            index: &data[Self::INDEX_START..index_end],
+            data: &data[index_end..],
+        };
+        let max_data = ret.filename_end(nb_items - 1) as usize;
+        assert!(
+            ret.data.len() >= max_data,
+            "data size ({}) is too small to fit all data ({})",
+            data.len(),
+            index_end + max_data
+        );
+        ret
+    }
+
+    pub fn new_empty() -> Self {
+        ChangedFiles {
+            nb_items: 0,
+            index: EMPTY,
+            data: EMPTY,
+        }
+    }
+
+    /// internal function to return an individual entry at a given index
+    fn entry(&'a self, idx: u32) -> FileChange<'a> {
+        if idx >= self.nb_items {
+            panic!(
+                "index for entry is higher that the number of file {} >= {}",
+                idx, self.nb_items
+            )
+        }
+        let flags = self.flags(idx);
+        let filename = self.filename(idx);
+        let copy_idx = self.copy_idx(idx);
+        let copy_source = self.filename(copy_idx);
+        (flags, filename, copy_source)
+    }
+
+    /// internal function to return the filename of the entry at a given index
+    fn filename(&self, idx: u32) -> &HgPath {
+        let filename_start;
+        if idx == 0 {
+            filename_start = 0;
+        } else {
+            filename_start = self.filename_end(idx - 1)
+        }
+        let filename_end = self.filename_end(idx);
+        let filename_start = filename_start as usize;
+        let filename_end = filename_end as usize;
+        HgPath::new(&self.data[filename_start..filename_end])
+    }
+
+    /// internal function to return the flag field of the entry at a given
+    /// index
+    fn flags(&self, idx: u32) -> u8 {
+        let idx = idx as usize;
+        self.index[idx * (Self::ENTRY_SIZE as usize)]
+    }
+
+    /// internal function to return the end of a filename part at a given index
+    fn filename_end(&self, idx: u32) -> u32 {
+        let start = (idx * Self::ENTRY_SIZE) + Self::FILENAME_START;
+        let end = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
+        let start = start as usize;
+        let end = end as usize;
+        let raw = (&self.index[start..end])
+            .try_into()
+            .expect("failed to turn 4 bytes into 4 bytes");
+        u32::from_be_bytes(raw)
+    }
+
+    /// internal function to return index of the copy source of the entry at a
+    /// given index
+    fn copy_idx(&self, idx: u32) -> u32 {
+        let start = (idx * Self::ENTRY_SIZE) + Self::COPY_SOURCE_START;
+        let end = (idx + 1) * Self::ENTRY_SIZE;
+        let start = start as usize;
+        let end = end as usize;
+        let raw = (&self.index[start..end])
+            .try_into()
+            .expect("failed to turn 4 bytes into 4 bytes");
+        u32::from_be_bytes(raw)
+    }
+
+    /// Return an iterator over all the `Action` in this instance.
+    fn iter_actions(&self, parent: Parent) -> ActionsIterator {
+        ActionsIterator {
+            changes: &self,
+            parent: parent,
+            current: 0,
+        }
+    }
+
+    /// return the MergeCase value associated with a filename
+    fn get_merge_case(&self, path: &HgPath) -> MergeCase {
+        if self.nb_items == 0 {
+            return MergeCase::Normal;
+        }
+        let mut low_part = 0;
+        let mut high_part = self.nb_items;
+
+        while low_part < high_part {
+            let cursor = (low_part + high_part - 1) / 2;
+            let (flags, filename, _source) = self.entry(cursor);
+            match path.cmp(filename) {
+                Ordering::Less => low_part = cursor + 1,
+                Ordering::Greater => high_part = cursor,
+                Ordering::Equal => {
+                    return match flags & ACTION_MASK {
+                        MERGED => MergeCase::Merged,
+                        SALVAGED => MergeCase::Salvaged,
+                        _ => MergeCase::Normal,
+                    };
+                }
+            }
+        }
+        MergeCase::Normal
+    }
+}
+
+/// A struct responsible for answering "is X ancestors of Y" quickly
+///
+/// The structure will delegate ancestors call to a callback, and cache the
+/// result.
+#[derive(Debug)]
+struct AncestorOracle<'a, A: Fn(Revision, Revision) -> bool> {
+    inner: &'a A,
+    pairs: HashMap<(Revision, Revision), bool>,
+}
+
+impl<'a, A: Fn(Revision, Revision) -> bool> AncestorOracle<'a, A> {
+    fn new(func: &'a A) -> Self {
+        Self {
+            inner: func,
+            pairs: HashMap::default(),
+        }
+    }
+
+    fn record_overwrite(&mut self, anc: Revision, desc: Revision) {
+        self.pairs.insert((anc, desc), true);
+    }
+
+    /// returns `true` if `anc` is an ancestors of `desc`, `false` otherwise
+    fn is_overwrite(&mut self, anc: Revision, desc: Revision) -> bool {
+        if anc > desc {
+            false
+        } else if anc == desc {
+            true
+        } else {
+            if let Some(b) = self.pairs.get(&(anc, desc)) {
+                *b
+            } else {
+                let b = (self.inner)(anc, desc);
+                self.pairs.insert((anc, desc), b);
+                b
+            }
+        }
+    }
+}
+
+struct ActionsIterator<'a> {
+    changes: &'a ChangedFiles<'a>,
+    parent: Parent,
+    current: u32,
+}
+
+impl<'a> Iterator for ActionsIterator<'a> {
+    type Item = Action<'a>;
+
+    fn next(&mut self) -> Option<Action<'a>> {
+        let copy_flag = match self.parent {
+            Parent::FirstParent => P1_COPY,
+            Parent::SecondParent => P2_COPY,
+        };
+        while self.current < self.changes.nb_items {
+            let (flags, file, source) = self.changes.entry(self.current);
+            self.current += 1;
+            if (flags & ACTION_MASK) == REMOVED {
+                return Some(Action::Removed(file));
+            }
+            let copy = flags & COPY_MASK;
+            if copy == copy_flag {
+                return Some(Action::Copied(file, source));
+            }
+        }
+        return None;
+    }
+}
+
+/// A small struct whose purpose is to ensure lifetime of bytes referenced in
+/// ChangedFiles
+///
+/// It is passed to the RevInfoMaker callback who can assign any necessary
+/// content to the `data` attribute. The copy tracing code is responsible for
+/// keeping the DataHolder alive at least as long as the ChangedFiles object.
+pub struct DataHolder<D> {
+    /// RevInfoMaker callback should assign data referenced by the
+    /// ChangedFiles struct it return to this attribute. The DataHolder
+    /// lifetime will be at least as long as the ChangedFiles one.
+    pub data: Option<D>,
+}
+
+pub type RevInfoMaker<'a, D> =
+    Box<dyn for<'r> Fn(Revision, &'r mut DataHolder<D>) -> RevInfo<'r> + 'a>;
+
+/// enum used to carry information about the parent → child currently processed
+#[derive(Copy, Clone, Debug)]
+enum Parent {
+    /// The `p1(x) → x` edge
+    FirstParent,
+    /// The `p2(x) → x` edge
+    SecondParent,
+}
+
+/// A small "tokenizer" responsible of turning full HgPath into lighter
+/// PathToken
+///
+/// Dealing with small object, like integer is much faster, so HgPath input are
+/// turned into integer "PathToken" and converted back in the end.
+#[derive(Clone, Debug, Default)]
+struct TwoWayPathMap {
+    token: HashMap<HgPathBuf, PathToken>,
+    path: Vec<HgPathBuf>,
+}
+
+impl TwoWayPathMap {
+    fn tokenize(&mut self, path: &HgPath) -> PathToken {
+        match self.token.get(path) {
+            Some(a) => *a,
+            None => {
+                let a = self.token.len();
+                let buf = path.to_owned();
+                self.path.push(buf.clone());
+                self.token.insert(buf, a);
+                a
+            }
+        }
+    }
+
+    fn untokenize(&self, token: PathToken) -> &HgPathBuf {
+        assert!(token < self.path.len(), format!("Unknown token: {}", token));
+        &self.path[token]
+    }
+}
+
+/// Same as mercurial.copies._combine_changeset_copies, but in Rust.
+///
+/// Arguments are:
+///
+/// revs: all revisions to be considered
+/// children: a {parent ? [childrens]} mapping
+/// target_rev: the final revision we are combining copies to
+/// rev_info(rev): callback to get revision information:
+///   * first parent
+///   * second parent
+///   * ChangedFiles
+/// isancestors(low_rev, high_rev): callback to check if a revision is an
+///                                 ancestor of another
+pub fn combine_changeset_copies<A: Fn(Revision, Revision) -> bool, D>(
+    revs: Vec<Revision>,
+    mut children_count: HashMap<Revision, usize>,
+    target_rev: Revision,
+    rev_info: RevInfoMaker<D>,
+    is_ancestor: &A,
+) -> PathCopies {
+    let mut all_copies = HashMap::new();
+    let mut oracle = AncestorOracle::new(is_ancestor);
+
+    let mut path_map = TwoWayPathMap::default();
+
+    for rev in revs {
+        let mut d: DataHolder<D> = DataHolder { data: None };
+        let (p1, p2, changes) = rev_info(rev, &mut d);
+
+        // We will chain the copies information accumulated for the parent with
+        // the individual copies information the curent revision.  Creating a
+        // new TimeStampedPath for each `rev` → `children` vertex.
+        let mut copies: Option<TimeStampedPathCopies> = None;
+        if p1 != NULL_REVISION {
+            // Retrieve data computed in a previous iteration
+            let parent_copies = get_and_clean_parent_copies(
+                &mut all_copies,
+                &mut children_count,
+                p1,
+            );
+            if let Some(parent_copies) = parent_copies {
+                // combine it with data for that revision
+                let vertex_copies = add_from_changes(
+                    &mut path_map,
+                    &mut oracle,
+                    &parent_copies,
+                    &changes,
+                    Parent::FirstParent,
+                    rev,
+                );
+                // keep that data around for potential later combination
+                copies = Some(vertex_copies);
+            }
+        }
+        if p2 != NULL_REVISION {
+            // Retrieve data computed in a previous iteration
+            let parent_copies = get_and_clean_parent_copies(
+                &mut all_copies,
+                &mut children_count,
+                p2,
+            );
+            if let Some(parent_copies) = parent_copies {
+                // combine it with data for that revision
+                let vertex_copies = add_from_changes(
+                    &mut path_map,
+                    &mut oracle,
+                    &parent_copies,
+                    &changes,
+                    Parent::SecondParent,
+                    rev,
+                );
+
+                copies = match copies {
+                    None => Some(vertex_copies),
+                    // Merge has two parents needs to combines their copy
+                    // information.
+                    //
+                    // If we got data from both parents, We need to combine
+                    // them.
+                    Some(copies) => Some(merge_copies_dict(
+                        &path_map,
+                        rev,
+                        vertex_copies,
+                        copies,
+                        &changes,
+                        &mut oracle,
+                    )),
+                };
+            }
+        }
+        match copies {
+            Some(copies) => {
+                all_copies.insert(rev, copies);
+            }
+            _ => {}
+        }
+    }
+
+    // Drop internal information (like the timestamp) and return the final
+    // mapping.
+    let tt_result = all_copies
+        .remove(&target_rev)
+        .expect("target revision was not processed");
+    let mut result = PathCopies::default();
+    for (dest, tt_source) in tt_result {
+        if let Some(path) = tt_source.path {
+            let path_dest = path_map.untokenize(dest).to_owned();
+            let path_path = path_map.untokenize(path).to_owned();
+            result.insert(path_dest, path_path);
+        }
+    }
+    result
+}
+
+/// fetch previous computed information
+///
+/// If no other children are expected to need this information, we drop it from
+/// the cache.
+///
+/// If parent is not part of the set we are expected to walk, return None.
+fn get_and_clean_parent_copies(
+    all_copies: &mut HashMap<Revision, TimeStampedPathCopies>,
+    children_count: &mut HashMap<Revision, usize>,
+    parent_rev: Revision,
+) -> Option<TimeStampedPathCopies> {
+    let count = children_count.get_mut(&parent_rev)?;
+    *count -= 1;
+    if *count == 0 {
+        match all_copies.remove(&parent_rev) {
+            Some(c) => Some(c),
+            None => Some(TimeStampedPathCopies::default()),
+        }
+    } else {
+        match all_copies.get(&parent_rev) {
+            Some(c) => Some(c.clone()),
+            None => Some(TimeStampedPathCopies::default()),
+        }
+    }
+}
+
+/// Combine ChangedFiles with some existing PathCopies information and return
+/// the result
+fn add_from_changes<A: Fn(Revision, Revision) -> bool>(
+    path_map: &mut TwoWayPathMap,
+    oracle: &mut AncestorOracle<A>,
+    base_copies: &TimeStampedPathCopies,
+    changes: &ChangedFiles,
+    parent: Parent,
+    current_rev: Revision,
+) -> TimeStampedPathCopies {
+    let mut copies = base_copies.clone();
+    for action in changes.iter_actions(parent) {
+        match action {
+            Action::Copied(path_dest, path_source) => {
+                let dest = path_map.tokenize(path_dest);
+                let source = path_map.tokenize(path_source);
+                let entry;
+                if let Some(v) = base_copies.get(&source) {
+                    entry = match &v.path {
+                        Some(path) => Some((*(path)).to_owned()),
+                        None => Some(source.to_owned()),
+                    }
+                } else {
+                    entry = Some(source.to_owned());
+                }
+                // Each new entry is introduced by the children, we
+                // record this information as we will need it to take
+                // the right decision when merging conflicting copy
+                // information. See merge_copies_dict for details.
+                match copies.entry(dest) {
+                    Entry::Vacant(slot) => {
+                        let ttpc = TimeStampedPathCopy {
+                            rev: current_rev,
+                            path: entry,
+                        };
+                        slot.insert(ttpc);
+                    }
+                    Entry::Occupied(mut slot) => {
+                        let mut ttpc = slot.get_mut();
+                        oracle.record_overwrite(ttpc.rev, current_rev);
+                        ttpc.rev = current_rev;
+                        ttpc.path = entry;
+                    }
+                }
+            }
+            Action::Removed(deleted_path) => {
+                // We must drop copy information for removed file.
+                //
+                // We need to explicitly record them as dropped to
+                // propagate this information when merging two
+                // TimeStampedPathCopies object.
+                let deleted = path_map.tokenize(deleted_path);
+                copies.entry(deleted).and_modify(|old| {
+                    oracle.record_overwrite(old.rev, current_rev);
+                    old.rev = current_rev;
+                    old.path = None;
+                });
+            }
+        }
+    }
+    copies
+}
+
+/// merge two copies-mapping together, minor and major
+///
+/// In case of conflict, value from "major" will be picked, unless in some
+/// cases. See inline documentation for details.
+fn merge_copies_dict<A: Fn(Revision, Revision) -> bool>(
+    path_map: &TwoWayPathMap,
+    current_merge: Revision,
+    mut minor: TimeStampedPathCopies,
+    mut major: TimeStampedPathCopies,
+    changes: &ChangedFiles,
+    oracle: &mut AncestorOracle<A>,
+) -> TimeStampedPathCopies {
+    // This closure exist as temporary help while multiple developper are
+    // actively working on this code. Feel free to re-inline it once this
+    // code is more settled.
+    let mut cmp_value =
+        |dest: &PathToken,
+         src_minor: &TimeStampedPathCopy,
+         src_major: &TimeStampedPathCopy| {
+            compare_value(
+                path_map,
+                current_merge,
+                changes,
+                oracle,
+                dest,
+                src_minor,
+                src_major,
+            )
+        };
+    if minor.is_empty() {
+        major
+    } else if major.is_empty() {
+        minor
+    } else if minor.len() * 2 < major.len() {
+        // Lets says we are merging two TimeStampedPathCopies instance A and B.
+        //
+        // If A contains N items, the merge result will never contains more
+        // than N values differents than the one in A
+        //
+        // If B contains M items, with M > N, the merge result will always
+        // result in a minimum of M - N value differents than the on in
+        // A
+        //
+        // As a result, if N < (M-N), we know that simply iterating over A will
+        // yield less difference than iterating over the difference
+        // between A and B.
+        //
+        // This help performance a lot in case were a tiny
+        // TimeStampedPathCopies is merged with a much larger one.
+        for (dest, src_minor) in minor {
+            let src_major = major.get(&dest);
+            match src_major {
+                None => major.insert(dest, src_minor),
+                Some(src_major) => {
+                    match cmp_value(&dest, &src_minor, src_major) {
+                        MergePick::Any | MergePick::Major => None,
+                        MergePick::Minor => major.insert(dest, src_minor),
+                    }
+                }
+            };
+        }
+        major
+    } else if major.len() * 2 < minor.len() {
+        // This use the same rational than the previous block.
+        // (Check previous block documentation for details.)
+        for (dest, src_major) in major {
+            let src_minor = minor.get(&dest);
+            match src_minor {
+                None => minor.insert(dest, src_major),
+                Some(src_minor) => {
+                    match cmp_value(&dest, src_minor, &src_major) {
+                        MergePick::Any | MergePick::Minor => None,
+                        MergePick::Major => minor.insert(dest, src_major),
+                    }
+                }
+            };
+        }
+        minor
+    } else {
+        let mut override_minor = Vec::new();
+        let mut override_major = Vec::new();
+
+        let mut to_major = |k: &PathToken, v: &TimeStampedPathCopy| {
+            override_major.push((k.clone(), v.clone()))
+        };
+        let mut to_minor = |k: &PathToken, v: &TimeStampedPathCopy| {
+            override_minor.push((k.clone(), v.clone()))
+        };
+
+        // The diff function leverage detection of the identical subpart if
+        // minor and major has some common ancestors. This make it very
+        // fast is most case.
+        //
+        // In case where the two map are vastly different in size, the current
+        // approach is still slowish because the iteration will iterate over
+        // all the "exclusive" content of the larger on. This situation can be
+        // frequent when the subgraph of revision we are processing has a lot
+        // of roots. Each roots adding they own fully new map to the mix (and
+        // likely a small map, if the path from the root to the "main path" is
+        // small.
+        //
+        // We could do better by detecting such situation and processing them
+        // differently.
+        for d in minor.diff(&major) {
+            match d {
+                DiffItem::Add(k, v) => to_minor(k, v),
+                DiffItem::Remove(k, v) => to_major(k, v),
+                DiffItem::Update { old, new } => {
+                    let (dest, src_major) = new;
+                    let (_, src_minor) = old;
+                    match cmp_value(dest, src_minor, src_major) {
+                        MergePick::Major => to_minor(dest, src_major),
+                        MergePick::Minor => to_major(dest, src_minor),
+                        // If the two entry are identical, no need to do
+                        // anything (but diff should not have yield them)
+                        MergePick::Any => unreachable!(),
+                    }
+                }
+            };
+        }
+
+        let updates;
+        let mut result;
+        if override_major.is_empty() {
+            result = major
+        } else if override_minor.is_empty() {
+            result = minor
+        } else {
+            if override_minor.len() < override_major.len() {
+                updates = override_minor;
+                result = minor;
+            } else {
+                updates = override_major;
+                result = major;
+            }
+            for (k, v) in updates {
+                result.insert(k, v);
+            }
+        }
+        result
+    }
+}
+
+/// represent the side that should prevail when merging two
+/// TimeStampedPathCopies
+enum MergePick {
+    /// The "major" (p1) side prevails
+    Major,
+    /// The "minor" (p2) side prevails
+    Minor,
+    /// Any side could be used (because they are the same)
+    Any,
+}
+
+/// decide which side prevails in case of conflicting values
+#[allow(clippy::if_same_then_else)]
+fn compare_value<A: Fn(Revision, Revision) -> bool>(
+    path_map: &TwoWayPathMap,
+    current_merge: Revision,
+    changes: &ChangedFiles,
+    oracle: &mut AncestorOracle<A>,
+    dest: &PathToken,
+    src_minor: &TimeStampedPathCopy,
+    src_major: &TimeStampedPathCopy,
+) -> MergePick {
+    if src_major.rev == current_merge {
+        if src_minor.rev == current_merge {
+            if src_major.path.is_none() {
+                // We cannot get different copy information for both p1 and p2
+                // from the same revision. Unless this was a
+                // deletion
+                MergePick::Any
+            } else {
+                unreachable!();
+            }
+        } else {
+            // The last value comes the current merge, this value -will- win
+            // eventually.
+            oracle.record_overwrite(src_minor.rev, src_major.rev);
+            MergePick::Major
+        }
+    } else if src_minor.rev == current_merge {
+        // The last value comes the current merge, this value -will- win
+        // eventually.
+        oracle.record_overwrite(src_major.rev, src_minor.rev);
+        MergePick::Minor
+    } else if src_major.path == src_minor.path {
+        // we have the same value, but from other source;
+        if src_major.rev == src_minor.rev {
+            // If the two entry are identical, they are both valid
+            MergePick::Any
+        } else if oracle.is_overwrite(src_major.rev, src_minor.rev) {
+            MergePick::Minor
+        } else {
+            MergePick::Major
+        }
+    } else if src_major.rev == src_minor.rev {
+        // We cannot get copy information for both p1 and p2 in the
+        // same rev. So this is the same value.
+        unreachable!(
+            "conflict information from p1 and p2 in the same revision"
+        );
+    } else {
+        let dest_path = path_map.untokenize(*dest);
+        let action = changes.get_merge_case(dest_path);
+        if src_major.path.is_none() && action == MergeCase::Salvaged {
+            // If the file is "deleted" in the major side but was
+            // salvaged by the merge, we keep the minor side alive
+            MergePick::Minor
+        } else if src_minor.path.is_none() && action == MergeCase::Salvaged {
+            // If the file is "deleted" in the minor side but was
+            // salvaged by the merge, unconditionnaly preserve the
+            // major side.
+            MergePick::Major
+        } else if action == MergeCase::Merged {
+            // If the file was actively merged, copy information
+            // from each side might conflict.  The major side will
+            // win such conflict.
+            MergePick::Major
+        } else if oracle.is_overwrite(src_major.rev, src_minor.rev) {
+            // If the minor side is strictly newer than the major
+            // side, it should be kept.
+            MergePick::Minor
+        } else if src_major.path.is_some() {
+            // without any special case, the "major" value win
+            // other the "minor" one.
+            MergePick::Major
+        } else if oracle.is_overwrite(src_minor.rev, src_major.rev) {
+            // the "major" rev is a direct ancestors of "minor",
+            // any different value should
+            // overwrite
+            MergePick::Major
+        } else {
+            // major version is None (so the file was deleted on
+            // that branch) and that branch is independant (neither
+            // minor nor major is an ancestors of the other one.)
+            // We preserve the new
+            // information about the new file.
+            MergePick::Minor
+        }
+    }
+}
--- a/rust/hg-core/src/dirstate/dirstate_map.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/dirstate/dirstate_map.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -431,7 +431,7 @@
         let mut new_file_fold_map = FileFoldMap::default();
 
         for (filename, DirstateEntry { state, .. }) in self.state_map.iter() {
-            if *state == EntryState::Removed {
+            if *state != EntryState::Removed {
                 new_file_fold_map
                     .insert(normalize_case(&filename), filename.to_owned());
             }
@@ -447,7 +447,7 @@
         let mut new_file_fold_map = FileFoldMap::default();
 
         for (filename, DirstateEntry { state, .. }) in self.state_map.iter() {
-            if state == EntryState::Removed {
+            if state != EntryState::Removed {
                 new_file_fold_map
                     .insert(normalize_case(&filename), filename.to_owned());
             }
--- a/rust/hg-core/src/dirstate/dirstate_tree/node.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/dirstate/dirstate_tree/node.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -60,43 +60,46 @@
         // Are we're modifying the current file ? Is the the end of the path ?
         let is_current_file = tail.is_empty() && head.is_empty();
 
-        if let NodeKind::File(file) = &mut self.kind {
-            if is_current_file {
-                let new = Self {
-                    kind: NodeKind::File(File {
-                        entry: new_entry,
-                        ..file.clone()
-                    }),
-                };
-                return InsertResult {
-                    did_insert: false,
-                    old_entry: Some(std::mem::replace(self, new)),
-                };
-            } else {
-                match file.entry.state {
-                    // Only replace the current file with a directory if it's
-                    // marked as `Removed`
-                    EntryState::Removed => {
-                        self.kind = NodeKind::Directory(Directory {
-                            was_file: Some(Box::from(file.clone())),
-                            children: Default::default(),
-                        })
-                    }
-                    _ => {
-                        return Node::insert_in_file(
-                            file, new_entry, head, tail,
-                        )
-                    }
+        // Potentially Replace the current file with a directory if it's marked
+        // as `Removed`
+        if !is_current_file {
+            if let NodeKind::File(file) = &mut self.kind {
+                if file.entry.state == EntryState::Removed {
+                    self.kind = NodeKind::Directory(Directory {
+                        was_file: Some(Box::from(file.clone())),
+                        children: Default::default(),
+                    })
                 }
             }
         }
-
         match &mut self.kind {
             NodeKind::Directory(directory) => {
                 Node::insert_in_directory(directory, new_entry, head, tail)
             }
-            NodeKind::File(_) => {
-                unreachable!("The file case has already been handled")
+            NodeKind::File(file) => {
+                if is_current_file {
+                    let new = Self {
+                        kind: NodeKind::File(File {
+                            entry: new_entry,
+                            ..file.clone()
+                        }),
+                    };
+                    InsertResult {
+                        did_insert: false,
+                        old_entry: Some(std::mem::replace(self, new)),
+                    }
+                } else {
+                    match file.entry.state {
+                        EntryState::Removed => {
+                            unreachable!("Removed file turning into a directory was dealt with earlier")
+                        }
+                        _ => {
+                            Node::insert_in_file(
+                                file, new_entry, head, tail,
+                            )
+                        }
+                    }
+                }
             }
         }
     }
--- a/rust/hg-core/src/dirstate/status.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/dirstate/status.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -109,6 +109,10 @@
 /// A path with its computed ``Dispatch`` information
 type DispatchedPath<'a> = (HgPathCow<'a>, Dispatch);
 
+/// The conversion from `HgPath` to a real fs path failed.
+/// `22` is the error code for "Invalid argument"
+const INVALID_PATH_DISPATCH: Dispatch = Dispatch::Bad(BadMatch::OsError(22));
+
 /// Dates and times that are outside the 31-bit signed range are compared
 /// modulo 2^31. This should prevent hg from behaving badly with very large
 /// files or corrupt dates while still having a high probability of detecting
@@ -217,6 +221,12 @@
     }
 }
 
+fn dispatch_os_error(e: &std::io::Error) -> Dispatch {
+    Dispatch::Bad(BadMatch::OsError(
+        e.raw_os_error().expect("expected real OS error"),
+    ))
+}
+
 lazy_static! {
     static ref DEFAULT_WORK: HashSet<&'static HgPath> = {
         let mut h = HashSet::new();
@@ -366,19 +376,24 @@
     #[timed]
     pub fn walk_explicit(
         &self,
-        traversed_sender: crossbeam::Sender<HgPathBuf>,
+        traversed_sender: crossbeam_channel::Sender<HgPathBuf>,
     ) -> (Vec<DispatchedPath<'a>>, Vec<DispatchedPath<'a>>) {
         self.matcher
             .file_set()
             .unwrap_or(&DEFAULT_WORK)
             .par_iter()
-            .map(|&filename| -> Option<IoResult<_>> {
+            .flat_map(|&filename| -> Option<_> {
                 // TODO normalization
                 let normalized = filename;
 
                 let buf = match hg_path_to_path_buf(normalized) {
                     Ok(x) => x,
-                    Err(e) => return Some(Err(e.into())),
+                    Err(_) => {
+                        return Some((
+                            Cow::Borrowed(normalized),
+                            INVALID_PATH_DISPATCH,
+                        ))
+                    }
                 };
                 let target = self.root_dir.join(buf);
                 let st = target.symlink_metadata();
@@ -389,7 +404,7 @@
                         return if file_type.is_file() || file_type.is_symlink()
                         {
                             if let Some(entry) = in_dmap {
-                                return Some(Ok((
+                                return Some((
                                     Cow::Borrowed(normalized),
                                     dispatch_found(
                                         &normalized,
@@ -398,26 +413,26 @@
                                         &self.dmap.copy_map,
                                         self.options,
                                     ),
-                                )));
+                                ));
                             }
-                            Some(Ok((
+                            Some((
                                 Cow::Borrowed(normalized),
                                 Dispatch::Unknown,
-                            )))
+                            ))
                         } else if file_type.is_dir() {
                             if self.options.collect_traversed_dirs {
                                 traversed_sender
                                     .send(normalized.to_owned())
                                     .expect("receiver should outlive sender");
                             }
-                            Some(Ok((
+                            Some((
                                 Cow::Borrowed(normalized),
                                 Dispatch::Directory {
                                     was_file: in_dmap.is_some(),
                                 },
-                            )))
+                            ))
                         } else {
-                            Some(Ok((
+                            Some((
                                 Cow::Borrowed(normalized),
                                 Dispatch::Bad(BadMatch::BadType(
                                     // TODO do more than unknown
@@ -428,22 +443,20 @@
                                     // users.
                                     BadType::Unknown,
                                 )),
-                            )))
+                            ))
                         };
                     }
                     Err(_) => {
                         if let Some(entry) = in_dmap {
-                            return Some(Ok((
+                            return Some((
                                 Cow::Borrowed(normalized),
                                 dispatch_missing(entry.state),
-                            )));
+                            ));
                         }
                     }
                 };
                 None
             })
-            .flatten()
-            .filter_map(Result::ok)
             .partition(|(_, dispatch)| match dispatch {
                 Dispatch::Directory { .. } => true,
                 _ => false,
@@ -461,38 +474,30 @@
         path: impl AsRef<HgPath>,
         old_results: &FastHashMap<HgPathCow<'a>, Dispatch>,
         results: &mut Vec<DispatchedPath<'a>>,
-        traversed_sender: crossbeam::Sender<HgPathBuf>,
-    ) -> IoResult<()> {
+        traversed_sender: crossbeam_channel::Sender<HgPathBuf>,
+    ) {
         // The traversal is done in parallel, so use a channel to gather
-        // entries. `crossbeam::Sender` is `Sync`, while `mpsc::Sender`
+        // entries. `crossbeam_channel::Sender` is `Sync`, while `mpsc::Sender`
         // is not.
         let (files_transmitter, files_receiver) =
-            crossbeam::channel::unbounded();
+            crossbeam_channel::unbounded();
 
         self.traverse_dir(
             &files_transmitter,
             path,
             &old_results,
             traversed_sender,
-        )?;
+        );
 
         // Disconnect the channel so the receiver stops waiting
         drop(files_transmitter);
 
-        // TODO don't collect. Find a way of replicating the behavior of
-        // `itertools::process_results`, but for `rayon::ParallelIterator`
-        let new_results: IoResult<Vec<(Cow<HgPath>, Dispatch)>> =
-            files_receiver
-                .into_iter()
-                .map(|item| {
-                    let (f, d) = item?;
-                    Ok((Cow::Owned(f), d))
-                })
-                .collect();
+        let new_results = files_receiver
+            .into_iter()
+            .par_bridge()
+            .map(|(f, d)| (Cow::Owned(f), d));
 
-        results.par_extend(new_results?);
-
-        Ok(())
+        results.par_extend(new_results);
     }
 
     /// Dispatch a single entry (file, folder, symlink...) found during
@@ -501,11 +506,11 @@
     fn handle_traversed_entry<'b>(
         &'a self,
         scope: &rayon::Scope<'b>,
-        files_sender: &'b crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
+        files_sender: &'b crossbeam_channel::Sender<(HgPathBuf, Dispatch)>,
         old_results: &'a FastHashMap<Cow<HgPath>, Dispatch>,
         filename: HgPathBuf,
         dir_entry: DirEntry,
-        traversed_sender: crossbeam::Sender<HgPathBuf>,
+        traversed_sender: crossbeam_channel::Sender<HgPathBuf>,
     ) -> IoResult<()>
     where
         'a: 'b,
@@ -534,7 +539,7 @@
                 {
                     let metadata = dir_entry.metadata()?;
                     files_sender
-                        .send(Ok((
+                        .send((
                             filename.to_owned(),
                             dispatch_found(
                                 &filename,
@@ -543,7 +548,7 @@
                                 &self.dmap.copy_map,
                                 self.options,
                             ),
-                        )))
+                        ))
                         .unwrap();
                 }
             } else if (self.matcher.matches_everything()
@@ -556,17 +561,17 @@
                 {
                     if self.options.list_ignored {
                         files_sender
-                            .send(Ok((filename.to_owned(), Dispatch::Ignored)))
+                            .send((filename.to_owned(), Dispatch::Ignored))
                             .unwrap();
                     }
                 } else if self.options.list_unknown {
                     files_sender
-                        .send(Ok((filename.to_owned(), Dispatch::Unknown)))
+                        .send((filename.to_owned(), Dispatch::Unknown))
                         .unwrap();
                 }
             } else if self.is_ignored(&filename) && self.options.list_ignored {
                 files_sender
-                    .send(Ok((filename.to_owned(), Dispatch::Ignored)))
+                    .send((filename.to_owned(), Dispatch::Ignored))
                     .unwrap();
             }
         } else if let Some(entry) = entry_option {
@@ -575,10 +580,7 @@
                 || self.matcher.matches(&filename)
             {
                 files_sender
-                    .send(Ok((
-                        filename.to_owned(),
-                        dispatch_missing(entry.state),
-                    )))
+                    .send((filename.to_owned(), dispatch_missing(entry.state)))
                     .unwrap();
             }
         }
@@ -590,11 +592,11 @@
     fn handle_traversed_dir<'b>(
         &'a self,
         scope: &rayon::Scope<'b>,
-        files_sender: &'b crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
+        files_sender: &'b crossbeam_channel::Sender<(HgPathBuf, Dispatch)>,
         old_results: &'a FastHashMap<Cow<HgPath>, Dispatch>,
         entry_option: Option<&'a DirstateEntry>,
         directory: HgPathBuf,
-        traversed_sender: crossbeam::Sender<HgPathBuf>,
+        traversed_sender: crossbeam_channel::Sender<HgPathBuf>,
     ) where
         'a: 'b,
     {
@@ -606,10 +608,10 @@
                     || self.matcher.matches(&directory)
                 {
                     files_sender
-                        .send(Ok((
+                        .send((
                             directory.to_owned(),
                             dispatch_missing(entry.state),
-                        )))
+                        ))
                         .unwrap();
                 }
             }
@@ -621,7 +623,6 @@
                     &old_results,
                     traversed_sender,
                 )
-                .unwrap_or_else(|e| files_sender.send(Err(e)).unwrap())
             }
         });
     }
@@ -630,11 +631,11 @@
     /// entries in a separate thread.
     fn traverse_dir(
         &self,
-        files_sender: &crossbeam::Sender<IoResult<(HgPathBuf, Dispatch)>>,
+        files_sender: &crossbeam_channel::Sender<(HgPathBuf, Dispatch)>,
         directory: impl AsRef<HgPath>,
         old_results: &FastHashMap<Cow<HgPath>, Dispatch>,
-        traversed_sender: crossbeam::Sender<HgPathBuf>,
-    ) -> IoResult<()> {
+        traversed_sender: crossbeam_channel::Sender<HgPathBuf>,
+    ) {
         let directory = directory.as_ref();
 
         if self.options.collect_traversed_dirs {
@@ -644,38 +645,33 @@
         }
 
         let visit_entries = match self.matcher.visit_children_set(directory) {
-            VisitChildrenSet::Empty => return Ok(()),
+            VisitChildrenSet::Empty => return,
             VisitChildrenSet::This | VisitChildrenSet::Recursive => None,
             VisitChildrenSet::Set(set) => Some(set),
         };
-        let buf = hg_path_to_path_buf(directory)?;
+        let buf = match hg_path_to_path_buf(directory) {
+            Ok(b) => b,
+            Err(_) => {
+                files_sender
+                    .send((directory.to_owned(), INVALID_PATH_DISPATCH))
+                    .expect("receiver should outlive sender");
+                return;
+            }
+        };
         let dir_path = self.root_dir.join(buf);
 
         let skip_dot_hg = !directory.as_bytes().is_empty();
         let entries = match list_directory(dir_path, skip_dot_hg) {
             Err(e) => {
-                return match e.kind() {
-                    ErrorKind::NotFound | ErrorKind::PermissionDenied => {
-                        files_sender
-                            .send(Ok((
-                                directory.to_owned(),
-                                Dispatch::Bad(BadMatch::OsError(
-                                    // Unwrapping here is OK because the error
-                                    // always is a
-                                    // real os error
-                                    e.raw_os_error().unwrap(),
-                                )),
-                            )))
-                            .expect("receiver should outlive sender");
-                        Ok(())
-                    }
-                    _ => Err(e),
-                };
+                files_sender
+                    .send((directory.to_owned(), dispatch_os_error(&e)))
+                    .expect("receiver should outlive sender");
+                return;
             }
             Ok(entries) => entries,
         };
 
-        rayon::scope(|scope| -> IoResult<()> {
+        rayon::scope(|scope| {
             for (filename, dir_entry) in entries {
                 if let Some(ref set) = visit_entries {
                     if !set.contains(filename.deref()) {
@@ -690,17 +686,26 @@
                 };
 
                 if !old_results.contains_key(filename.deref()) {
-                    self.handle_traversed_entry(
+                    match self.handle_traversed_entry(
                         scope,
                         files_sender,
                         old_results,
                         filename,
                         dir_entry,
                         traversed_sender.clone(),
-                    )?;
+                    ) {
+                        Err(e) => {
+                            files_sender
+                                .send((
+                                    directory.to_owned(),
+                                    dispatch_os_error(&e),
+                                ))
+                                .expect("receiver should outlive sender");
+                        }
+                        Ok(_) => {}
+                    }
                 }
             }
-            Ok(())
         })
     }
 
@@ -716,14 +721,23 @@
                 .fs_iter(self.root_dir.clone())
                 .par_bridge()
                 .filter(|(path, _)| self.matcher.matches(path))
-                .flat_map(move |(filename, shortcut)| {
+                .map(move |(filename, shortcut)| {
                     let entry = match shortcut {
                         StatusShortcut::Entry(e) => e,
                         StatusShortcut::Dispatch(d) => {
-                            return Ok((Cow::Owned(filename), d))
+                            return (Cow::Owned(filename), d)
                         }
                     };
-                    let filename_as_path = hg_path_to_path_buf(&filename)?;
+                    let filename_as_path = match hg_path_to_path_buf(&filename)
+                    {
+                        Ok(f) => f,
+                        Err(_) => {
+                            return (
+                                Cow::Owned(filename),
+                                INVALID_PATH_DISPATCH,
+                            )
+                        }
+                    };
                     let meta = self
                         .root_dir
                         .join(filename_as_path)
@@ -734,10 +748,10 @@
                             if !(m.file_type().is_file()
                                 || m.file_type().is_symlink()) =>
                         {
-                            Ok((
+                            (
                                 Cow::Owned(filename),
                                 dispatch_missing(entry.state),
-                            ))
+                            )
                         }
                         Ok(m) => {
                             let dispatch = dispatch_found(
@@ -747,7 +761,7 @@
                                 &self.dmap.copy_map,
                                 self.options,
                             );
-                            Ok((Cow::Owned(filename), dispatch))
+                            (Cow::Owned(filename), dispatch)
                         }
                         Err(e)
                             if e.kind() == ErrorKind::NotFound
@@ -758,12 +772,14 @@
                             // It happens if the dirstate contains `foo/bar`
                             // and foo is not a
                             // directory
-                            Ok((
+                            (
                                 Cow::Owned(filename),
                                 dispatch_missing(entry.state),
-                            ))
+                            )
                         }
-                        Err(e) => Err(e),
+                        Err(e) => {
+                            (Cow::Owned(filename), dispatch_os_error(&e))
+                        }
                     }
                 }),
         );
@@ -776,10 +792,18 @@
     #[cfg(not(feature = "dirstate-tree"))]
     #[timed]
     pub fn extend_from_dmap(&self, results: &mut Vec<DispatchedPath<'a>>) {
-        results.par_extend(self.dmap.par_iter().flat_map(
+        results.par_extend(self.dmap.par_iter().map(
             move |(filename, entry)| {
                 let filename: &HgPath = filename;
-                let filename_as_path = hg_path_to_path_buf(filename)?;
+                let filename_as_path = match hg_path_to_path_buf(filename) {
+                    Ok(f) => f,
+                    Err(_) => {
+                        return (
+                            Cow::Borrowed(filename),
+                            INVALID_PATH_DISPATCH,
+                        )
+                    }
+                };
                 let meta =
                     self.root_dir.join(filename_as_path).symlink_metadata();
                 match meta {
@@ -787,12 +811,12 @@
                         if !(m.file_type().is_file()
                             || m.file_type().is_symlink()) =>
                     {
-                        Ok((
+                        (
                             Cow::Borrowed(filename),
                             dispatch_missing(entry.state),
-                        ))
+                        )
                     }
-                    Ok(m) => Ok((
+                    Ok(m) => (
                         Cow::Borrowed(filename),
                         dispatch_found(
                             filename,
@@ -801,7 +825,7 @@
                             &self.dmap.copy_map,
                             self.options,
                         ),
-                    )),
+                    ),
                     Err(e)
                         if e.kind() == ErrorKind::NotFound
                             || e.raw_os_error() == Some(20) =>
@@ -811,12 +835,12 @@
                         // It happens if the dirstate contains `foo/bar`
                         // and foo is not a
                         // directory
-                        Ok((
+                        (
                             Cow::Borrowed(filename),
                             dispatch_missing(entry.state),
-                        ))
+                        )
                     }
-                    Err(e) => Err(e),
+                    Err(e) => (Cow::Borrowed(filename), dispatch_os_error(&e)),
                 }
             },
         ));
@@ -830,10 +854,7 @@
     /// `extend` in timings
     #[cfg(not(feature = "dirstate-tree"))]
     #[timed]
-    pub fn handle_unknowns(
-        &self,
-        results: &mut Vec<DispatchedPath<'a>>,
-    ) -> IoResult<()> {
+    pub fn handle_unknowns(&self, results: &mut Vec<DispatchedPath<'a>>) {
         let to_visit: Vec<(&HgPath, &DirstateEntry)> =
             if results.is_empty() && self.matcher.matches_everything() {
                 self.dmap.iter().map(|(f, e)| (f.deref(), e)).collect()
@@ -857,21 +878,23 @@
 
         let path_auditor = PathAuditor::new(&self.root_dir);
 
-        // TODO don't collect. Find a way of replicating the behavior of
-        // `itertools::process_results`, but for `rayon::ParallelIterator`
-        let new_results: IoResult<Vec<_>> = to_visit
-            .into_par_iter()
-            .filter_map(|(filename, entry)| -> Option<IoResult<_>> {
+        let new_results = to_visit.into_par_iter().filter_map(
+            |(filename, entry)| -> Option<_> {
                 // Report ignored items in the dmap as long as they are not
                 // under a symlink directory.
                 if path_auditor.check(filename) {
                     // TODO normalize for case-insensitive filesystems
                     let buf = match hg_path_to_path_buf(filename) {
                         Ok(x) => x,
-                        Err(e) => return Some(Err(e.into())),
+                        Err(_) => {
+                            return Some((
+                                Cow::Owned(filename.to_owned()),
+                                INVALID_PATH_DISPATCH,
+                            ));
+                        }
                     };
-                    Some(Ok((
-                        Cow::Borrowed(filename),
+                    Some((
+                        Cow::Owned(filename.to_owned()),
                         match self.root_dir.join(&buf).symlink_metadata() {
                             // File was just ignored, no links, and exists
                             Ok(meta) => {
@@ -887,21 +910,19 @@
                             // File doesn't exist
                             Err(_) => dispatch_missing(entry.state),
                         },
-                    )))
+                    ))
                 } else {
                     // It's either missing or under a symlink directory which
                     // we, in this case, report as missing.
-                    Some(Ok((
-                        Cow::Borrowed(filename),
+                    Some((
+                        Cow::Owned(filename.to_owned()),
                         dispatch_missing(entry.state),
-                    )))
+                    ))
                 }
-            })
-            .collect();
+            },
+        );
 
-        results.par_extend(new_results?);
-
-        Ok(())
+        results.par_extend(new_results);
     }
 }
 
--- a/rust/hg-core/src/lib.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/lib.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -8,6 +8,7 @@
 pub use ancestors::{AncestorsIterator, LazyAncestors, MissingAncestors};
 mod dirstate;
 pub mod discovery;
+pub mod requirements;
 pub mod testing; // unconditionally built, for use from integration tests
 pub use dirstate::{
     dirs_multiset::{DirsMultiset, DirsMultisetIter},
@@ -19,10 +20,13 @@
     CopyMap, CopyMapIter, DirstateEntry, DirstateParents, EntryState,
     StateMap, StateMapIter,
 };
+pub mod copy_tracing;
 mod filepatterns;
 pub mod matchers;
+pub mod repo;
 pub mod revlog;
 pub use revlog::*;
+pub mod config;
 pub mod operations;
 pub mod utils;
 
--- a/rust/hg-core/src/operations/cat.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/operations/cat.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -8,13 +8,19 @@
 use std::convert::From;
 use std::path::PathBuf;
 
+use crate::repo::Repo;
 use crate::revlog::changelog::Changelog;
-use crate::revlog::manifest::{Manifest, ManifestEntry};
+use crate::revlog::manifest::Manifest;
 use crate::revlog::path_encode::path_encode;
 use crate::revlog::revlog::Revlog;
 use crate::revlog::revlog::RevlogError;
+use crate::revlog::Node;
+use crate::revlog::NodePrefix;
 use crate::revlog::Revision;
-use crate::utils::hg_path::HgPathBuf;
+use crate::utils::files::get_path_from_bytes;
+use crate::utils::hg_path::{HgPath, HgPathBuf};
+
+const METADATA_DELIMITER: [u8; 2] = [b'\x01', b'\n'];
 
 /// Kind of error encountered by `CatRev`
 #[derive(Debug)]
@@ -23,6 +29,8 @@
     IoError(std::io::Error),
     /// The revision has not been found.
     InvalidRevision,
+    /// Found more than one revision whose ID match the requested prefix
+    AmbiguousPrefix,
     /// A `revlog` file is corrupted.
     CorruptedRevlog,
     /// The `revlog` format version is not supported.
@@ -52,6 +60,7 @@
                 CatRevErrorKind::UnsuportedRevlogVersion(version)
             }
             RevlogError::InvalidRevision => CatRevErrorKind::InvalidRevision,
+            RevlogError::AmbiguousPrefix => CatRevErrorKind::AmbiguousPrefix,
             RevlogError::Corrupted => CatRevErrorKind::CorruptedRevlog,
             RevlogError::UnknowDataFormat(format) => {
                 CatRevErrorKind::UnknowRevlogDataFormat(format)
@@ -62,84 +71,65 @@
 }
 
 /// List files under Mercurial control at a given revision.
-pub struct CatRev<'a> {
-    root: &'a PathBuf,
-    /// The revision to cat the files from.
-    rev: &'a str,
-    /// The files to output.
-    files: &'a [HgPathBuf],
-    /// The changelog file
-    changelog: Changelog,
-    /// The manifest file
-    manifest: Manifest,
-    /// The manifest entry corresponding to the revision.
-    ///
-    /// Used to hold the owner of the returned references.
-    manifest_entry: Option<ManifestEntry>,
-}
+///
+/// * `root`: Repository root
+/// * `rev`: The revision to cat the files from.
+/// * `files`: The files to output.
+pub fn cat(
+    repo: &Repo,
+    rev: &str,
+    files: &[HgPathBuf],
+) -> Result<Vec<u8>, CatRevError> {
+    let changelog = Changelog::open(repo)?;
+    let manifest = Manifest::open(repo)?;
+
+    let changelog_entry = match rev.parse::<Revision>() {
+        Ok(rev) => changelog.get_rev(rev)?,
+        _ => {
+            let changelog_node = NodePrefix::from_hex(&rev)
+                .map_err(|_| CatRevErrorKind::InvalidRevision)?;
+            changelog.get_node(changelog_node.borrow())?
+        }
+    };
+    let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?)
+        .map_err(|_| CatRevErrorKind::CorruptedRevlog)?;
+
+    let manifest_entry = manifest.get_node((&manifest_node).into())?;
+    let mut bytes = vec![];
 
-impl<'a> CatRev<'a> {
-    pub fn new(
-        root: &'a PathBuf,
-        rev: &'a str,
-        files: &'a [HgPathBuf],
-    ) -> Result<Self, CatRevError> {
-        let changelog = Changelog::open(&root)?;
-        let manifest = Manifest::open(&root)?;
-        let manifest_entry = None;
+    for (manifest_file, node_bytes) in manifest_entry.files_with_nodes() {
+        for cat_file in files.iter() {
+            if cat_file.as_bytes() == manifest_file.as_bytes() {
+                let index_path = store_path(manifest_file, b".i");
+                let data_path = store_path(manifest_file, b".d");
 
-        Ok(Self {
-            root,
-            rev,
-            files,
-            changelog,
-            manifest,
-            manifest_entry,
-        })
+                let file_log =
+                    Revlog::open(repo, &index_path, Some(&data_path))?;
+                let file_node = Node::from_hex(node_bytes)
+                    .map_err(|_| CatRevErrorKind::CorruptedRevlog)?;
+                let file_rev = file_log.get_node_rev((&file_node).into())?;
+                let data = file_log.get_rev_data(file_rev)?;
+                if data.starts_with(&METADATA_DELIMITER) {
+                    let end_delimiter_position = data
+                        [METADATA_DELIMITER.len()..]
+                        .windows(METADATA_DELIMITER.len())
+                        .position(|bytes| bytes == METADATA_DELIMITER);
+                    if let Some(position) = end_delimiter_position {
+                        let offset = METADATA_DELIMITER.len() * 2;
+                        bytes.extend(data[position + offset..].iter());
+                    }
+                } else {
+                    bytes.extend(data);
+                }
+            }
+        }
     }
 
-    pub fn run(&mut self) -> Result<Vec<u8>, CatRevError> {
-        let changelog_entry = match self.rev.parse::<Revision>() {
-            Ok(rev) => self.changelog.get_rev(rev)?,
-            _ => {
-                let changelog_node = hex::decode(&self.rev)
-                    .map_err(|_| CatRevErrorKind::InvalidRevision)?;
-                self.changelog.get_node(&changelog_node)?
-            }
-        };
-        let manifest_node = hex::decode(&changelog_entry.manifest_node()?)
-            .map_err(|_| CatRevErrorKind::CorruptedRevlog)?;
-
-        self.manifest_entry = Some(self.manifest.get_node(&manifest_node)?);
-        if let Some(ref manifest_entry) = self.manifest_entry {
-            let mut bytes = vec![];
+    Ok(bytes)
+}
 
-            for (manifest_file, node_bytes) in
-                manifest_entry.files_with_nodes()
-            {
-                for cat_file in self.files.iter() {
-                    if cat_file.as_bytes() == manifest_file.as_bytes() {
-                        let encoded_bytes =
-                            path_encode(manifest_file.as_bytes());
-                        let revlog_index_string = format!(
-                            ".hg/store/data/{}.i",
-                            String::from_utf8_lossy(&encoded_bytes),
-                        );
-                        let revlog_index_path =
-                            self.root.join(&revlog_index_string);
-                        let file_log = Revlog::open(&revlog_index_path)?;
-                        let file_node = hex::decode(&node_bytes)
-                            .map_err(|_| CatRevErrorKind::CorruptedRevlog)?;
-                        let file_rev = file_log.get_node_rev(&file_node)?;
-                        let data = file_log.get_rev_data(file_rev)?;
-                        bytes.extend(data);
-                    }
-                }
-            }
-
-            Ok(bytes)
-        } else {
-            unreachable!("manifest_entry should have been stored");
-        }
-    }
+fn store_path(hg_path: &HgPath, suffix: &[u8]) -> PathBuf {
+    let encoded_bytes =
+        path_encode(&[b"data/", hg_path.as_bytes(), suffix].concat());
+    get_path_from_bytes(&encoded_bytes).into()
 }
--- a/rust/hg-core/src/operations/debugdata.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/operations/debugdata.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -5,8 +5,9 @@
 // This software may be used and distributed according to the terms of the
 // GNU General Public License version 2 or any later version.
 
-use super::find_root;
+use crate::repo::Repo;
 use crate::revlog::revlog::{Revlog, RevlogError};
+use crate::revlog::NodePrefix;
 use crate::revlog::Revision;
 
 /// Kind of data to debug
@@ -19,11 +20,12 @@
 /// Kind of error encountered by DebugData
 #[derive(Debug)]
 pub enum DebugDataErrorKind {
-    FindRootError(find_root::FindRootError),
     /// Error when reading a `revlog` file.
     IoError(std::io::Error),
     /// The revision has not been found.
     InvalidRevision,
+    /// Found more than one revision whose ID match the requested prefix
+    AmbiguousPrefix,
     /// A `revlog` file is corrupted.
     CorruptedRevlog,
     /// The `revlog` format version is not supported.
@@ -45,13 +47,6 @@
     }
 }
 
-impl From<find_root::FindRootError> for DebugDataError {
-    fn from(err: find_root::FindRootError) -> Self {
-        let kind = DebugDataErrorKind::FindRootError(err);
-        DebugDataError { kind }
-    }
-}
-
 impl From<std::io::Error> for DebugDataError {
     fn from(err: std::io::Error) -> Self {
         let kind = DebugDataErrorKind::IoError(err);
@@ -69,6 +64,9 @@
             RevlogError::InvalidRevision => {
                 DebugDataErrorKind::InvalidRevision
             }
+            RevlogError::AmbiguousPrefix => {
+                DebugDataErrorKind::AmbiguousPrefix
+            }
             RevlogError::Corrupted => DebugDataErrorKind::CorruptedRevlog,
             RevlogError::UnknowDataFormat(format) => {
                 DebugDataErrorKind::UnknowRevlogDataFormat(format)
@@ -79,32 +77,26 @@
 }
 
 /// Dump the contents data of a revision.
-pub struct DebugData<'a> {
-    /// Revision or hash of the revision.
-    rev: &'a str,
-    /// Kind of data to debug.
+pub fn debug_data(
+    repo: &Repo,
+    rev: &str,
     kind: DebugDataKind,
-}
-
-impl<'a> DebugData<'a> {
-    pub fn new(rev: &'a str, kind: DebugDataKind) -> Self {
-        DebugData { rev, kind }
-    }
+) -> Result<Vec<u8>, DebugDataError> {
+    let index_file = match kind {
+        DebugDataKind::Changelog => "00changelog.i",
+        DebugDataKind::Manifest => "00manifest.i",
+    };
+    let revlog = Revlog::open(repo, index_file, None)?;
 
-    pub fn run(&mut self) -> Result<Vec<u8>, DebugDataError> {
-        let rev = self
-            .rev
-            .parse::<Revision>()
-            .or(Err(DebugDataErrorKind::InvalidRevision))?;
+    let data = match rev.parse::<Revision>() {
+        Ok(rev) => revlog.get_rev_data(rev)?,
+        _ => {
+            let node = NodePrefix::from_hex(&rev)
+                .map_err(|_| DebugDataErrorKind::InvalidRevision)?;
+            let rev = revlog.get_node_rev(node.borrow())?;
+            revlog.get_rev_data(rev)?
+        }
+    };
 
-        let root = find_root::FindRoot::new().run()?;
-        let index_file = match self.kind {
-            DebugDataKind::Changelog => root.join(".hg/store/00changelog.i"),
-            DebugDataKind::Manifest => root.join(".hg/store/00manifest.i"),
-        };
-        let revlog = Revlog::open(&index_file)?;
-        let data = revlog.get_rev_data(rev)?;
-
-        Ok(data)
-    }
+    Ok(data)
 }
--- a/rust/hg-core/src/operations/dirstate_status.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/operations/dirstate_status.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -18,7 +18,7 @@
 impl<'a, M: Matcher + Sync> Status<'a, M> {
     pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> {
         let (traversed_sender, traversed_receiver) =
-            crossbeam::channel::unbounded();
+            crossbeam_channel::unbounded();
 
         // Step 1: check the files explicitly mentioned by the user
         let (work, mut results) = self.walk_explicit(traversed_sender.clone());
@@ -56,7 +56,7 @@
                                     .expect("old results should exist"),
                                 &mut results,
                                 traversed_sender.clone(),
-                            )?;
+                            );
                         }
                     }
                     _ => {
@@ -77,7 +77,7 @@
 impl<'a, M: Matcher + Sync> Status<'a, M> {
     pub(crate) fn run(&self) -> Result<LookupAndStatus<'a>, StatusError> {
         let (traversed_sender, traversed_receiver) =
-            crossbeam::channel::unbounded();
+            crossbeam_channel::unbounded();
 
         // Step 1: check the files explicitly mentioned by the user
         let (work, mut results) = self.walk_explicit(traversed_sender.clone());
@@ -104,7 +104,7 @@
                                 &old_results,
                                 &mut results,
                                 traversed_sender.clone(),
-                            )?;
+                            );
                         }
                     }
                     _ => {
@@ -116,7 +116,7 @@
 
         if !self.matcher.is_exact() {
             if self.options.list_unknown {
-                self.handle_unknowns(&mut results)?;
+                self.handle_unknowns(&mut results);
             } else {
                 // TODO this is incorrect, see issue6335
                 // This requires a fix in both Python and Rust that can happen
--- a/rust/hg-core/src/operations/find_root.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/operations/find_root.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -28,46 +28,29 @@
 }
 
 /// Find the root of the repository
-/// by searching for a .hg directory in the current directory and its
+/// by searching for a .hg directory in the process’ current directory and its
 /// ancestors
-pub struct FindRoot<'a> {
-    current_dir: Option<&'a Path>,
+pub fn find_root() -> Result<PathBuf, FindRootError> {
+    let current_dir = std::env::current_dir().map_err(|e| FindRootError {
+        kind: FindRootErrorKind::GetCurrentDirError(e),
+    })?;
+    Ok(find_root_from_path(&current_dir)?.into())
 }
 
-impl<'a> FindRoot<'a> {
-    pub fn new() -> Self {
-        Self { current_dir: None }
+/// Find the root of the repository
+/// by searching for a .hg directory in the given directory and its ancestors
+pub fn find_root_from_path(start: &Path) -> Result<&Path, FindRootError> {
+    if start.join(".hg").exists() {
+        return Ok(start);
     }
-
-    pub fn new_from_path(current_dir: &'a Path) -> Self {
-        Self {
-            current_dir: Some(current_dir),
+    for ancestor in start.ancestors() {
+        if ancestor.join(".hg").exists() {
+            return Ok(ancestor);
         }
     }
-
-    pub fn run(&self) -> Result<PathBuf, FindRootError> {
-        let current_dir = match self.current_dir {
-            None => std::env::current_dir().or_else(|e| {
-                Err(FindRootError {
-                    kind: FindRootErrorKind::GetCurrentDirError(e),
-                })
-            })?,
-            Some(path) => path.into(),
-        };
-
-        if current_dir.join(".hg").exists() {
-            return Ok(current_dir);
-        }
-        let ancestors = current_dir.ancestors();
-        for parent in ancestors {
-            if parent.join(".hg").exists() {
-                return Ok(parent.into());
-            }
-        }
-        Err(FindRootError {
-            kind: FindRootErrorKind::RootNotFound(current_dir.to_path_buf()),
-        })
-    }
+    Err(FindRootError {
+        kind: FindRootErrorKind::RootNotFound(start.into()),
+    })
 }
 
 #[cfg(test)]
@@ -81,7 +64,7 @@
         let tmp_dir = tempfile::tempdir().unwrap();
         let path = tmp_dir.path();
 
-        let err = FindRoot::new_from_path(&path).run().unwrap_err();
+        let err = find_root_from_path(&path).unwrap_err();
 
         // TODO do something better
         assert!(match err {
@@ -98,7 +81,7 @@
         let root = tmp_dir.path();
         fs::create_dir_all(root.join(".hg")).unwrap();
 
-        let result = FindRoot::new_from_path(&root).run().unwrap();
+        let result = find_root_from_path(&root).unwrap();
 
         assert_eq!(result, root)
     }
@@ -109,10 +92,8 @@
         let root = tmp_dir.path();
         fs::create_dir_all(root.join(".hg")).unwrap();
 
-        let result =
-            FindRoot::new_from_path(&root.join("some/nested/directory"))
-                .run()
-                .unwrap();
+        let directory = root.join("some/nested/directory");
+        let result = find_root_from_path(&directory).unwrap();
 
         assert_eq!(result, root)
     }
--- a/rust/hg-core/src/operations/list_tracked_files.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/operations/list_tracked_files.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -6,16 +6,16 @@
 // GNU General Public License version 2 or any later version.
 
 use crate::dirstate::parsers::parse_dirstate;
+use crate::repo::Repo;
 use crate::revlog::changelog::Changelog;
 use crate::revlog::manifest::{Manifest, ManifestEntry};
+use crate::revlog::node::{Node, NodePrefix};
 use crate::revlog::revlog::RevlogError;
 use crate::revlog::Revision;
 use crate::utils::hg_path::HgPath;
 use crate::{DirstateParseError, EntryState};
 use rayon::prelude::*;
 use std::convert::From;
-use std::fs;
-use std::path::PathBuf;
 
 /// Kind of error encountered by `ListDirstateTrackedFiles`
 #[derive(Debug)]
@@ -50,20 +50,19 @@
 
 /// List files under Mercurial control in the working directory
 /// by reading the dirstate
-pub struct ListDirstateTrackedFiles {
+pub struct Dirstate {
     /// The `dirstate` content.
     content: Vec<u8>,
 }
 
-impl ListDirstateTrackedFiles {
-    pub fn new(root: &PathBuf) -> Result<Self, ListDirstateTrackedFilesError> {
-        let dirstate = root.join(".hg/dirstate");
-        let content = fs::read(&dirstate)?;
+impl Dirstate {
+    pub fn new(repo: &Repo) -> Result<Self, ListDirstateTrackedFilesError> {
+        let content = repo.hg_vfs().read("dirstate")?;
         Ok(Self { content })
     }
 
-    pub fn run(
-        &mut self,
+    pub fn tracked_files(
+        &self,
     ) -> Result<Vec<&HgPath>, ListDirstateTrackedFilesError> {
         let (_, entries, _) = parse_dirstate(&self.content)
             .map_err(ListDirstateTrackedFilesErrorKind::ParseError)?;
@@ -86,6 +85,8 @@
     IoError(std::io::Error),
     /// The revision has not been found.
     InvalidRevision,
+    /// Found more than one revision whose ID match the requested prefix
+    AmbiguousPrefix,
     /// A `revlog` file is corrupted.
     CorruptedRevlog,
     /// The `revlog` format version is not supported.
@@ -119,6 +120,9 @@
             RevlogError::InvalidRevision => {
                 ListRevTrackedFilesErrorKind::InvalidRevision
             }
+            RevlogError::AmbiguousPrefix => {
+                ListRevTrackedFilesErrorKind::AmbiguousPrefix
+            }
             RevlogError::Corrupted => {
                 ListRevTrackedFilesErrorKind::CorruptedRevlog
             }
@@ -131,57 +135,31 @@
 }
 
 /// List files under Mercurial control at a given revision.
-pub struct ListRevTrackedFiles<'a> {
-    /// The revision to list the files from.
-    rev: &'a str,
-    /// The changelog file
-    changelog: Changelog,
-    /// The manifest file
-    manifest: Manifest,
-    /// The manifest entry corresponding to the revision.
-    ///
-    /// Used to hold the owner of the returned references.
-    manifest_entry: Option<ManifestEntry>,
+pub fn list_rev_tracked_files(
+    repo: &Repo,
+    rev: &str,
+) -> Result<FilesForRev, ListRevTrackedFilesError> {
+    let changelog = Changelog::open(repo)?;
+    let manifest = Manifest::open(repo)?;
+
+    let changelog_entry = match rev.parse::<Revision>() {
+        Ok(rev) => changelog.get_rev(rev)?,
+        _ => {
+            let changelog_node = NodePrefix::from_hex(&rev)
+                .or(Err(ListRevTrackedFilesErrorKind::InvalidRevision))?;
+            changelog.get_node(changelog_node.borrow())?
+        }
+    };
+    let manifest_node = Node::from_hex(&changelog_entry.manifest_node()?)
+        .or(Err(ListRevTrackedFilesErrorKind::CorruptedRevlog))?;
+    let manifest_entry = manifest.get_node((&manifest_node).into())?;
+    Ok(FilesForRev(manifest_entry))
 }
 
-impl<'a> ListRevTrackedFiles<'a> {
-    pub fn new(
-        root: &PathBuf,
-        rev: &'a str,
-    ) -> Result<Self, ListRevTrackedFilesError> {
-        let changelog = Changelog::open(&root)?;
-        let manifest = Manifest::open(&root)?;
-
-        Ok(Self {
-            rev,
-            changelog,
-            manifest,
-            manifest_entry: None,
-        })
-    }
+pub struct FilesForRev(ManifestEntry);
 
-    pub fn run(
-        &mut self,
-    ) -> Result<impl Iterator<Item = &HgPath>, ListRevTrackedFilesError> {
-        let changelog_entry = match self.rev.parse::<Revision>() {
-            Ok(rev) => self.changelog.get_rev(rev)?,
-            _ => {
-                let changelog_node = hex::decode(&self.rev)
-                    .or(Err(ListRevTrackedFilesErrorKind::InvalidRevision))?;
-                self.changelog.get_node(&changelog_node)?
-            }
-        };
-        let manifest_node = hex::decode(&changelog_entry.manifest_node()?)
-            .or(Err(ListRevTrackedFilesErrorKind::CorruptedRevlog))?;
-
-        self.manifest_entry = Some(self.manifest.get_node(&manifest_node)?);
-
-        if let Some(ref manifest_entry) = self.manifest_entry {
-            Ok(manifest_entry.files())
-        } else {
-            panic!(
-                "manifest entry should have been stored in self.manifest_node to ensure its lifetime since references are returned from it"
-            )
-        }
+impl FilesForRev {
+    pub fn iter(&self) -> impl Iterator<Item = &HgPath> {
+        self.0.files()
     }
 }
--- a/rust/hg-core/src/operations/mod.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/operations/mod.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -7,22 +7,17 @@
 mod dirstate_status;
 mod find_root;
 mod list_tracked_files;
-pub use cat::{CatRev, CatRevError, CatRevErrorKind};
+pub use cat::{cat, CatRevError, CatRevErrorKind};
 pub use debugdata::{
-    DebugData, DebugDataError, DebugDataErrorKind, DebugDataKind,
+    debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind,
 };
-pub use find_root::{FindRoot, FindRootError, FindRootErrorKind};
-pub use list_tracked_files::{
-    ListDirstateTrackedFiles, ListDirstateTrackedFilesError,
-    ListDirstateTrackedFilesErrorKind,
+pub use find_root::{
+    find_root, find_root_from_path, FindRootError, FindRootErrorKind,
 };
 pub use list_tracked_files::{
-    ListRevTrackedFiles, ListRevTrackedFilesError,
+    list_rev_tracked_files, FilesForRev, ListRevTrackedFilesError,
     ListRevTrackedFilesErrorKind,
 };
-
-// TODO add an `Operation` trait when GAT have landed (rust #44265):
-// there is no way to currently define a trait which can both return
-// references to `self` and to passed data, which is what we would need.
-// Generic Associated Types may fix this and allow us to have a unified
-// interface.
+pub use list_tracked_files::{
+    Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind,
+};
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/repo.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,92 @@
+use crate::operations::{find_root, FindRootError};
+use crate::requirements;
+use memmap::{Mmap, MmapOptions};
+use std::path::{Path, PathBuf};
+
+/// A repository on disk
+pub struct Repo {
+    working_directory: PathBuf,
+    dot_hg: PathBuf,
+    store: PathBuf,
+}
+
+/// Filesystem access abstraction for the contents of a given "base" diretory
+#[derive(Clone, Copy)]
+pub(crate) struct Vfs<'a> {
+    base: &'a Path,
+}
+
+impl Repo {
+    /// Returns `None` if the given path doesn’t look like a repository
+    /// (doesn’t contain a `.hg` sub-directory).
+    pub fn for_path(root: impl Into<PathBuf>) -> Self {
+        let working_directory = root.into();
+        let dot_hg = working_directory.join(".hg");
+        Self {
+            store: dot_hg.join("store"),
+            dot_hg,
+            working_directory,
+        }
+    }
+
+    pub fn find() -> Result<Self, FindRootError> {
+        find_root().map(Self::for_path)
+    }
+
+    pub fn check_requirements(
+        &self,
+    ) -> Result<(), requirements::RequirementsError> {
+        requirements::check(self)
+    }
+
+    pub fn working_directory_path(&self) -> &Path {
+        &self.working_directory
+    }
+
+    /// For accessing repository files (in `.hg`), except for the store
+    /// (`.hg/store`).
+    pub(crate) fn hg_vfs(&self) -> Vfs<'_> {
+        Vfs { base: &self.dot_hg }
+    }
+
+    /// For accessing repository store files (in `.hg/store`)
+    pub(crate) fn store_vfs(&self) -> Vfs<'_> {
+        Vfs { base: &self.store }
+    }
+
+    /// For accessing the working copy
+
+    // The undescore prefix silences the "never used" warning. Remove before
+    // using.
+    pub(crate) fn _working_directory_vfs(&self) -> Vfs<'_> {
+        Vfs {
+            base: &self.working_directory,
+        }
+    }
+}
+
+impl Vfs<'_> {
+    pub(crate) fn read(
+        &self,
+        relative_path: impl AsRef<Path>,
+    ) -> std::io::Result<Vec<u8>> {
+        std::fs::read(self.base.join(relative_path))
+    }
+
+    pub(crate) fn open(
+        &self,
+        relative_path: impl AsRef<Path>,
+    ) -> std::io::Result<std::fs::File> {
+        std::fs::File::open(self.base.join(relative_path))
+    }
+
+    pub(crate) fn mmap_open(
+        &self,
+        relative_path: impl AsRef<Path>,
+    ) -> std::io::Result<Mmap> {
+        let file = self.open(relative_path)?;
+        // TODO: what are the safety requirements here?
+        let mmap = unsafe { MmapOptions::new().map(&file) }?;
+        Ok(mmap)
+    }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/requirements.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,76 @@
+use crate::repo::Repo;
+use std::io;
+
+#[derive(Debug)]
+pub enum RequirementsError {
+    // TODO: include a path?
+    Io(io::Error),
+    /// The `requires` file is corrupted
+    Corrupted,
+    /// The repository requires a feature that we don't support
+    Unsupported {
+        feature: String,
+    },
+}
+
+fn parse(bytes: &[u8]) -> Result<Vec<String>, ()> {
+    // The Python code reading this file uses `str.splitlines`
+    // which looks for a number of line separators (even including a couple of
+    // non-ASCII ones), but Python code writing it always uses `\n`.
+    let lines = bytes.split(|&byte| byte == b'\n');
+
+    lines
+        .filter(|line| !line.is_empty())
+        .map(|line| {
+            // Python uses Unicode `str.isalnum` but feature names are all
+            // ASCII
+            if line[0].is_ascii_alphanumeric() && line.is_ascii() {
+                Ok(String::from_utf8(line.into()).unwrap())
+            } else {
+                Err(())
+            }
+        })
+        .collect()
+}
+
+pub fn load(repo: &Repo) -> Result<Vec<String>, RequirementsError> {
+    match repo.hg_vfs().read("requires") {
+        Ok(bytes) => parse(&bytes).map_err(|()| RequirementsError::Corrupted),
+
+        // Treat a missing file the same as an empty file.
+        // From `mercurial/localrepo.py`:
+        // > requires file contains a newline-delimited list of
+        // > features/capabilities the opener (us) must have in order to use
+        // > the repository. This file was introduced in Mercurial 0.9.2,
+        // > which means very old repositories may not have one. We assume
+        // > a missing file translates to no requirements.
+        Err(error) if error.kind() == std::io::ErrorKind::NotFound => {
+            Ok(Vec::new())
+        }
+
+        Err(error) => Err(RequirementsError::Io(error))?,
+    }
+}
+
+pub fn check(repo: &Repo) -> Result<(), RequirementsError> {
+    for feature in load(repo)? {
+        if !SUPPORTED.contains(&&*feature) {
+            return Err(RequirementsError::Unsupported { feature });
+        }
+    }
+    Ok(())
+}
+
+// TODO: set this to actually-supported features
+const SUPPORTED: &[&str] = &[
+    "dotencode",
+    "fncache",
+    "generaldelta",
+    "revlogv1",
+    "sparserevlog",
+    "store",
+    // As of this writing everything rhg does is read-only.
+    // When it starts writing to the repository, it’ll need to either keep the
+    // persistent nodemap up to date or remove this entry:
+    "persistent-nodemap",
+];
--- a/rust/hg-core/src/revlog.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/revlog.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -7,6 +7,7 @@
 
 pub mod node;
 pub mod nodemap;
+mod nodemap_docket;
 pub mod path_encode;
 pub use node::{Node, NodeError, NodePrefix, NodePrefixRef};
 pub mod changelog;
--- a/rust/hg-core/src/revlog/changelog.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/revlog/changelog.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,6 +1,7 @@
+use crate::repo::Repo;
 use crate::revlog::revlog::{Revlog, RevlogError};
+use crate::revlog::NodePrefixRef;
 use crate::revlog::Revision;
-use std::path::PathBuf;
 
 /// A specialized `Revlog` to work with `changelog` data format.
 pub struct Changelog {
@@ -10,16 +11,15 @@
 
 impl Changelog {
     /// Open the `changelog` of a repository given by its root.
-    pub fn open(root: &PathBuf) -> Result<Self, RevlogError> {
-        let index_file = root.join(".hg/store/00changelog.i");
-        let revlog = Revlog::open(&index_file)?;
+    pub fn open(repo: &Repo) -> Result<Self, RevlogError> {
+        let revlog = Revlog::open(repo, "00changelog.i", None)?;
         Ok(Self { revlog })
     }
 
     /// Return the `ChangelogEntry` a given node id.
     pub fn get_node(
         &self,
-        node: &[u8],
+        node: NodePrefixRef,
     ) -> Result<ChangelogEntry, RevlogError> {
         let rev = self.revlog.get_node_rev(node)?;
         self.get_rev(rev)
--- a/rust/hg-core/src/revlog/index.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/revlog/index.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,7 +1,9 @@
+use std::convert::TryInto;
 use std::ops::Deref;
 
 use byteorder::{BigEndian, ByteOrder};
 
+use crate::revlog::node::Node;
 use crate::revlog::revlog::RevlogError;
 use crate::revlog::{Revision, NULL_REVISION};
 
@@ -130,6 +132,16 @@
     }
 }
 
+impl super::RevlogIndex for Index {
+    fn len(&self) -> usize {
+        self.len()
+    }
+
+    fn node(&self, rev: Revision) -> Option<&Node> {
+        self.get_entry(rev).map(|entry| entry.hash())
+    }
+}
+
 #[derive(Debug)]
 pub struct IndexEntry<'a> {
     bytes: &'a [u8],
@@ -188,8 +200,8 @@
     ///
     /// Currently, SHA-1 is used and only the first 20 bytes of this field
     /// are used.
-    pub fn hash(&self) -> &[u8] {
-        &self.bytes[32..52]
+    pub fn hash(&self) -> &'a Node {
+        (&self.bytes[32..52]).try_into().unwrap()
     }
 }
 
--- a/rust/hg-core/src/revlog/manifest.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/revlog/manifest.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,7 +1,8 @@
+use crate::repo::Repo;
 use crate::revlog::revlog::{Revlog, RevlogError};
+use crate::revlog::NodePrefixRef;
 use crate::revlog::Revision;
 use crate::utils::hg_path::HgPath;
-use std::path::PathBuf;
 
 /// A specialized `Revlog` to work with `manifest` data format.
 pub struct Manifest {
@@ -11,14 +12,16 @@
 
 impl Manifest {
     /// Open the `manifest` of a repository given by its root.
-    pub fn open(root: &PathBuf) -> Result<Self, RevlogError> {
-        let index_file = root.join(".hg/store/00manifest.i");
-        let revlog = Revlog::open(&index_file)?;
+    pub fn open(repo: &Repo) -> Result<Self, RevlogError> {
+        let revlog = Revlog::open(repo, "00manifest.i", None)?;
         Ok(Self { revlog })
     }
 
     /// Return the `ManifestEntry` of a given node id.
-    pub fn get_node(&self, node: &[u8]) -> Result<ManifestEntry, RevlogError> {
+    pub fn get_node(
+        &self,
+        node: NodePrefixRef,
+    ) -> Result<ManifestEntry, RevlogError> {
         let rev = self.revlog.get_node_rev(node)?;
         self.get_rev(rev)
     }
--- a/rust/hg-core/src/revlog/node.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/revlog/node.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -9,6 +9,7 @@
 //! of a revision.
 
 use hex::{self, FromHex, FromHexError};
+use std::convert::{TryFrom, TryInto};
 
 /// The length in bytes of a `Node`
 ///
@@ -65,6 +66,19 @@
     }
 }
 
+/// Return an error if the slice has an unexpected length
+impl<'a> TryFrom<&'a [u8]> for &'a Node {
+    type Error = std::array::TryFromSliceError;
+
+    #[inline]
+    fn try_from(bytes: &'a [u8]) -> Result<&'a Node, Self::Error> {
+        let data = bytes.try_into()?;
+        // Safety: `#[repr(transparent)]` makes it ok to "wrap" the target
+        // of a reference to the type of the single field.
+        Ok(unsafe { std::mem::transmute::<&NodeData, &Node>(data) })
+    }
+}
+
 #[derive(Debug, PartialEq)]
 pub enum NodeError {
     ExactLengthRequired(usize, String),
@@ -103,8 +117,8 @@
     ///
     /// To be used in FFI and I/O only, in order to facilitate future
     /// changes of hash format.
-    pub fn from_hex(hex: &str) -> Result<Node, NodeError> {
-        Ok(NodeData::from_hex(hex)
+    pub fn from_hex(hex: impl AsRef<[u8]>) -> Result<Node, NodeError> {
+        Ok(NodeData::from_hex(hex.as_ref())
             .map_err(|e| NodeError::from((e, hex)))?
             .into())
     }
@@ -126,17 +140,15 @@
     }
 }
 
-impl<T: AsRef<str>> From<(FromHexError, T)> for NodeError {
+impl<T: AsRef<[u8]>> From<(FromHexError, T)> for NodeError {
     fn from(err_offender: (FromHexError, T)) -> Self {
         let (err, offender) = err_offender;
+        let offender = String::from_utf8_lossy(offender.as_ref()).into_owned();
         match err {
             FromHexError::InvalidStringLength => {
-                NodeError::ExactLengthRequired(
-                    NODE_NYBBLES_LENGTH,
-                    offender.as_ref().to_owned(),
-                )
+                NodeError::ExactLengthRequired(NODE_NYBBLES_LENGTH, offender)
             }
-            _ => NodeError::HexError(err, offender.as_ref().to_owned()),
+            _ => NodeError::HexError(err, offender),
         }
     }
 }
@@ -171,8 +183,8 @@
 
         let is_odd = len % 2 == 1;
         let even_part = if is_odd { &hex[..len - 1] } else { hex };
-        let mut buf: Vec<u8> = Vec::from_hex(&even_part)
-            .map_err(|e| (e, String::from_utf8_lossy(hex)))?;
+        let mut buf: Vec<u8> =
+            Vec::from_hex(&even_part).map_err(|e| (e, hex))?;
 
         if is_odd {
             let latest_char = char::from(hex[len - 1]);
@@ -182,7 +194,7 @@
                         c: latest_char,
                         index: len - 1,
                     },
-                    String::from_utf8_lossy(hex),
+                    hex,
                 )
             })? as u8;
             buf.push(latest_nybble << 4);
@@ -278,6 +290,12 @@
     }
 }
 
+impl PartialEq<Node> for NodePrefixRef<'_> {
+    fn eq(&self, other: &Node) -> bool {
+        !self.is_odd && self.buf == other.data
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
@@ -292,7 +310,7 @@
     }
 
     /// Pad an hexadecimal string to reach `NODE_NYBBLES_LENGTH`
-    ///
+    ///check_hash
     /// The padding is made with zeros
     pub fn hex_pad_right(hex: &str) -> String {
         let mut res = hex.to_string();
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-core/src/revlog/nodemap_docket.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,121 @@
+use memmap::Mmap;
+use std::convert::TryInto;
+use std::path::{Path, PathBuf};
+
+use super::revlog::RevlogError;
+use crate::repo::Repo;
+use crate::utils::strip_suffix;
+
+const ONDISK_VERSION: u8 = 1;
+
+pub(super) struct NodeMapDocket {
+    pub data_length: usize,
+    // TODO: keep here more of the data from `parse()` when we need it
+}
+
+impl NodeMapDocket {
+    /// Return `Ok(None)` when the caller should proceed without a persistent
+    /// nodemap:
+    ///
+    /// * This revlog does not have a `.n` docket file (it is not generated for
+    ///   small revlogs), or
+    /// * The docket has an unsupported version number (repositories created by
+    ///   later hg, maybe that should be a requirement instead?), or
+    /// * The docket file points to a missing (likely deleted) data file (this
+    ///   can happen in a rare race condition).
+    pub fn read_from_file(
+        repo: &Repo,
+        index_path: &Path,
+    ) -> Result<Option<(Self, Mmap)>, RevlogError> {
+        let docket_path = index_path.with_extension("n");
+        let docket_bytes = match repo.store_vfs().read(&docket_path) {
+            Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
+                return Ok(None)
+            }
+            Err(e) => return Err(RevlogError::IoError(e)),
+            Ok(bytes) => bytes,
+        };
+
+        let mut input = if let Some((&ONDISK_VERSION, rest)) =
+            docket_bytes.split_first()
+        {
+            rest
+        } else {
+            return Ok(None);
+        };
+        let input = &mut input;
+
+        let uid_size = read_u8(input)? as usize;
+        let _tip_rev = read_be_u64(input)?;
+        // TODO: do we care about overflow for 4 GB+ nodemap files on 32-bit
+        // systems?
+        let data_length = read_be_u64(input)? as usize;
+        let _data_unused = read_be_u64(input)?;
+        let tip_node_size = read_be_u64(input)? as usize;
+        let uid = read_bytes(input, uid_size)?;
+        let _tip_node = read_bytes(input, tip_node_size)?;
+
+        let uid =
+            std::str::from_utf8(uid).map_err(|_| RevlogError::Corrupted)?;
+        let docket = NodeMapDocket { data_length };
+
+        let data_path = rawdata_path(&docket_path, uid);
+        // TODO: use `std::fs::read` here when the `persistent-nodemap.mmap`
+        // config is false?
+        match repo.store_vfs().mmap_open(&data_path) {
+            Ok(mmap) => {
+                if mmap.len() >= data_length {
+                    Ok(Some((docket, mmap)))
+                } else {
+                    Err(RevlogError::Corrupted)
+                }
+            }
+            Err(error) => {
+                if error.kind() == std::io::ErrorKind::NotFound {
+                    Ok(None)
+                } else {
+                    Err(RevlogError::IoError(error))
+                }
+            }
+        }
+    }
+}
+
+fn read_bytes<'a>(
+    input: &mut &'a [u8],
+    count: usize,
+) -> Result<&'a [u8], RevlogError> {
+    if let Some(start) = input.get(..count) {
+        *input = &input[count..];
+        Ok(start)
+    } else {
+        Err(RevlogError::Corrupted)
+    }
+}
+
+fn read_u8<'a>(input: &mut &[u8]) -> Result<u8, RevlogError> {
+    Ok(read_bytes(input, 1)?[0])
+}
+
+fn read_be_u64<'a>(input: &mut &[u8]) -> Result<u64, RevlogError> {
+    let array = read_bytes(input, std::mem::size_of::<u64>())?
+        .try_into()
+        .unwrap();
+    Ok(u64::from_be_bytes(array))
+}
+
+fn rawdata_path(docket_path: &Path, uid: &str) -> PathBuf {
+    let docket_name = docket_path
+        .file_name()
+        .expect("expected a base name")
+        .to_str()
+        .expect("expected an ASCII file name in the store");
+    let prefix = strip_suffix(docket_name, ".n.a")
+        .or_else(|| strip_suffix(docket_name, ".n"))
+        .expect("expected docket path in .n or .n.a");
+    let name = format!("{}-{}.nd", prefix, uid);
+    docket_path
+        .parent()
+        .expect("expected a non-root path")
+        .join(name)
+}
--- a/rust/hg-core/src/revlog/revlog.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/revlog/revlog.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,5 +1,4 @@
 use std::borrow::Cow;
-use std::fs::File;
 use std::io::Read;
 use std::ops::Deref;
 use std::path::Path;
@@ -8,29 +7,28 @@
 use crypto::digest::Digest;
 use crypto::sha1::Sha1;
 use flate2::read::ZlibDecoder;
-use memmap::{Mmap, MmapOptions};
 use micro_timer::timed;
 use zstd;
 
 use super::index::Index;
-use super::node::{NODE_BYTES_LENGTH, NULL_NODE_ID};
+use super::node::{NodePrefixRef, NODE_BYTES_LENGTH, NULL_NODE};
+use super::nodemap;
+use super::nodemap::NodeMap;
+use super::nodemap_docket::NodeMapDocket;
 use super::patch;
+use crate::repo::Repo;
 use crate::revlog::Revision;
 
 pub enum RevlogError {
     IoError(std::io::Error),
     UnsuportedVersion(u16),
     InvalidRevision,
+    /// Found more than one entry whose ID match the requested prefix
+    AmbiguousPrefix,
     Corrupted,
     UnknowDataFormat(u8),
 }
 
-fn mmap_open(path: &Path) -> Result<Mmap, std::io::Error> {
-    let file = File::open(path)?;
-    let mmap = unsafe { MmapOptions::new().map(&file) }?;
-    Ok(mmap)
-}
-
 /// Read only implementation of revlog.
 pub struct Revlog {
     /// When index and data are not interleaved: bytes of the revlog index.
@@ -39,6 +37,8 @@
     index: Index,
     /// When index and data are not interleaved: bytes of the revlog data
     data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>>,
+    /// When present on disk: the persistent nodemap for this revlog
+    nodemap: Option<nodemap::NodeTree>,
 }
 
 impl Revlog {
@@ -47,9 +47,16 @@
     /// It will also open the associated data file if index and data are not
     /// interleaved.
     #[timed]
-    pub fn open(index_path: &Path) -> Result<Self, RevlogError> {
-        let index_mmap =
-            mmap_open(&index_path).map_err(RevlogError::IoError)?;
+    pub fn open(
+        repo: &Repo,
+        index_path: impl AsRef<Path>,
+        data_path: Option<&Path>,
+    ) -> Result<Self, RevlogError> {
+        let index_path = index_path.as_ref();
+        let index_mmap = repo
+            .store_vfs()
+            .mmap_open(&index_path)
+            .map_err(RevlogError::IoError)?;
 
         let version = get_version(&index_mmap);
         if version != 1 {
@@ -58,20 +65,36 @@
 
         let index = Index::new(Box::new(index_mmap))?;
 
-        // TODO load data only when needed //
+        let default_data_path = index_path.with_extension("d");
+
         // type annotation required
         // won't recognize Mmap as Deref<Target = [u8]>
         let data_bytes: Option<Box<dyn Deref<Target = [u8]> + Send>> =
             if index.is_inline() {
                 None
             } else {
-                let data_path = index_path.with_extension("d");
-                let data_mmap =
-                    mmap_open(&data_path).map_err(RevlogError::IoError)?;
+                let data_path = data_path.unwrap_or(&default_data_path);
+                let data_mmap = repo
+                    .store_vfs()
+                    .mmap_open(data_path)
+                    .map_err(RevlogError::IoError)?;
                 Some(Box::new(data_mmap))
             };
 
-        Ok(Revlog { index, data_bytes })
+        let nodemap = NodeMapDocket::read_from_file(repo, index_path)?.map(
+            |(docket, data)| {
+                nodemap::NodeTree::load_bytes(
+                    Box::new(data),
+                    docket.data_length,
+                )
+            },
+        );
+
+        Ok(Revlog {
+            index,
+            data_bytes,
+            nodemap,
+        })
     }
 
     /// Return number of entries of the `Revlog`.
@@ -86,17 +109,39 @@
 
     /// Return the full data associated to a node.
     #[timed]
-    pub fn get_node_rev(&self, node: &[u8]) -> Result<Revision, RevlogError> {
-        // This is brute force. But it is fast enough for now.
-        // Optimization will come later.
+    pub fn get_node_rev(
+        &self,
+        node: NodePrefixRef,
+    ) -> Result<Revision, RevlogError> {
+        if let Some(nodemap) = &self.nodemap {
+            return nodemap
+                .find_bin(&self.index, node)
+                // TODO: propagate details of this error:
+                .map_err(|_| RevlogError::Corrupted)?
+                .ok_or(RevlogError::InvalidRevision);
+        }
+
+        // Fallback to linear scan when a persistent nodemap is not present.
+        // This happens when the persistent-nodemap experimental feature is not
+        // enabled, or for small revlogs.
+        //
+        // TODO: consider building a non-persistent nodemap in memory to
+        // optimize these cases.
+        let mut found_by_prefix = None;
         for rev in (0..self.len() as Revision).rev() {
             let index_entry =
                 self.index.get_entry(rev).ok_or(RevlogError::Corrupted)?;
-            if node == index_entry.hash() {
+            if node == *index_entry.hash() {
                 return Ok(rev);
             }
+            if node.is_prefix_of(index_entry.hash()) {
+                if found_by_prefix.is_some() {
+                    return Err(RevlogError::AmbiguousPrefix);
+                }
+                found_by_prefix = Some(rev)
+            }
         }
-        Err(RevlogError::InvalidRevision)
+        found_by_prefix.ok_or(RevlogError::InvalidRevision)
     }
 
     /// Return the full data associated to a revision.
@@ -130,7 +175,7 @@
         if self.check_hash(
             index_entry.p1(),
             index_entry.p2(),
-            index_entry.hash(),
+            index_entry.hash().as_bytes(),
             &data,
         ) {
             Ok(data)
@@ -150,15 +195,15 @@
         let e1 = self.index.get_entry(p1);
         let h1 = match e1 {
             Some(ref entry) => entry.hash(),
-            None => &NULL_NODE_ID,
+            None => &NULL_NODE,
         };
         let e2 = self.index.get_entry(p2);
         let h2 = match e2 {
             Some(ref entry) => entry.hash(),
-            None => &NULL_NODE_ID,
+            None => &NULL_NODE,
         };
 
-        hash(data, &h1, &h2).as_slice() == expected
+        hash(data, h1.as_bytes(), h2.as_bytes()).as_slice() == expected
     }
 
     /// Build the full data of a revision out its snapshot
--- a/rust/hg-core/src/utils.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/utils.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -167,3 +167,12 @@
         self.as_bytes().escaped_bytes()
     }
 }
+
+// TODO: use the str method when we require Rust 1.45
+pub(crate) fn strip_suffix<'a>(s: &'a str, suffix: &str) -> Option<&'a str> {
+    if s.ends_with(suffix) {
+        Some(&s[..s.len() - suffix.len()])
+    } else {
+        None
+    }
+}
--- a/rust/hg-core/src/utils/files.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/utils/files.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -18,6 +18,7 @@
 use same_file::is_same_file;
 use std::borrow::{Cow, ToOwned};
 use std::fs::Metadata;
+use std::io::Read;
 use std::iter::FusedIterator;
 use std::ops::Deref;
 use std::path::{Path, PathBuf};
@@ -308,6 +309,17 @@
     }
 }
 
+/// Reads a file in one big chunk instead of doing multiple reads
+pub fn read_whole_file(filepath: &Path) -> std::io::Result<Vec<u8>> {
+    let mut file = std::fs::File::open(filepath)?;
+    let size = file.metadata()?.len();
+
+    let mut res = vec![0; size as usize];
+    file.read_exact(&mut res)?;
+
+    Ok(res)
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
--- a/rust/hg-core/src/utils/path_auditor.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-core/src/utils/path_auditor.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -206,6 +206,7 @@
 
         let base_dir = tempdir().unwrap();
         let base_dir_path = base_dir.path();
+        let skip = base_dir_path.components().count() - 1;
         let a = base_dir_path.join("a");
         let b = base_dir_path.join("b");
         create_dir(&a).unwrap();
@@ -215,7 +216,7 @@
         // TODO make portable
         std::os::unix::fs::symlink(&a, &b).unwrap();
 
-        let buf = b.join("in_a").components().skip(2).collect::<PathBuf>();
+        let buf = b.join("in_a").components().skip(skip).collect::<PathBuf>();
         eprintln!("buf: {}", buf.display());
         let path = path_to_hg_path_buf(buf).unwrap();
         assert_eq!(
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/hg-cpython/src/copy_tracing.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,148 @@
+use cpython::ObjectProtocol;
+use cpython::PyBool;
+use cpython::PyBytes;
+use cpython::PyDict;
+use cpython::PyList;
+use cpython::PyModule;
+use cpython::PyObject;
+use cpython::PyResult;
+use cpython::PyTuple;
+use cpython::Python;
+
+use hg::copy_tracing::combine_changeset_copies;
+use hg::copy_tracing::ChangedFiles;
+use hg::copy_tracing::DataHolder;
+use hg::copy_tracing::RevInfo;
+use hg::copy_tracing::RevInfoMaker;
+use hg::Revision;
+
+/// Combines copies information contained into revision `revs` to build a copy
+/// map.
+///
+/// See mercurial/copies.py for details
+pub fn combine_changeset_copies_wrapper(
+    py: Python,
+    revs: PyList,
+    children_count: PyDict,
+    target_rev: Revision,
+    rev_info: PyObject,
+    is_ancestor: PyObject,
+) -> PyResult<PyDict> {
+    let revs: PyResult<_> =
+        revs.iter(py).map(|r| Ok(r.extract(py)?)).collect();
+
+    // Wrap the `is_ancestor` python callback as a Rust closure
+    //
+    // No errors are expected from the Python side, and they will should only
+    // happens in case of programing error or severe data corruption. Such
+    // errors will raise panic and the rust-cpython harness will turn them into
+    // Python exception.
+    let is_ancestor_wrap = |anc: Revision, desc: Revision| -> bool {
+        is_ancestor
+            .call(py, (anc, desc), None)
+            .expect(
+                "rust-copy-tracing: python call  to `is_ancestor` \
+                failed",
+            )
+            .cast_into::<PyBool>(py)
+            .expect(
+                "rust-copy-tracing: python call  to `is_ancestor` \
+                returned unexpected non-Bool value",
+            )
+            .is_true()
+    };
+
+    // Wrap the `rev_info_maker` python callback as a Rust closure
+    //
+    // No errors are expected from the Python side, and they will should only
+    // happens in case of programing error or severe data corruption. Such
+    // errors will raise panic and the rust-cpython harness will turn them into
+    // Python exception.
+    let rev_info_maker: RevInfoMaker<PyBytes> =
+        Box::new(|rev: Revision, d: &mut DataHolder<PyBytes>| -> RevInfo {
+            let res: PyTuple = rev_info
+                .call(py, (rev,), None)
+                .expect("rust-copy-tracing: python call to `rev_info` failed")
+                .cast_into(py)
+                .expect(
+                    "rust-copy_tracing: python call to `rev_info` returned \
+                    unexpected non-Tuple value",
+                );
+            let p1 = res.get_item(py, 0).extract(py).expect(
+                "rust-copy-tracing: rev_info return is invalid, first item \
+                is a not a revision",
+            );
+            let p2 = res.get_item(py, 1).extract(py).expect(
+                "rust-copy-tracing: rev_info return is invalid, first item \
+                is a not a revision",
+            );
+
+            let files = match res.get_item(py, 2).extract::<PyBytes>(py) {
+                Ok(raw) => {
+                    // Give responsability for the raw bytes lifetime to
+                    // hg-core
+                    d.data = Some(raw);
+                    let addrs = d.data.as_ref().expect(
+                        "rust-copy-tracing: failed to get a reference to the \
+                        raw bytes for copy data").data(py);
+                    ChangedFiles::new(addrs)
+                }
+                // value was presumably None, meaning they was no copy data.
+                Err(_) => ChangedFiles::new_empty(),
+            };
+
+            (p1, p2, files)
+        });
+    let children_count: PyResult<_> = children_count
+        .items(py)
+        .iter()
+        .map(|(k, v)| Ok((k.extract(py)?, v.extract(py)?)))
+        .collect();
+
+    let res = combine_changeset_copies(
+        revs?,
+        children_count?,
+        target_rev,
+        rev_info_maker,
+        &is_ancestor_wrap,
+    );
+    let out = PyDict::new(py);
+    for (dest, source) in res.into_iter() {
+        out.set_item(
+            py,
+            PyBytes::new(py, &dest.into_vec()),
+            PyBytes::new(py, &source.into_vec()),
+        )?;
+    }
+    Ok(out)
+}
+
+/// Create the module, with `__package__` given from parent
+pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
+    let dotted_name = &format!("{}.copy_tracing", package);
+    let m = PyModule::new(py, dotted_name)?;
+
+    m.add(py, "__package__", package)?;
+    m.add(py, "__doc__", "Copy tracing - Rust implementation")?;
+
+    m.add(
+        py,
+        "combine_changeset_copies",
+        py_fn!(
+            py,
+            combine_changeset_copies_wrapper(
+                revs: PyList,
+                children: PyDict,
+                target_rev: Revision,
+                rev_info: PyObject,
+                is_ancestor: PyObject
+            )
+        ),
+    )?;
+
+    let sys = PyModule::import(py, "sys")?;
+    let sys_modules: PyDict = sys.get(py, "modules")?.extract(py)?;
+    sys_modules.set_item(py, dotted_name, &m)?;
+
+    Ok(m)
+}
--- a/rust/hg-cpython/src/dirstate/dirstate_map.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-cpython/src/dirstate/dirstate_map.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -350,8 +350,8 @@
         {
             dict.set_item(
                 py,
-                key.as_bytes().to_vec(),
-                value.as_bytes().to_vec(),
+                PyBytes::new(py, key.as_bytes()).into_object(),
+                PyBytes::new(py, value.as_bytes()).into_object(),
             )?;
         }
         Ok(dict)
--- a/rust/hg-cpython/src/dirstate/status.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-cpython/src/dirstate/status.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -10,6 +10,7 @@
 //! `rustext.dirstate.status`.
 
 use crate::{dirstate::DirstateMap, exceptions::FallbackError};
+use cpython::exc::OSError;
 use cpython::{
     exc::ValueError, ObjectProtocol, PyBytes, PyErr, PyList, PyObject,
     PyResult, PyTuple, Python, PythonObject, ToPyObject,
@@ -89,6 +90,7 @@
 
             PyErr::new::<FallbackError, _>(py, &as_string)
         }
+        StatusError::IO(e) => PyErr::new::<OSError, _>(py, e.to_string()),
         e => PyErr::new::<ValueError, _>(py, e.to_string()),
     }
 }
--- a/rust/hg-cpython/src/lib.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hg-cpython/src/lib.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -29,6 +29,7 @@
 mod conversion;
 #[macro_use]
 pub mod ref_sharing;
+pub mod copy_tracing;
 pub mod dagops;
 pub mod debug;
 pub mod dirstate;
@@ -49,6 +50,11 @@
     m.add(py, "ancestor", ancestors::init_module(py, &dotted_name)?)?;
     m.add(py, "dagop", dagops::init_module(py, &dotted_name)?)?;
     m.add(py, "debug", debug::init_module(py, &dotted_name)?)?;
+    m.add(
+        py,
+        "copy_tracing",
+        copy_tracing::init_module(py, &dotted_name)?,
+    )?;
     m.add(py, "discovery", discovery::init_module(py, &dotted_name)?)?;
     m.add(py, "dirstate", dirstate::init_module(py, &dotted_name)?)?;
     m.add(py, "revlog", revlog::init_module(py, &dotted_name)?)?;
--- a/rust/hgcli/pyoxidizer.bzl	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/hgcli/pyoxidizer.bzl	Tue Jan 19 21:48:43 2021 +0530
@@ -14,6 +14,10 @@
     return default_python_distribution(flavor = "standalone_dynamic")
 
 def resource_callback(policy, resource):
+    if not IS_WINDOWS:
+        resource.add_location = "in-memory"
+        return
+
     # We use a custom resource routing policy to influence where things are loaded
     # from.
     #
@@ -36,18 +40,22 @@
 def make_exe(dist):
     """Builds a Rust-wrapped Mercurial binary."""
     packaging_policy = dist.make_python_packaging_policy()
+
     # Extension may depend on any Python functionality. Include all
     # extensions.
     packaging_policy.extension_module_filter = "all"
     packaging_policy.resources_location = "in-memory"
-    packaging_policy.resources_location_fallback = "filesystem-relative:lib"
+    if IS_WINDOWS:
+        packaging_policy.resources_location_fallback = "filesystem-relative:lib"
     packaging_policy.register_resource_callback(resource_callback)
 
     config = dist.make_python_interpreter_config()
     config.raw_allocator = "system"
-    config.run_mode = "eval:%s" % RUN_CODE
+    config.run_command = RUN_CODE
+
     # We want to let the user load extensions from the file system
     config.filesystem_importer = True
+
     # We need this to make resourceutil happy, since it looks for sys.frozen.
     config.sys_frozen = True
     config.legacy_windows_stdio = True
--- a/rust/rhg/Cargo.toml	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/Cargo.toml	Tue Jan 19 21:48:43 2021 +0530
@@ -1,7 +1,10 @@
 [package]
 name = "rhg"
 version = "0.1.0"
-authors = ["Antoine Cezar <antoine.cezar@octobus.net>"]
+authors = [
+    "Antoine Cezar <antoine.cezar@octobus.net>",
+    "Raphaël Gomès <raphael.gomes@octobus.net>",
+]
 edition = "2018"
 
 [dependencies]
@@ -10,3 +13,4 @@
 log = "0.4.11"
 micro-timer = "0.3.1"
 env_logger = "0.7.1"
+format-bytes = "0.1.3"
--- a/rust/rhg/src/commands.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/commands.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,5 +1,6 @@
 pub mod cat;
 pub mod debugdata;
+pub mod debugrequirements;
 pub mod files;
 pub mod root;
 use crate::error::CommandError;
--- a/rust/rhg/src/commands/cat.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/commands/cat.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -2,8 +2,8 @@
 use crate::error::{CommandError, CommandErrorKind};
 use crate::ui::utf8_to_local;
 use crate::ui::Ui;
-use hg::operations::FindRoot;
-use hg::operations::{CatRev, CatRevError, CatRevErrorKind};
+use hg::operations::{cat, CatRevError, CatRevErrorKind};
+use hg::repo::Repo;
 use hg::utils::hg_path::HgPathBuf;
 use micro_timer::timed;
 use std::convert::TryFrom;
@@ -31,7 +31,8 @@
 impl<'a> Command for CatCommand<'a> {
     #[timed]
     fn run(&self, ui: &Ui) -> Result<(), CommandError> {
-        let root = FindRoot::new().run()?;
+        let repo = Repo::find()?;
+        repo.check_requirements()?;
         let cwd = std::env::current_dir()
             .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?;
 
@@ -39,7 +40,7 @@
         for file in self.files.iter() {
             let normalized = cwd.join(&file);
             let stripped = normalized
-                .strip_prefix(&root)
+                .strip_prefix(&repo.working_directory_path())
                 .or(Err(CommandErrorKind::Abort(None)))?;
             let hg_file = HgPathBuf::try_from(stripped.to_path_buf())
                 .or(Err(CommandErrorKind::Abort(None)))?;
@@ -48,10 +49,8 @@
 
         match self.rev {
             Some(rev) => {
-                let mut operation = CatRev::new(&root, rev, &files)
+                let data = cat(&repo, rev, &files)
                     .map_err(|e| map_rev_error(rev, e))?;
-                let data =
-                    operation.run().map_err(|e| map_rev_error(rev, e))?;
                 self.display(ui, &data)
             }
             None => Err(CommandErrorKind::Unimplemented.into()),
@@ -68,7 +67,14 @@
             )),
             CatRevErrorKind::InvalidRevision => CommandErrorKind::Abort(Some(
                 utf8_to_local(&format!(
-                    "abort: invalid revision identifier{}\n",
+                    "abort: invalid revision identifier {}\n",
+                    rev
+                ))
+                .into(),
+            )),
+            CatRevErrorKind::AmbiguousPrefix => CommandErrorKind::Abort(Some(
+                utf8_to_local(&format!(
+                    "abort: ambiguous revision identifier {}\n",
                     rev
                 ))
                 .into(),
--- a/rust/rhg/src/commands/debugdata.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/commands/debugdata.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -3,8 +3,9 @@
 use crate::ui::utf8_to_local;
 use crate::ui::Ui;
 use hg::operations::{
-    DebugData, DebugDataError, DebugDataErrorKind, DebugDataKind,
+    debug_data, DebugDataError, DebugDataErrorKind, DebugDataKind,
 };
+use hg::repo::Repo;
 use micro_timer::timed;
 
 pub const HELP_TEXT: &str = "
@@ -25,9 +26,9 @@
 impl<'a> Command for DebugDataCommand<'a> {
     #[timed]
     fn run(&self, ui: &Ui) -> Result<(), CommandError> {
-        let mut operation = DebugData::new(self.rev, self.kind);
-        let data =
-            operation.run().map_err(|e| to_command_error(self.rev, e))?;
+        let repo = Repo::find()?;
+        let data = debug_data(&repo, self.rev, self.kind)
+            .map_err(|e| to_command_error(self.rev, e))?;
 
         let mut stdout = ui.stdout_buffer();
         stdout.write_all(&data)?;
@@ -40,7 +41,6 @@
 /// Convert operation errors to command errors
 fn to_command_error(rev: &str, err: DebugDataError) -> CommandError {
     match err.kind {
-        DebugDataErrorKind::FindRootError(err) => CommandError::from(err),
         DebugDataErrorKind::IoError(err) => CommandError {
             kind: CommandErrorKind::Abort(Some(
                 utf8_to_local(&format!("abort: {}\n", err)).into(),
@@ -55,6 +55,15 @@
                 .into(),
             )),
         },
+        DebugDataErrorKind::AmbiguousPrefix => CommandError {
+            kind: CommandErrorKind::Abort(Some(
+                utf8_to_local(&format!(
+                    "abort: ambiguous revision identifier{}\n",
+                    rev
+                ))
+                .into(),
+            )),
+        },
         DebugDataErrorKind::UnsuportedRevlogVersion(version) => CommandError {
             kind: CommandErrorKind::Abort(Some(
                 utf8_to_local(&format!(
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/rhg/src/commands/debugrequirements.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,30 @@
+use crate::commands::Command;
+use crate::error::CommandError;
+use crate::ui::Ui;
+use hg::repo::Repo;
+use hg::requirements;
+
+pub const HELP_TEXT: &str = "
+Print the current repo requirements.
+";
+
+pub struct DebugRequirementsCommand {}
+
+impl DebugRequirementsCommand {
+    pub fn new() -> Self {
+        DebugRequirementsCommand {}
+    }
+}
+
+impl Command for DebugRequirementsCommand {
+    fn run(&self, ui: &Ui) -> Result<(), CommandError> {
+        let repo = Repo::find()?;
+        let mut output = String::new();
+        for req in requirements::load(&repo)? {
+            output.push_str(&req);
+            output.push('\n');
+        }
+        ui.write_stdout(output.as_bytes())?;
+        Ok(())
+    }
+}
--- a/rust/rhg/src/commands/files.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/commands/files.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -2,18 +2,16 @@
 use crate::error::{CommandError, CommandErrorKind};
 use crate::ui::utf8_to_local;
 use crate::ui::Ui;
-use hg::operations::FindRoot;
 use hg::operations::{
-    ListDirstateTrackedFiles, ListDirstateTrackedFilesError,
-    ListDirstateTrackedFilesErrorKind,
+    list_rev_tracked_files, ListRevTrackedFilesError,
+    ListRevTrackedFilesErrorKind,
 };
 use hg::operations::{
-    ListRevTrackedFiles, ListRevTrackedFilesError,
-    ListRevTrackedFilesErrorKind,
+    Dirstate, ListDirstateTrackedFilesError, ListDirstateTrackedFilesErrorKind,
 };
+use hg::repo::Repo;
 use hg::utils::files::{get_bytes_from_path, relativize_path};
 use hg::utils::hg_path::{HgPath, HgPathBuf};
-use std::path::PathBuf;
 
 pub const HELP_TEXT: &str = "
 List tracked files.
@@ -33,13 +31,13 @@
     fn display_files(
         &self,
         ui: &Ui,
-        root: &PathBuf,
+        repo: &Repo,
         files: impl IntoIterator<Item = &'a HgPath>,
     ) -> Result<(), CommandError> {
         let cwd = std::env::current_dir()
             .or_else(|e| Err(CommandErrorKind::CurrentDirNotFound(e)))?;
         let rooted_cwd = cwd
-            .strip_prefix(&root)
+            .strip_prefix(repo.working_directory_path())
             .expect("cwd was already checked within the repository");
         let rooted_cwd = HgPathBuf::from(get_bytes_from_path(rooted_cwd));
 
@@ -56,17 +54,16 @@
 
 impl<'a> Command for FilesCommand<'a> {
     fn run(&self, ui: &Ui) -> Result<(), CommandError> {
-        let root = FindRoot::new().run()?;
+        let repo = Repo::find()?;
+        repo.check_requirements()?;
         if let Some(rev) = self.rev {
-            let mut operation = ListRevTrackedFiles::new(&root, rev)
+            let files = list_rev_tracked_files(&repo, rev)
                 .map_err(|e| map_rev_error(rev, e))?;
-            let files = operation.run().map_err(|e| map_rev_error(rev, e))?;
-            self.display_files(ui, &root, files)
+            self.display_files(ui, &repo, files.iter())
         } else {
-            let mut operation = ListDirstateTrackedFiles::new(&root)
-                .map_err(map_dirstate_error)?;
-            let files = operation.run().map_err(map_dirstate_error)?;
-            self.display_files(ui, &root, files)
+            let distate = Dirstate::new(&repo).map_err(map_dirstate_error)?;
+            let files = distate.tracked_files().map_err(map_dirstate_error)?;
+            self.display_files(ui, &repo, files)
         }
     }
 }
@@ -83,7 +80,16 @@
             ListRevTrackedFilesErrorKind::InvalidRevision => {
                 CommandErrorKind::Abort(Some(
                     utf8_to_local(&format!(
-                        "abort: invalid revision identifier{}\n",
+                        "abort: invalid revision identifier {}\n",
+                        rev
+                    ))
+                    .into(),
+                ))
+            }
+            ListRevTrackedFilesErrorKind::AmbiguousPrefix => {
+                CommandErrorKind::Abort(Some(
+                    utf8_to_local(&format!(
+                        "abort: ambiguous revision identifier {}\n",
                         rev
                     ))
                     .into(),
--- a/rust/rhg/src/commands/root.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/commands/root.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,7 +1,8 @@
 use crate::commands::Command;
 use crate::error::CommandError;
 use crate::ui::Ui;
-use hg::operations::FindRoot;
+use format_bytes::format_bytes;
+use hg::repo::Repo;
 use hg::utils::files::get_bytes_from_path;
 
 pub const HELP_TEXT: &str = "
@@ -20,13 +21,9 @@
 
 impl Command for RootCommand {
     fn run(&self, ui: &Ui) -> Result<(), CommandError> {
-        let path_buf = FindRoot::new().run()?;
-
-        let bytes = get_bytes_from_path(path_buf);
-
-        // TODO use formating macro
-        ui.write_stdout(&[bytes.as_slice(), b"\n"].concat())?;
-
+        let repo = Repo::find()?;
+        let bytes = get_bytes_from_path(repo.working_directory_path());
+        ui.write_stdout(&format_bytes!(b"{}\n", bytes.as_slice()))?;
         Ok(())
     }
 }
--- a/rust/rhg/src/error.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/error.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,6 +1,8 @@
 use crate::exitcode;
 use crate::ui::UiError;
+use format_bytes::format_bytes;
 use hg::operations::{FindRootError, FindRootErrorKind};
+use hg::requirements::RequirementsError;
 use hg::utils::files::get_bytes_from_path;
 use std::convert::From;
 use std::path::PathBuf;
@@ -12,6 +14,8 @@
     RootNotFound(PathBuf),
     /// The current directory cannot be found
     CurrentDirNotFound(std::io::Error),
+    /// `.hg/requires`
+    RequirementsError(RequirementsError),
     /// The standard output stream cannot be written to
     StdoutError,
     /// The standard error stream cannot be written to
@@ -27,6 +31,10 @@
         match self {
             CommandErrorKind::RootNotFound(_) => exitcode::ABORT,
             CommandErrorKind::CurrentDirNotFound(_) => exitcode::ABORT,
+            CommandErrorKind::RequirementsError(
+                RequirementsError::Unsupported { .. },
+            ) => exitcode::UNIMPLEMENTED_COMMAND,
+            CommandErrorKind::RequirementsError(_) => exitcode::ABORT,
             CommandErrorKind::StdoutError => exitcode::ABORT,
             CommandErrorKind::StderrError => exitcode::ABORT,
             CommandErrorKind::Abort(_) => exitcode::ABORT,
@@ -37,26 +45,21 @@
     /// Return the message corresponding to the error kind if any
     pub fn get_error_message_bytes(&self) -> Option<Vec<u8>> {
         match self {
-            // TODO use formating macro
             CommandErrorKind::RootNotFound(path) => {
                 let bytes = get_bytes_from_path(path);
-                Some(
-                    [
-                        b"abort: no repository found in '",
-                        bytes.as_slice(),
-                        b"' (.hg not found)!\n",
-                    ]
-                    .concat(),
-                )
+                Some(format_bytes!(
+                    b"abort: no repository found in '{}' (.hg not found)!\n",
+                    bytes.as_slice()
+                ))
             }
-            // TODO use formating macro
-            CommandErrorKind::CurrentDirNotFound(e) => Some(
-                [
-                    b"abort: error getting current working directory: ",
-                    e.to_string().as_bytes(),
-                    b"\n",
-                ]
-                .concat(),
+            CommandErrorKind::CurrentDirNotFound(e) => Some(format_bytes!(
+                b"abort: error getting current working directory: {}\n",
+                e.to_string().as_bytes(),
+            )),
+            CommandErrorKind::RequirementsError(
+                RequirementsError::Corrupted,
+            ) => Some(
+                "abort: .hg/requires is corrupted\n".as_bytes().to_owned(),
             ),
             CommandErrorKind::Abort(message) => message.to_owned(),
             _ => None,
@@ -111,3 +114,11 @@
         }
     }
 }
+
+impl From<RequirementsError> for CommandError {
+    fn from(err: RequirementsError) -> Self {
+        CommandError {
+            kind: CommandErrorKind::RequirementsError(err),
+        }
+    }
+}
--- a/rust/rhg/src/main.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/main.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -83,6 +83,10 @@
                         .required(true)
                         .value_name("REV"),
                 ),
+        )
+        .subcommand(
+            SubCommand::with_name("debugrequirements")
+                .about(commands::debugrequirements::HELP_TEXT),
         );
 
     let matches = app.clone().get_matches_safe().unwrap_or_else(|err| {
@@ -124,6 +128,10 @@
         ("debugdata", Some(matches)) => {
             commands::debugdata::DebugDataCommand::try_from(matches)?.run(&ui)
         }
+        ("debugrequirements", _) => {
+            commands::debugrequirements::DebugRequirementsCommand::new()
+                .run(&ui)
+        }
         _ => unreachable!(), // Because of AppSettings::SubcommandRequired,
     }
 }
--- a/rust/rhg/src/ui.rs	Thu Dec 24 15:58:08 2020 +0900
+++ b/rust/rhg/src/ui.rs	Tue Jan 19 21:48:43 2021 +0530
@@ -1,3 +1,4 @@
+use format_bytes::format_bytes;
 use std::borrow::Cow;
 use std::io;
 use std::io::{ErrorKind, Write};
@@ -87,7 +88,10 @@
     let mut stderr = io::stderr();
 
     stderr
-        .write_all(&[b"abort: ", error.to_string().as_bytes(), b"\n"].concat())
+        .write_all(&format_bytes!(
+            b"abort: {}\n",
+            error.to_string().as_bytes()
+        ))
         .map_err(UiError::StderrError)?;
 
     stderr.flush().map_err(UiError::StderrError)?;
--- a/setup.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/setup.py	Tue Jan 19 21:48:43 2021 +0530
@@ -179,7 +179,6 @@
 import re
 import shutil
 import tempfile
-from distutils import log
 
 # We have issues with setuptools on some platforms and builders. Until
 # those are resolved, setuptools is opt-in except for platforms where
@@ -199,6 +198,7 @@
 from distutils.command.install import install
 from distutils.command.install_lib import install_lib
 from distutils.command.install_scripts import install_scripts
+from distutils import log
 from distutils.spawn import spawn, find_executable
 from distutils import file_util
 from distutils.errors import (
@@ -782,6 +782,9 @@
 
         pythonlib = None
 
+        dir = os.path.dirname(self.get_ext_fullpath('dummy'))
+        self.hgtarget = os.path.join(dir, 'hg')
+
         if getattr(sys, 'dllhandle', None):
             # Different Python installs can have different Python library
             # names. e.g. the official CPython distribution uses pythonXY.dll
@@ -807,6 +810,19 @@
                     )
                 pythonlib = dllbasename[:-4]
 
+                # Copy the pythonXY.dll next to the binary so that it runs
+                # without tampering with PATH.
+                fsdecode = lambda x: x
+                if sys.version_info[0] >= 3:
+                    fsdecode = os.fsdecode
+                dest = os.path.join(
+                    os.path.dirname(self.hgtarget),
+                    fsdecode(dllbasename),
+                )
+
+                if not os.path.exists(dest):
+                    shutil.copy(buf.value, dest)
+
         if not pythonlib:
             log.warn(
                 'could not determine Python DLL filename; assuming pythonXY'
@@ -829,8 +845,6 @@
             output_dir=self.build_temp,
             macros=macros,
         )
-        dir = os.path.dirname(self.get_ext_fullpath('dummy'))
-        self.hgtarget = os.path.join(dir, 'hg')
         self.compiler.link_executable(
             objects, self.hgtarget, libraries=[], output_dir=self.build_temp
         )
@@ -1053,7 +1067,7 @@
 
 
 class hginstalllib(install_lib):
-    '''
+    """
     This is a specialization of install_lib that replaces the copy_file used
     there so that it supports setting the mode of files after copying them,
     instead of just preserving the mode that the files originally had.  If your
@@ -1062,7 +1076,7 @@
 
     Note that just passing keep_permissions=False to copy_file would be
     insufficient, as it might still be applying a umask.
-    '''
+    """
 
     def run(self):
         realcopyfile = file_util.copy_file
@@ -1090,11 +1104,11 @@
 
 
 class hginstallscripts(install_scripts):
-    '''
+    """
     This is a specialization of install_scripts that replaces the @LIBDIR@ with
     the configured directory for modules. If possible, the path is made relative
     to the directory for scripts.
-    '''
+    """
 
     def initialize_options(self):
         install_scripts.initialize_options(self)
@@ -1273,6 +1287,7 @@
     'mercurial.thirdparty.attr',
     'mercurial.thirdparty.zope',
     'mercurial.thirdparty.zope.interface',
+    'mercurial.upgrade_utils',
     'mercurial.utils',
     'mercurial.revlogutils',
     'mercurial.testing',
@@ -1294,6 +1309,13 @@
     'hgdemandimport',
 ]
 
+# The pygit2 dependency dropped py2 support with the 1.0 release in Dec 2019.
+# Prior releases do not build at all on Windows, because Visual Studio 2008
+# doesn't understand C 11.  Older Linux releases are buggy.
+if sys.version_info[0] == 2:
+    packages.remove('hgext.git')
+
+
 for name in os.listdir(os.path.join('mercurial', 'templates')):
     if name != '__pycache__' and os.path.isdir(
         os.path.join('mercurial', 'templates', name)
@@ -1387,8 +1409,7 @@
 
 
 class RustExtension(Extension):
-    """Base classes for concrete Rust Extension classes.
-    """
+    """Base classes for concrete Rust Extension classes."""
 
     rusttargetdir = os.path.join('rust', 'target', 'release')
 
@@ -1534,7 +1555,10 @@
         include_dirs=common_include_dirs,
         extra_compile_args=common_cflags,
         depends=common_depends
-        + ['mercurial/cext/charencode.h', 'mercurial/cext/revlog.h',],
+        + [
+            'mercurial/cext/charencode.h',
+            'mercurial/cext/revlog.h',
+        ],
     ),
     Extension(
         'mercurial.cext.osutil',
@@ -1622,10 +1646,19 @@
     msvccompiler.MSVCCompiler = HackedMSVCCompiler
 
 packagedata = {
-    'mercurial': ['locale/*/LC_MESSAGES/hg.mo', 'dummycert.pem',],
-    'mercurial.defaultrc': ['*.rc',],
-    'mercurial.helptext': ['*.txt',],
-    'mercurial.helptext.internals': ['*.txt',],
+    'mercurial': [
+        'locale/*/LC_MESSAGES/hg.mo',
+        'dummycert.pem',
+    ],
+    'mercurial.defaultrc': [
+        '*.rc',
+    ],
+    'mercurial.helptext': [
+        '*.txt',
+    ],
+    'mercurial.helptext.internals': [
+        '*.txt',
+    ],
 }
 
 
--- a/tests/artifacts/scripts/generate-churning-bundle.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/artifacts/scripts/generate-churning-bundle.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # generate-branchy-bundle - generate a branch for a "large" branchy repository
 #
--- a/tests/badserverext.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/badserverext.py	Tue Jan 19 21:48:43 2021 +0530
@@ -44,16 +44,24 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'badserver', b'closeafteraccept', default=False,
+    b'badserver',
+    b'closeafteraccept',
+    default=False,
 )
 configitem(
-    b'badserver', b'closeafterrecvbytes', default=b'0',
+    b'badserver',
+    b'closeafterrecvbytes',
+    default=b'0',
 )
 configitem(
-    b'badserver', b'closeaftersendbytes', default=b'0',
+    b'badserver',
+    b'closeaftersendbytes',
+    default=b'0',
 )
 configitem(
-    b'badserver', b'closebeforeaccept', default=False,
+    b'badserver',
+    b'closebeforeaccept',
+    default=False,
 )
 
 # We can't adjust __class__ on a socket instance. So we define a proxy type.
--- a/tests/check-perf-code.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/check-perf-code.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # check-perf-code - (historical) portability checker for contrib/perf.py
 
--- a/tests/drawdag.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/drawdag.py	Tue Jan 19 21:48:43 2021 +0530
@@ -86,11 +86,11 @@
 import itertools
 import re
 
+from mercurial.node import nullid
 from mercurial.i18n import _
 from mercurial import (
     context,
     error,
-    node,
     obsolete,
     pycompat,
     registrar,
@@ -299,7 +299,7 @@
         self._added = added
         self._parents = parentctxs
         while len(self._parents) < 2:
-            self._parents.append(repo[node.nullid])
+            self._parents.append(repo[nullid])
 
     def filectx(self, key):
         return simplefilectx(key, self._added[key])
@@ -388,7 +388,7 @@
         content = content.replace(br'\n', b'\n').replace(br'\1', b'\1')
         files[name][path] = content
 
-    committed = {None: node.nullid}  # {name: node}
+    committed = {None: nullid}  # {name: node}
 
     # for leaf nodes, try to find existing nodes in repo
     for name, parents in edges.items():
--- a/tests/dumbhttp.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/dumbhttp.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from __future__ import absolute_import
 
--- a/tests/dummysmtpd.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/dummysmtpd.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 """dummy SMTP server for use in tests"""
 
--- a/tests/dummyssh	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/dummyssh	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from __future__ import absolute_import
 
--- a/tests/f	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/f	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 """
 Utility for inspecting files in various ways.
--- a/tests/fakedirstatewritetime.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/fakedirstatewritetime.py	Tue Jan 19 21:48:43 2021 +0530
@@ -27,7 +27,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'fakedirstatewritetime', b'fakenow', default=None,
+    b'fakedirstatewritetime',
+    b'fakenow',
+    default=None,
 )
 
 parsers = policy.importmod('parsers')
--- a/tests/fakepatchtime.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/fakepatchtime.py	Tue Jan 19 21:48:43 2021 +0530
@@ -14,7 +14,9 @@
 configitem = registrar.configitem(configtable)
 
 configitem(
-    b'fakepatchtime', b'fakenow', default=None,
+    b'fakepatchtime',
+    b'fakenow',
+    default=None,
 )
 
 
--- a/tests/filterpyflakes.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/filterpyflakes.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 # Filter output by pyflakes to control which warnings we check
 
--- a/tests/filtertraceback.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/filtertraceback.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 # Filters traceback lines from stdin.
 
--- a/tests/flagprocessorext.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/flagprocessorext.py	Tue Jan 19 21:48:43 2021 +0530
@@ -6,8 +6,8 @@
 import zlib
 
 from mercurial import (
+    bundlecaches,
     changegroup,
-    exchange,
     extensions,
     revlog,
     util,
@@ -134,15 +134,25 @@
     revlog.REVIDX_FLAGS_ORDER.extend(flags)
 
     # Teach exchange to use changegroup 3
-    for k in exchange._bundlespeccontentopts.keys():
-        exchange._bundlespeccontentopts[k][b"cg.version"] = b"03"
+    for k in bundlecaches._bundlespeccontentopts.keys():
+        bundlecaches._bundlespeccontentopts[k][b"cg.version"] = b"03"
 
     # Register flag processors for each extension
     flagutil.addflagprocessor(
-        REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,)
+        REVIDX_NOOP,
+        (
+            noopdonothingread,
+            noopdonothing,
+            validatehash,
+        ),
     )
     flagutil.addflagprocessor(
-        REVIDX_BASE64, (b64decode, b64encode, bypass,),
+        REVIDX_BASE64,
+        (
+            b64decode,
+            b64encode,
+            bypass,
+        ),
     )
     flagutil.addflagprocessor(
         REVIDX_GZIP, (gzipdecompress, gzipcompress, bypass)
--- a/tests/fsmonitor-run-tests.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/fsmonitor-run-tests.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 # fsmonitor-run-tests.py - Run Mercurial tests with fsmonitor enabled
 #
--- a/tests/get-with-headers.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/get-with-headers.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 """This does HTTP GET requests given a host:port and path and returns
 a subset of the headers plus the body of the result."""
--- a/tests/hghave	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/hghave	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """Test the running system for features availability. Exit with zero
 if all features are there, non-zero otherwise. If a feature name is
 prefixed with "no-", the absence of feature is tested.
--- a/tests/hghave.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/hghave.py	Tue Jan 19 21:48:43 2021 +0530
@@ -14,6 +14,8 @@
 checks = {
     "true": (lambda: True, "yak shaving"),
     "false": (lambda: False, "nail clipper"),
+    "known-bad-output": (lambda: True, "use for currently known bad output"),
+    "missing-correct-output": (lambda: False, "use for missing good output"),
 }
 
 try:
@@ -586,14 +588,14 @@
 
 @check("pylint", "Pylint python linter")
 def has_pylint():
-    return matchoutput("pylint --help", br"Usage:  pylint", True)
+    return matchoutput("pylint --help", br"Usage:[ ]+pylint", True)
 
 
 @check("clang-format", "clang-format C code formatter")
 def has_clang_format():
     m = matchoutput('clang-format --version', br'clang-format version (\d+)')
-    # style changed somewhere between 4.x and 6.x
-    return m and int(m.group(1)) >= 6
+    # style changed somewhere between 10.x and 11.x
+    return m and int(m.group(1)) >= 11
 
 
 @check("jshint", "JSHint static code analysis tool")
@@ -700,17 +702,29 @@
     return os.path.isdir(os.path.join(t, "..", ".hg"))
 
 
-@check("tic", "terminfo compiler and curses module")
-def has_tic():
+@check("curses", "terminfo compiler and curses module")
+def has_curses():
     try:
         import curses
 
         curses.COLOR_BLUE
-        return matchoutput('test -x "`which tic`"', br'')
+
+        # Windows doesn't have a `tic` executable, but the windows_curses
+        # package is sufficient to run the tests without it.
+        if os.name == 'nt':
+            return True
+
+        return has_tic()
+
     except (ImportError, AttributeError):
         return False
 
 
+@check("tic", "terminfo compiler")
+def has_tic():
+    return matchoutput('test -x "`which tic`"', br'')
+
+
 @check("xz", "xz compression utility")
 def has_xz():
     # When Windows invokes a subprocess in shell mode, it uses `cmd.exe`, which
@@ -886,17 +900,16 @@
         return False
 
 
-@check("py2virtualenv", "Python2 virtualenv support")
-def has_py2virtualenv():
-    if sys.version_info[0] != 2:
-        return False
-
+@check("virtualenv", "virtualenv support")
+def has_virtualenv():
     try:
         import virtualenv
 
-        virtualenv.ACTIVATE_SH
-        return True
-    except ImportError:
+        # --no-site-package became the default in 1.7 (Nov 2011), and the
+        # argument was removed in 20.0 (Feb 2020).  Rather than make the
+        # script complicated, just ignore ancient versions.
+        return int(virtualenv.__version__.split('.')[0]) > 1
+    except (AttributeError, ImportError, IndexError):
         return False
 
 
@@ -1005,7 +1018,7 @@
     return 'fncache' in getrepofeatures()
 
 
-@check('sqlite', 'sqlite3 module is available')
+@check('sqlite', 'sqlite3 module and matching cli is available')
 def has_sqlite():
     try:
         import sqlite3
@@ -1047,7 +1060,7 @@
     version_regex = b'black, version ([0-9a-b.]+)'
     version = matchoutput(blackcmd, version_regex)
     sv = distutils.version.StrictVersion
-    return version and sv(_bytes2sys(version.group(1))) >= sv('19.10b0')
+    return version and sv(_bytes2sys(version.group(1))) >= sv('20.8b1')
 
 
 @check('pytype', 'the pytype type checker')
@@ -1058,11 +1071,12 @@
     return version and sv(_bytes2sys(version.group(0))) >= sv('2019.10.17')
 
 
-@check("rustfmt", "rustfmt tool")
+@check("rustfmt", "rustfmt tool at version nightly-2020-10-04")
 def has_rustfmt():
     # We use Nightly's rustfmt due to current unstable config options.
     return matchoutput(
-        '`rustup which --toolchain nightly rustfmt` --version', b'rustfmt'
+        '`rustup which --toolchain nightly-2020-10-04 rustfmt` --version',
+        b'rustfmt',
     )
 
 
--- a/tests/hypothesishelpers.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/hypothesishelpers.py	Tue Jan 19 21:48:43 2021 +0530
@@ -44,8 +44,7 @@
 
 
 def roundtrips(data, decode, encode):
-    """helper to tests function that must do proper encode/decode roundtripping
-    """
+    """helper to tests function that must do proper encode/decode roundtripping"""
 
     @given(data)
     def testroundtrips(value):
@@ -71,6 +70,11 @@
     st.builds(
         lambda s, e: s.encode(e),
         st.text(),
-        st.sampled_from(['utf-8', 'utf-16',]),
+        st.sampled_from(
+            [
+                'utf-8',
+                'utf-16',
+            ]
+        ),
     )
 ) | st.binary()
--- a/tests/killdaemons.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/killdaemons.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from __future__ import absolute_import
 import errno
--- a/tests/library-infinitepush.sh	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/library-infinitepush.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -15,7 +15,7 @@
 [extensions]
 infinitepush=
 [ui]
-ssh = python "$TESTDIR/dummyssh"
+ssh = $PYTHON "$TESTDIR/dummyssh"
 [infinitepush]
 branchpattern=re:scratch/.*
 EOF
--- a/tests/ls-l.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/ls-l.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 # like ls -l, but do not print date, user, or non-common mode bit, to avoid
 # using globs in tests.
--- a/tests/md5sum.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/md5sum.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Based on python's Tools/scripts/md5sum.py
 #
--- a/tests/narrow-library.sh	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/narrow-library.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -2,7 +2,7 @@
 [extensions]
 narrow=
 [ui]
-ssh=python "$RUNTESTDIR/dummyssh"
+ssh=$PYTHON "$RUNTESTDIR/dummyssh"
 [experimental]
 changegroup3 = True
 EOF
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/phabricator/phabupdate-revs.json	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,218 @@
+{
+    "version": 1,
+    "interactions": [
+        {
+            "request": {
+                "method": "POST",
+                "uri": "https://phab.mercurial-scm.org//api/differential.query",
+                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22ids%22%3A+%5B7917%2C+7918%5D%7D&output=json&__conduit__=1",
+                "headers": {
+                    "accept": [
+                        "application/mercurial-0.1"
+                    ],
+                    "content-length": [
+                        "154"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
+                    "host": [
+                        "phab.mercurial-scm.org"
+                    ],
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.6+124-da178b816812+20201120)"
+                    ]
+                }
+            },
+            "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
+                "headers": {
+                    "X-XSS-Protection": [
+                        "1; mode=block"
+                    ],
+                    "Date": [
+                        "Sat, 21 Nov 2020 04:42:44 GMT"
+                    ],
+                    "Referrer-Policy": [
+                        "no-referrer"
+                    ],
+                    "Strict-Transport-Security": [
+                        "max-age=0; includeSubdomains; preload"
+                    ],
+                    "Cache-Control": [
+                        "no-store"
+                    ],
+                    "Set-Cookie": [
+                        "phsid=A%2Fw422dnwrfacwgvoi3igw4xdjyojfi67muc4ucxxs; expires=Thu, 20-Nov-2025 04:42:44 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
+                    ],
+                    "X-Content-Type-Options": [
+                        "nosniff"
+                    ],
+                    "X-Frame-Options": [
+                        "Deny"
+                    ],
+                    "Server": [
+                        "Apache/2.4.10 (Debian)"
+                    ],
+                    "Transfer-Encoding": [
+                        "chunked"
+                    ],
+                    "Content-Type": [
+                        "application/json"
+                    ],
+                    "Expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
+                    ]
+                },
+                "body": {
+                    "string": "{\"result\":[{\"id\":\"7918\",\"phid\":\"PHID-DREV-sfsckrwrwc77rdl3k5rz\",\"title\":\"create draft change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7918\",\"dateCreated\":\"1579221164\",\"dateModified\":\"1579222305\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":2},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"3\",\"activeDiffPHID\":\"PHID-DIFF-pqdlhei24n47fzeofjph\",\"diffs\":[\"19394\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[\"PHID-DREV-yhl3yvijs4jploa5iqm4\"]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"},{\"id\":\"7917\",\"phid\":\"PHID-DREV-yhl3yvijs4jploa5iqm4\",\"title\":\"create public change for phabricator testing\",\"uri\":\"https:\\/\\/phab.mercurial-scm.org\\/D7917\",\"dateCreated\":\"1579221160\",\"dateModified\":\"1579222286\",\"authorPHID\":\"PHID-USER-tzhaient733lwrlbcag5\",\"status\":\"4\",\"statusName\":\"Abandoned\",\"properties\":{\"draft.broadcast\":true,\"lines.added\":1,\"lines.removed\":1},\"branch\":\"default\",\"summary\":\"\",\"testPlan\":\"\",\"lineCount\":\"2\",\"activeDiffPHID\":\"PHID-DIFF-e64weyerxtutv2jvj2dt\",\"diffs\":[\"19393\"],\"commits\":[],\"reviewers\":{\"PHID-PROJ-3dvcxzznrjru2xmmses3\":\"PHID-PROJ-3dvcxzznrjru2xmmses3\"},\"ccs\":[\"PHID-USER-q42dn7cc3donqriafhjx\"],\"hashes\":[[\"hgcm\",\"\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\"]],\"auxiliary\":{\"phabricator:projects\":[],\"phabricator:depends-on\":[]},\"repositoryPHID\":\"PHID-REPO-bvunnehri4u2isyr7bc3\",\"sourcePath\":\"\\/\"}],\"error_code\":null,\"error_info\":null}"
+                }
+            }
+        },
+        {
+            "request": {
+                "method": "POST",
+                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+%22PHID-DREV-yhl3yvijs4jploa5iqm4%22%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22reclaim%22%2C+%22value%22%3A+true%7D%5D%7D&output=json&__conduit__=1",
+                "headers": {
+                    "accept": [
+                        "application/mercurial-0.1"
+                    ],
+                    "content-length": [
+                        "273"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
+                    "host": [
+                        "phab.mercurial-scm.org"
+                    ],
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.6+124-da178b816812+20201120)"
+                    ]
+                }
+            },
+            "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
+                "headers": {
+                    "X-XSS-Protection": [
+                        "1; mode=block"
+                    ],
+                    "Date": [
+                        "Sat, 21 Nov 2020 04:42:44 GMT"
+                    ],
+                    "Referrer-Policy": [
+                        "no-referrer"
+                    ],
+                    "Strict-Transport-Security": [
+                        "max-age=0; includeSubdomains; preload"
+                    ],
+                    "Cache-Control": [
+                        "no-store"
+                    ],
+                    "Set-Cookie": [
+                        "phsid=A%2Fx3q7clkv3ti5yodbqj65kwkptz476tzun2uauime; expires=Thu, 20-Nov-2025 04:42:44 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
+                    ],
+                    "X-Content-Type-Options": [
+                        "nosniff"
+                    ],
+                    "X-Frame-Options": [
+                        "Deny"
+                    ],
+                    "Server": [
+                        "Apache/2.4.10 (Debian)"
+                    ],
+                    "Transfer-Encoding": [
+                        "chunked"
+                    ],
+                    "Content-Type": [
+                        "application/json"
+                    ],
+                    "Expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
+                    ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"object\":{\"id\":7917,\"phid\":\"PHID-DREV-yhl3yvijs4jploa5iqm4\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-6alffj324fqa2nj\"},{\"phid\":\"PHID-XACT-DREV-p4bkxo6zaqhcnvp\"}]},\"error_code\":null,\"error_info\":null}"
+                }
+            }
+        },
+        {
+            "request": {
+                "method": "POST",
+                "uri": "https://phab.mercurial-scm.org//api/differential.revision.edit",
+                "body": "params=%7B%22__conduit__%22%3A+%7B%22token%22%3A+%22cli-hahayouwish%22%7D%2C+%22objectIdentifier%22%3A+%22PHID-DREV-sfsckrwrwc77rdl3k5rz%22%2C+%22transactions%22%3A+%5B%7B%22type%22%3A+%22reclaim%22%2C+%22value%22%3A+true%7D%5D%7D&output=json&__conduit__=1",
+                "headers": {
+                    "accept": [
+                        "application/mercurial-0.1"
+                    ],
+                    "content-length": [
+                        "273"
+                    ],
+                    "content-type": [
+                        "application/x-www-form-urlencoded"
+                    ],
+                    "host": [
+                        "phab.mercurial-scm.org"
+                    ],
+                    "user-agent": [
+                        "mercurial/proto-1.0 (Mercurial 5.6+124-da178b816812+20201120)"
+                    ]
+                }
+            },
+            "response": {
+                "status": {
+                    "code": 200,
+                    "message": "OK"
+                },
+                "headers": {
+                    "X-XSS-Protection": [
+                        "1; mode=block"
+                    ],
+                    "Date": [
+                        "Sat, 21 Nov 2020 04:42:45 GMT"
+                    ],
+                    "Referrer-Policy": [
+                        "no-referrer"
+                    ],
+                    "Strict-Transport-Security": [
+                        "max-age=0; includeSubdomains; preload"
+                    ],
+                    "Cache-Control": [
+                        "no-store"
+                    ],
+                    "Set-Cookie": [
+                        "phsid=A%2Fmgusoeqrp6o43ru5uhrtttwzxtf32aqb5siez3oo; expires=Thu, 20-Nov-2025 04:42:45 GMT; Max-Age=157680000; path=/; domain=phab.mercurial-scm.org; secure; httponly"
+                    ],
+                    "X-Content-Type-Options": [
+                        "nosniff"
+                    ],
+                    "X-Frame-Options": [
+                        "Deny"
+                    ],
+                    "Server": [
+                        "Apache/2.4.10 (Debian)"
+                    ],
+                    "Transfer-Encoding": [
+                        "chunked"
+                    ],
+                    "Content-Type": [
+                        "application/json"
+                    ],
+                    "Expires": [
+                        "Sat, 01 Jan 2000 00:00:00 GMT"
+                    ]
+                },
+                "body": {
+                    "string": "{\"result\":{\"object\":{\"id\":7918,\"phid\":\"PHID-DREV-sfsckrwrwc77rdl3k5rz\"},\"transactions\":[{\"phid\":\"PHID-XACT-DREV-wfo42n4hsabosy5\"},{\"phid\":\"PHID-XACT-DREV-aqluo36kykh2jae\"}]},\"error_code\":null,\"error_info\":null}"
+                }
+            }
+        }
+    ]
+}
\ No newline at end of file
--- a/tests/printenv.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/printenv.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # simple script to be used in hooks
 #
--- a/tests/readlink.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/readlink.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from __future__ import absolute_import, print_function
 
--- a/tests/remotefilelog-library.sh	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/remotefilelog-library.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -8,7 +8,7 @@
 rebase=
 strip=
 [ui]
-ssh=python "$TESTDIR/dummyssh"
+ssh=$PYTHON "$TESTDIR/dummyssh"
 [server]
 preferuncompressed=True
 [experimental]
--- a/tests/revlog-formatv0.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/revlog-formatv0.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 # Copyright 2010 Intevation GmbH
 # Author(s):
 # Thomas Arendsen Hein <thomas@intevation.de>
--- a/tests/run-tests.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/run-tests.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # run-tests.py - Run a set of tests on Mercurial
 #
@@ -47,6 +47,7 @@
 
 import argparse
 import collections
+import contextlib
 import difflib
 import distutils.version as version
 import errno
@@ -255,12 +256,13 @@
     else:
         family = socket.AF_INET
     try:
-        s = socket.socket(family, socket.SOCK_STREAM)
-        s.bind(('localhost', port))
-        s.close()
+        with contextlib.closing(socket.socket(family, socket.SOCK_STREAM)) as s:
+            s.bind(('localhost', port))
         return True
     except socket.error as exc:
-        if exc.errno not in (
+        if os.name == 'nt' and exc.errno == errno.WSAEACCES:
+            return False
+        elif exc.errno not in (
             errno.EADDRINUSE,
             errno.EADDRNOTAVAIL,
             errno.EPROTONOSUPPORT,
@@ -534,7 +536,9 @@
         help="install and use chg wrapper in place of hg",
     )
     hgconf.add_argument(
-        "--chg-debug", action="store_true", help="show chg debug logs",
+        "--chg-debug",
+        action="store_true",
+        help="show chg debug logs",
     )
     hgconf.add_argument("--compiler", help="compiler to build with")
     hgconf.add_argument(
@@ -1193,7 +1197,10 @@
         if self._keeptmpdir:
             log(
                 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
-                % (_bytes2sys(self._testtmp), _bytes2sys(self._threadtmp),)
+                % (
+                    _bytes2sys(self._testtmp),
+                    _bytes2sys(self._threadtmp),
+                )
             )
         else:
             try:
@@ -1330,6 +1337,9 @@
         env['TESTTMP'] = _bytes2sys(self._testtmp)
         env['TESTNAME'] = self.name
         env['HOME'] = _bytes2sys(self._testtmp)
+        if os.name == 'nt':
+            # py3.8+ ignores HOME: https://bugs.python.org/issue36264
+            env['USERPROFILE'] = env['HOME']
         formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1])
         env['HGTEST_TIMEOUT_DEFAULT'] = formated_timeout
         env['HGTEST_TIMEOUT'] = _bytes2sys(b"%d" % self._timeout)
@@ -1359,14 +1369,14 @@
 
         extraextensions = []
         for opt in self._extraconfigopts:
-            section, key = _sys2bytes(opt).split(b'.', 1)
+            section, key = opt.split('.', 1)
             if section != 'extensions':
                 continue
-            name = key.split(b'=', 1)[0]
+            name = key.split('=', 1)[0]
             extraextensions.append(name)
 
         if extraextensions:
-            env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
+            env['HGTESTEXTRAEXTENSIONS'] = ' '.join(extraextensions)
 
         # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
         # IP addresses.
@@ -1439,9 +1449,11 @@
             hgrc.write(b'[ui]\n')
             hgrc.write(b'slash = True\n')
             hgrc.write(b'interactive = False\n')
+            hgrc.write(b'detailed-exit-code = True\n')
             hgrc.write(b'merge = internal:merge\n')
             hgrc.write(b'mergemarkers = detailed\n')
             hgrc.write(b'promptecho = True\n')
+            hgrc.write(b'timeout.warn=15\n')
             hgrc.write(b'[defaults]\n')
             hgrc.write(b'[devel]\n')
             hgrc.write(b'all-warnings = true\n')
@@ -2090,11 +2102,11 @@
 
     @staticmethod
     def parsehghaveoutput(lines):
-        '''Parse hghave log lines.
+        """Parse hghave log lines.
 
         Return tuple of lists (missing, failed):
           * the missing/unknown features
-          * the features for which existence check failed'''
+          * the features for which existence check failed"""
         missing = []
         failed = []
         for line in lines:
@@ -2154,12 +2166,10 @@
             self.color = pygmentspresent
 
     def onStart(self, test):
-        """ Can be overriden by custom TestResult
-        """
+        """Can be overriden by custom TestResult"""
 
     def onEnd(self):
-        """ Can be overriden by custom TestResult
-        """
+        """Can be overriden by custom TestResult"""
 
     def addFailure(self, test, reason):
         self.failures.append((test, reason))
@@ -2943,7 +2953,7 @@
         self._hgtmp = None
         self._installdir = None
         self._bindir = None
-        self._tmpbinddir = None
+        self._tmpbindir = None
         self._pythondir = None
         self._coveragefile = None
         self._createdfiles = []
@@ -3167,7 +3177,9 @@
         vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
         vlog("# Using PATH", os.environ["PATH"])
         vlog(
-            "# Using", _bytes2sys(IMPL_PATH), _bytes2sys(osenvironb[IMPL_PATH]),
+            "# Using",
+            _bytes2sys(IMPL_PATH),
+            _bytes2sys(osenvironb[IMPL_PATH]),
         )
         vlog("# Writing to directory", _bytes2sys(self._outputdir))
 
@@ -3433,7 +3445,7 @@
     def _usecorrectpython(self):
         """Configure the environment to use the appropriate Python in tests."""
         # Tests must use the same interpreter as us or bad things will happen.
-        pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
+        pyexename = sys.platform == 'win32' and b'python.exe' or b'python3'
 
         # os.symlink() is a thing with py3 on Windows, but it requires
         # Administrator rights.
@@ -3459,6 +3471,15 @@
                     if err.errno != errno.EEXIST:
                         raise
         else:
+            # Windows doesn't have `python3.exe`, and MSYS cannot understand the
+            # reparse point with that name provided by Microsoft.  Create a
+            # simple script on PATH with that name that delegates to the py3
+            # launcher so the shebang lines work.
+            if os.getenv('MSYSTEM'):
+                with open(osenvironb[b'RUNTESTDIR'] + b'/python3', 'wb') as f:
+                    f.write(b'#!/bin/sh\n')
+                    f.write(b'py -3 "$@"\n')
+
             exedir, exename = os.path.split(sysexecutable)
             vlog(
                 "# Modifying search path to find %s as %s in '%s'"
@@ -3467,7 +3488,29 @@
             path = os.environ['PATH'].split(os.pathsep)
             while exedir in path:
                 path.remove(exedir)
-            os.environ['PATH'] = os.pathsep.join([exedir] + path)
+
+            # Binaries installed by pip into the user area like pylint.exe may
+            # not be in PATH by default.
+            extra_paths = [exedir]
+            vi = sys.version_info
+            if 'APPDATA' in os.environ:
+                scripts_dir = os.path.join(
+                    os.environ['APPDATA'],
+                    'Python',
+                    'Python%d%d' % (vi[0], vi[1]),
+                    'Scripts',
+                )
+
+                if vi.major == 2:
+                    scripts_dir = os.path.join(
+                        os.environ['APPDATA'],
+                        'Python',
+                        'Scripts',
+                    )
+
+                extra_paths.append(scripts_dir)
+
+            os.environ['PATH'] = os.pathsep.join(extra_paths + path)
             if not self._findprogram(pyexename):
                 print("WARNING: Cannot find %s in search path" % pyexename)
 
--- a/tests/seq.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/seq.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # A portable replacement for 'seq'
 #
--- a/tests/simplestorerepo.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/simplestorerepo.py	Tue Jan 19 21:48:43 2021 +0530
@@ -532,6 +532,7 @@
         linkmapper,
         transaction,
         addrevisioncb=None,
+        duplicaterevisioncb=None,
         maybemissingparents=False,
     ):
         if maybemissingparents:
@@ -539,7 +540,7 @@
                 _('simple store does not support missing parents ' 'write mode')
             )
 
-        nodes = []
+        empty = True
 
         transaction.addbackup(self._indexpath)
 
@@ -547,9 +548,10 @@
             linkrev = linkmapper(linknode)
             flags = flags or revlog.REVIDX_DEFAULT_FLAGS
 
-            nodes.append(node)
-
             if node in self._indexbynode:
+                if duplicaterevisioncb:
+                    duplicaterevisioncb(self, node)
+                empty = False
                 continue
 
             # Need to resolve the fulltext from the delta base.
@@ -564,7 +566,8 @@
 
             if addrevisioncb:
                 addrevisioncb(self, node)
-        return nodes
+            empty = False
+        return not empty
 
     def _headrevs(self):
         # Assume all revisions are heads by default.
--- a/tests/svn-safe-append.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/svn-safe-append.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from __future__ import absolute_import
 
--- a/tests/test-absorb-filefixupstate.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-absorb-filefixupstate.py	Tue Jan 19 21:48:43 2021 +0530
@@ -78,7 +78,13 @@
 testfilefixup(case0, b'222', [b'', b'222'])
 
 # input case 1: 3 lines, each commit adds one line
-case1 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2, 3]), (b'3', [3]),])
+case1 = buildcontents(
+    [
+        (b'1', [1, 2, 3]),
+        (b'2', [2, 3]),
+        (b'3', [3]),
+    ]
+)
 
 # 1:1 line mapping
 testfilefixup(case1, b'123', case1)
@@ -121,7 +127,13 @@
 testfilefixup(case1, b'12b3', case1)
 
 # input case 2: delete in the middle
-case2 = buildcontents([(b'11', [1, 2]), (b'22', [1]), (b'33', [1, 2]),])
+case2 = buildcontents(
+    [
+        (b'11', [1, 2]),
+        (b'22', [1]),
+        (b'33', [1, 2]),
+    ]
+)
 
 # deletion (optimize code should make it 2 chunks)
 testfilefixup(
@@ -136,7 +148,13 @@
 testfilefixup(case2, b'aaa', case2)
 
 # input case 3: rev 3 reverts rev 2
-case3 = buildcontents([(b'1', [1, 2, 3]), (b'2', [2]), (b'3', [1, 2, 3]),])
+case3 = buildcontents(
+    [
+        (b'1', [1, 2, 3]),
+        (b'2', [2]),
+        (b'3', [1, 2, 3]),
+    ]
+)
 
 # 1:1 line mapping
 testfilefixup(case3, b'13', case3)
@@ -159,7 +177,13 @@
     [
         (b'1', [1, 2, 3]),
         (b'2', [2, 3]),
-        (b'3', [1, 2,]),
+        (
+            b'3',
+            [
+                1,
+                2,
+            ],
+        ),
         (b'4', [1, 3]),
         (b'5', [3]),
         (b'6', [2, 3]),
@@ -183,7 +207,13 @@
 testfilefixup(case4, b'', [b'', b'34', b'37', b''])
 
 # input case 5: replace a small chunk which is near a deleted line
-case5 = buildcontents([(b'12', [1, 2]), (b'3', [1]), (b'4', [1, 2]),])
+case5 = buildcontents(
+    [
+        (b'12', [1, 2]),
+        (b'3', [1]),
+        (b'4', [1, 2]),
+    ]
+)
 
 testfilefixup(case5, b'1cd4', [b'', b'1cd34', b'1cd4'])
 
--- a/tests/test-absorb-unfinished.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-absorb-unfinished.t	Tue Jan 19 21:48:43 2021 +0530
@@ -17,14 +17,14 @@
   $ hg commit -Aqm "foo 2"
 
   $ hg --config extensions.rebase= rebase -r 1 -d 0
-  rebasing 1:c3b6dc0e177a "foo 2" (tip)
+  rebasing 1:c3b6dc0e177a tip "foo 2"
   merging foo.whole
   warning: conflicts while merging foo.whole! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg --config extensions.rebase= absorb
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
 
--- a/tests/test-absorb.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-absorb.t	Tue Jan 19 21:48:43 2021 +0530
@@ -19,7 +19,7 @@
 
   $ hg absorb
   abort: no mutable changeset to change
-  [255]
+  [10]
 
 Make some commits:
 
@@ -229,7 +229,7 @@
   $ sedi 's/Insert/insert/' a
   $ hg absorb --apply-changes
   abort: no mutable changeset to change
-  [255]
+  [10]
 
 Make working copy clean:
 
@@ -267,7 +267,7 @@
   $ echo 2 >> m2
   $ hg absorb --apply-changes
   abort: cannot absorb into a merge
-  [255]
+  [10]
   $ hg revert -q -C m1 m2
 
 Use a new repo:
--- a/tests/test-acl.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-acl.t	Tue Jan 19 21:48:43 2021 +0530
@@ -807,6 +807,7 @@
   acl: bookmark access granted: "ef1ea85a6374b77d6da9dcda9541f498f2d17df7" on bookmark "moving-bookmark"
   bundle2-input-bundle: 7 parts total
   updating the branch cache
+  invalid branch cache (served.hidden): tip differs
   added 1 changesets with 1 changes to 1 files
   bundle2-output-bundle: "HG20", 1 parts total
   bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
@@ -1788,6 +1789,7 @@
   bundle2-input-part: total payload size 48
   bundle2-input-bundle: 5 parts total
   updating the branch cache
+  invalid branch cache (served.hidden): tip differs
   added 4 changesets with 4 changes to 4 files (+1 heads)
   bundle2-output-bundle: "HG20", 1 parts total
   bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
@@ -2094,6 +2096,7 @@
   bundle2-input-part: total payload size 48
   bundle2-input-bundle: 5 parts total
   updating the branch cache
+  invalid branch cache (served.hidden): tip differs
   added 4 changesets with 4 changes to 4 files (+1 heads)
   bundle2-output-bundle: "HG20", 1 parts total
   bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
@@ -2185,6 +2188,7 @@
   bundle2-input-part: total payload size 48
   bundle2-input-bundle: 5 parts total
   updating the branch cache
+  invalid branch cache (served.hidden): tip differs
   added 4 changesets with 4 changes to 4 files (+1 heads)
   bundle2-output-bundle: "HG20", 1 parts total
   bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
@@ -2348,6 +2352,7 @@
   bundle2-input-part: total payload size 48
   bundle2-input-bundle: 5 parts total
   updating the branch cache
+  invalid branch cache (served.hidden): tip differs
   added 4 changesets with 4 changes to 4 files (+1 heads)
   bundle2-output-bundle: "HG20", 1 parts total
   bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload
--- a/tests/test-add.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-add.t	Tue Jan 19 21:48:43 2021 +0530
@@ -44,11 +44,11 @@
 #if no-windows
   $ echo foo > con.xml
   $ hg --config ui.portablefilenames=jump add con.xml
-  abort: ui.portablefilenames value is invalid ('jump')
-  [255]
+  config error: ui.portablefilenames value is invalid ('jump')
+  [30]
   $ hg --config ui.portablefilenames=abort add con.xml
   abort: filename contains 'con', which is reserved on Windows: con.xml
-  [255]
+  [10]
   $ hg st
   A a
   A b
@@ -68,7 +68,7 @@
   $ hg --config ui.portablefilenames=abort add
   adding hello:world
   abort: filename contains ':', which is reserved on Windows: 'hello:world'
-  [255]
+  [10]
   $ hg st
   A a
   A b
@@ -286,7 +286,7 @@
   $ hg commit -qAm "bar"
   $ hg forget foo --dry-run -i
   abort: cannot specify both --dry-run and --interactive
-  [255]
+  [10]
 
   $ hg forget foo --config ui.interactive=True -i << EOF
   > ?
--- a/tests/test-alias.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-alias.t	Tue Jan 19 21:48:43 2021 +0530
@@ -208,8 +208,8 @@
 unknown
 
   $ hg unknown
-  abort: alias 'unknown' resolves to unknown command 'bargle'
-  [255]
+  config error: alias 'unknown' resolves to unknown command 'bargle'
+  [30]
   $ hg help unknown
   alias 'unknown' resolves to unknown command 'bargle'
 
@@ -217,8 +217,8 @@
 ambiguous
 
   $ hg ambiguous
-  abort: alias 'ambiguous' resolves to ambiguous command 's'
-  [255]
+  config error: alias 'ambiguous' resolves to ambiguous command 's'
+  [30]
   $ hg help ambiguous
   alias 'ambiguous' resolves to ambiguous command 's'
 
@@ -226,8 +226,8 @@
 recursive
 
   $ hg recursive
-  abort: alias 'recursive' resolves to unknown command 'recursive'
-  [255]
+  config error: alias 'recursive' resolves to unknown command 'recursive'
+  [30]
   $ hg help recursive
   alias 'recursive' resolves to unknown command 'recursive'
 
@@ -235,9 +235,9 @@
 disabled
 
   $ hg disabled
-  abort: alias 'disabled' resolves to unknown command 'email'
+  config error: alias 'disabled' resolves to unknown command 'email'
   ('email' is provided by 'patchbomb' extension)
-  [255]
+  [30]
   $ hg help disabled
   alias 'disabled' resolves to unknown command 'email'
   
@@ -251,8 +251,8 @@
 no definition
 
   $ hg nodef
-  abort: no definition for alias 'nodefinition'
-  [255]
+  config error: no definition for alias 'nodefinition'
+  [30]
   $ hg help nodef
   no definition for alias 'nodefinition'
 
@@ -260,8 +260,8 @@
 no closing quotation
 
   $ hg noclosing
-  abort: error in definition for alias 'noclosingquotation': No closing quotation
-  [255]
+  config error: error in definition for alias 'noclosingquotation': No closing quotation
+  [30]
   $ hg help noclosing
   error in definition for alias 'noclosingquotation': No closing quotation
 
@@ -275,37 +275,37 @@
 invalid options
 
   $ hg no--cwd
-  abort: error in definition for alias 'no--cwd': --cwd may only be given on the command line
-  [255]
+  config error: error in definition for alias 'no--cwd': --cwd may only be given on the command line
+  [30]
   $ hg help no--cwd
   error in definition for alias 'no--cwd': --cwd may only be given on the
   command line
   $ hg no-R
-  abort: error in definition for alias 'no-R': -R may only be given on the command line
-  [255]
+  config error: error in definition for alias 'no-R': -R may only be given on the command line
+  [30]
   $ hg help no-R
   error in definition for alias 'no-R': -R may only be given on the command line
   $ hg no--repo
-  abort: error in definition for alias 'no--repo': --repo may only be given on the command line
-  [255]
+  config error: error in definition for alias 'no--repo': --repo may only be given on the command line
+  [30]
   $ hg help no--repo
   error in definition for alias 'no--repo': --repo may only be given on the
   command line
   $ hg no--repository
-  abort: error in definition for alias 'no--repository': --repository may only be given on the command line
-  [255]
+  config error: error in definition for alias 'no--repository': --repository may only be given on the command line
+  [30]
   $ hg help no--repository
   error in definition for alias 'no--repository': --repository may only be given
   on the command line
   $ hg no--config
-  abort: error in definition for alias 'no--config': --config may only be given on the command line
-  [255]
+  config error: error in definition for alias 'no--config': --config may only be given on the command line
+  [30]
   $ hg no --config alias.no='--repo elsewhere --cwd elsewhere status'
-  abort: error in definition for alias 'no': --repo/--cwd may only be given on the command line
-  [255]
+  config error: error in definition for alias 'no': --repo/--cwd may only be given on the command line
+  [30]
   $ hg no --config alias.no='--repo elsewhere'
-  abort: error in definition for alias 'no': --repo may only be given on the command line
-  [255]
+  config error: error in definition for alias 'no': --repo may only be given on the command line
+  [30]
 
 optional repository
 
@@ -357,10 +357,10 @@
 
   $ hg positional
   abort: too few arguments for command alias
-  [255]
+  [10]
   $ hg positional a
   abort: too few arguments for command alias
-  [255]
+  [10]
   $ hg positional 'node|short' rev
   0 e63c23eaa88a | 1970-01-01 00:00 +0000
 
@@ -468,13 +468,13 @@
   $ hg i
   hg: command 'i' is ambiguous:
       idalias idaliaslong idaliasshell identify import incoming init
-  [255]
+  [10]
   $ hg id
   042423737847 tip
   $ hg ida
   hg: command 'ida' is ambiguous:
       idalias idaliaslong idaliasshell
-  [255]
+  [10]
   $ hg idalias
   042423737847 tip
   $ hg idaliasl
@@ -484,7 +484,7 @@
   $ hg parentsshell
   hg: command 'parentsshell' is ambiguous:
       parentsshell1 parentsshell2
-  [255]
+  [10]
   $ hg parentsshell1
   one
   $ hg parentsshell2
@@ -496,13 +496,13 @@
   $ hg init sub
   $ cd sub
   $ hg count 'branch(default)'
-  abort: unknown revision 'default'!
+  abort: unknown revision 'default'
   0
   $ hg -v count 'branch(default)'
-  abort: unknown revision 'default'!
+  abort: unknown revision 'default'
   0
   $ hg -R .. count 'branch(default)'
-  abort: unknown revision 'default'!
+  abort: unknown revision 'default'
   0
   $ hg --cwd .. count 'branch(default)'
   2
@@ -533,11 +533,11 @@
   $ hg --cwd .. subalias > /dev/null
   hg: unknown command 'subalias'
   (did you mean idalias?)
-  [255]
+  [10]
   $ hg -R .. subalias > /dev/null
   hg: unknown command 'subalias'
   (did you mean idalias?)
-  [255]
+  [10]
 
 
 shell alias defined in other repo
@@ -545,7 +545,7 @@
   $ hg mainalias > /dev/null
   hg: unknown command 'mainalias'
   (did you mean idalias?)
-  [255]
+  [10]
   $ hg -R .. mainalias
   main
   $ hg --cwd .. mainalias
@@ -555,7 +555,7 @@
   $ hg --cwd .. manalias
   hg: unknown command 'manalias'
   (did you mean one of idalias, mainalias, manifest?)
-  [255]
+  [10]
 
 shell aliases with escaped $ chars
 
@@ -593,7 +593,7 @@
   $ hg reba
   hg: command 'reba' is ambiguous:
       rebase rebate
-  [255]
+  [10]
   $ hg rebat
   this is rebate
   $ hg rebat --foo-bar
@@ -631,11 +631,11 @@
   $ hg help invalidhelp
   non-ASCII character in alias definition 'invalidhelp:help'
   $ hg invaliddoc
-  abort: non-ASCII character in alias definition 'invaliddoc:doc'
-  [255]
+  config error: non-ASCII character in alias definition 'invaliddoc:doc'
+  [30]
   $ hg invalidhelp
-  abort: non-ASCII character in alias definition 'invalidhelp:help'
-  [255]
+  config error: non-ASCII character in alias definition 'invalidhelp:help'
+  [30]
 
 invalid arguments
 
@@ -650,22 +650,22 @@
    -T --template TEMPLATE display with template
   
   (use 'hg rt -h' to show more help)
-  [255]
+  [10]
 
 invalid global arguments for normal commands, aliases, and shell aliases
 
   $ hg --invalid root
   hg: option --invalid not recognized
   (use 'hg help -v' for a list of global options)
-  [255]
+  [10]
   $ hg --invalid mylog
   hg: option --invalid not recognized
   (use 'hg help -v' for a list of global options)
-  [255]
+  [10]
   $ hg --invalid blank
   hg: option --invalid not recognized
   (use 'hg help -v' for a list of global options)
-  [255]
+  [10]
 
 environment variable changes in alias commands
 
@@ -714,9 +714,9 @@
 
 #if no-outer-repo
   $ hg root
-  abort: no repository found in '$TESTTMP' (.hg not found)!
-  [255]
+  abort: no repository found in '$TESTTMP' (.hg not found)
+  [10]
   $ hg --config alias.hgroot='!hg root' hgroot
-  abort: no repository found in '$TESTTMP' (.hg not found)!
-  [255]
+  abort: no repository found in '$TESTTMP' (.hg not found)
+  [10]
 #endif
--- a/tests/test-amend.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-amend.t	Tue Jan 19 21:48:43 2021 +0530
@@ -153,7 +153,7 @@
   $ echo FOO > $TESTTMP/msg
   $ hg amend -l $TESTTMP/msg -m BAR
   abort: cannot specify both --message and --logfile
-  [255]
+  [10]
   $ hg amend -l $TESTTMP/msg
   saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/974f07f28537-edb6470a-amend.hg (obsstore-off !)
   $ hg log -r . -T '{node|short} {desc}\n'
@@ -197,7 +197,7 @@
   $ echo 2 >> B
   $ hg amend
   abort: cannot amend changeset with children
-  [255]
+  [10]
 
 #if obsstore-on
 
@@ -226,7 +226,7 @@
   $ hg add bar
   $ hg amend --note 'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy'
   abort: cannot store a note of more than 255 bytes
-  [255]
+  [10]
   $ hg amend --note "adding bar"
   $ hg debugobsolete -r .
   112478962961147124edd43549aedd1a335e44bf be169c7e8dbe21cd10b3d79691cbe7f241e3c21c 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
@@ -240,7 +240,7 @@
   $ hg amend -m AMEND
   abort: cannot amend public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
 
 Amend a merge changeset
 
@@ -403,10 +403,10 @@
   $ hg init $TESTTMP/repo5
   $ cd $TESTTMP/repo5
   $ cat <<'EOF' >> .hg/hgrc
-  > [ui]
-  > logtemplate = 'user:        {user}
-  >                date:        {date|date}
-  >                summary:     {desc|firstline}\n'
+  > [command-templates]
+  > log = 'user:        {user}
+  >        date:        {date|date}
+  >        summary:     {desc|firstline}\n'
   > EOF
 
   $ echo a>a
@@ -471,7 +471,7 @@
 
   $ hg amend -D --date '0 0'
   abort: cannot specify both --date and --currentdate
-  [255]
+  [10]
 
 Close branch
 
--- a/tests/test-ancestor.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-ancestor.py	Tue Jan 19 21:48:43 2021 +0530
@@ -24,13 +24,13 @@
 
 
 def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
-    '''nodes: total number of nodes in the graph
+    """nodes: total number of nodes in the graph
     rootprob: probability that a new node (not 0) will be a root
     mergeprob: probability that, excluding a root a node will be a merge
     prevprob: probability that p1 will be the previous node
 
     return value is a graph represented as an adjacency list.
-    '''
+    """
     graph = [None] * nodes
     for i in xrange(nodes):
         if i == 0 or rng.random() < rootprob:
@@ -228,7 +228,11 @@
         print("remaining (sorted): %s" % sorted(list(revs)))
 
     for i, (bases, revs) in enumerate(
-        (({10}, {11}), ({11}, {10}), ({7}, {9, 11}),)
+        (
+            ({10}, {11}),
+            ({11}, {10}),
+            ({7}, {9, 11}),
+        )
     ):
         print("%% missingancestors(), example %d" % (i + 1))
         missanc = ancestor.incrementalmissingancestors(graph.get, bases)
--- a/tests/test-annotate.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-annotate.t	Tue Jan 19 21:48:43 2021 +0530
@@ -930,45 +930,45 @@
   $ hg up 24 --quiet
   $ hg log -r 'followlines()'
   hg: parse error: followlines takes at least 1 positional arguments
-  [255]
+  [10]
   $ hg log -r 'followlines(baz)'
   hg: parse error: followlines requires a line range
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, x)'
   hg: parse error: followlines expects a line number or a range
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, 1:2, startrev=desc("b"))'
   hg: parse error: followlines expects exactly one revision
-  [255]
+  [10]
   $ hg log -r 'followlines("glob:*", 1:2)'
   hg: parse error: followlines expects exactly one file
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, 1:)'
   hg: parse error: line range bounds must be integers
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, :1)'
   hg: parse error: line range bounds must be integers
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, x:4)'
   hg: parse error: line range bounds must be integers
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, 5:4)'
   hg: parse error: line range must be positive
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, 0:4)'
   hg: parse error: fromline must be strictly positive
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, 2:40)'
   abort: line range exceeds file size
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=[1])'
   hg: parse error at 43: not a prefix: [
   (followlines(baz, 2:4, startrev=20, descend=[1])
                                               ^ here)
-  [255]
+  [10]
   $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=a)'
   hg: parse error: descend argument must be a boolean
-  [255]
+  [10]
 
 Test empty annotate output
 
--- a/tests/test-archive.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-archive.t	Tue Jan 19 21:48:43 2021 +0530
@@ -560,7 +560,7 @@
   $ cd ../empty
   $ hg archive ../test-empty
   abort: no working directory: please specify a revision
-  [255]
+  [10]
 
 old file -- date clamped to 1980
 
--- a/tests/test-audit-path.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-audit-path.t	Tue Jan 19 21:48:43 2021 +0530
@@ -177,7 +177,7 @@
 
   $ hg up -qC 2
   $ hg rebase -s 2 -d 1 --config extensions.rebase=
-  rebasing 2:e73c21d6b244 "file a/poisoned" (tip)
+  rebasing 2:e73c21d6b244 tip "file a/poisoned"
   abort: path 'a/poisoned' traverses symbolic link 'a'
   [255]
   $ ls ../merge-symlink-out
--- a/tests/test-audit-subrepo.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-audit-subrepo.t	Tue Jan 19 21:48:43 2021 +0530
@@ -120,8 +120,8 @@
   $ hg init sub
   $ echo '= sub' >> .hgsub
   $ hg ci -qAm 'add subrepo ""'
-  hg: parse error at .hgsub:1: = sub
-  [255]
+  config error at .hgsub:1: = sub
+  [30]
 
 prepare tampered repo (including the commit above):
 
@@ -144,8 +144,8 @@
 on clone (and update):
 
   $ hg clone -q emptypath emptypath2
-  hg: parse error at .hgsub:1: = sub
-  [255]
+  config error at .hgsub:1: = sub
+  [30]
 
 Test current path
 -----------------
--- a/tests/test-backout.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-backout.t	Tue Jan 19 21:48:43 2021 +0530
@@ -5,10 +5,10 @@
 
   $ hg backout
   abort: please specify a revision to backout
-  [255]
+  [10]
   $ hg backout -r 0 0
   abort: please specify just one revision
-  [255]
+  [10]
 
 basic operation
 (this also tests that editor is invoked if the commit message is not
@@ -210,7 +210,7 @@
 
   $ hg backout 1
   abort: cannot backout change that is not an ancestor
-  [255]
+  [10]
   $ echo c > c
   $ hg ci -Am2
   adding c
@@ -227,7 +227,7 @@
 
   $ hg backout 1
   abort: cannot backout change that is not an ancestor
-  [255]
+  [10]
   $ hg summary
   parent: 2:db815d6d32e6 tip
    2
@@ -464,19 +464,19 @@
 
   $ hg backout 4
   abort: cannot backout a merge changeset
-  [255]
+  [10]
 
 backout of merge with bad parent should fail
 
   $ hg backout --parent 0 4
   abort: cb9a9f314b8b is not a parent of b2f3bb92043e
-  [255]
+  [10]
 
 backout of non-merge with parent should fail
 
   $ hg backout --parent 0 3
   abort: cannot use --parent on non-merge changeset
-  [255]
+  [10]
 
 backout with valid parent should be ok
 
@@ -805,7 +805,7 @@
 
   $ hg backout --merge --no-commit 2
   abort: cannot specify both --no-commit and --merge
-  [255]
+  [10]
 
 Ensure that backout out the same changeset twice performs correctly:
 
@@ -819,5 +819,5 @@
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg backout 2
   removing 3
-  created new head
+  warning: commit already existed in the repository!
   changeset 3:8f188de730d9 backs out changeset 2:cccc23d9d68f
--- a/tests/test-bad-pull.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bad-pull.t	Tue Jan 19 21:48:43 2021 +0530
@@ -2,7 +2,7 @@
 
   $ hg clone http://localhost:$HGPORT/ copy
   abort: * (glob)
-  [255]
+  [100]
 
   $ test -d copy
   [1]
@@ -11,5 +11,5 @@
   $ cat dumb.pid >> $DAEMON_PIDS
   $ hg clone http://localhost:$HGPORT/foo copy2
   abort: HTTP Error 404: * (glob)
-  [255]
+  [100]
   $ killdaemons.py
--- a/tests/test-basic.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-basic.t	Tue Jan 19 21:48:43 2021 +0530
@@ -9,9 +9,11 @@
   lfs.usercache=$TESTTMP/.cache/lfs
   ui.slash=True
   ui.interactive=False
+  ui.detailed-exit-code=True
   ui.merge=internal:merge
   ui.mergemarkers=detailed
   ui.promptecho=True
+  ui.timeout.warn=15
   web.address=localhost
   web\.ipv6=(?:True|False) (re)
   web.server-header=testing stub value
@@ -47,6 +49,31 @@
   [255]
 #endif
 
+On Python 3, stdio may be None:
+
+  $ hg debuguiprompt --config ui.interactive=true 0<&-
+   abort: Bad file descriptor
+  [255]
+  $ hg version -q 0<&-
+  Mercurial Distributed SCM * (glob)
+
+#if py3
+  $ hg version -q 1>&-
+  abort: Bad file descriptor
+  [255]
+#else
+  $ hg version -q 1>&-
+#endif
+  $ hg unknown -q 1>&-
+  hg: unknown command 'unknown'
+  (did you mean debugknown?)
+  [10]
+
+  $ hg version -q 2>&-
+  Mercurial Distributed SCM * (glob)
+  $ hg unknown -q 2>&-
+  [10]
+
   $ hg commit -m test
 
 This command is ancient:
--- a/tests/test-batching.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-batching.py	Tue Jan 19 21:48:43 2021 +0530
@@ -30,11 +30,17 @@
 class localthing(thing):
     def foo(self, one, two=None):
         if one:
-            return b"%s and %s" % (one, two,)
+            return b"%s and %s" % (
+                one,
+                two,
+            )
         return b"Nope"
 
     def bar(self, b, a):
-        return b"%s und %s" % (b, a,)
+        return b"%s und %s" % (
+            b,
+            a,
+        )
 
     def greet(self, name=None):
         return b"Hello, %s" % name
@@ -176,7 +182,15 @@
             args = b','.join(n + b'=' + escapearg(v) for n, v in args)
             req.append(name + b':' + args)
         req = b';'.join(req)
-        res = self._submitone(b'batch', [(b'cmds', req,)])
+        res = self._submitone(
+            b'batch',
+            [
+                (
+                    b'cmds',
+                    req,
+                )
+            ],
+        )
         for r in res.split(b';'):
             yield r
 
@@ -190,7 +204,16 @@
 
     @wireprotov1peer.batchable
     def foo(self, one, two=None):
-        encargs = [(b'one', mangle(one),), (b'two', mangle(two),)]
+        encargs = [
+            (
+                b'one',
+                mangle(one),
+            ),
+            (
+                b'two',
+                mangle(two),
+            ),
+        ]
         encresref = wireprotov1peer.future()
         yield encargs, encresref
         yield unmangle(encresref.value)
@@ -198,14 +221,33 @@
     @wireprotov1peer.batchable
     def bar(self, b, a):
         encresref = wireprotov1peer.future()
-        yield [(b'b', mangle(b),), (b'a', mangle(a),)], encresref
+        yield [
+            (
+                b'b',
+                mangle(b),
+            ),
+            (
+                b'a',
+                mangle(a),
+            ),
+        ], encresref
         yield unmangle(encresref.value)
 
     # greet is coded directly. It therefore does not support batching. If it
     # does appear in a batch, the batch is split around greet, and the call to
     # greet is done in its own roundtrip.
     def greet(self, name=None):
-        return unmangle(self._submitone(b'greet', [(b'name', mangle(name),)]))
+        return unmangle(
+            self._submitone(
+                b'greet',
+                [
+                    (
+                        b'name',
+                        mangle(name),
+                    )
+                ],
+            )
+        )
 
 
 # demo remote usage
--- a/tests/test-bheads.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bheads.t	Tue Jan 19 21:48:43 2021 +0530
@@ -251,7 +251,7 @@
   -------
   7: Adding c branch (c)
   -------
-  abort: unknown revision 'z'!
+  abort: unknown revision 'z'
   -------
 
 =======
--- a/tests/test-bisect.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bisect.t	Tue Jan 19 21:48:43 2021 +0530
@@ -14,167 +14,167 @@
   adding a
 
 
-  $ hg log
-  changeset:   31:58c80a7c8a40
-  tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:31 1970 +0000
-  summary:     msg 31
-  
-  changeset:   30:ed2d2f24b11c
-  user:        test
-  date:        Thu Jan 01 00:00:30 1970 +0000
-  summary:     msg 30
-  
-  changeset:   29:b5bd63375ab9
-  user:        test
-  date:        Thu Jan 01 00:00:29 1970 +0000
-  summary:     msg 29
-  
-  changeset:   28:8e0c2264c8af
-  user:        test
-  date:        Thu Jan 01 00:00:28 1970 +0000
-  summary:     msg 28
-  
-  changeset:   27:288867a866e9
-  user:        test
-  date:        Thu Jan 01 00:00:27 1970 +0000
-  summary:     msg 27
-  
-  changeset:   26:3efc6fd51aeb
-  user:        test
-  date:        Thu Jan 01 00:00:26 1970 +0000
-  summary:     msg 26
-  
-  changeset:   25:02a84173a97a
-  user:        test
-  date:        Thu Jan 01 00:00:25 1970 +0000
-  summary:     msg 25
-  
-  changeset:   24:10e0acd3809e
-  user:        test
-  date:        Thu Jan 01 00:00:24 1970 +0000
-  summary:     msg 24
-  
-  changeset:   23:5ec79163bff4
-  user:        test
-  date:        Thu Jan 01 00:00:23 1970 +0000
-  summary:     msg 23
-  
-  changeset:   22:06c7993750ce
-  user:        test
-  date:        Thu Jan 01 00:00:22 1970 +0000
-  summary:     msg 22
-  
-  changeset:   21:e5db6aa3fe2a
-  user:        test
-  date:        Thu Jan 01 00:00:21 1970 +0000
-  summary:     msg 21
-  
-  changeset:   20:7128fb4fdbc9
-  user:        test
-  date:        Thu Jan 01 00:00:20 1970 +0000
-  summary:     msg 20
-  
-  changeset:   19:52798545b482
-  user:        test
-  date:        Thu Jan 01 00:00:19 1970 +0000
-  summary:     msg 19
-  
-  changeset:   18:86977a90077e
-  user:        test
-  date:        Thu Jan 01 00:00:18 1970 +0000
-  summary:     msg 18
-  
-  changeset:   17:03515f4a9080
-  user:        test
-  date:        Thu Jan 01 00:00:17 1970 +0000
-  summary:     msg 17
-  
-  changeset:   16:a2e6ea4973e9
-  user:        test
-  date:        Thu Jan 01 00:00:16 1970 +0000
-  summary:     msg 16
-  
-  changeset:   15:e7fa0811edb0
-  user:        test
-  date:        Thu Jan 01 00:00:15 1970 +0000
-  summary:     msg 15
-  
-  changeset:   14:ce8f0998e922
-  user:        test
-  date:        Thu Jan 01 00:00:14 1970 +0000
-  summary:     msg 14
-  
-  changeset:   13:9d7d07bc967c
-  user:        test
-  date:        Thu Jan 01 00:00:13 1970 +0000
-  summary:     msg 13
-  
-  changeset:   12:1941b52820a5
-  user:        test
-  date:        Thu Jan 01 00:00:12 1970 +0000
-  summary:     msg 12
-  
-  changeset:   11:7b4cd9578619
-  user:        test
-  date:        Thu Jan 01 00:00:11 1970 +0000
-  summary:     msg 11
-  
-  changeset:   10:7c5eff49a6b6
-  user:        test
-  date:        Thu Jan 01 00:00:10 1970 +0000
-  summary:     msg 10
-  
-  changeset:   9:eb44510ef29a
-  user:        test
-  date:        Thu Jan 01 00:00:09 1970 +0000
-  summary:     msg 9
-  
-  changeset:   8:453eb4dba229
-  user:        test
-  date:        Thu Jan 01 00:00:08 1970 +0000
-  summary:     msg 8
-  
-  changeset:   7:03750880c6b5
-  user:        test
-  date:        Thu Jan 01 00:00:07 1970 +0000
-  summary:     msg 7
-  
-  changeset:   6:a3d5c6fdf0d3
-  user:        test
-  date:        Thu Jan 01 00:00:06 1970 +0000
-  summary:     msg 6
-  
-  changeset:   5:7874a09ea728
-  user:        test
-  date:        Thu Jan 01 00:00:05 1970 +0000
-  summary:     msg 5
-  
-  changeset:   4:9b2ba8336a65
-  user:        test
-  date:        Thu Jan 01 00:00:04 1970 +0000
-  summary:     msg 4
-  
-  changeset:   3:b53bea5e2fcb
-  user:        test
-  date:        Thu Jan 01 00:00:03 1970 +0000
-  summary:     msg 3
-  
-  changeset:   2:db07c04beaca
-  user:        test
-  date:        Thu Jan 01 00:00:02 1970 +0000
-  summary:     msg 2
-  
-  changeset:   1:5cd978ea5149
-  user:        test
-  date:        Thu Jan 01 00:00:01 1970 +0000
-  summary:     msg 1
-  
-  changeset:   0:b99c7b9c8e11
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     msg 0
+  $ hg log -G
+  @  changeset:   31:58c80a7c8a40
+  |  tag:         tip
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:31 1970 +0000
+  |  summary:     msg 31
+  |
+  o  changeset:   30:ed2d2f24b11c
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:30 1970 +0000
+  |  summary:     msg 30
+  |
+  o  changeset:   29:b5bd63375ab9
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:29 1970 +0000
+  |  summary:     msg 29
+  |
+  o  changeset:   28:8e0c2264c8af
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:28 1970 +0000
+  |  summary:     msg 28
+  |
+  o  changeset:   27:288867a866e9
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:27 1970 +0000
+  |  summary:     msg 27
+  |
+  o  changeset:   26:3efc6fd51aeb
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:26 1970 +0000
+  |  summary:     msg 26
+  |
+  o  changeset:   25:02a84173a97a
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:25 1970 +0000
+  |  summary:     msg 25
+  |
+  o  changeset:   24:10e0acd3809e
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:24 1970 +0000
+  |  summary:     msg 24
+  |
+  o  changeset:   23:5ec79163bff4
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:23 1970 +0000
+  |  summary:     msg 23
+  |
+  o  changeset:   22:06c7993750ce
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:22 1970 +0000
+  |  summary:     msg 22
+  |
+  o  changeset:   21:e5db6aa3fe2a
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:21 1970 +0000
+  |  summary:     msg 21
+  |
+  o  changeset:   20:7128fb4fdbc9
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:20 1970 +0000
+  |  summary:     msg 20
+  |
+  o  changeset:   19:52798545b482
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:19 1970 +0000
+  |  summary:     msg 19
+  |
+  o  changeset:   18:86977a90077e
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:18 1970 +0000
+  |  summary:     msg 18
+  |
+  o  changeset:   17:03515f4a9080
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:17 1970 +0000
+  |  summary:     msg 17
+  |
+  o  changeset:   16:a2e6ea4973e9
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:16 1970 +0000
+  |  summary:     msg 16
+  |
+  o  changeset:   15:e7fa0811edb0
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:15 1970 +0000
+  |  summary:     msg 15
+  |
+  o  changeset:   14:ce8f0998e922
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:14 1970 +0000
+  |  summary:     msg 14
+  |
+  o  changeset:   13:9d7d07bc967c
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:13 1970 +0000
+  |  summary:     msg 13
+  |
+  o  changeset:   12:1941b52820a5
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:12 1970 +0000
+  |  summary:     msg 12
+  |
+  o  changeset:   11:7b4cd9578619
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:11 1970 +0000
+  |  summary:     msg 11
+  |
+  o  changeset:   10:7c5eff49a6b6
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:10 1970 +0000
+  |  summary:     msg 10
+  |
+  o  changeset:   9:eb44510ef29a
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:09 1970 +0000
+  |  summary:     msg 9
+  |
+  o  changeset:   8:453eb4dba229
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:08 1970 +0000
+  |  summary:     msg 8
+  |
+  o  changeset:   7:03750880c6b5
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:07 1970 +0000
+  |  summary:     msg 7
+  |
+  o  changeset:   6:a3d5c6fdf0d3
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:06 1970 +0000
+  |  summary:     msg 6
+  |
+  o  changeset:   5:7874a09ea728
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:05 1970 +0000
+  |  summary:     msg 5
+  |
+  o  changeset:   4:9b2ba8336a65
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:04 1970 +0000
+  |  summary:     msg 4
+  |
+  o  changeset:   3:b53bea5e2fcb
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:03 1970 +0000
+  |  summary:     msg 3
+  |
+  o  changeset:   2:db07c04beaca
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:02 1970 +0000
+  |  summary:     msg 2
+  |
+  o  changeset:   1:5cd978ea5149
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:01 1970 +0000
+  |  summary:     msg 1
+  |
+  o  changeset:   0:b99c7b9c8e11
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     msg 0
   
 
   $ hg up -C
@@ -364,7 +364,7 @@
   $ hg bisect -r
   $ hg bisect
   abort: cannot bisect (no known good revisions)
-  [255]
+  [20]
 
 
 reproduce AssertionError, issue1445
@@ -475,7 +475,7 @@
   $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters"
   changeset 31:58c80a7c8a40: good
   abort: cannot bisect (no known bad revisions)
-  [255]
+  [20]
   $ hg up -qr 0
   $ hg bisect --command "\"$PYTHON\" \"$TESTTMP/script.py\" and some parameters"
   changeset 0:b99c7b9c8e11: bad
@@ -545,7 +545,7 @@
   $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params"
   changeset 31:58c80a7c8a40: good
   abort: cannot bisect (no known bad revisions)
-  [255]
+  [20]
   $ hg up -qr 0
   $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params"
   changeset 0:b99c7b9c8e11: bad
@@ -751,46 +751,46 @@
   $ hg bisect -r
   $ hg bisect -b -c false
   abort: --bad and --command are incompatible
-  [255]
+  [10]
   $ hg bisect -b -e
   abort: --bad and --extend are incompatible
-  [255]
+  [10]
   $ hg bisect -b -g
   abort: --bad and --good are incompatible
-  [255]
+  [10]
   $ hg bisect -b -r
   abort: --bad and --reset are incompatible
-  [255]
+  [10]
   $ hg bisect -b -s
   abort: --bad and --skip are incompatible
-  [255]
+  [10]
   $ hg bisect -c false -e
   abort: --command and --extend are incompatible
-  [255]
+  [10]
   $ hg bisect -c false -g
   abort: --command and --good are incompatible
-  [255]
+  [10]
   $ hg bisect -c false -r
   abort: --command and --reset are incompatible
-  [255]
+  [10]
   $ hg bisect -c false -s
   abort: --command and --skip are incompatible
-  [255]
+  [10]
   $ hg bisect -e -g
   abort: --extend and --good are incompatible
-  [255]
+  [10]
   $ hg bisect -e -r
   abort: --extend and --reset are incompatible
-  [255]
+  [10]
   $ hg bisect -e -s
   abort: --extend and --skip are incompatible
-  [255]
+  [10]
   $ hg bisect -g -r
   abort: --good and --reset are incompatible
-  [255]
+  [10]
   $ hg bisect -g -s
   abort: --good and --skip are incompatible
-  [255]
+  [10]
   $ hg bisect -r -s
   abort: --reset and --skip are incompatible
-  [255]
+  [10]
--- a/tests/test-blackbox.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-blackbox.t	Tue Jan 19 21:48:43 2021 +0530
@@ -128,9 +128,9 @@
   new changesets d02f48003e62
   (run 'hg update' to get a working copy)
   $ hg blackbox -l 6
-  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull
-  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (served) in * seconds (glob)
   1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served) with 1 labels and 2 nodes
+  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (served.hidden) in * seconds (glob)
+  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (served.hidden) with 1 labels and 2 nodes
   1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> 1 incoming changes - new heads: d02f48003e62
   1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> pull exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
@@ -193,8 +193,8 @@
   $ hg blackbox -l 6
   1970/01/01 00:00:00 bob @73f6ee326b27d820b0472f1a825e3a50f3dc489b (5000)> strip tip
   1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/73f6ee326b27-7612e004-backup.hg
-  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (base) in * seconds (glob)
-  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (base) with 1 labels and 2 nodes
+  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> updated branch cache (immutable) in * seconds (glob)
+  1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> wrote branch cache (immutable) with 1 labels and 2 nodes
   1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> strip tip exited 0 after * seconds (glob)
   1970/01/01 00:00:00 bob @6563da9dcf87b1949716e38ff3e3dfaa3198eb06 (5000)> blackbox -l 6
 
@@ -482,7 +482,7 @@
 #endif
 
   $ head -1 .hg/blackbox.log
-  1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> ** Unknown exception encountered with possibly-broken third-party extension mock
+  1970/01/01 00:00:00 bob @0000000000000000000000000000000000000000 (5000)> ** Unknown exception encountered with possibly-broken third-party extension "mock" (version N/A)
   $ tail -2 .hg/blackbox.log
   RuntimeError: raise
   
--- a/tests/test-bookflow.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bookflow.t	Tue Jan 19 21:48:43 2021 +0530
@@ -140,7 +140,7 @@
   $ cd ../b
   $ # make sure we cannot push after bookmarks diverged
   $ hg push -B X | grep abort
-  abort: push creates new remote head * with bookmark 'X'! (glob)
+  abort: push creates new remote head * with bookmark 'X' (glob)
   (pull and merge or see 'hg help push' for details about pushing new heads)
   [1]
   $ hg pull -u | grep divergent
--- a/tests/test-bookmarks-current.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bookmarks-current.t	Tue Jan 19 21:48:43 2021 +0530
@@ -244,5 +244,5 @@
   Y
   $ hg bookmarks --inactive
   $ hg bookmarks -ql .
-  abort: no active bookmark!
+  abort: no active bookmark
   [255]
--- a/tests/test-bookmarks-pushpull.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bookmarks-pushpull.t	Tue Jan 19 21:48:43 2021 +0530
@@ -10,8 +10,8 @@
 #require serve
 
   $ cat << EOF >> $HGRCPATH
-  > [ui]
-  > logtemplate={rev}:{node|short} {desc|firstline}
+  > [command-templates]
+  > log={rev}:{node|short} {desc|firstline}
   > [phases]
   > publish=False
   > [experimental]
@@ -356,7 +356,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (leaving bookmark V)
   $ hg push -B . ../a
-  abort: no active bookmark!
+  abort: no active bookmark
   [255]
   $ hg update -r V
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -386,7 +386,7 @@
   $ hg pull -B anotherbadname ../a
   pulling from ../a
   abort: remote bookmark anotherbadname not found!
-  [255]
+  [10]
 
 divergent bookmarks
 
@@ -740,9 +740,9 @@
   $ hg push http://localhost:$HGPORT2/
   pushing to http://localhost:$HGPORT2/
   searching for changes
-  abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
+  abort: push creates new remote head c922c0139ca0 with bookmark 'Y'
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg -R ../a book
      @                         1:0d2164f0ce0d
    * X                         1:0d2164f0ce0d
@@ -757,9 +757,9 @@
   $ hg push http://localhost:$HGPORT2/
   pushing to http://localhost:$HGPORT2/
   searching for changes
-  abort: push creates new remote head c922c0139ca0 with bookmark 'Y'!
+  abort: push creates new remote head c922c0139ca0 with bookmark 'Y'
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg -R ../a book
      @                         1:0d2164f0ce0d
    * X                         1:0d2164f0ce0d
@@ -1053,9 +1053,9 @@
   pushing to $TESTTMP/addmarks
   searching for changes
   remote has heads on branch 'default' that are not known locally: a2a606d9ff1b
-  abort: push creates new remote head 54694f811df9 with bookmark 'X'!
+  abort: push creates new remote head 54694f811df9 with bookmark 'X'
   (pull and merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ cd ../addmarks
 
 Check summary output for incoming/outgoing bookmarks
@@ -1166,7 +1166,7 @@
   searching for changes
   no changes found
   pushkey-abort: prepushkey hook exited with status 1
-  abort: exporting bookmark @ failed!
+  abort: exporting bookmark @ failed
   [255]
 
 #endif
@@ -1194,7 +1194,7 @@
   searching for changes
   no changes found
   remote: pushkey-abort: prepushkey hook exited with status 1
-  abort: exporting bookmark @ failed!
+  abort: exporting bookmark @ failed
   [255]
 
   $ hg -R ../issue4455-dest/ bookmarks
@@ -1205,7 +1205,7 @@
   searching for changes
   no changes found
   remote: pushkey-abort: prepushkey hook exited with status 1
-  exporting bookmark @ failed!
+  exporting bookmark @ failed
   [1]
 
 #endif
@@ -1233,7 +1233,7 @@
   searching for changes
   no changes found
   remote: pushkey-abort: prepushkey hook exited with status 1
-  abort: exporting bookmark @ failed!
+  abort: exporting bookmark @ failed
   [255]
 
   $ hg -R ../issue4455-dest/ bookmarks
@@ -1244,7 +1244,7 @@
   searching for changes
   no changes found
   remote: pushkey-abort: prepushkey hook exited with status 1
-  exporting bookmark @ failed!
+  exporting bookmark @ failed
   [1]
 
 #endif
@@ -1324,7 +1324,7 @@
   searching for changes
   no changes found
   remote: pushkey-abort: prepushkey.no-bm-move hook exited with status 1
-  abort: updating bookmark foo failed!
+  abort: updating bookmark foo failed
   [255]
 #endif
 #if b2-binary
@@ -1364,3 +1364,33 @@
   no changes found (ignored 1 secret changesets)
   abort: cannot push bookmark foo as it points to a secret changeset
   [255]
+
+Test pushing all bookmarks
+
+  $ hg init $TESTTMP/ab1
+  $ cd $TESTTMP/ab1
+  $ "$PYTHON" $TESTDIR/seq.py 1 5 | while read i; do
+  > echo $i > test && hg ci -Am test
+  > done
+  adding test
+  $ hg clone -U . ../ab2
+  $ hg book -r 1 A; hg book -r 2 B; hg book -r 3 C
+  $ hg push ../ab2
+  pushing to ../ab2
+  searching for changes
+  no changes found
+  [1]
+  $ hg push --all-bookmarks -r 1 ../ab2
+  abort: cannot specify both --all-bookmarks and --rev
+  [10]
+  $ hg push --all-bookmarks -B A ../ab2
+  abort: cannot specify both --all-bookmarks and --bookmark
+  [10]
+  $ hg push --all-bookmarks ../ab2
+  pushing to ../ab2
+  searching for changes
+  no changes found
+  exporting bookmark A
+  exporting bookmark B
+  exporting bookmark C
+  [1]
--- a/tests/test-bookmarks-rebase.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bookmarks-rebase.t	Tue Jan 19 21:48:43 2021 +0530
@@ -37,7 +37,7 @@
 rebase
 
   $ hg rebase -s two -d one
-  rebasing 3:2ae46b1d99a7 "3" (two tip)
+  rebasing 3:2ae46b1d99a7 two tip "3"
   saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-rebase.hg
 
   $ hg log
@@ -77,11 +77,11 @@
   created new head
   $ hg bookmark three
   $ hg rebase -s three -d two
-  rebasing 4:dd7c838e8362 "4" (three tip)
+  rebasing 4:dd7c838e8362 three tip "4"
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --abort
   rebase aborted
   $ hg bookmark
@@ -92,11 +92,11 @@
 after aborted rebase, restoring a bookmark that has been removed should not fail
 
   $ hg rebase -s three -d two
-  rebasing 4:dd7c838e8362 "4" (three tip)
+  rebasing 4:dd7c838e8362 three tip "4"
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg bookmark -d three
   $ hg rebase --abort
   rebase aborted
--- a/tests/test-bookmarks.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bookmarks.t	Tue Jan 19 21:48:43 2021 +0530
@@ -79,13 +79,13 @@
    * X2                        0:f7b1eb17ad24
   $ hg bookmarks -l X A Y
   abort: bookmark 'A' does not exist
-  [255]
+  [10]
   $ hg bookmarks -l -r0
   abort: cannot specify both --list and --rev
-  [255]
+  [10]
   $ hg bookmarks -l --inactive
   abort: cannot specify both --inactive and --list
-  [255]
+  [10]
 
   $ hg log -T '{bookmarks % "{rev} {bookmark}\n"}'
   0 X
@@ -184,22 +184,22 @@
 but "literal:." is not since "." seems not a literal bookmark:
 
   $ hg log -r 'bookmark("literal:.")'
-  abort: bookmark '.' does not exist!
+  abort: bookmark '.' does not exist
   [255]
 
 "." should fail if there's no active bookmark:
 
   $ hg bookmark --inactive
   $ hg log -r 'bookmark(.)'
-  abort: no active bookmark!
+  abort: no active bookmark
   [255]
   $ hg log -r 'present(bookmark(.))'
 
   $ hg log -r 'bookmark(unknown)'
-  abort: bookmark 'unknown' does not exist!
+  abort: bookmark 'unknown' does not exist
   [255]
   $ hg log -r 'bookmark("literal:unknown")'
-  abort: bookmark 'unknown' does not exist!
+  abort: bookmark 'unknown' does not exist
   [255]
   $ hg log -r 'bookmark("re:unknown")'
   $ hg log -r 'present(bookmark("literal:unknown"))'
@@ -245,7 +245,7 @@
 
   $ hg bookmark -m A B
   abort: bookmark 'A' does not exist
-  [255]
+  [10]
 
 rename to existent bookmark
 
@@ -277,7 +277,7 @@
   $ hg book rename-me
   $ hg book -i rename-me
   $ hg book -m . renamed
-  abort: no active bookmark!
+  abort: no active bookmark
   [255]
   $ hg up -q Y
   $ hg book -d rename-me
@@ -297,7 +297,7 @@
   $ hg book delete-me
   $ hg book -i delete-me
   $ hg book -d .
-  abort: no active bookmark!
+  abort: no active bookmark
   [255]
   $ hg up -q Y
   $ hg book -d delete-me
@@ -327,28 +327,28 @@
 
   $ hg bookmark -m Y
   abort: new bookmark name required
-  [255]
+  [10]
   $ hg bookmark -m Y Y2 Y3
   abort: only one new bookmark name allowed
-  [255]
+  [10]
 
 delete without name
 
   $ hg bookmark -d
   abort: bookmark name required
-  [255]
+  [10]
 
 delete nonexistent bookmark
 
   $ hg bookmark -d A
   abort: bookmark 'A' does not exist
-  [255]
+  [10]
 
 delete with --inactive
 
   $ hg bookmark -d --inactive Y
   abort: cannot specify both --inactive and --delete
-  [255]
+  [10]
 
 bookmark name with spaces should be stripped
 
@@ -385,26 +385,26 @@
   $ hg bookmark '
   > '
   abort: bookmark names cannot consist entirely of whitespace
-  [255]
+  [10]
 
   $ hg bookmark -m Z '
   > '
   abort: bookmark names cannot consist entirely of whitespace
-  [255]
+  [10]
 
 bookmark with reserved name
 
   $ hg bookmark tip
   abort: the name 'tip' is reserved
-  [255]
+  [10]
 
   $ hg bookmark .
   abort: the name '.' is reserved
-  [255]
+  [10]
 
   $ hg bookmark null
   abort: the name 'null' is reserved
-  [255]
+  [10]
 
 
 bookmark with existing name
@@ -431,7 +431,7 @@
 
   $ hg bookmark 10
   abort: cannot use an integer as a name
-  [255]
+  [10]
 
 bookmark with a name that matches a node id
   $ hg bookmark 925d80f479bb db815d6d32e6 --config "$TESTHOOK"
@@ -476,15 +476,15 @@
 
   $ hg bookmark -m Y -d Z
   abort: cannot specify both --delete and --rename
-  [255]
+  [10]
 
   $ hg bookmark -r 1 -d Z
   abort: cannot specify both --delete and --rev
-  [255]
+  [10]
 
   $ hg bookmark -r 1 -m Z Y
   abort: cannot specify both --rename and --rev
-  [255]
+  [10]
 
 force bookmark with existing name
 
@@ -522,28 +522,28 @@
 
   $ hg bookmark -r .
   abort: bookmark name required
-  [255]
+  [10]
 
 bookmark name with whitespace only
 
   $ hg bookmark ' '
   abort: bookmark names cannot consist entirely of whitespace
-  [255]
+  [10]
 
   $ hg bookmark -m Y ' '
   abort: bookmark names cannot consist entirely of whitespace
-  [255]
+  [10]
 
 invalid bookmark
 
   $ hg bookmark 'foo:bar'
   abort: ':' cannot be used in a name
-  [255]
+  [10]
 
   $ hg bookmark 'foo
   > bar'
   abort: '\n' cannot be used in a name
-  [255]
+  [10]
 
 the bookmark extension should be ignored now that it is part of core
 
@@ -793,7 +793,7 @@
 
   $ hg -R ../cloned-bookmarks-manual-update update -d 1986
   abort: revision matching date not found
-  [255]
+  [10]
   $ hg -R ../cloned-bookmarks-manual-update update
   updating to active bookmark Y
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-branch-change.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-branch-change.t	Tue Jan 19 21:48:43 2021 +0530
@@ -34,38 +34,38 @@
 
   $ hg branch -r .
   abort: no branch name specified for the revisions
-  [255]
+  [10]
 
 Setting an invalid branch name
 
   $ hg branch -r . a:b
   abort: ':' cannot be used in a name
-  [255]
+  [10]
   $ hg branch -r . tip
   abort: the name 'tip' is reserved
-  [255]
+  [10]
   $ hg branch -r . 1234
   abort: cannot use an integer as a name
-  [255]
+  [10]
 
 Change on non-linear set of commits
 
   $ hg branch -r 2 -r 4 foo
   abort: cannot change branch of non-linear revisions
-  [255]
+  [10]
 
 Change in middle of the stack (linear commits)
 
   $ hg branch -r 1::3 foo
   abort: cannot change branch of changeset with children
-  [255]
+  [10]
 
 Change with dirty working directory
 
   $ echo bar > a
   $ hg branch -r . foo
   abort: uncommitted changes
-  [255]
+  [20]
 
   $ hg revert --all
   reverting a
@@ -74,7 +74,7 @@
 
   $ hg branch -r 'draft() - all()' foo
   abort: empty revision set
-  [255]
+  [10]
 
 Changing branch on linear set of commits from head
 
@@ -123,13 +123,13 @@
 
   $ hg branch -r . default
   abort: a branch of the same name already exists
-  [255]
+  [10]
 
 Changing on a branch head which is not topological head
 
   $ hg branch -r 2 stable
   abort: cannot change branch of changeset with children
-  [255]
+  [10]
 
 Enabling the allowunstable config and trying to change branch on a branch head
 which is not a topological head
@@ -143,13 +143,13 @@
 Changing branch of an obsoleted changeset
 
   $ hg branch -r 4 foobar
-  abort: hidden revision '4' was rewritten as: 7c1991464886!
+  abort: hidden revision '4' was rewritten as: 7c1991464886
   (use --hidden to access hidden revisions)
   [255]
 
   $ hg branch -r 4 --hidden foobar
   abort: cannot change branch of a obsolete changeset
-  [255]
+  [10]
 
 Make sure bookmark movement is correct
 
@@ -331,7 +331,7 @@
   (branch merge, don't forget to commit)
   $ hg branch -r . abcd
   abort: outstanding uncommitted merge
-  [255]
+  [20]
 
   $ hg ci -m "Merge commit"
   $ hg glog -r 'parents(.)::'
@@ -353,7 +353,7 @@
   changed branch on 1 changesets
   $ hg branch -r . stable
   abort: a branch of the same name already exists
-  [255]
+  [10]
 
   $ hg branch -r . stable --force
   changed branch on 1 changesets
@@ -368,7 +368,7 @@
   $ hg branch -r . def
   abort: cannot change branch of public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
 
 Merge commit with conflicts, with evolution and without
 
--- a/tests/test-branch-option.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-branch-option.t	Tue Jan 19 21:48:43 2021 +0530
@@ -57,12 +57,12 @@
 unknown branch and fallback
 
   $ hg in -qbz
-  abort: unknown branch 'z'!
+  abort: unknown branch 'z'
   [255]
   $ hg in -q ../branch#z
   2:f25d57ab0566
   $ hg out -qbz
-  abort: unknown branch 'z'!
+  abort: unknown branch 'z'
   [255]
 
 in rev c branch a
--- a/tests/test-branches.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-branches.t	Tue Jan 19 21:48:43 2021 +0530
@@ -51,36 +51,36 @@
 
   $ hg branch tip
   abort: the name 'tip' is reserved
-  [255]
+  [10]
   $ hg branch null
   abort: the name 'null' is reserved
-  [255]
+  [10]
   $ hg branch .
   abort: the name '.' is reserved
-  [255]
+  [10]
 
 invalid characters
 
   $ hg branch 'foo:bar'
   abort: ':' cannot be used in a name
-  [255]
+  [10]
 
   $ hg branch 'foo
   > bar'
   abort: '\n' cannot be used in a name
-  [255]
+  [10]
 
 trailing or leading spaces should be stripped before testing duplicates
 
   $ hg branch 'b '
   abort: a branch of the same name already exists
   (use 'hg update' to switch to it)
-  [255]
+  [10]
 
   $ hg branch ' b'
   abort: a branch of the same name already exists
   (use 'hg update' to switch to it)
-  [255]
+  [10]
 
 verify update will accept invalid legacy branch names
 
@@ -281,7 +281,7 @@
   $ hg commit -d '9 0' --close-branch -m 'close this part branch too'
   $ hg commit -d '9 0' --close-branch -m 're-closing this branch'
   abort: current revision is already a branch closing head
-  [255]
+  [10]
 
   $ hg log -r tip --debug
   changeset:   12:e3d49c0575d8fc2cb1cd6859c747c14f5f6d499f
@@ -959,7 +959,7 @@
   $ hg ci -m "closing branch" --close-branch
   abort: can only close branch heads
   (use --force-close-branch to close branch from a non-head changeset)
-  [255]
+  [10]
 
   $ hg up 0
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -984,7 +984,261 @@
   $ hg ci -m "branch closed" --close-branch
   abort: can only close branch heads
   (use --force-close-branch to close branch from a non-head changeset)
-  [255]
+  [10]
 
   $ hg ci -m "branch closed" --force-close-branch
   created new head
+  $ cd ..
+
+Test various special cases for the branchmap
+--------------------------------------------
+
+Basic fork of the same branch
+
+  $ hg init branchmap-testing1
+  $ cd branchmap-testing1
+  $ hg debugbuild '@A . :base . :p1 *base /p1'
+  $ hg log -G
+  o    changeset:   3:71ca9a6d524e
+  |\   branch:      A
+  | |  tag:         tip
+  | |  parent:      2:a3b807b3ff0b
+  | |  parent:      1:99ba08759bc7
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:03 1970 +0000
+  | |  summary:     r3
+  | |
+  | o  changeset:   2:a3b807b3ff0b
+  | |  branch:      A
+  | |  parent:      0:2ab8003a1750
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:02 1970 +0000
+  | |  summary:     r2
+  | |
+  o |  changeset:   1:99ba08759bc7
+  |/   branch:      A
+  |    tag:         p1
+  |    user:        debugbuilddag
+  |    date:        Thu Jan 01 00:00:01 1970 +0000
+  |    summary:     r1
+  |
+  o  changeset:   0:2ab8003a1750
+     branch:      A
+     tag:         base
+     user:        debugbuilddag
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     r0
+  
+  $ hg branches
+  A                              3:71ca9a6d524e
+  $ hg clone -r 1 -r 2 . ../branchmap-testing1-clone
+  adding changesets
+  adding manifests
+  adding file changes
+  added 3 changesets with 0 changes to 0 files (+1 heads)
+  new changesets 2ab8003a1750:a3b807b3ff0b
+  updating to branch A
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd ../branchmap-testing1-clone
+  $ hg pull ../branchmap-testing1
+  pulling from ../branchmap-testing1
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files (-1 heads)
+  new changesets 71ca9a6d524e
+  (run 'hg update' to get a working copy)
+  $ hg branches
+  A                              3:71ca9a6d524e
+  $ cd ..
+
+Switching to a different branch and back
+
+  $ hg init branchmap-testing2
+  $ cd branchmap-testing2
+  $ hg debugbuild '@A . @B . @A .'
+  $ hg log -G
+  o  changeset:   2:9699e9f260b5
+  |  branch:      A
+  |  tag:         tip
+  |  user:        debugbuilddag
+  |  date:        Thu Jan 01 00:00:02 1970 +0000
+  |  summary:     r2
+  |
+  o  changeset:   1:0bc7d348d965
+  |  branch:      B
+  |  user:        debugbuilddag
+  |  date:        Thu Jan 01 00:00:01 1970 +0000
+  |  summary:     r1
+  |
+  o  changeset:   0:2ab8003a1750
+     branch:      A
+     user:        debugbuilddag
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     r0
+  
+  $ hg branches
+  A                              2:9699e9f260b5
+  B                              1:0bc7d348d965 (inactive)
+  $ hg clone -r 1 . ../branchmap-testing2-clone
+  adding changesets
+  adding manifests
+  adding file changes
+  added 2 changesets with 0 changes to 0 files
+  new changesets 2ab8003a1750:0bc7d348d965
+  updating to branch B
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd ../branchmap-testing2-clone
+  $ hg pull ../branchmap-testing2
+  pulling from ../branchmap-testing2
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files
+  new changesets 9699e9f260b5
+  (run 'hg update' to get a working copy)
+  $ hg branches
+  A                              2:9699e9f260b5
+  B                              1:0bc7d348d965 (inactive)
+  $ cd ..
+
+A fork on a branch switching to a different branch and back
+is still collecting the fork.
+
+  $ hg init branchmap-testing3
+  $ cd branchmap-testing3
+  $ hg debugbuild '@A . :base . :p1 *base @B . @A /p1'
+  $ hg log -G
+  o    changeset:   4:3614a1711d23
+  |\   branch:      A
+  | |  tag:         tip
+  | |  parent:      3:e9c8abcf65aa
+  | |  parent:      1:99ba08759bc7
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:04 1970 +0000
+  | |  summary:     r4
+  | |
+  | o  changeset:   3:e9c8abcf65aa
+  | |  branch:      B
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:03 1970 +0000
+  | |  summary:     r3
+  | |
+  | o  changeset:   2:a3b807b3ff0b
+  | |  branch:      A
+  | |  parent:      0:2ab8003a1750
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:02 1970 +0000
+  | |  summary:     r2
+  | |
+  o |  changeset:   1:99ba08759bc7
+  |/   branch:      A
+  |    tag:         p1
+  |    user:        debugbuilddag
+  |    date:        Thu Jan 01 00:00:01 1970 +0000
+  |    summary:     r1
+  |
+  o  changeset:   0:2ab8003a1750
+     branch:      A
+     tag:         base
+     user:        debugbuilddag
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     r0
+  
+  $ hg branches
+  A                              4:3614a1711d23
+  B                              3:e9c8abcf65aa (inactive)
+  $ hg clone -r 1 -r 3 . ../branchmap-testing3-clone
+  adding changesets
+  adding manifests
+  adding file changes
+  added 4 changesets with 0 changes to 0 files (+1 heads)
+  new changesets 2ab8003a1750:e9c8abcf65aa
+  updating to branch A
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd ../branchmap-testing3-clone
+  $ hg pull ../branchmap-testing3
+  pulling from ../branchmap-testing3
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files (-1 heads)
+  new changesets 3614a1711d23
+  (run 'hg update' to get a working copy)
+  $ hg branches
+  A                              4:3614a1711d23
+  B                              3:e9c8abcf65aa (inactive)
+  $ cd ..
+
+Intermediary parents are on different branches.
+
+  $ hg init branchmap-testing4
+  $ cd branchmap-testing4
+  $ hg debugbuild '@A . @B :base . @A :p1 *base @C . @A /p1'
+  $ hg log -G
+  o    changeset:   4:4bf67499b70a
+  |\   branch:      A
+  | |  tag:         tip
+  | |  parent:      3:4a546028fa8f
+  | |  parent:      1:0bc7d348d965
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:04 1970 +0000
+  | |  summary:     r4
+  | |
+  | o  changeset:   3:4a546028fa8f
+  | |  branch:      C
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:03 1970 +0000
+  | |  summary:     r3
+  | |
+  | o  changeset:   2:a3b807b3ff0b
+  | |  branch:      A
+  | |  parent:      0:2ab8003a1750
+  | |  user:        debugbuilddag
+  | |  date:        Thu Jan 01 00:00:02 1970 +0000
+  | |  summary:     r2
+  | |
+  o |  changeset:   1:0bc7d348d965
+  |/   branch:      B
+  |    tag:         p1
+  |    user:        debugbuilddag
+  |    date:        Thu Jan 01 00:00:01 1970 +0000
+  |    summary:     r1
+  |
+  o  changeset:   0:2ab8003a1750
+     branch:      A
+     tag:         base
+     user:        debugbuilddag
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     r0
+  
+  $ hg branches
+  A                              4:4bf67499b70a
+  C                              3:4a546028fa8f (inactive)
+  B                              1:0bc7d348d965 (inactive)
+  $ hg clone -r 1 -r 3 . ../branchmap-testing4-clone
+  adding changesets
+  adding manifests
+  adding file changes
+  added 4 changesets with 0 changes to 0 files (+1 heads)
+  new changesets 2ab8003a1750:4a546028fa8f
+  updating to branch B
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd ../branchmap-testing4-clone
+  $ hg pull ../branchmap-testing4
+  pulling from ../branchmap-testing4
+  searching for changes
+  adding changesets
+  adding manifests
+  adding file changes
+  added 1 changesets with 0 changes to 0 files (-1 heads)
+  new changesets 4bf67499b70a
+  (run 'hg update' to get a working copy)
+  $ hg branches
+  A                              4:4bf67499b70a
+  C                              3:4a546028fa8f (inactive)
+  B                              1:0bc7d348d965 (inactive)
+  $ cd ..
--- a/tests/test-bundle-r.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bundle-r.t	Tue Jan 19 21:48:43 2021 +0530
@@ -172,12 +172,12 @@
 
   $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg test-3
   abort: --base is incompatible with specifying a destination
-  [255]
+  [10]
   $ hg -R test bundle -a -r tip test-bundle-branch1.hg test-3
   abort: --all is incompatible with specifying a destination
-  [255]
+  [10]
   $ hg -R test bundle -r tip test-bundle-branch1.hg
-  abort: repository default-push not found!
+  abort: repository default-push not found
   [255]
 
   $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg
@@ -223,8 +223,8 @@
   adding changesets
   transaction abort!
   rollback completed
-  abort: 00changelog.i@93ee6ab32777: unknown parent!
-  [255]
+  abort: 00changelog.i@93ee6ab32777: unknown parent
+  [50]
 
 revision 2
 
--- a/tests/test-bundle-type.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bundle-type.t	Tue Jan 19 21:48:43 2021 +0530
@@ -34,12 +34,12 @@
   $ hg bundle -a -t unknown out.hg
   abort: unknown is not a recognized bundle specification
   (see 'hg help bundlespec' for supported values for --type)
-  [255]
+  [10]
 
   $ hg bundle -a -t unknown-v2 out.hg
   abort: unknown compression is not supported
   (see 'hg help bundlespec' for supported values for --type)
-  [255]
+  [10]
 
   $ cd ..
 
@@ -199,7 +199,7 @@
   $ hg -R tzstd bundle -a -t zstd-v1 zstd-v1
   abort: compression engine zstd is not supported on v1 bundles
   (see 'hg help bundlespec' for supported values for --type)
-  [255]
+  [10]
 
 #else
 
@@ -228,5 +228,5 @@
   $ hg bundle -a -t garbage ../bgarbage
   abort: garbage is not a recognized bundle specification
   (see 'hg help bundlespec' for supported values for --type)
-  [255]
+  [10]
   $ cd ..
--- a/tests/test-bundle.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bundle.t	Tue Jan 19 21:48:43 2021 +0530
@@ -288,7 +288,7 @@
   $ hg -R test bundle -t packed1 packed.hg
   abort: packed bundles cannot be produced by "hg bundle"
   (use 'hg debugcreatestreamclonebundle')
-  [255]
+  [10]
 
 packed1 is produced properly
 
@@ -349,7 +349,7 @@
   $ hg -R packed unbundle packed.hg
   abort: packed bundles cannot be applied with "hg unbundle"
   (use "hg debugapplystreamclonebundle")
-  [255]
+  [10]
 
 packed1 can be consumed from debug command
 
@@ -601,7 +601,7 @@
 
   $ hg clone full.hg ''
   abort: empty destination path is not valid
-  [255]
+  [10]
 
 test for https://bz.mercurial-scm.org/216
 
@@ -697,13 +697,13 @@
   $ cd orig
   $ hg incoming '../test#bundle.hg'
   comparing with ../test
-  abort: unknown revision 'bundle.hg'!
+  abort: unknown revision 'bundle.hg'
   [255]
 
 note that percent encoding is not handled:
 
   $ hg incoming ../test%23bundle.hg
-  abort: repository ../test%23bundle.hg not found!
+  abort: repository ../test%23bundle.hg not found
   [255]
   $ cd ..
 
@@ -733,8 +733,8 @@
 partial history bundle, fails w/ unknown parent
 
   $ hg -R bundle.hg verify
-  abort: 00changelog.i@bbd179dfa0a7: unknown parent!
-  [255]
+  abort: 00changelog.i@bbd179dfa0a7: unknown parent
+  [50]
 
 full history bundle, refuses to verify non-local repo
 
@@ -844,7 +844,7 @@
 
   $ hg bundle -r 'public()' no-output.hg
   abort: no commits to bundle
-  [255]
+  [10]
 
   $ cd ..
 
--- a/tests/test-bundle2-exchange.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bundle2-exchange.t	Tue Jan 19 21:48:43 2021 +0530
@@ -30,7 +30,8 @@
   > bundle2-output-capture=True
   > [ui]
   > ssh="$PYTHON" "$TESTDIR/dummyssh"
-  > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
+  > [command-templates]
+  > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
   > [web]
   > push_ssl = false
   > allow_push = *
@@ -986,7 +987,7 @@
   abort: remote error:
   incompatible Mercurial client; bundle2 required
   (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
-  [255]
+  [100]
   $ killdaemons.py
   $ cd ..
 
@@ -1031,7 +1032,7 @@
   abort: remote error:
   incompatible Mercurial client; bundle2 required
   (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
-  [255]
+  [100]
 
   $ killdaemons.py
 
@@ -1049,7 +1050,7 @@
   abort: remote error:
   incompatible Mercurial client; bundle2 required
   (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
-  [255]
+  [100]
   $ killdaemons.py
 
   $ hg --config devel.legacy.exchange=bundle1 clone ssh://user@dummy/bundle2onlyserver not-bundle2-ssh
@@ -1074,7 +1075,7 @@
   abort: remote error:
   incompatible Mercurial client; bundle2 required
   (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
-  [255]
+  [100]
 
   $ killdaemons.py
 
@@ -1131,7 +1132,7 @@
   abort: remote error:
   incompatible Mercurial client; bundle2 required
   (see https://www.mercurial-scm.org/wiki/IncompatibleClient)
-  [255]
+  [100]
 
 (also check with ssh)
 
--- a/tests/test-bundle2-format.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bundle2-format.t	Tue Jan 19 21:48:43 2021 +0530
@@ -235,7 +235,8 @@
   > evolution.createmarkers=True
   > [ui]
   > ssh="$PYTHON" "$TESTDIR/dummyssh"
-  > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
+  > [command-templates]
+  > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
   > [web]
   > push_ssl = false
   > allow_push = *
--- a/tests/test-bundle2-multiple-changegroups.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bundle2-multiple-changegroups.t	Tue Jan 19 21:48:43 2021 +0530
@@ -34,8 +34,8 @@
   > EOF
 
   $ cat >> $HGRCPATH << EOF
-  > [ui]
-  > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
+  > [command-templates]
+  > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
   > EOF
 
 Start with a simple repository with a single commit
--- a/tests/test-bundle2-remote-changegroup.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-bundle2-remote-changegroup.t	Tue Jan 19 21:48:43 2021 +0530
@@ -96,7 +96,8 @@
   $ cat >> $HGRCPATH << EOF
   > [ui]
   > ssh="$PYTHON" "$TESTDIR/dummyssh"
-  > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
+  > [command-templates]
+  > log={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
   > EOF
 
   $ hg init repo
--- a/tests/test-casecollision-merge.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-casecollision-merge.t	Tue Jan 19 21:48:43 2021 +0530
@@ -332,7 +332,7 @@
 
   $ hg update --check
   abort: uncommitted changes
-  [255]
+  [20]
 
   $ hg update --clean
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
--- a/tests/test-cbor.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-cbor.py	Tue Jan 19 21:48:43 2021 +0530
@@ -690,7 +690,12 @@
 
         self.assertEqual(
             list(cborutil.streamencodearrayfromiter(source)),
-            [b'\x9f', b'\x43', b'foo', b'\xff',],
+            [
+                b'\x9f',
+                b'\x43',
+                b'foo',
+                b'\xff',
+            ],
         )
 
         dest = b''.join(cborutil.streamencodearrayfromiter(source))
@@ -799,7 +804,11 @@
 class SetTests(TestCase):
     def testempty(self):
         self.assertEqual(
-            list(cborutil.streamencode(set())), [b'\xd9\x01\x02', b'\x80',]
+            list(cborutil.streamencode(set())),
+            [
+                b'\xd9\x01\x02',
+                b'\x80',
+            ],
         )
 
         self.assertEqual(cborutil.decodeall(b'\xd9\x01\x02\x80'), [set()])
@@ -914,14 +923,26 @@
         ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\x80',])  # empty array
+        encoded = b''.join(
+            [
+                b'\xd9\x01\x02',
+                b'\x81',
+                b'\x80',
+            ]
+        )  # empty array
 
         with self.assertRaisesRegex(
             cborutil.CBORDecodeError, 'collections not allowed as set values'
         ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([b'\xd9\x01\x02', b'\x81', b'\xa0',])  # empty map
+        encoded = b''.join(
+            [
+                b'\xd9\x01\x02',
+                b'\x81',
+                b'\xa0',
+            ]
+        )  # empty map
 
         with self.assertRaisesRegex(
             cborutil.CBORDecodeError, 'collections not allowed as set values'
@@ -1059,7 +1080,13 @@
         ):
             cborutil.decodeall(encoded)
 
-        encoded = b''.join([b'\xa1', b'\x80', b'\x43foo',])  # empty array
+        encoded = b''.join(
+            [
+                b'\xa1',
+                b'\x80',
+                b'\x43foo',
+            ]
+        )  # empty array
 
         with self.assertRaisesRegex(
             cborutil.CBORDecodeError, 'collections not supported as map keys'
@@ -1260,7 +1287,10 @@
 
     def testpartialinput(self):
         encoded = b''.join(
-            [b'\x82', b'\x01',]  # array of 2 elements  # integer 1
+            [
+                b'\x82',
+                b'\x01',
+            ]  # array of 2 elements  # integer 1
         )
 
         with self.assertRaisesRegex(
--- a/tests/test-check-code.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-check-code.t	Tue Jan 19 21:48:43 2021 +0530
@@ -90,6 +90,6 @@
 
 Keep Windows line endings in check
 
-  $ hg files 'set:eol(dos)'
+  $ testrepohg files 'set:eol(dos)'
   contrib/win32/hg.bat
   contrib/win32/mercurial.ini
--- a/tests/test-check-interfaces.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-check-interfaces.py	Tue Jan 19 21:48:43 2021 +0530
@@ -12,7 +12,7 @@
 
 # Only run if tests are run in a repo
 if subprocess.call(
-    ['python', '%s/hghave' % os.environ['TESTDIR'], 'test-repo']
+    [sys.executable, '%s/hghave' % os.environ['TESTDIR'], 'test-repo']
 ):
     sys.exit(80)
 
--- a/tests/test-check-pylint.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-check-pylint.t	Tue Jan 19 21:48:43 2021 +0530
@@ -17,6 +17,6 @@
   >   mercurial hgdemandimport hgext hgext3rd | sed 's/\r$//'
   Using config file *fakerc (glob) (?)
    (?)
-  ------------------------------------ (?)
-  Your code has been rated at 10.00/10 (?)
+  ------------------------------------* (glob) (?)
+  Your code has been rated at 10.00/10* (glob) (?)
    (?)
--- a/tests/test-check-rust-format.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-check-rust-format.t	Tue Jan 19 21:48:43 2021 +0530
@@ -3,7 +3,7 @@
   $ . "$TESTDIR/helpers-testrepo.sh"
 
   $ cd "$TESTDIR"/..
-  $ RUSTFMT=$(rustup which --toolchain nightly rustfmt)
+  $ RUSTFMT=$(rustup which --toolchain nightly-2020-10-04 rustfmt)
   $ for f in `testrepohg files 'glob:**/*.rs'` ; do
   >   $RUSTFMT --check --edition=2018 --unstable-features --color=never $f
   > done
--- a/tests/test-chg.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-chg.t	Tue Jan 19 21:48:43 2021 +0530
@@ -29,8 +29,8 @@
   $ chg status
   $ echo '=brokenconfig' >> $HGRCPATH
   $ chg status
-  hg: parse error at * (glob)
-  [255]
+  config error at * =brokenconfig (glob)
+  [30]
 
   $ cp $HGRCPATH.orig $HGRCPATH
 
--- a/tests/test-clone-uncompressed.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-clone-uncompressed.t	Tue Jan 19 21:48:43 2021 +0530
@@ -192,9 +192,17 @@
   transferred 96.5 KB in * seconds (* */sec) (glob)
 
   $ ls -1 clone1/.hg/cache
+  branch2-base
+  branch2-immutable
   branch2-served
+  branch2-served.hidden
+  branch2-visible
+  branch2-visible-hidden
+  hgtagsfnodes1
   rbc-names-v1
   rbc-revs-v1
+  tags2
+  tags2-served
 #endif
 
 getbundle requests with stream=1 are uncompressed
@@ -265,6 +273,7 @@
   bundle2-input-part: total payload size 24
   bundle2-input-bundle: 2 parts total
   checking for updated bookmarks
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 #endif
 #if stream-bundle2
@@ -287,6 +296,7 @@
   bundle2-input-part: "listkeys" (params: 1 mandatory) supported
   bundle2-input-bundle: 2 parts total
   checking for updated bookmarks
+  updating the branch cache
   (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
 #endif
 
--- a/tests/test-clone-update-order.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-clone-update-order.t	Tue Jan 19 21:48:43 2021 +0530
@@ -22,7 +22,7 @@
 
   $ hg clone -U -u . .#other ../b -r 0 -r 1 -r 2 -b other
   abort: cannot specify both --noupdate and --updaterev
-  [255]
+  [10]
 
   $ hg clone -U .#other ../b -r 0 -r 1 -r 2 -b other
   adding changesets
--- a/tests/test-clone.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-clone.t	Tue Jan 19 21:48:43 2021 +0530
@@ -56,9 +56,17 @@
 Ensure branchcache got copied over:
 
   $ ls .hg/cache
+  branch2-base
+  branch2-immutable
   branch2-served
+  branch2-served.hidden
+  branch2-visible
+  branch2-visible-hidden
+  hgtagsfnodes1
   rbc-names-v1
   rbc-revs-v1
+  tags2
+  tags2-served
 
   $ cat a
   a
@@ -73,7 +81,7 @@
 
   $ hg clone . ''
   abort: empty destination path is not valid
-  [255]
+  [10]
 
 No update, with debug option:
 
@@ -99,6 +107,7 @@
   linking: 17 files (reposimplestore !)
   linking: 18 files (reposimplestore !)
   linked 18 files (reposimplestore !)
+  updating the branch cache
 #else
   $ hg --debug clone -U . ../c --config progress.debug=true
   linking: 1 files
@@ -127,9 +136,17 @@
 Ensure branchcache got copied over:
 
   $ ls .hg/cache
+  branch2-base
+  branch2-immutable
   branch2-served
+  branch2-served.hidden
+  branch2-visible
+  branch2-visible-hidden
+  hgtagsfnodes1
   rbc-names-v1
   rbc-revs-v1
+  tags2
+  tags2-served
 
   $ cat a 2>/dev/null || echo "a not present"
   a not present
@@ -189,7 +206,7 @@
 
   $ hg clone --pull a ''
   abort: empty destination path is not valid
-  [255]
+  [10]
 
 Clone to '.':
 
@@ -254,7 +271,7 @@
 
   $ hg clone --noupdate --updaterev 1 a ua
   abort: cannot specify both --noupdate and --updaterev
-  [255]
+  [10]
 
 
 Testing clone -u:
@@ -611,25 +628,25 @@
 No local source
 
   $ hg clone a b
-  abort: repository a not found!
+  abort: repository a not found
   [255]
 
 Invalid URL
 
   $ hg clone http://invalid:url/a b
   abort: error: nonnumeric port: 'url'
-  [255]
+  [100]
 
 No remote source
 
 #if windows
   $ hg clone http://$LOCALIP:3121/a b
   abort: error: * (glob)
-  [255]
+  [100]
 #else
   $ hg clone http://$LOCALIP:3121/a b
   abort: error: *refused* (glob)
-  [255]
+  [100]
 #endif
   $ rm -rf b # work around bug with http clone
 
@@ -676,7 +693,7 @@
   $ hg clone q
   destination directory: q
   abort: destination 'q' is not empty
-  [255]
+  [10]
 
 destination directory not empty
 
@@ -684,7 +701,7 @@
   $ echo stuff > a/a
   $ hg clone q a
   abort: destination 'a' is not empty
-  [255]
+  [10]
 
 
 #if unix-permissions no-root
@@ -1106,7 +1123,7 @@
   $ hg -R a id -r 0
   acb14030fe0a
   $ hg id -R remote -r 0
-  abort: repository remote not found!
+  abort: repository remote not found
   [255]
   $ hg --config share.pool=share -q clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote
   $ hg -R remote id -r 0
@@ -1176,10 +1193,10 @@
   abort: potentially unsafe url: 'ssh://-oProxyCommand=touch${IFS}owned/path'
   [255]
   $ hg clone 'ssh://fakehost|touch%20owned/path'
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ hg clone 'ssh://fakehost%7Ctouch%20owned/path'
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
   $ hg clone 'ssh://-oProxyCommand=touch owned%20foo@example.com/nonexistent/path'
@@ -1192,14 +1209,14 @@
   sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
   sending hello command
   sending between command
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ hg clone "ssh://example.com:%26touch%20owned%20/" --debug
   running sh -c "read l; read l; read l" -p "&touch owned " example.com "hg -R . serve --stdio"
   sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
   sending hello command
   sending between command
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 #else
   $ hg clone "ssh://%3btouch%20owned%20/" --debug
@@ -1207,14 +1224,14 @@
   sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
   sending hello command
   sending between command
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ hg clone "ssh://example.com:%3btouch%20owned%20/" --debug
   running sh -c "read l; read l; read l" -p ';touch owned ' example.com 'hg -R . serve --stdio'
   sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
   sending hello command
   sending between command
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 #endif
 
@@ -1223,7 +1240,7 @@
   sending upgrade request: * proto=exp-ssh-v2-0003 (glob) (sshv2 !)
   sending hello command
   sending between command
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
 We should not have created a file named owned - if it exists, the
--- a/tests/test-clonebundles.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-clonebundles.t	Tue Jan 19 21:48:43 2021 +0530
@@ -597,6 +597,7 @@
   added 2 changesets with 2 changes to 2 files
   new changesets 53245c60e682:aaff8d2ffbbf
   calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
+  updating the branch cache
   (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ hg clone -U --debug --config ui.available-memory=32MB http://localhost:$HGPORT gzip-too-large2
@@ -634,5 +635,6 @@
   checking for updated bookmarks
   2 local changesets published
   calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles
+  updating the branch cache
   (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
   $ killdaemons.py
--- a/tests/test-commandserver.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-commandserver.t	Tue Jan 19 21:48:43 2021 +0530
@@ -90,7 +90,7 @@
   *** runcommand id
   000000000000 tip
   *** runcommand id -runknown
-  abort: unknown revision 'unknown'!
+  abort: unknown revision 'unknown'
    [255]
 
   >>> from hgclient import bprint, check, readchannel
@@ -148,7 +148,7 @@
   ...     runcommand(server, [b'log', b'-b', b'--config=alias.log=!echo pwned',
   ...                         b'default'])
   *** runcommand log -b --config=alias.log=!echo pwned default
-  abort: unknown revision '--config=alias.log=!echo pwned'!
+  abort: unknown revision '--config=alias.log=!echo pwned'
    [255]
 
 check that "histedit --commands=-" can read rules from the input channel:
@@ -211,8 +211,10 @@
   lfs.usercache=$TESTTMP/.cache/lfs
   ui.slash=True
   ui.interactive=False
+  ui.detailed-exit-code=True
   ui.merge=internal:merge
   ui.mergemarkers=detailed
+  ui.timeout.warn=15
   ui.foo=bar
   ui.nontty=true
   web.address=localhost
@@ -222,8 +224,10 @@
   *** runcommand -R foo showconfig ui defaults
   ui.slash=True
   ui.interactive=False
+  ui.detailed-exit-code=True
   ui.merge=internal:merge
   ui.mergemarkers=detailed
+  ui.timeout.warn=15
   ui.nontty=true
 #endif
 
@@ -712,7 +716,7 @@
   o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob)
   *** runcommand id
   abort: there is no Mercurial repository here (.hg not found)
-   [255]
+   [10]
 
   >>> from hgclient import check, readchannel, runcommand
   >>> @check
@@ -729,7 +733,7 @@
 
   $ cd repo
   $ hg serve --cmdserver pipe -R ../nonexistent
-  abort: repository ../nonexistent not found!
+  abort: repository ../nonexistent not found
   [255]
   $ cd ..
 
@@ -982,8 +986,8 @@
   $ cd repo3
 
   $ cat <<EOF >> $HGRCPATH
-  > [ui]
-  > logtemplate = {rev} {desc|firstline} ({files})\n
+  > [command-templates]
+  > log = {rev} {desc|firstline} ({files})\n
   > 
   > [extensions]
   > failafterfinalize = $TESTTMP/failafterfinalize.py
--- a/tests/test-commit-amend.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-commit-amend.t	Tue Jan 19 21:48:43 2021 +0530
@@ -12,7 +12,7 @@
   $ hg ci --amend
   abort: cannot amend public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
   $ hg phase -r . -f -d
 
   $ echo a >> a
@@ -83,7 +83,7 @@
   phases: 2 draft
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend
   abort: empty commit message
-  [255]
+  [10]
   $ hg summary
   parent: 1:43f1ba15f28a tip
    amend base1
@@ -350,7 +350,7 @@
   $ echo a >> a
   $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend
   abort: empty commit message
-  [255]
+  [10]
   $ hg book
      book1                     1:a3b65065808c
    * book2                     1:a3b65065808c
@@ -407,7 +407,7 @@
   (branch merge, don't forget to commit)
   $ hg ci --amend
   abort: cannot amend while merging
-  [255]
+  [20]
   $ hg ci -m 'merge'
 
 Refuse to amend if there is a merge conflict (issue5805):
@@ -423,7 +423,7 @@
 
   $ hg ci --amend
   abort: unresolved merge conflicts (see 'hg help resolve')
-  [255]
+  [20]
 
   $ hg up -qC .
 
@@ -884,7 +884,7 @@
   $ hg ci -m..
   $ hg ci --amend --close-branch -m 'closing'
   abort: can only close branch heads
-  [255]
+  [10]
 
 This silliness fails:
 
@@ -893,7 +893,7 @@
   $ echo b >> b
   $ hg ci --close-branch -m'open and close'
   abort: branch "silliness" has no heads to close
-  [255]
+  [10]
 
 Test that amend with --secret creates new secret changeset forcibly
 ---------------------------------------------------------------------
--- a/tests/test-commit-interactive-curses.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-commit-interactive-curses.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#require tic
+#require curses
 
 Set up a repo
 
--- a/tests/test-commit-interactive.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-commit-interactive.t	Tue Jan 19 21:48:43 2021 +0530
@@ -20,14 +20,14 @@
   [255]
   $ hg commit -i --config ui.interactive=false
   abort: running non-interactively
-  [255]
+  [10]
   $ hg commit -i empty-rw<<EOF
   > n
   > EOF
   diff --git a/empty-rw b/empty-rw
   new file mode 100644
   abort: empty commit message
-  [255]
+  [10]
 
   $ hg tip -p
   changeset:   -1:000000000000
@@ -46,7 +46,7 @@
   diff --git a/empty-rw b/empty-rw
   new file mode 100644
   abort: empty commit message
-  [255]
+  [10]
 
   $ hg tip -p
   changeset:   -1:000000000000
@@ -61,7 +61,7 @@
   $ touch untracked
   $ hg commit -i -m should-fail empty-rw untracked
   abort: untracked: file not tracked!
-  [255]
+  [10]
   $ rm untracked
 
 Record empty file
@@ -885,7 +885,7 @@
   (enter ? for help) [Ynesfdaq?] q
   
   abort: user quit
-  [255]
+  [250]
 
 Patterns
 
@@ -1014,7 +1014,7 @@
   (enter ? for help) [Ynesfdaq?] q
   
   abort: user quit
-  [255]
+  [250]
 
 s, all
 
@@ -1350,7 +1350,7 @@
 
   $ hg commit -i -m'will abort'
   abort: cannot partially commit a merge (use "hg commit" instead)
-  [255]
+  [10]
 
   $ hg up -C
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
@@ -1419,7 +1419,7 @@
   (enter ? for help) [Ynesfdaq?] q
   
   abort: user quit
-  [255]
+  [250]
   $ hg revert editedfile
 
 Removing changes from patch
@@ -1494,7 +1494,7 @@
   Hunk #1 FAILED at 0
   1 out of 1 hunks FAILED -- saving rejects to file editedfile.rej
   abort: patch failed to apply
-  [255]
+  [10]
   $ cat editedfile
   This change will not be committed
   This is the second line
@@ -1540,7 +1540,7 @@
   (enter ? for help) [Ynesfdaq?] e
   
   abort: error parsing patch: unhandled transition: range -> range
-  [255]
+  [10]
 
 Exiting editor with status 1, ignores the edit but does not stop the recording
 session
@@ -1600,7 +1600,7 @@
   (enter ? for help) [Ynesfdaq?] e
   
   abort: error parsing patch: unhandled transition: file -> other
-  [255]
+  [10]
 
   $ hg up -C
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-commit-unresolved.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-commit-unresolved.t	Tue Jan 19 21:48:43 2021 +0530
@@ -47,7 +47,7 @@
   $ hg abort
   abort: no merge in progress (abortflag !)
   abort: no operation in progress (abortcommand !)
-  [255]
+  [20]
 
   $ hg merge
   merging A
@@ -58,10 +58,10 @@
 
   $ hg merge --abort e4501
   abort: cannot specify a node with --abort
-  [255]
+  [10]
   $ hg merge --abort --rev e4501
   abort: cannot specify both --abort and --rev
-  [255]
+  [10]
 
 #if abortcommand
 when in dry-run mode
@@ -101,7 +101,7 @@
   $ echo "ABCD" > A
   $ hg commit -m "Merged"
   abort: unresolved merge conflicts (see 'hg help resolve')
-  [255]
+  [20]
 
 Mark the conflict as resolved and commit
 
@@ -123,7 +123,7 @@
   $ hg rm --force A
   $ hg commit -m merged
   abort: unresolved merge conflicts (see 'hg help resolve')
-  [255]
+  [20]
 
   $ hg resolve -ma
   (no more unresolved files)
@@ -145,7 +145,7 @@
 
   $ hg merge --preview --abort
   abort: cannot specify both --abort and --preview
-  [255]
+  [10]
 
   $ hg abort
   aborting the merge, updating back to 68352a18a7c4
--- a/tests/test-commit.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-commit.t	Tue Jan 19 21:48:43 2021 +0530
@@ -11,25 +11,25 @@
   $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg commit -m ""
   HGEDITFORM=commit.normal.normal
   abort: empty commit message
-  [255]
+  [10]
   $ hg commit -d '0 0' -m commit-1
   $ echo foo >> foo
   $ hg commit -d '1 4444444' -m commit-3
   hg: parse error: impossible time zone offset: 4444444
-  [255]
+  [10]
   $ hg commit -d '1	15.1' -m commit-4
   hg: parse error: invalid date: '1\t15.1'
-  [255]
+  [10]
   $ hg commit -d 'foo bar' -m commit-5
   hg: parse error: invalid date: 'foo bar'
-  [255]
+  [10]
   $ hg commit -d ' 1 4444' -m commit-6
   $ hg commit -d '111111111111 0' -m commit-7
   hg: parse error: date exceeds 32 bits: 111111111111
-  [255]
+  [10]
   $ hg commit -d '-111111111111 0' -m commit-7
   hg: parse error: date exceeds 32 bits: -111111111111
-  [255]
+  [10]
   $ echo foo >> foo
   $ hg commit -d '1901-12-13 20:45:52 +0000' -m commit-7-2
   $ echo foo >> foo
@@ -39,10 +39,10 @@
   2 1901-12-13 20:45:52 +0000
   $ hg commit -d '1901-12-13 20:45:51 +0000' -m commit-7
   hg: parse error: date exceeds 32 bits: -2147483649
-  [255]
+  [10]
   $ hg commit -d '-2147483649 0' -m commit-7
   hg: parse error: date exceeds 32 bits: -2147483649
-  [255]
+  [10]
 
 commit added file that has been deleted
 
@@ -54,7 +54,7 @@
   [1]
   $ hg commit -m commit-8-2 bar
   abort: bar: file not found!
-  [255]
+  [10]
 
   $ hg -q revert -a --no-backup
 
@@ -74,7 +74,7 @@
   adding dir.file
   $ hg commit -m commit-10 dir dir.file
   abort: dir: no match under directory!
-  [255]
+  [10]
 
   $ echo >> dir/file
   $ mkdir bleh
@@ -82,10 +82,10 @@
   $ cd bleh
   $ hg commit -m commit-11 .
   abort: bleh: no match under directory!
-  [255]
+  [10]
   $ hg commit -m commit-12 ../dir ../dir2
   abort: dir2: no match under directory!
-  [255]
+  [10]
   $ hg -v commit -m commit-13 ../dir
   committing files:
   dir/file
@@ -96,20 +96,20 @@
 
   $ hg commit -m commit-14 does-not-exist
   abort: does-not-exist: * (glob)
-  [255]
+  [10]
 
 #if symlink
   $ ln -s foo baz
   $ hg commit -m commit-15 baz
   abort: baz: file not tracked!
-  [255]
+  [10]
   $ rm baz
 #endif
 
   $ touch quux
   $ hg commit -m commit-16 quux
   abort: quux: file not tracked!
-  [255]
+  [10]
   $ echo >> dir/file
   $ hg -v commit -m commit-17 dir/file
   committing files:
@@ -155,7 +155,7 @@
   $ HGEDITOR=false hg ci --addremove
   adding newfile
   abort: edit failed: false exited with status 1
-  [255]
+  [250]
   $ hg status
   ? newfile
 
@@ -164,7 +164,7 @@
   $ echo foo >> foo
   $ echo fake >> .hg/requires
   $ hg commit -m bla
-  abort: repository requires features unknown to this Mercurial: fake!
+  abort: repository requires features unknown to this Mercurial: fake
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
@@ -370,7 +370,7 @@
   HG: changed changed
   HG: removed removed
   abort: empty commit message
-  [255]
+  [10]
 
 test saving last-message.txt
 
@@ -456,7 +456,7 @@
   HG: subrepo 'sub' is changed
   HG: subrepo 'sub2' is changed
   abort: empty commit message
-  [255]
+  [10]
 
   $ cat >> .hg/hgrc <<EOF
   > [committemplate]
@@ -471,7 +471,7 @@
   HG: Leave message empty to abort commit.
   HG: no bookmark is activated
   abort: empty commit message
-  [255]
+  [10]
 
   $ cat >> .hg/hgrc <<EOF
   > [committemplate]
@@ -484,7 +484,7 @@
   HG: Leave message empty to abort commit.
   HG: no bookmark is activated
   abort: empty commit message
-  [255]
+  [10]
 
   $ cat >> .hg/hgrc <<EOF
   > [committemplate]
@@ -734,7 +734,7 @@
   $ hg add foo2
   $ HGEDITOR="sh $TESTTMP/notouching.sh" hg commit
   abort: commit message unchanged
-  [255]
+  [10]
 
   $ cd ..
 
@@ -762,7 +762,7 @@
   $ hg add foo2
   $ HGEDITOR="sh $TESTTMP/notouching.sh" hg ci
   abort: commit message unchanged
-  [255]
+  [10]
   $ HGEDITOR="sh $TESTTMP/lowercaseline.sh" hg ci
   first line
   HG: this is customized commit template
--- a/tests/test-committer.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-committer.t	Tue Jan 19 21:48:43 2021 +0530
@@ -102,8 +102,8 @@
   $ hg commit -u ' ' -m commit-1
   transaction abort!
   rollback completed
-  abort: empty username!
-  [255]
+  abort: empty username
+  [50]
 
 # don't add tests here, previous test is unstable
 
--- a/tests/test-completion.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-completion.t	Tue Jan 19 21:48:43 2021 +0530
@@ -131,6 +131,7 @@
   debugsetparents
   debugsidedata
   debugssl
+  debugstrip
   debugsub
   debugsuccessorssets
   debugtagscache
@@ -241,7 +242,7 @@
   $ hg debugcomplete --options s
   hg: command 's' is ambiguous:
       serve shelve showconfig status summary
-  [255]
+  [10]
 
 Show all commands + options
   $ hg debugcommands
@@ -259,7 +260,7 @@
   cat: output, rev, decode, include, exclude, template
   clone: noupdate, updaterev, rev, branch, pull, uncompressed, stream, ssh, remotecmd, insecure
   commit: addremove, close-branch, amend, secret, edit, force-close-branch, interactive, include, exclude, message, logfile, date, user, subrepos
-  config: untrusted, edit, local, shared, global, template
+  config: untrusted, edit, local, shared, non-shared, global, template
   continue: dry-run
   copy: forget, after, at-rev, force, include, exclude, dry-run
   debugancestor: 
@@ -294,7 +295,7 @@
   debuginstall: template
   debugknown: 
   debuglabelcomplete: 
-  debuglocks: force-lock, force-wlock, set-lock, set-wlock
+  debuglocks: force-free-lock, force-free-wlock, set-lock, set-wlock
   debugmanifestfulltextcache: clear, add
   debugmergestate: style, template
   debugnamecomplete: 
@@ -319,6 +320,7 @@
   debugsetparents: 
   debugsidedata: changelog, manifest, dir
   debugssl: 
+  debugstrip: rev, force, no-backup, nobackup, , keep, bookmark, soft
   debugsub: rev
   debugsuccessorssets: closest
   debugtagscache: 
@@ -326,12 +328,12 @@
   debuguigetpass: prompt
   debuguiprompt: prompt
   debugupdatecaches: 
-  debugupgraderepo: optimize, run, backup, changelog, manifest
+  debugupgraderepo: optimize, run, backup, changelog, manifest, filelogs
   debugwalk: include, exclude
   debugwhyunstable: 
   debugwireargs: three, four, five, ssh, remotecmd, insecure
   debugwireproto: localssh, peer, noreadstderr, nologhandshake, ssh, remotecmd, insecure
-  diff: rev, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
+  diff: rev, from, to, change, text, git, binary, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, ignore-space-at-eol, unified, stat, root, include, exclude, subrepos
   export: bookmark, output, switch-parent, rev, text, git, binary, nodates, template
   files: rev, print0, include, exclude, template, subrepos
   forget: interactive, include, exclude, dry-run
@@ -344,7 +346,7 @@
   incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
   init: ssh, remotecmd, insecure
   locate: rev, print0, fullpath, include, exclude
-  log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
+  log: follow, follow-first, date, copies, keyword, rev, line-range, removed, only-merges, user, only-branch, branch, bookmark, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude
   manifest: rev, all, template
   merge: force, rev, preview, abort, tool
   outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos
@@ -352,7 +354,7 @@
   paths: template
   phase: public, draft, secret, force, rev
   pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
-  push: force, rev, bookmark, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
+  push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
   recover: verify
   remove: after, force, subrepos, include, exclude, dry-run
   rename: after, at-rev, force, include, exclude, dry-run
--- a/tests/test-config.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-config.t	Tue Jan 19 21:48:43 2021 +0530
@@ -7,8 +7,8 @@
   > novaluekey
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1: novaluekey
-  [255]
+  config error at $TESTTMP/.hg/hgrc:1: novaluekey
+  [30]
 
 Invalid syntax: no key
 
@@ -16,8 +16,8 @@
   > =nokeyvalue
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1: =nokeyvalue
-  [255]
+  config error at $TESTTMP/.hg/hgrc:1: =nokeyvalue
+  [30]
 
 Test hint about invalid syntax from leading white space
 
@@ -25,18 +25,16 @@
   >  key=value
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1:  key=value
-  unexpected leading whitespace
-  [255]
+  config error at $TESTTMP/.hg/hgrc:1: unexpected leading whitespace:  key=value
+  [30]
 
   $ cat > .hg/hgrc << EOF
   >  [section]
   > key=value
   > EOF
   $ hg showconfig
-  hg: parse error at $TESTTMP/.hg/hgrc:1:  [section]
-  unexpected leading whitespace
-  [255]
+  config error at $TESTTMP/.hg/hgrc:1: unexpected leading whitespace:  [section]
+  [30]
 
 Reset hgrc
 
@@ -333,7 +331,7 @@
 
   $ HGEDITOR=false hg config --edit
   abort: edit failed: false exited with status 1
-  [255]
+  [10]
 
 config affected by environment variables
 
--- a/tests/test-conflict.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-conflict.t	Tue Jan 19 21:48:43 2021 +0530
@@ -80,6 +80,8 @@
    }
   ]
 
+  $ hg status -0
+  M a\x00? a.orig\x00 (no-eol) (esc)
   $ cat a
   Small Mathematical Series.
   1
@@ -102,6 +104,35 @@
 
   $ hg up -q --clean .
   $ cat <<EOF >> .hg/hgrc
+  > [command-templates]
+  > mergemarker = '{author} {rev}'
+  > EOF
+
+  $ hg merge 1
+  merging a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+
+  $ cat a
+  Small Mathematical Series.
+  1
+  2
+  3
+  <<<<<<< working copy: test 2
+  6
+  8
+  =======
+  4
+  5
+  >>>>>>> merge rev:    test 1
+  Hop we are done.
+
+Verify custom conflict markers with legacy config name
+
+  $ hg up -q --clean .
+  $ cat <<EOF >> .hg/hgrc
   > [ui]
   > mergemarkertemplate = '{author} {rev}'
   > EOF
@@ -131,8 +162,8 @@
 
   $ hg up -q --clean .
   $ cat >> .hg/hgrc <<EOF
-  > [ui]
-  > mergemarkertemplate={author} {rev}\nfoo\nbar\nbaz
+  > [command-templates]
+  > mergemarker={author} {rev}\nfoo\nbar\nbaz
   > EOF
 
   $ hg -q merge 1
@@ -170,8 +201,8 @@
   $ hg --encoding utf-8 commit --logfile logfile
 
   $ cat >> .hg/hgrc <<EOF
-  > [ui]
-  > mergemarkertemplate={desc|firstline}
+  > [command-templates]
+  > mergemarker={desc|firstline}
   > EOF
 
   $ hg -q --encoding utf-8 merge 1
@@ -251,6 +282,80 @@
   >>>>>>> merge rev
   Hop we are done.
 
+internal:mergediff
+
+  $ hg co -C 1
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat << EOF > a
+  > Small Mathematical Series.
+  > 1
+  > 2
+  > 3
+  > 4
+  > 4.5
+  > 5
+  > Hop we are done.
+  > EOF
+  $ hg co -m 2 -t internal:mergediff
+  merging a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges
+  [1]
+  $ cat a
+  Small Mathematical Series.
+  1
+  2
+  3
+  <<<<<<<
+  ------- base
+  +++++++ working copy
+   4
+  +4.5
+   5
+  ======= destination
+  6
+  8
+  >>>>>>>
+  Hop we are done.
+Test the same thing as above but modify a bit more so we instead get the working
+copy in full and the diff from base to destination.
+  $ hg co -C 1
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cat << EOF > a
+  > Small Mathematical Series.
+  > 1
+  > 2
+  > 3.5
+  > 4.5
+  > 5.5
+  > Hop we are done.
+  > EOF
+  $ hg co -m 2 -t internal:mergediff
+  merging a
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges
+  [1]
+  $ cat a
+  Small Mathematical Series.
+  1
+  2
+  <<<<<<<
+  ======= working copy
+  3.5
+  4.5
+  5.5
+  ------- base
+  +++++++ destination
+   3
+  -4
+  -5
+  +6
+  +8
+  >>>>>>>
+  Hop we are done.
+
 Add some unconflicting changes on each head, to make sure we really
 are merging, unlike :local and :other
 
--- a/tests/test-confused-revert.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-confused-revert.t	Tue Jan 19 21:48:43 2021 +0530
@@ -61,7 +61,7 @@
   $ hg revert
   abort: uncommitted merge with no revision specified
   (use 'hg update' or see 'hg help revert')
-  [255]
+  [10]
 
 Revert should be ok now:
 
--- a/tests/test-convert-filemap.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-convert-filemap.t	Tue Jan 19 21:48:43 2021 +0530
@@ -292,9 +292,9 @@
   $ rm -rf source/.hg/store/data/dir/file4
 #endif
   $ hg -q convert --filemap renames.fmap --datesort source dummydest
-  abort: data/dir/file3.i@e96dce0bc6a2: no match found! (reporevlogstore !)
-  abort: data/dir/file3/index@e96dce0bc6a2: no node! (reposimplestore !)
-  [255]
+  abort: data/dir/file3.i@e96dce0bc6a2: no match found (reporevlogstore !)
+  abort: data/dir/file3/index@e96dce0bc6a2: no node (reposimplestore !)
+  [50]
   $ hg -q convert --filemap renames.fmap --datesort --config convert.hg.ignoreerrors=1 source renames.repo
   ignoring: data/dir/file3.i@e96dce0bc6a2: no match found (reporevlogstore !)
   ignoring: data/dir/file4.i@6edd55f559cd: no match found (reporevlogstore !)
--- a/tests/test-convert-git.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-convert-git.t	Tue Jan 19 21:48:43 2021 +0530
@@ -332,8 +332,8 @@
 
 input validation
   $ hg convert --config convert.git.similarity=foo --datesort git-repo2 fullrepo
-  abort: convert.git.similarity is not a valid integer ('foo')
-  [255]
+  config error: convert.git.similarity is not a valid integer ('foo')
+  [30]
   $ hg convert --config convert.git.similarity=-1 --datesort git-repo2 fullrepo
   abort: similarity must be between 0 and 100
   [255]
--- a/tests/test-copies-chain-merge.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-copies-chain-merge.t	Tue Jan 19 21:48:43 2021 +0530
@@ -17,8 +17,8 @@
   $ cat << EOF >> $HGRCPATH
   > [diff]
   > git=yes
-  > [ui]
-  > logtemplate={rev} {desc}\n
+  > [command-templates]
+  > log={rev} {desc}\n
   > EOF
 
 #if compatibility
@@ -585,6 +585,97 @@
 
   $ hg up null --quiet
 
+Merging a branch where a rename was deleted with a branch where the same file was renamed
+------------------------------------------------------------------------------------------
+
+Create a "conflicting" merge where `d` get removed on one branch before its
+rename information actually conflict with the other branch.
+
+(the copy information from the branch that was not deleted should win).
+
+  $ hg up 'desc("i-0")'
+  3 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg mv b d
+  $ hg ci -m "h-1: b -(move)-> d"
+  created new head
+
+  $ hg up 'desc("c-1")'
+  1 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  $ hg merge 'desc("h-1")'
+  1 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg ci -m "mCH-delete-before-conflict-m-0"
+
+  $ hg up 'desc("h-1")'
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg merge 'desc("c-1")'
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ hg ci -m "mHC-delete-before-conflict-m-0"
+  created new head
+  $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))'
+  @    36 mHC-delete-before-conflict-m-0
+  |\
+  +---o  35 mCH-delete-before-conflict-m-0
+  | |/
+  | o  34 h-1: b -(move)-> d
+  | |
+  o |  6 c-1 delete d
+  | |
+  o |  2 i-2: c -move-> d
+  | |
+  o |  1 i-1: a -move-> c
+  |/
+  o  0 i-0 initial commit: a b h
+  
+
+
+Summary of all created cases
+----------------------------
+
+  $ hg up --quiet null
+
+(This exists to help keeping a compact list of the various cases we have built)
+
+  $ hg log -T '{desc|firstline}\n'| sort
+  a-1: d -move-> e
+  a-2: e -move-> f
+  b-1: b update
+  c-1 delete d
+  d-1 delete d
+  d-2 re-add d
+  e-1 b -move-> g
+  e-2 g -move-> f
+  f-1: rename h -> i
+  f-2: rename i -> d
+  g-1: update d
+  h-1: b -(move)-> d
+  i-0 initial commit: a b h
+  i-1: a -move-> c
+  i-2: c -move-> d
+  mABm-0 simple merge - the other way
+  mAEm-0 simple merge - one way
+  mBAm-0 simple merge - one way
+  mBC-revert-m-0
+  mBCm-0 simple merge - one way
+  mBCm-1 re-add d
+  mBDm-0 simple merge - one way
+  mBFm-0 simple merge - one way
+  mCB-revert-m-0
+  mCBm-0 simple merge - the other way
+  mCBm-1 re-add d
+  mCGm-0
+  mCH-delete-before-conflict-m-0
+  mDBm-0 simple merge - the other way
+  mDGm-0 simple merge - one way
+  mEAm-0 simple merge - the other way
+  mFBm-0 simple merge - the other way
+  mFGm-0 simple merge - one way
+  mGCm-0
+  mGDm-0 simple merge - the other way
+  mGFm-0 simple merge - the other way
+  mHC-delete-before-conflict-m-0
+
 
 Test that sidedata computations during upgrades are correct
 ===========================================================
@@ -605,6 +696,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no    yes      no
   persistent-nodemap:  no     no      no
@@ -619,6 +711,11 @@
      preserved: * (glob)
      added: exp-copies-sidedata-changeset, exp-sidedata-flag
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
 #endif
 
 
@@ -801,6 +898,20 @@
    entry-0014 size 14
     '\x00\x00\x00\x01\x10\x00\x00\x00\x01\x00\x00\x00\x00d'
   salvaged   : d, ;
+  ##### revision 34 #####
+  1 sidedata entries
+   entry-0014 size 24
+    '\x00\x00\x00\x02\x0c\x00\x00\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x02\x00\x00\x00\x00bd'
+  removed    : b, ;
+  added    p1: d, b;
+  ##### revision 35 #####
+  1 sidedata entries
+   entry-0014 size 4
+    '\x00\x00\x00\x00'
+  ##### revision 36 #####
+  1 sidedata entries
+   entry-0014 size 4
+    '\x00\x00\x00\x00'
 
 #endif
 
@@ -808,6 +919,19 @@
 Test copy information chaining
 ==============================
 
+Check that matching only affect the destination and not intermediate path
+-------------------------------------------------------------------------
+
+The two status call should give the same value for f
+
+  $ hg status --copies --rev 'desc("i-0")' --rev 'desc("a-2")'
+  A f
+    a
+  R a
+  $ hg status --copies --rev 'desc("i-0")' --rev 'desc("a-2")' f
+  A f
+    a (no-changeset no-compatibility !)
+
 merging with unrelated change does not interfere with the renames
 ---------------------------------------------------------------
 
@@ -1003,29 +1127,15 @@
 
 Log output should not include a merge commit as it did not happen
 
-#if no-changeset
-  $ hg log -Gfr 'desc("mBDm-0")' d
-  o  8 d-2 re-add d
-  |
-  ~
-#else
   $ hg log -Gfr 'desc("mBDm-0")' d
   o  8 d-2 re-add d
   |
   ~
-#endif
 
-#if no-changeset
   $ hg log -Gfr 'desc("mDBm-0")' d
   o  8 d-2 re-add d
   |
   ~
-#else
-  $ hg log -Gfr 'desc("mDBm-0")' d
-  o  8 d-2 re-add d
-  |
-  ~
-#endif
 
   $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mBDm-0")'
   M b
@@ -1228,6 +1338,7 @@
   o  0 i-0 initial commit: a b h
   
 #else
+BROKEN: `hg log --follow <file>` relies on filelog metadata to work
   $ hg log -Gfr 'desc("mBFm-0")' d
   o  22 f-2: rename i -> d
   |
@@ -1243,6 +1354,7 @@
   o  0 i-0 initial commit: a b h
   
 #else
+BROKEN: `hg log --follow <file>` relies on filelog metadata to work
   $ hg log -Gfr 'desc("mFBm-0")' d
   o  22 f-2: rename i -> d
   |
@@ -1312,6 +1424,7 @@
   o  0 i-0 initial commit: a b h
   
 #else
+BROKEN: `hg log --follow <file>` relies on filelog metadata to work
   $ hg log -Gfr 'desc("mDGm-0")' d
   o    26 mDGm-0 simple merge - one way
   |\
@@ -1340,6 +1453,7 @@
   o  0 i-0 initial commit: a b h
   
 #else
+BROKEN: `hg log --follow <file>` relies on filelog metadata to work
   $ hg log -Gfr 'desc("mDGm-0")' d
   o    26 mDGm-0 simple merge - one way
   |\
@@ -1360,11 +1474,6 @@
 This case is similar to BF/FB, but an actual merge happens, so both side of the
 history are relevant.
 
-Note:
-| In this case, the merge get conflicting information since on one side we have
-| "a -> c -> d". and one the other one we have "h -> i -> d".
-|
-| The current code arbitrarily pick one side
 
   $ hg log -G --rev '::(desc("mGFm")+desc("mFGm"))'
   o    29 mGFm-0 simple merge - the other way
@@ -1383,6 +1492,33 @@
   |
   o  0 i-0 initial commit: a b h
   
+
+Note:
+| In this case, the merge get conflicting information since on one side we have
+| "a -> c -> d". and one the other one we have "h -> i -> d".
+|
+| The current code arbitrarily pick one side depending the ordering of the merged hash:
+
+In this case, the file hash from "f-2" is lower, so it will be `p1` of the resulting filenode its copy tracing information will win (and trace back to "h"):
+
+Details on this hash ordering pick:
+
+  $ hg manifest --debug 'desc("g-1")' | egrep 'd$'
+  f2b277c39e0d2bbac99d8aae075c0d8b5304d266 644   d (no-changeset !)
+  4ff57b4e8dceedb487e70e6965ea188a7c042cca 644   d (changeset !)
+  $ hg status --copies --rev 'desc("i-0")' --rev 'desc("g-1")' d
+  A d
+    a (no-changeset no-compatibility !)
+
+  $ hg manifest --debug 'desc("f-2")' | egrep 'd$'
+  4a067cf8965d1bfff130057ade26b44f580231be 644   d (no-changeset !)
+  fe6f8b4f507fe3eb524c527192a84920a4288dac 644   d (changeset !)
+  $ hg status --copies --rev 'desc("i-0")' --rev 'desc("f-2")' d
+  A d
+    h (no-changeset no-compatibility !)
+
+Copy tracing data on the resulting merge:
+
   $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mFGm-0")'
   A d
     h
@@ -1432,6 +1568,7 @@
   o  0 i-0 initial commit: a b h
   
 #else
+BROKEN: `hg log --follow <file>` relies on filelog metadata to work
   $ hg log -Gfr 'desc("mFGm-0")' d
   o    28 mFGm-0 simple merge - one way
   |\
@@ -1461,6 +1598,7 @@
   o  0 i-0 initial commit: a b h
   
 #else
+BROKEN: `hg log --follow <file>` relies on filelog metadata to work
   $ hg log -Gfr 'desc("mGFm-0")' d
   o    29 mGFm-0 simple merge - the other way
   |\
@@ -1566,3 +1704,51 @@
   A d
   $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mCB-revert-m-0")'
   $ hg status --copies --rev 'desc("b-1")' --rev 'desc("mBC-revert-m-0")'
+
+
+Merging a branch where a rename was deleted with a branch where the same file was renamed
+------------------------------------------------------------------------------------------
+
+Create a "conflicting" merge where `d` get removed on one branch before its
+rename information actually conflict with the other branch.
+
+(the copy information from the branch that was not deleted should win).
+
+  $ hg log -G --rev '::(desc("mCH-delete-before-conflict-m")+desc("mHC-delete-before-conflict-m"))'
+  o    36 mHC-delete-before-conflict-m-0
+  |\
+  +---o  35 mCH-delete-before-conflict-m-0
+  | |/
+  | o  34 h-1: b -(move)-> d
+  | |
+  o |  6 c-1 delete d
+  | |
+  o |  2 i-2: c -move-> d
+  | |
+  o |  1 i-1: a -move-> c
+  |/
+  o  0 i-0 initial commit: a b h
+  
+
+  $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mCH-delete-before-conflict-m")'
+  A d
+    b (no-compatibility no-changeset !)
+  R a
+  R b
+  $ hg status --copies --rev 'desc("i-0")' --rev 'desc("mHC-delete-before-conflict-m")'
+  A d
+    b
+  R a
+  R b
+  $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mCH-delete-before-conflict-m")'
+  A d
+    b
+  R b
+  $ hg status --copies --rev 'desc("c-1")' --rev 'desc("mHC-delete-before-conflict-m")'
+  A d
+    b
+  R b
+  $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mCH-delete-before-conflict-m")'
+  R a
+  $ hg status --copies --rev 'desc("h-1")' --rev 'desc("mHC-delete-before-conflict-m")'
+  R a
--- a/tests/test-copies-in-changeset.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-copies-in-changeset.t	Tue Jan 19 21:48:43 2021 +0530
@@ -37,6 +37,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes    yes      no
   persistent-nodemap:  no     no      no
@@ -50,6 +51,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -344,7 +346,7 @@
   $ hg mv a b
   $ hg ci -qm 'rename a to b'
   $ hg rebase -d 1 --config rebase.experimental.inmemory=yes
-  rebasing 2:* "rename a to b" (tip) (glob)
+  rebasing 2:* tip "rename a to b" (glob)
   merging a and b to b
   saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/*-*-rebase.hg (glob)
   $ hg st --change . --copies
@@ -417,6 +419,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes    yes      no
   persistent-nodemap:  no     no      no
@@ -442,6 +445,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes    yes      no
   persistent-nodemap:  no     no      no
@@ -469,6 +473,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes    yes      no
   persistent-nodemap:  no     no      no
--- a/tests/test-copies.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-copies.t	Tue Jan 19 21:48:43 2021 +0530
@@ -462,6 +462,74 @@
   $ hg debugpathcopies 0 5
   x -> z
 
+Create x and y, then remove y and rename x to y on one side of merge, and
+modify x on the other side. The modification to x from the second side
+should be propagated to y.
+  $ newrepo
+  $ echo original > x
+  $ hg add x
+  $ echo unrelated > y
+  $ hg add y
+  $ hg commit -m 'add x and y'
+  $ hg remove y
+  $ hg commit -m 'remove y'
+  $ hg rename x y
+  $ hg commit -m 'rename x to y'
+  $ hg checkout -q 0
+  $ echo modified > x
+  $ hg commit -m 'modify x'
+  created new head
+  $ hg l
+  @  3 modify x
+  |  x
+  | o  2 rename x to y
+  | |  x y
+  | o  1 remove y
+  |/   y
+  o  0 add x and y
+     x y
+#if filelog
+  $ hg merge 2
+  file 'x' was deleted in other [merge rev] but was modified in local [working copy].
+  You can use (c)hanged version, (d)elete, or leave (u)nresolved.
+  What do you want to do? u
+  1 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+This should ideally be "modified", but we will probably not be able to fix
+that in the filelog case.
+  $ cat y
+  original
+#else
+  $ hg merge 2
+  merging x and y to y
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ cat y
+  modified
+#endif
+Same as above, but in the opposite direction
+#if filelog
+  $ hg co -qC 2
+  $ hg merge 3
+  file 'x' was deleted in local [working copy] but was modified in other [merge rev].
+  You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
+  What do you want to do? u
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
+  [1]
+BROKEN: should be "modified"
+  $ cat y
+  original
+#else
+  $ hg co -qC 2
+  $ hg merge 3
+  merging y and x to y
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ cat y
+  modified
+#endif
 
 Create x and y, then rename x to z on one side of merge, and rename y to z and
 then delete z on the other side.
@@ -502,7 +570,7 @@
   $ hg debugpathcopies 2 4
   x -> z (no-filelog !)
   $ hg debugpathcopies 0 4
-  x -> z (filelog !)
+  x -> z (no-changeset no-compatibility !)
   $ hg debugpathcopies 1 5
   $ hg debugpathcopies 2 5
   x -> z (no-filelog !)
@@ -625,8 +693,8 @@
      a
 
   $ hg rebase -r . -d 2 -t :other
-  rebasing 5:5018b1509e94 "added willconflict and d" (tip) (no-changeset !)
-  rebasing 5:af8d273bf580 "added willconflict and d" (tip) (changeset !)
+  rebasing 5:5018b1509e94 tip "added willconflict and d" (no-changeset !)
+  rebasing 5:af8d273bf580 tip "added willconflict and d" (changeset !)
 
   $ hg up 3 -q
   $ hg l --hidden
--- a/tests/test-copy-move-merge.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-copy-move-merge.t	Tue Jan 19 21:48:43 2021 +0530
@@ -100,10 +100,10 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: b8bf91eeebbc, local: add3f11052fa+, remote: 17c05bb7fcb6
+  starting 4 threads for background file closing (?)
    preserving a for resolve of b
    preserving a for resolve of c
   removing a
-  starting 4 threads for background file closing (?)
    b: remote moved from a -> m (premerge)
   picked tool ':merge' for b (binary False symlink False changedelete False)
   merging a and b to b
@@ -137,7 +137,7 @@
 
   $ hg up -qC 2
   $ hg rebase --keep -d 1 -b 2 --config extensions.rebase=
-  rebasing 2:add3f11052fa "other" (tip)
+  rebasing 2:add3f11052fa tip "other"
   merging b and a to b
   merging c and a to c
 
@@ -156,7 +156,7 @@
   $ hg rebase --keep -d 1 -b 2 --config extensions.rebase= --config experimental.copytrace=off --config ui.interactive=True << EOF
   > c
   > EOF
-  rebasing 2:add3f11052fa "other" (tip)
+  rebasing 2:add3f11052fa tip "other"
   file 'a' was deleted in local [dest] but was modified in other [source].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? c
@@ -238,7 +238,7 @@
 file is copied from `0:a`, so the file history of the `3:b` should trace directly to `0:a`.
 
   $ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.copytrace=off
-  rebasing 3:47e1a9e6273b "copy a->b (2)" (tip)
+  rebasing 3:47e1a9e6273b tip "copy a->b (2)"
   saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-rebase.hg
 
   $ hg log -G -f b
--- a/tests/test-copy.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-copy.t	Tue Jan 19 21:48:43 2021 +0530
@@ -9,7 +9,7 @@
   $ hg copy a b
   $ hg --config ui.portablefilenames=abort copy a con.xml
   abort: filename contains 'con', which is reserved on Windows: con.xml
-  [255]
+  [10]
   $ hg status
   A b
   $ hg sum
@@ -115,7 +115,7 @@
   $ hg mv foo bar
   foo: not copying - file is not managed
   abort: no files to copy
-  [255]
+  [10]
   $ hg st -A
   ? foo
 respects ui.relative-paths
@@ -124,15 +124,15 @@
   $ hg mv ../foo ../bar
   ../foo: not copying - file is not managed
   abort: no files to copy
-  [255]
+  [10]
   $ hg mv ../foo ../bar --config ui.relative-paths=yes
   ../foo: not copying - file is not managed
   abort: no files to copy
-  [255]
+  [10]
   $ hg mv ../foo ../bar --config ui.relative-paths=no
   foo: not copying - file is not managed
   abort: no files to copy
-  [255]
+  [10]
   $ cd ..
   $ rmdir dir
   $ hg add foo
--- a/tests/test-copytrace-heuristics.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-copytrace-heuristics.t	Tue Jan 19 21:48:43 2021 +0530
@@ -54,7 +54,7 @@
      desc: initial
 
   $ hg rebase -s . -d 1
-  rebasing 2:557f403c0afd "mod a, mod dir/file.txt" (tip)
+  rebasing 2:557f403c0afd tip "mod a, mod dir/file.txt"
   merging b and a to b
   merging dir2/file.txt and dir/file.txt to dir2/file.txt
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/557f403c0afd-9926eeff-rebase.hg
@@ -87,12 +87,12 @@
      desc: initial
 
   $ hg rebase -s . -d 1
-  rebasing 2:d526312210b9 "mode a" (tip)
+  rebasing 2:d526312210b9 tip "mode a"
   file 'a' was deleted in local [dest] but was modified in other [source].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ cd ..
   $ rm -rf repo
@@ -126,7 +126,7 @@
      desc: initial
 
   $ hg rebase -s . -d 2
-  rebasing 3:9d5cf99c3d9f "mod a" (tip)
+  rebasing 3:9d5cf99c3d9f tip "mod a"
   merging b and a to b
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9d5cf99c3d9f-f02358cc-rebase.hg
   $ cd ..
@@ -161,7 +161,7 @@
      desc: initial
 
   $ hg rebase -s . -d 0
-  rebasing 3:fbe97126b396 "mod b" (tip)
+  rebasing 3:fbe97126b396 tip "mod b"
   merging a and b to a
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fbe97126b396-cf5452a1-rebase.hg
   $ cd ..
@@ -198,7 +198,7 @@
      desc: initial
 
   $ hg rebase -s . -d 2
-  rebasing 3:6b2f4cece40f "mod dir/a" (tip)
+  rebasing 3:6b2f4cece40f tip "mod dir/a"
   merging dir/b and dir/a to dir/b
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/6b2f4cece40f-503efe60-rebase.hg
   $ cd ..
@@ -243,13 +243,13 @@
 With small limit
 
   $ hg rebase -s 2 -d 1 --config experimental.copytrace.movecandidateslimit=0
-  rebasing 2:ef716627c70b "mod a" (tip)
+  rebasing 2:ef716627c70b tip "mod a"
   skipping copytracing for 'a', more candidates than the limit: 7
   file 'a' was deleted in local [dest] but was modified in other [source].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg rebase --abort
   rebase aborted
@@ -257,7 +257,7 @@
 With default limit which is 100
 
   $ hg rebase -s 2 -d 1
-  rebasing 2:ef716627c70b "mod a" (tip)
+  rebasing 2:ef716627c70b tip "mod a"
   merging foo and a to foo
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
 
@@ -323,7 +323,7 @@
      desc: initial
 
   $ hg rebase -s . -d 1
-  rebasing 2:a33d80b6e352 "mv dir/ dir2/" (tip)
+  rebasing 2:a33d80b6e352 tip "mv dir/ dir2/"
   merging dir/a and dir2/a to dir2/a
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a33d80b6e352-fecb9ada-rebase.hg
   $ cd ..
@@ -358,7 +358,7 @@
   o  rev: 0
      desc: initial
   $ hg rebase -s . -d 2
-  rebasing 3:d41316942216 "mod a" (tip)
+  rebasing 3:d41316942216 tip "mod a"
   merging c and a to c
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d41316942216-2b5949bc-rebase.hg
 
@@ -431,7 +431,7 @@
      desc: initial
 
   $ hg rebase -s . -d 2
-  rebasing 3:ef716627c70b "mod a" (tip)
+  rebasing 3:ef716627c70b tip "mod a"
   merging b and a to b
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
   $ ls -A
@@ -504,7 +504,7 @@
      desc: initial
 
   $ hg rebase -s . -d 1
-  rebasing 2:ef716627c70b "mod a" (tip)
+  rebasing 2:ef716627c70b tip "mod a"
   merging b and a to b
   merging c and a to c
   saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/ef716627c70b-24681561-rebase.hg
@@ -631,7 +631,7 @@
      desc: initial
 
   $ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
-  rebasing 2:6207d2d318e7 "mod a" (tip)
+  rebasing 2:6207d2d318e7 tip "mod a"
   merging dir2/b and dir1/a to dir2/b
   saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/6207d2d318e7-1c9779ad-rebase.hg
   $ cat dir2/b
@@ -669,7 +669,7 @@
      desc: initial
 
   $ hg rebase -s . -d 1 --config experimental.copytrace.sourcecommitlimit=100
-  rebasing 2:e8919e7df8d0 "mv dir1 dir2" (tip)
+  rebasing 2:e8919e7df8d0 tip "mv dir1 dir2"
   saved backup bundle to $TESTTMP/repo/repo/.hg/strip-backup/e8919e7df8d0-f62fab62-rebase.hg
   $ ls dir2
   a
@@ -711,7 +711,7 @@
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 But when we have "sourcecommitlimit > (no. of drafts from base to c1)", we do
 fullcopytracing
--- a/tests/test-default-push.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-default-push.t	Tue Jan 19 21:48:43 2021 +0530
@@ -18,9 +18,9 @@
 Push should provide a hint when both 'default' and 'default-push' not set:
   $ cd c
   $ hg push --config paths.default=
-  abort: default repository not configured!
+  config error: default repository not configured!
   (see 'hg help config.paths')
-  [255]
+  [30]
 
   $ cd ..
 
@@ -75,7 +75,7 @@
 Pushing to a path that isn't defined should not fall back to default
 
   $ hg --cwd b push doesnotexist
-  abort: repository doesnotexist does not exist!
+  abort: repository doesnotexist does not exist
   [255]
 
 :pushurl is used when defined
@@ -137,13 +137,13 @@
   $ hg --config 'paths.default:pushrev=notdefined()' push
   pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob)
   hg: parse error: unknown identifier: notdefined
-  [255]
+  [10]
 
   $ hg --config 'paths.default:pushrev=(' push
   pushing to file:/*/$TESTTMP/pushurlsource/../pushurldest (glob)
   hg: parse error at 1: not a prefix: end
   ((
     ^ here)
-  [255]
+  [10]
 
   $ cd ..
--- a/tests/test-demandimport.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-demandimport.py	Tue Jan 19 21:48:43 2021 +0530
@@ -14,7 +14,7 @@
 
 # Only run if demandimport is allowed
 if subprocess.call(
-    ['python', '%s/hghave' % os.environ['TESTDIR'], 'demandimport']
+    [os.environ['PYTHON'], '%s/hghave' % os.environ['TESTDIR'], 'demandimport']
 ):
     sys.exit(80)
 
--- a/tests/test-devel-warnings.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-devel-warnings.t	Tue Jan 19 21:48:43 2021 +0530
@@ -104,6 +104,7 @@
    */hg:* in <module> (glob) (?)
    */mercurial/dispatch.py:* in run (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py* in callcatch (glob)
@@ -120,6 +121,7 @@
    */hg:* in <module> (glob) (?)
    */mercurial/dispatch.py:* in run (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py:* in callcatch (glob)
@@ -142,6 +144,7 @@
    */mercurial/commandserver.py:* in runcommand (glob)
    */mercurial/commandserver.py:* in _dispatchcommand (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py:* in callcatch (glob)
@@ -184,6 +187,7 @@
    */hg:* in <module> (glob) (?)
    */mercurial/dispatch.py:* in run (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py* in callcatch (glob)
@@ -201,6 +205,7 @@
    */hg:* in <module> (glob)
    */mercurial/dispatch.py:* in run (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py:* in callcatch (glob)
@@ -223,6 +228,7 @@
    */mercurial/commandserver.py:* in runcommand (glob)
    */mercurial/commandserver.py:* in _dispatchcommand (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py:* in callcatch (glob)
@@ -247,6 +253,7 @@
    */hg:* in <module> (glob) (?)
    */mercurial/dispatch.py:* in run (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py* in callcatch (glob)
@@ -271,6 +278,7 @@
    */hg:* in <module> (glob)
    */mercurial/dispatch.py:* in run (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py:* in callcatch (glob)
@@ -293,6 +301,7 @@
    */mercurial/commandserver.py:* in runcommand (glob)
    */mercurial/commandserver.py:* in _dispatchcommand (glob)
    */mercurial/dispatch.py:* in dispatch (glob)
+   */mercurial/dispatch.py:* in _rundispatch (glob)
    */mercurial/dispatch.py:* in _runcatch (glob)
    */mercurial/dispatch.py:* in _callcatch (glob)
    */mercurial/scmutil.py:* in callcatch (glob)
@@ -310,9 +319,9 @@
 Test programming error failure:
 
   $ hg buggytransaction 2>&1 | egrep -v '^  '
-  ** Unknown exception encountered with possibly-broken third-party extension buggylocking
+  ** Unknown exception encountered with possibly-broken third-party extension "buggylocking" (version N/A)
   ** which supports versions unknown of Mercurial.
-  ** Please disable buggylocking and try your action again.
+  ** Please disable "buggylocking" and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
@@ -322,9 +331,9 @@
   *ProgrammingError: transaction requires locking (glob)
 
   $ hg programmingerror 2>&1 | egrep -v '^  '
-  ** Unknown exception encountered with possibly-broken third-party extension buggylocking
+  ** Unknown exception encountered with possibly-broken third-party extension "buggylocking" (version N/A)
   ** which supports versions unknown of Mercurial.
-  ** Please disable buggylocking and try your action again.
+  ** Please disable "buggylocking" and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
--- a/tests/test-diff-change.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-diff-change.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-Testing diff --change
+Testing diff --change, --from, --to
 
   $ hg init a
   $ cd a
@@ -29,6 +29,59 @@
   -first
   +second
 
+Test --from and --to
+
+  $ hg diff --from . --rev .
+  abort: cannot specify both --from and --rev
+  [10]
+  $ hg diff --to . --rev .
+  abort: cannot specify both --to and --rev
+  [10]
+  $ hg diff --from . --change .
+  abort: cannot specify both --from and --change
+  [10]
+  $ hg diff --to . --change .
+  abort: cannot specify both --to and --change
+  [10]
+  $ echo dirty > file.txt
+  $ hg diff --from .
+  diff -r bf5ff72eb7e0 file.txt
+  --- a/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,1 +1,1 @@
+  -third
+  +dirty
+  $ hg diff --from . --reverse
+  diff -r bf5ff72eb7e0 file.txt
+  --- a/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,1 +1,1 @@
+  -dirty
+  +third
+  $ hg diff --to .
+  diff -r bf5ff72eb7e0 file.txt
+  --- a/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,1 +1,1 @@
+  -dirty
+  +third
+  $ hg diff --from 0 --to 2
+  diff -r 4bb65dda5db4 -r bf5ff72eb7e0 file.txt
+  --- a/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,1 +1,1 @@
+  -first
+  +third
+  $ hg diff --from 2 --to 0
+  diff -r bf5ff72eb7e0 -r 4bb65dda5db4 file.txt
+  --- a/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  +++ b/file.txt	Thu Jan 01 00:00:00 1970 +0000
+  @@ -1,1 +1,1 @@
+  -third
+  +first
+  $ hg co -C .
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
   $ cd ..
 
 Test dumb revspecs: top-level "x:y", "x:", ":y" and ":" ranges should be handled
--- a/tests/test-diffdir.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-diffdir.t	Tue Jan 19 21:48:43 2021 +0530
@@ -34,10 +34,10 @@
 
   $ hg diff -r ""
   hg: parse error: empty query
-  [255]
+  [10]
   $ hg diff -r tip -r ""
   hg: parse error: empty query
-  [255]
+  [10]
 
 Remove a file that was added via merge. Since the file is not in parent 1,
 it should not be in the diff.
--- a/tests/test-directaccess.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-directaccess.t	Tue Jan 19 21:48:43 2021 +0530
@@ -40,7 +40,7 @@
 Testing with rev number
 
   $ hg exp 2 --config experimental.directaccess.revnums=False
-  abort: hidden revision '2' was rewritten as: 2443a0e66469!
+  abort: hidden revision '2' was rewritten as: 2443a0e66469
   (use --hidden to access hidden revisions)
   [255]
 
@@ -73,7 +73,7 @@
   A c
 
   $ hg status --change 2 --config experimental.directaccess.revnums=False
-  abort: hidden revision '2' was rewritten as: 2443a0e66469!
+  abort: hidden revision '2' was rewritten as: 2443a0e66469
   (use --hidden to access hidden revisions)
   [255]
 
@@ -195,12 +195,12 @@
 Commands with undefined intent should not work right now
 
   $ hg phase -r 28ad74
-  abort: hidden revision '28ad74' was rewritten as: 2443a0e66469!
+  abort: hidden revision '28ad74' was rewritten as: 2443a0e66469
   (use --hidden to access hidden revisions)
   [255]
 
   $ hg phase -r 2
-  abort: hidden revision '2' was rewritten as: 2443a0e66469!
+  abort: hidden revision '2' was rewritten as: 2443a0e66469
   (use --hidden to access hidden revisions)
   [255]
 
--- a/tests/test-dirstate-backup.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-dirstate-backup.t	Tue Jan 19 21:48:43 2021 +0530
@@ -9,7 +9,7 @@
   > EOF
   applying patch from stdin
   abort: stdin: no diffs found
-  [255]
+  [10]
 
 No dirstate backups are left behind
 
--- a/tests/test-dirstate-nonnormalset.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-dirstate-nonnormalset.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,6 +1,6 @@
   $ cat >> $HGRCPATH << EOF
-  > [ui]
-  > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
+  > [command-templates]
+  > log="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n"
   > [extensions]
   > dirstateparanoidcheck = $TESTDIR/../contrib/dirstatenonnormalcheck.py
   > [experimental]
--- a/tests/test-dirstate-race.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-dirstate-race.t	Tue Jan 19 21:48:43 2021 +0530
@@ -225,7 +225,7 @@
   > EOF
 
   $ hg rebase -s . -d 3 --tool test
-  rebasing 4:b08445fd6b2a "c4" (tip)
+  rebasing 4:b08445fd6b2a tip "c4"
   merging a
   custom merge tool
   custom merge tool end
--- a/tests/test-dispatch.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-dispatch.t	Tue Jan 19 21:48:43 2021 +0530
@@ -29,7 +29,7 @@
    -T --template TEMPLATE   display with template
   
   (use 'hg cat -h' to show more help)
-  [255]
+  [10]
 
 Missing parameter for early option:
 
@@ -41,10 +41,10 @@
 "--" may be an option value:
 
   $ hg -R -- log
-  abort: repository -- not found!
+  abort: repository -- not found
   [255]
   $ hg log -R --
-  abort: repository -- not found!
+  abort: repository -- not found
   [255]
   $ hg log -T --
   -- (no-eol)
@@ -62,21 +62,21 @@
 Unparsable form of early options:
 
   $ hg cat --debugg
-  abort: option --debugger may not be abbreviated!
-  [255]
+  abort: option --debugger may not be abbreviated
+  [10]
 
 Parsing failure of early options should be detected before executing the
 command:
 
   $ hg log -b '--config=hooks.pre-log=false' default
-  abort: option --config may not be abbreviated!
-  [255]
+  abort: option --config may not be abbreviated
+  [10]
   $ hg log -b -R. default
-  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo!
-  [255]
+  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo
+  [10]
   $ hg log --cwd .. -b --cwd=. default
-  abort: option --cwd may not be abbreviated!
-  [255]
+  abort: option --cwd may not be abbreviated
+  [10]
 
 However, we can't prevent it from loading extensions and configs:
 
@@ -85,14 +85,14 @@
   > EOF
   $ hg log -b '--config=extensions.bad=bad.py' default
   *** failed to import extension bad from bad.py: bad
-  abort: option --config may not be abbreviated!
-  [255]
+  abort: option --config may not be abbreviated
+  [10]
 
   $ mkdir -p badrepo/.hg
   $ echo 'invalid-syntax' > badrepo/.hg/hgrc
   $ hg log -b -Rbadrepo default
-  hg: parse error at badrepo/.hg/hgrc:1: invalid-syntax
-  [255]
+  config error at badrepo/.hg/hgrc:1: invalid-syntax
+  [30]
 
   $ hg log -b --cwd=inexistent default
   abort: $ENOENT$: 'inexistent'
@@ -110,11 +110,11 @@
   hg log: option -b not recognized
   error in definition for alias 'log': --config may only be given on the command
   line
-  [255]
+  [10]
 
   $ hg log -b '--config=defaults.log=--config=hooks.pre-log=false'
-  abort: option --config may not be abbreviated!
-  [255]
+  abort: option --config may not be abbreviated
+  [10]
 
 Shell aliases bypass any command parsing rules but for the early one:
 
@@ -126,31 +126,31 @@
 
 #if no-chg
   $ HGPLAIN=+strictflags hg log -b --config='hooks.pre-log=false' default
-  abort: unknown revision '--config=hooks.pre-log=false'!
+  abort: unknown revision '--config=hooks.pre-log=false'
   [255]
   $ HGPLAIN=+strictflags hg log -b -R. default
-  abort: unknown revision '-R.'!
+  abort: unknown revision '-R.'
   [255]
   $ HGPLAIN=+strictflags hg log -b --cwd=. default
-  abort: unknown revision '--cwd=.'!
+  abort: unknown revision '--cwd=.'
   [255]
 #endif
   $ HGPLAIN=+strictflags hg log -b --debugger default
-  abort: unknown revision '--debugger'!
+  abort: unknown revision '--debugger'
   [255]
   $ HGPLAIN=+strictflags hg log -b --config='alias.log=!echo pwned' default
-  abort: unknown revision '--config=alias.log=!echo pwned'!
+  abort: unknown revision '--config=alias.log=!echo pwned'
   [255]
 
   $ HGPLAIN=+strictflags hg log --config='hooks.pre-log=false' -b default
-  abort: option --config may not be abbreviated!
-  [255]
+  abort: option --config may not be abbreviated
+  [10]
   $ HGPLAIN=+strictflags hg log -q --cwd=.. -b default
-  abort: option --cwd may not be abbreviated!
-  [255]
+  abort: option --cwd may not be abbreviated
+  [10]
   $ HGPLAIN=+strictflags hg log -q -R . -b default
-  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo!
-  [255]
+  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo
+  [10]
 
   $ HGPLAIN=+strictflags hg --config='hooks.pre-log=false' log -b default
   abort: pre-log hook exited with status 1
@@ -197,8 +197,8 @@
 No repo:
 
   $ hg cat
-  abort: no repository found in '$TESTTMP' (.hg not found)!
-  [255]
+  abort: no repository found in '$TESTTMP' (.hg not found)
+  [10]
 
 #endif
 
--- a/tests/test-doctest.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-doctest.py	Tue Jan 19 21:48:43 2021 +0530
@@ -76,7 +76,9 @@
     sys.exit(0)
 
 files = subprocess.check_output(
-    "hg files --print0 \"%s\"" % fileset, shell=True, cwd=cwd,
+    "hg files --print0 \"%s\"" % fileset,
+    shell=True,
+    cwd=cwd,
 ).split(b'\0')
 
 if sys.version_info[0] >= 3:
--- a/tests/test-double-merge.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-double-merge.t	Tue Jan 19 21:48:43 2021 +0530
@@ -35,9 +35,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: e6dc8efe11cc, local: 6a0df1dad128+, remote: 484bf6903104
+  starting 4 threads for background file closing (?)
    preserving foo for resolve of bar
    preserving foo for resolve of foo
-  starting 4 threads for background file closing (?)
    bar: remote copied from foo -> m (premerge)
   picked tool ':merge' for bar (binary False symlink False changedelete False)
   merging foo and bar to bar
--- a/tests/test-editor-filename.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-editor-filename.t	Tue Jan 19 21:48:43 2021 +0530
@@ -17,7 +17,7 @@
   $ hg commit
   *.commit.hg.txt (glob)
   abort: edit failed: sh exited with status 1
-  [255]
+  [250]
 
 Verify that the path for a histedit editor has the expected suffix.
 
@@ -30,7 +30,7 @@
   $ hg histedit
   *.histedit.hg.txt (glob)
   abort: edit failed: sh exited with status 1
-  [255]
+  [250]
 
 Verify that when performing an action that has the side-effect of creating an
 editor for a diff, the file ends in .diff.
@@ -61,4 +61,4 @@
   (enter ? for help) [Ynesfdaq?] q
   
   abort: user quit
-  [255]
+  [250]
--- a/tests/test-empty.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-empty.t	Tue Jan 19 21:48:43 2021 +0530
@@ -44,6 +44,7 @@
   checked 0 changesets with 0 changes to 0 files
   $ ls .hg
   00changelog.i
+  cache
   hgrc
   requires
   store
--- a/tests/test-eol-clone.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-eol-clone.t	Tue Jan 19 21:48:43 2021 +0530
@@ -89,6 +89,7 @@
    a.txt: remote created -> g
   getting a.txt
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating the branch cache
   $ cd repo-3
 
   $ cat a.txt
--- a/tests/test-eolfilename.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-eolfilename.t	Tue Jan 19 21:48:43 2021 +0530
@@ -12,12 +12,12 @@
   adding he\r (no-eol) (esc)
   llo
   abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
-  [255]
+  [10]
   $ hg ci -A -m m
   adding he\r (no-eol) (esc)
   llo
   abort: '\n' and '\r' disallowed in filenames: 'he\rllo'
-  [255]
+  [10]
   $ rm "$A"
   $ echo foo > "hell
   > o"
@@ -25,12 +25,12 @@
   adding hell
   o
   abort: '\n' and '\r' disallowed in filenames: 'hell\no'
-  [255]
+  [10]
   $ hg ci -A -m m
   adding hell
   o
   abort: '\n' and '\r' disallowed in filenames: 'hell\no'
-  [255]
+  [10]
   $ echo foo > "$A"
   $ hg debugwalk -v
   * matcher:
@@ -48,10 +48,10 @@
   $ A=`printf 'quick\rfox'`
   $ hg cp quickfox "$A"
   abort: '\n' and '\r' disallowed in filenames: 'quick\rfox'
-  [255]
+  [10]
   $ hg mv quickfox "$A"
   abort: '\n' and '\r' disallowed in filenames: 'quick\rfox'
-  [255]
+  [10]
 
 https://bz.mercurial-scm.org/2036
 
--- a/tests/test-exchange-obsmarkers-case-A3.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-exchange-obsmarkers-case-A3.t	Tue Jan 19 21:48:43 2021 +0530
@@ -199,9 +199,9 @@
   $ hg push -R main -r 'desc(A1)' pushdest
   pushing to pushdest
   searching for changes
-  abort: push creates new remote head e5ea8f9c7314!
+  abort: push creates new remote head e5ea8f9c7314
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ cd ..
 
 test obsmarkers exchange.
--- a/tests/test-export.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-export.t	Tue Jan 19 21:48:43 2021 +0530
@@ -337,7 +337,7 @@
   hg: parse error at 3: unterminated template expansion
   (%m{
       ^ here)
-  [255]
+  [10]
   $ hg export -o '%\' tip
   abort: invalid format spec '%\' in output filename
   [255]
@@ -367,13 +367,13 @@
 
   $ hg export ""
   hg: parse error: empty query
-  [255]
+  [10]
   $ hg export 999
-  abort: unknown revision '999'!
+  abort: unknown revision '999'
   [255]
   $ hg export "not all()"
   abort: export requires at least one changeset
-  [255]
+  [10]
 
 Check for color output
   $ cat <<EOF >> $HGRCPATH
--- a/tests/test-extdata.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-extdata.t	Tue Jan 19 21:48:43 2021 +0530
@@ -54,7 +54,7 @@
 
   $ hg log -qr "extdata()"
   hg: parse error: extdata takes at least 1 string argument
-  [255]
+  [10]
   $ hg log -qr "extdata(unknown)"
   abort: unknown extdata source 'unknown'
   [255]
@@ -73,7 +73,7 @@
   hg: parse error at 0: not a prefix: +
   (+---------------------------------------+
    ^ here)
-  [255]
+  [10]
 
 test template support:
 
@@ -95,17 +95,17 @@
 
   $ hg log -T "{extdata()}\n"
   hg: parse error: extdata expects one argument
-  [255]
+  [10]
   $ hg log -T "{extdata('unknown')}\n"
   abort: unknown extdata source 'unknown'
   [255]
   $ hg log -T "{extdata(unknown)}\n"
   hg: parse error: empty data source specified
   (did you mean extdata('unknown')?)
-  [255]
+  [10]
   $ hg log -T "{extdata('{unknown}')}\n"
   hg: parse error: empty data source specified
-  [255]
+  [10]
 
 we don't fix up relative file URLs, but we do run shell commands in repo root
 
@@ -113,7 +113,7 @@
   $ cd sub
   $ hg log -qr "extdata(filedata)"
   abort: error: $ENOENT$
-  [255]
+  [100]
   $ hg log -qr "extdata(shelldata)"
   2:f6ed99a58333
 
--- a/tests/test-extdiff.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-extdiff.t	Tue Jan 19 21:48:43 2021 +0530
@@ -11,7 +11,7 @@
 
 Should diff cloned directories:
 
-  $ hg extdiff -o -r $opt
+  $ hg extdiff -o -r
   Only in a: a
   Only in a: b
   [1]
@@ -50,7 +50,8 @@
   options ([+] can be repeated):
   
    -o --option OPT [+]      pass option to comparison program
-   -r --rev REV [+]         revision
+      --from REV1           revision to diff from
+      --to REV2             revision to diff to
    -c --change REV          change made by revision
       --per-file            compare each file instead of revision snapshots
       --confirm             prompt user before each external program invocation
@@ -68,11 +69,20 @@
 
 Should diff cloned files directly:
 
-  $ hg falabala -r 0:1
+  $ hg falabala --from 0 --to 1
   diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) (windows !)
   diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) (no-windows !)
   [1]
 
+Can show diff from working copy:
+  $ echo c >> a
+  $ hg falabala --to 1
+  diffing "*\\extdiff.*\\a" "a.34eed99112ab\\a" (glob) (windows !)
+  diffing */extdiff.*/a a.34eed99112ab/a (glob) (no-windows !)
+  [1]
+  $ hg revert a
+  $ rm a.orig
+
 Specifying an empty revision should abort.
 
   $ hg extdiff -p diff --patch --rev 'ancestor()' --rev 1
@@ -130,7 +140,7 @@
   $ hg ci -Sm "adding subrepo"
   $ echo > .hgsub
   $ hg ci -m "removing subrepo"
-  $ hg falabala -r 4 -r 5 -S
+  $ hg falabala --from 4 --to 5 -S
   diffing a.398e36faf9c6 a.5ab95fb166c4
   [1]
 
@@ -283,7 +293,7 @@
   > kdiff3.diffargs=--L1 \$plabel1 --L2 \$clabel \$parent \$child
   > EOF
 
-  $ hg --debug kdiff3 -r0 | grep '^running'
+  $ hg --debug kdiff3 --from 0 | grep '^running'
   running 'echo --L1 "@0" --L2 "" a.8a5febb7f867 a' in * (glob) (windows !)
   running "echo --L1 '@0' --L2 '' a.8a5febb7f867 a" in * (glob) (no-windows !)
 
@@ -487,7 +497,7 @@
   $ echo a >> a
   $ ln -s missing linka
   $ hg add linka
-  $ hg falabala -r 0 --traceback
+  $ hg falabala --from 0 --traceback
   diffing testsymlinks.07f494440405 testsymlinks
   [1]
   $ cd ..
@@ -503,14 +513,14 @@
   $ HGPLAIN=1 hg --config hgext.extdiff= --config extdiff.cmd.td=hi help -k xyzzy
   abort: no matches
   (try 'hg help' for a list of topics)
-  [255]
+  [10]
 
   $ HGPLAIN=1 hg --config hgext.extdiff= --config extdiff.cmd.td=hi help td > /dev/null
 
   $ LC_MESSAGES=ja_JP.UTF-8 hg --config hgext.extdiff= --config extdiff.cmd.td=$U help -k xyzzy
   abort: no matches
   (try 'hg help' for a list of topics)
-  [255]
+  [10]
 
   $ LC_MESSAGES=ja_JP.UTF-8 hg --config hgext.extdiff= --config extdiff.cmd.td=$U help td \
   > | grep "^      '"
--- a/tests/test-extension.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-extension.t	Tue Jan 19 21:48:43 2021 +0530
@@ -154,7 +154,10 @@
   > from mercurial import exthelper
   > from mercurial.utils import procutil
   > 
-  > write = procutil.stdout.write
+  > def write(msg):
+  >     procutil.stdout.write(msg)
+  >     procutil.stdout.flush()
+  > 
   > name = os.path.basename(__file__).rsplit('.', 1)[0]
   > bytesname = name.encode('utf-8')
   > write(b"1) %s imported\n" % bytesname)
@@ -194,6 +197,9 @@
 
 Check normal command's load order of extensions and registration of functions
 
+ On chg server, extension should be first set up by the server. Then
+ object-level setup should follow in the worker process.
+
   $ hg log -r "foo() and bar()" -q
   1) foo imported
   1) bar imported
@@ -209,6 +215,18 @@
   4) bar uipopulate
   5) foo reposetup
   5) bar reposetup
+  4) foo uipopulate (chg !)
+  4) bar uipopulate (chg !)
+  4) foo uipopulate (chg !)
+  4) bar uipopulate (chg !)
+  4) foo uipopulate (chg !)
+  4) bar uipopulate (chg !)
+  4) foo uipopulate (chg !)
+  4) bar uipopulate (chg !)
+  4) foo uipopulate (chg !)
+  4) bar uipopulate (chg !)
+  5) foo reposetup (chg !)
+  5) bar reposetup (chg !)
   0:c24b9ac61126
 
 Check hgweb's load order of extensions and registration of functions
@@ -818,10 +836,8 @@
       program, use -o/--option. These will be passed before the names of the
       directories or files to compare.
   
-      When two revision arguments are given, then changes are shown between
-      those revisions. If only one revision is specified then that revision is
-      compared to the working directory, and, when no revisions are specified,
-      the working directory files are compared to its parent.
+      The --from, --to, and --change options work the same way they do for 'hg
+      diff'.
   
       The --per-file option runs the external program repeatedly on each file to
       diff, instead of once on two directories. By default, this happens one by
@@ -841,7 +857,8 @@
   
    -p --program CMD         comparison program to run
    -o --option OPT [+]      pass option to comparison program
-   -r --rev REV [+]         revision
+      --from REV1           revision to diff from
+      --to REV2             revision to diff to
    -c --change REV          change made by revision
       --per-file            compare each file instead of revision snapshots
       --confirm             prompt user before each external program invocation
@@ -1001,7 +1018,7 @@
   multirevs command
   
   (use 'hg multirevs -h' to show more help)
-  [255]
+  [10]
 
 
 
@@ -1415,37 +1432,39 @@
 
 No declared supported version, extension complains:
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
-  ** Unknown exception encountered with possibly-broken third-party extension throw
+  ** Unknown exception encountered with possibly-broken third-party extension "throw" 1.0.0
   ** which supports versions unknown of Mercurial.
-  ** Please disable throw and try your action again.
+  ** Please disable "throw" and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
   ** Mercurial Distributed SCM * (glob)
-  ** Extensions loaded: throw
+  ** Extensions loaded: throw 1.0.0
 
-empty declaration of supported version, extension complains:
+empty declaration of supported version, extension complains (but doesn't choke if
+the value is improperly a str instead of bytes):
   $ echo "testedwith = ''" >> throw.py
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
-  ** Unknown exception encountered with possibly-broken third-party extension throw
+  ** Unknown exception encountered with possibly-broken third-party extension "throw" 1.0.0
   ** which supports versions unknown of Mercurial.
-  ** Please disable throw and try your action again.
+  ** Please disable "throw" and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
-  ** Extensions loaded: throw
+  ** Extensions loaded: throw 1.0.0
 
-If the extension specifies a buglink, show that:
+If the extension specifies a buglink, show that (but don't choke if the value is
+improperly a str instead of bytes):
   $ echo 'buglink = "http://example.com/bts"' >> throw.py
   $ rm -f throw.pyc throw.pyo
   $ rm -Rf __pycache__
   $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*'
-  ** Unknown exception encountered with possibly-broken third-party extension throw
+  ** Unknown exception encountered with possibly-broken third-party extension "throw" 1.0.0
   ** which supports versions unknown of Mercurial.
-  ** Please disable throw and try your action again.
+  ** Please disable "throw" and try your action again.
   ** If that fixes the bug please report it to http://example.com/bts
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
-  ** Extensions loaded: throw
+  ** Extensions loaded: throw 1.0.0
 
 If the extensions declare outdated versions, accuse the older extension first:
   $ echo "from mercurial import util" >> older.py
@@ -1456,13 +1475,13 @@
   $ rm -Rf __pycache__
   $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
   >   throw 2>&1 | egrep '^\*\*'
-  ** Unknown exception encountered with possibly-broken third-party extension older
+  ** Unknown exception encountered with possibly-broken third-party extension "older" (version N/A)
   ** which supports versions 1.9 of Mercurial.
-  ** Please disable older and try your action again.
+  ** Please disable "older" and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
   ** Mercurial Distributed SCM (version 2.2)
-  ** Extensions loaded: throw, older
+  ** Extensions loaded: older, throw 1.0.0
 
 One extension only tested with older, one only with newer versions:
   $ echo "util.version = lambda:b'2.1'" >> older.py
@@ -1470,13 +1489,13 @@
   $ rm -Rf __pycache__
   $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
   >   throw 2>&1 | egrep '^\*\*'
-  ** Unknown exception encountered with possibly-broken third-party extension older
+  ** Unknown exception encountered with possibly-broken third-party extension "older" (version N/A)
   ** which supports versions 1.9 of Mercurial.
-  ** Please disable older and try your action again.
+  ** Please disable "older" and try your action again.
   ** If that fixes the bug please report it to the extension author.
   ** Python * (glob)
   ** Mercurial Distributed SCM (version 2.1)
-  ** Extensions loaded: throw, older
+  ** Extensions loaded: older, throw 1.0.0
 
 Older extension is tested with current version, the other only with newer:
   $ echo "util.version = lambda:b'1.9.3'" >> older.py
@@ -1484,13 +1503,13 @@
   $ rm -Rf __pycache__
   $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
   >   throw 2>&1 | egrep '^\*\*'
-  ** Unknown exception encountered with possibly-broken third-party extension throw
+  ** Unknown exception encountered with possibly-broken third-party extension "throw" 1.0.0
   ** which supports versions 2.1 of Mercurial.
-  ** Please disable throw and try your action again.
+  ** Please disable "throw" and try your action again.
   ** If that fixes the bug please report it to http://example.com/bts
   ** Python * (glob)
   ** Mercurial Distributed SCM (version 1.9.3)
-  ** Extensions loaded: throw, older
+  ** Extensions loaded: older, throw 1.0.0
 
 Ability to point to a different point
   $ hg --config extensions.throw=throw.py --config extensions.older=older.py \
@@ -1499,7 +1518,7 @@
   ** Your Local Goat Lenders
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
-  ** Extensions loaded: throw, older
+  ** Extensions loaded: older, throw 1.0.0
 
 Declare the version as supporting this hg version, show regular bts link:
   $ hgver=`hg debuginstall -T '{hgver}'`
@@ -1514,7 +1533,7 @@
   ** https://mercurial-scm.org/wiki/BugTracker
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
-  ** Extensions loaded: throw
+  ** Extensions loaded: throw 1.0.0
 
 Patch version is ignored during compatibility check
   $ echo "testedwith = b'3.2'" >> throw.py
@@ -1526,7 +1545,7 @@
   ** https://mercurial-scm.org/wiki/BugTracker
   ** Python * (glob)
   ** Mercurial Distributed SCM (*) (glob)
-  ** Extensions loaded: throw
+  ** Extensions loaded: throw 1.0.0
 
 Test version number support in 'hg version':
   $ echo '__version__ = (1, 2, 3)' >> throw.py
@@ -1829,7 +1848,7 @@
   *** (use @command decorator to register 'deprecatedcmd')
   hg: unknown command 'deprecatedcmd'
   (use 'hg help' for a list of commands)
-  [255]
+  [10]
 
  the extension shouldn't be loaded at all so the mq works:
 
@@ -1886,4 +1905,4 @@
   *** (use b'' to make it byte string)
   hg: unknown command 'dummy'
   (did you mean summary?)
-  [255]
+  [10]
--- a/tests/test-fastannotate.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-fastannotate.t	Tue Jan 19 21:48:43 2021 +0530
@@ -123,7 +123,7 @@
   $ hg fastannotate --config fastannotate.modes=fctx -h -q
   hg: unknown command 'fastannotate'
   (did you mean *) (glob)
-  [255]
+  [10]
 
 rename
 
--- a/tests/test-fetch.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-fetch.t	Tue Jan 19 21:48:43 2021 +0530
@@ -157,7 +157,7 @@
 
   $ hg --cwd i fetch ../h
   abort: uncommitted changes
-  [255]
+  [20]
 
 test fetch with named branches
 
--- a/tests/test-filecache.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-filecache.py	Tue Jan 19 21:48:43 2021 +0530
@@ -5,7 +5,7 @@
 import sys
 
 if subprocess.call(
-    ['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']
+    [sys.executable, '%s/hghave' % os.environ['TESTDIR'], 'cacheable']
 ):
     sys.exit(80)
 
--- a/tests/test-filelog.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-filelog.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """
 Tests the behavior of filelog w.r.t. data starting with '\1\n'
 """
--- a/tests/test-fileset.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-fileset.t	Tue Jan 19 21:48:43 2021 +0530
@@ -81,7 +81,7 @@
   $ fileset 'a_b'
   $ fileset '"\xy"'
   hg: parse error: invalid \x escape* (glob)
-  [255]
+  [10]
 
 Test invalid syntax
 
@@ -90,25 +90,25 @@
     (string 'added')
     None)
   hg: parse error: not a symbol
-  [255]
+  [10]
   $ fileset -v '()()'
   (func
     (group
       None)
     None)
   hg: parse error: not a symbol
-  [255]
+  [10]
   $ fileset -v -- '-x'
   (negate
     (symbol 'x'))
   hg: parse error: can't use negate operator in this context
-  [255]
+  [10]
   $ fileset -v -- '-()'
   (negate
     (group
       None))
   hg: parse error: can't use negate operator in this context
-  [255]
+  [10]
   $ fileset -p parsed 'a, b, c'
   * parsed:
   (list
@@ -117,23 +117,23 @@
     (symbol 'c'))
   hg: parse error: can't use a list in this context
   (see 'hg help "filesets.x or y"')
-  [255]
+  [10]
 
   $ fileset '"path":.'
   hg: parse error: not a symbol
-  [255]
+  [10]
   $ fileset 'path:foo bar'
   hg: parse error at 9: invalid token
-  [255]
+  [10]
   $ fileset 'foo:bar:baz'
   hg: parse error: not a symbol
-  [255]
+  [10]
   $ fileset 'foo:bar()'
   hg: parse error: pattern must be a string
-  [255]
+  [10]
   $ fileset 'foo:bar'
   hg: parse error: invalid pattern kind: foo
-  [255]
+  [10]
 
 Show parsed tree at stages:
 
@@ -562,7 +562,7 @@
   c1
   $ fileset 'grep("missingparens(")'
   hg: parse error: invalid match pattern: (unbalanced parenthesis|missing \)).* (re)
-  [255]
+  [10]
 
 #if execbit
   $ chmod +x b2
@@ -589,11 +589,11 @@
   $ hg add 1k 2k
   $ fileset 'size("bar")'
   hg: parse error: couldn't parse size: bar
-  [255]
+  [10]
   $ fileset '(1k, 2k)'
   hg: parse error: can't use a list in this context
   (see 'hg help "filesets.x or y"')
-  [255]
+  [10]
   $ fileset 'size(1k)'
   1k
   $ fileset '(1k or 2k) and size("< 2k")'
@@ -1018,10 +1018,10 @@
 
   $ fileset "status('', '4', added())"
   hg: parse error: first argument to status must be a revision
-  [255]
+  [10]
   $ fileset "status('2', '', added())"
   hg: parse error: second argument to status must be a revision
-  [255]
+  [10]
 
 Empty revset will error at the revset layer
 
@@ -1029,9 +1029,9 @@
   hg: parse error at 1: not a prefix: end
   ( 
     ^ here)
-  [255]
+  [10]
   $ fileset "status('2', ' ', added())"
   hg: parse error at 1: not a prefix: end
   ( 
     ^ here)
-  [255]
+  [10]
--- a/tests/test-fix-topology.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-fix-topology.t	Tue Jan 19 21:48:43 2021 +0530
@@ -22,6 +22,7 @@
   > [extensions]
   > fix =
   > strip =
+  > debugdrawdag=$TESTDIR/drawdag.py
   > [fix]
   > uppercase-whole-file:command="$PYTHON" $UPPERCASEPY
   > uppercase-whole-file:pattern=set:**
@@ -366,6 +367,37 @@
   XXXX
 
   $ cd ..
+
+  $ hg init exclude-obsolete
+  $ cd exclude-obsolete
+  $ hg debugdrawdag <<'EOS'
+  > E C # prune: C
+  > | |
+  > D B # prune: B, D
+  > |/
+  > A
+  > EOS
+  1 new orphan changesets
+  $ hg log --graph --template '{rev} {desc}\n'
+  *  4 E
+  |
+  | x  3 C
+  | |
+  x |  2 D
+  | |
+  | x  1 B
+  |/
+  o  0 A
+  
+  $ hg fix -s A
+  $ hg fix -s B
+  abort: no changesets specified
+  (use --source or --working-dir)
+  [255]
+  $ hg fix -s D
+  $ hg fix -s E
+  $ cd ..
+
 #endif
 
 The --all flag should fix anything that wouldn't cause a problem if you fixed
@@ -379,7 +411,7 @@
   $ cd fixall
   $ hg fix --all --working-dir
   abort: cannot specify both --working-dir and --all
-  [255]
+  [10]
 
 #if obsstore-on
   $ printf "one\n" > foo.whole
--- a/tests/test-fix.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-fix.t	Tue Jan 19 21:48:43 2021 +0530
@@ -246,15 +246,15 @@
 
   $ hg fix
   abort: no changesets specified
-  (use --rev or --working-dir)
+  (use --source or --working-dir)
   [255]
   $ hg fix --whole
   abort: no changesets specified
-  (use --rev or --working-dir)
+  (use --source or --working-dir)
   [255]
   $ hg fix --base 0
   abort: no changesets specified
-  (use --rev or --working-dir)
+  (use --source or --working-dir)
   [255]
 
 Fixing a public revision isn't allowed. It should abort early enough that
@@ -266,11 +266,11 @@
   $ hg fix -r 0
   abort: cannot fix public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
   $ hg fix -r 0 --working-dir
   abort: cannot fix public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
   $ hg cat -r tip hello.whole
   hello
   $ cat hello.whole
@@ -865,11 +865,11 @@
   $ hg commit -Aqm "foo 2"
 
   $ hg --config extensions.rebase= rebase -r 1 -d 0
-  rebasing 1:c3b6dc0e177a "foo 2" (tip)
+  rebasing 1:c3b6dc0e177a tip "foo 2"
   merging foo.whole
   warning: conflicts while merging foo.whole! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg --config extensions.rebase= fix --working-dir
   abort: unresolved conflicts
@@ -879,7 +879,7 @@
   $ hg --config extensions.rebase= fix -r .
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
 
   $ cd ..
 
@@ -1174,7 +1174,7 @@
   $ hg commit -m "second"
   $ hg --config experimental.evolution.allowunstable=False fix -r '.^'
   abort: cannot fix changeset with children
-  [255]
+  [10]
   $ hg fix -r '.^'
   1 new orphan changesets
   $ hg cat -r 2 foo.whole
--- a/tests/test-flagprocessor.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-flagprocessor.t	Tue Jan 19 21:48:43 2021 +0530
@@ -189,8 +189,8 @@
 # TEST: ensure a missing processor is handled
   $ echo '[FAIL][BASE64][GZIP][NOOP]' > fail-base64-gzip-noop
   $ hg commit -Aqm 'fail+base64+gzip+noop'
-  abort: missing processor for flag '0x1'!
-  [255]
+  abort: missing processor for flag '0x1'
+  [50]
   $ rm fail-base64-gzip-noop
 
 # TEST: ensure we cannot register several flag processors on the same flag
@@ -205,7 +205,7 @@
       extsetup(ui)
     File "*/tests/flagprocessorext.py", line *, in extsetup (glob)
       flagutil.addflagprocessor( (py38 !)
-      REVIDX_NOOP, (noopdonothingread, noopdonothing, validatehash,) (no-py38 !)
+      validatehash, (no-py38 !)
     File "*/mercurial/revlogutils/flagutil.py", line *, in addflagprocessor (glob)
       insertflagprocessor(flag, processor, flagprocessors)
     File "*/mercurial/revlogutils/flagutil.py", line *, in insertflagprocessor (glob)
--- a/tests/test-fuzz-targets.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-fuzz-targets.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#require test-repo
+#require test-repo py3
 
   $ cd $TESTDIR/../contrib/fuzz
   $ OUT=$TESTTMP ; export OUT
@@ -26,13 +26,23 @@
   >        -fsanitize=fuzzer-no-link,address || return 1
   > }
 
+Try to find a python3-config that's next to our sys.executable. If
+that doesn't work, fall back to looking for a global python3-config
+and hope that works out for the best.
+  $ PYBIN=`$PYTHON -c 'import sys, os; print(os.path.dirname(sys.executable))'`
+  $ if [ -x "$PYBIN/python3-config" ] ; then
+  >   PYTHON_CONFIG="$PYBIN/python3-config"
+  > else
+  >   PYTHON_CONFIG="`which python3-config`"
+  > fi
+
 #if clang-libfuzzer
   $ CXX=clang++ havefuzz || exit 80
-  $ $MAKE -s clean all PYTHON_CONFIG=`which python-config`
+  $ $MAKE -s clean all PYTHON_CONFIG="$PYTHON_CONFIG"
 #endif
 #if no-clang-libfuzzer clang-6.0
   $ CXX=clang++-6.0 havefuzz || exit 80
-  $ $MAKE -s clean all CC=clang-6.0 CXX=clang++-6.0 PYTHON_CONFIG=`which python-config`
+  $ $MAKE -s clean all CC=clang-6.0 CXX=clang++-6.0 PYTHON_CONFIG="$PYTHON_CONFIG"
 #endif
 #if no-clang-libfuzzer no-clang-6.0
   $ exit 80
--- a/tests/test-git-interop.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-git-interop.t	Tue Jan 19 21:48:43 2021 +0530
@@ -16,6 +16,10 @@
   >  }
 
 
+  $ hg version -v --config extensions.git= | grep '^[E ]'
+  Enabled extensions:
+    git  internal  (pygit2 *) (glob)
+
 Test auto-loading extension works:
   $ mkdir nogit
   $ cd nogit
@@ -26,6 +30,7 @@
   [255]
   $ git init
   Initialized empty Git repository in $TESTTMP/nogit/.git/
+  $ git config --global init.defaultBranch master
 This status invocation shows some hg gunk because we didn't use
 `hg init --git`, which fixes up .git/info/exclude for us.
   $ hg status
@@ -67,8 +72,8 @@
 
 Without creating the .hg, hg status fails:
   $ hg status
-  abort: no repository found in '$TESTTMP/foo' (.hg not found)!
-  [255]
+  abort: no repository found in '$TESTTMP/foo' (.hg not found)
+  [10]
 But if you run hg init --git, it works:
   $ hg init --git
   $ hg id --traceback
@@ -304,14 +309,10 @@
   $ hg status
   heads mismatch, rebuilding dagcache
   M beta
-  $ git status
+  $ git status | egrep -v '^$|^  \(use '
   On branch master
   Changes not staged for commit:
-    (use "git add <file>..." to update what will be committed)
-    (use "git checkout -- <file>..." to discard changes in working directory)
-  
   	modified:   beta
-  
   no changes added to commit (use "git add" and/or "git commit -a")
 
 Contents of each commit should be the same
--- a/tests/test-globalopts.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-globalopts.t	Tue Jan 19 21:48:43 2021 +0530
@@ -85,15 +85,15 @@
   $ hg ann a/a a/a
   0: a
   $ hg ann a/a b/b
-  abort: no repository found in '$TESTTMP' (.hg not found)!
-  [255]
+  abort: no repository found in '$TESTTMP' (.hg not found)
+  [10]
   $ hg -R b ann a/a
   abort: a/a not under root '$TESTTMP/b'
   (consider using '--cwd b')
   [255]
   $ hg log
-  abort: no repository found in '$TESTTMP' (.hg not found)!
-  [255]
+  abort: no repository found in '$TESTTMP' (.hg not found)
+  [10]
 
 #endif
 
@@ -134,23 +134,23 @@
 earlygetopt with illegal abbreviations:
 
   $ hg --confi "foo.bar=baz"
-  abort: option --config may not be abbreviated!
-  [255]
+  abort: option --config may not be abbreviated
+  [10]
   $ hg --cw a tip
-  abort: option --cwd may not be abbreviated!
-  [255]
+  abort: option --cwd may not be abbreviated
+  [10]
   $ hg --rep a tip
-  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo!
-  [255]
+  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo
+  [10]
   $ hg --repositor a tip
-  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo!
-  [255]
+  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo
+  [10]
   $ hg -qR a tip
-  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo!
-  [255]
+  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo
+  [10]
   $ hg -qRa tip
-  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo!
-  [255]
+  abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo
+  [10]
 
 Testing --cwd:
 
@@ -217,19 +217,19 @@
   quuxfoo
   $ hg --cwd c --config '' tip -q
   abort: malformed --config option: '' (use --config section.name=value)
-  [255]
+  [10]
   $ hg --cwd c --config a.b tip -q
   abort: malformed --config option: 'a.b' (use --config section.name=value)
-  [255]
+  [10]
   $ hg --cwd c --config a tip -q
   abort: malformed --config option: 'a' (use --config section.name=value)
-  [255]
+  [10]
   $ hg --cwd c --config a.= tip -q
   abort: malformed --config option: 'a.=' (use --config section.name=value)
-  [255]
+  [10]
   $ hg --cwd c --config .b= tip -q
   abort: malformed --config option: '.b=' (use --config section.name=value)
-  [255]
+  [10]
 
 Testing --debug:
 
--- a/tests/test-glog-beautifygraph.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-glog-beautifygraph.t	Tue Jan 19 21:48:43 2021 +0530
@@ -286,7 +286,7 @@
 The rest of our tests will use the default narrow text UTF-8.
 
   $ hg log -G -q
-  \xe2\x97\x8d  34:fea3ac5810e0 (esc)
+  \xe2\x97\x89  34:fea3ac5810e0 (esc)
   \xe2\x94\x82 (esc)
   \xe2\x94\x82 \xe2\x97\x8b  33:68608f5145f9 (esc)
   \xe2\x94\x82 \xe2\x94\x82 (esc)
@@ -358,7 +358,7 @@
   
 
   $ hg log -G
-  \xe2\x97\x8d  changeset:   34:fea3ac5810e0 (esc)
+  \xe2\x97\x89  changeset:   34:fea3ac5810e0 (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x94\x82  parent:      32:d06dffa21a31 (esc)
   \xe2\x94\x82  user:        test (esc)
@@ -594,7 +594,7 @@
   
 File glog:
   $ hg log -G a
-  \xe2\x97\x8d  changeset:   34:fea3ac5810e0 (esc)
+  \xe2\x97\x89  changeset:   34:fea3ac5810e0 (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x94\x82  parent:      32:d06dffa21a31 (esc)
   \xe2\x94\x82  user:        test (esc)
@@ -831,7 +831,7 @@
 File glog per revset:
 
   $ hg log -G -r 'file("a")'
-  \xe2\x97\x8d  changeset:   34:fea3ac5810e0 (esc)
+  \xe2\x97\x89  changeset:   34:fea3ac5810e0 (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x94\x82  parent:      32:d06dffa21a31 (esc)
   \xe2\x94\x82  user:        test (esc)
@@ -1268,7 +1268,7 @@
 
 From outer space:
   $ hg log -G -l1 repo
-  \xe2\x97\x8d  changeset:   34:fea3ac5810e0 (esc)
+  \xe2\x97\x89  changeset:   34:fea3ac5810e0 (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x95\xa7  parent:      32:d06dffa21a31 (esc)
      user:        test
@@ -1276,7 +1276,7 @@
      summary:     (34) head
   
   $ hg log -G -l1 repo/a
-  \xe2\x97\x8d  changeset:   34:fea3ac5810e0 (esc)
+  \xe2\x97\x89  changeset:   34:fea3ac5810e0 (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x95\xa7  parent:      32:d06dffa21a31 (esc)
      user:        test
@@ -1299,7 +1299,7 @@
   $ echo more >two
   $ hg commit -mmore
   $ hg log -G two
-  \xe2\x97\x8d  changeset:   2:12c28321755b (esc)
+  \xe2\x97\x89  changeset:   2:12c28321755b (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x94\x82  user:        test (esc)
   \xe2\x94\x82  date:        Thu Jan 01 00:00:00 1970 +0000 (esc)
@@ -1375,7 +1375,7 @@
   $ hg -R repo outgoing --graph repo2
   comparing with repo2
   searching for changes
-  \xe2\x97\x8d  changeset:   34:fea3ac5810e0 (esc)
+  \xe2\x97\x89  changeset:   34:fea3ac5810e0 (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x94\x82  parent:      32:d06dffa21a31 (esc)
   \xe2\x94\x82  user:        test (esc)
@@ -1518,7 +1518,7 @@
   (branches are permanent and global, did you want a bookmark?)
   $ commit 36 "buggy merge: identical parents" 35 35
   $ hg log -G -l5
-  \xe2\x97\x8d  changeset:   36:08a19a744424 (esc)
+  \xe2\x97\x89  changeset:   36:08a19a744424 (esc)
   \xe2\x94\x82  branch:      branch (esc)
   \xe2\x94\x82  tag:         tip (esc)
   \xe2\x94\x82  parent:      35:9159c3644c5e (esc)
@@ -1588,51 +1588,51 @@
     (list
       (func
         (symbol 'user')
-        (string 'test'))
+        (string 'literal:test'))
       (func
         (symbol 'user')
-        (string 'not-a-user'))))
+        (string 'literal:not-a-user'))))
   <filteredset
     <spanset- 0:37>,
     <addset
       <filteredset
         <fullreposet+ 0:37>,
-        <user 'test'>>,
+        <user 'literal:test'>>,
       <filteredset
         <fullreposet+ 0:37>,
-        <user 'not-a-user'>>>>
+        <user 'literal:not-a-user'>>>>
   $ testlog -b not-a-branch
-  abort: unknown revision 'not-a-branch'!
-  abort: unknown revision 'not-a-branch'!
-  abort: unknown revision 'not-a-branch'!
+  abort: unknown revision 'not-a-branch'
+  abort: unknown revision 'not-a-branch'
+  abort: unknown revision 'not-a-branch'
   $ testlog -b 35 -b 36 --only-branch branch
   []
   (or
     (list
       (func
         (symbol 'branch')
-        (string 'default'))
+        (string 'literal:default'))
       (or
         (list
           (func
             (symbol 'branch')
-            (string 'branch'))
+            (string 'literal:branch'))
           (func
             (symbol 'branch')
-            (string 'branch'))))))
+            (string 'literal:branch'))))))
   <filteredset
     <spanset- 0:37>,
     <addset
       <filteredset
         <fullreposet+ 0:37>,
-        <branch 'default'>>,
+        <branch 'literal:default'>>,
       <addset
         <filteredset
           <fullreposet+ 0:37>,
-          <branch 'branch'>>,
+          <branch 'literal:branch'>>,
         <filteredset
           <fullreposet+ 0:37>,
-          <branch 'branch'>>>>>
+          <branch 'literal:branch'>>>>>
   $ testlog -k expand -k merge
   []
   (or
@@ -1682,7 +1682,7 @@
     <date '2 0 to 4 0'>>
   $ hg log -G -d 'brace ) in a date'
   hg: parse error: invalid date: 'brace ) in a date'
-  [255]
+  [10]
   $ testlog --prune 31 --prune 32
   []
   (not
@@ -1739,7 +1739,7 @@
   $ hg mv dir/b e
   $ hg ci -m "mv dir/b e"
   $ hg log -G --template '({rev}) {desc|firstline}\n'
-  \xe2\x97\x8d  (4) mv dir/b e (esc)
+  \xe2\x97\x89  (4) mv dir/b e (esc)
   \xe2\x94\x82 (esc)
   \xe2\x97\x8b  (3) mv a b; add d (esc)
   \xe2\x94\x82 (esc)
@@ -1948,7 +1948,7 @@
   []
   <generatorsetdesc->
   $ hg log -G --follow-first e --template '{rev} {desc|firstline}\n'
-  \xe2\x97\x8d  6 merge 5 and 4 (esc)
+  \xe2\x97\x89  6 merge 5 and 4 (esc)
   \xe2\x94\x82\xe2\x95\xb2 (esc)
   \xe2\x94\x82 \xe2\x95\xa7 (esc)
   \xe2\x97\x8b  5 add another e (esc)
@@ -1959,7 +1959,7 @@
 
   $ hg log -G --copies --template "{rev} {desc|firstline} \
   >   copies: {file_copies_switch}\n"
-  \xe2\x97\x8d  6 merge 5 and 4   copies: (esc)
+  \xe2\x97\x89  6 merge 5 and 4   copies: (esc)
   \xe2\x94\x82\xe2\x95\xb2 (esc)
   \xe2\x94\x82 \xe2\x97\x8b  5 add another e   copies: (esc)
   \xe2\x94\x82 \xe2\x94\x82 (esc)
@@ -2101,7 +2101,7 @@
 
   $ hg up -q 6
   $ hg log -G --git --patch --follow-first e
-  \xe2\x97\x8d  changeset:   6:9feeac35a70a (esc)
+  \xe2\x97\x89  changeset:   6:9feeac35a70a (esc)
   \xe2\x94\x82\xe2\x95\xb2   tag:         tip (esc)
   \xe2\x94\x82 \xe2\x95\xa7  parent:      5:99b31f1c2782 (esc)
   \xe2\x94\x82    parent:      4:17d952250a9d (esc)
@@ -2158,7 +2158,7 @@
   \xe2\x94\x82\xe2\x95\xb1 (esc)
   \xe2\x97\x8b  3 mv a b; add d (esc)
   \xe2\x94\x82 (esc)
-  \xe2\x97\x8d  2 mv b dir/b (esc)
+  \xe2\x97\x89  2 mv b dir/b (esc)
   \xe2\x94\x82 (esc)
   \xe2\x97\x8b  1 copy a b (esc)
   \xe2\x94\x82 (esc)
@@ -2445,7 +2445,7 @@
   \xe2\x94\x82 \xe2\x94\x82 (esc)
   \xe2\x97\x8b \xe2\x94\x82  4 mv dir/b e (esc)
   \xe2\x94\x82\xe2\x95\xb1 (esc)
-  \xe2\x97\x8d  3 mv a b; add d (esc)
+  \xe2\x97\x89  3 mv a b; add d (esc)
   \xe2\x94\x82 (esc)
   \xe2\x97\x8b  2 mv b dir/b (esc)
   \xe2\x94\x82 (esc)
@@ -2528,13 +2528,13 @@
   $ hg log -G -qr '. + wdir()'
   \xe2\x97\x8b  2147483647:ffffffffffff (esc)
   \xe2\x94\x82 (esc)
-  \xe2\x97\x8d  3:5918b8d165d1 (esc)
+  \xe2\x97\x89  3:5918b8d165d1 (esc)
   \xe2\x94\x82 (esc)
   \xe2\x95\xa7 (esc)
 
 node template with changesetprinter:
 
-  $ hg log -Gqr 5:7 --config ui.graphnodetemplate='"{rev}"'
+  $ hg log -Gqr 5:7 --config command-templates.graphnode='"{rev}"'
   7  7:9febbb9c8b2e
   \xe2\x94\x82 (esc)
   6    6:9feeac35a70a
@@ -2547,7 +2547,7 @@
 node template with changesettemplater (shared cache variable):
 
   $ hg log -Gr 5:7 -T '{latesttag % "{rev} {tag}+{distance}"}\n' \
-  > --config ui.graphnodetemplate='{ifeq(latesttagdistance, 0, "#", graphnode)}'
+  > --config command-templates.graphnode='{ifeq(latesttagdistance, 0, "#", graphnode)}'
   \xe2\x97\x8b  7 foo-bar+1 (esc)
   \xe2\x94\x82 (esc)
   #    6 foo-bar+0
@@ -2560,7 +2560,7 @@
 label() should just work in node template:
 
   $ hg log -Gqr 7 --config extensions.color= --color=debug \
-  > --config ui.graphnodetemplate='{label("branch.{branch}", rev)}'
+  > --config command-templates.graphnode='{label("branch.{branch}", rev)}'
   [branch.default\xe2\x94\x827]  [log.node|7:9febbb9c8b2e] (esc)
   \xe2\x94\x82 (esc)
   \xe2\x95\xa7 (esc)
@@ -2776,7 +2776,7 @@
 .. unless HGPLAINEXCEPT=graph is set:
 
   $ HGPLAIN=1 HGPLAINEXCEPT=graph hg log -G -r 'file("a")' -m
-  \xe2\x97\x8d  changeset:   36:08a19a744424 (esc)
+  \xe2\x97\x89  changeset:   36:08a19a744424 (esc)
   \xe2\x94\x86  branch:      branch (esc)
   \xe2\x94\x86  tag:         tip (esc)
   \xe2\x94\x86  parent:      35:9159c3644c5e (esc)
@@ -3051,7 +3051,7 @@
   (branch merge, don't forget to commit)
   $ hg ci -qm 6
   $ hg log -G -r '0 | 1 | 2 | 6'
-  \xe2\x97\x8d  changeset:   6:851fe89689ad (esc)
+  \xe2\x97\x89  changeset:   6:851fe89689ad (esc)
   \xe2\x94\x86\xe2\x95\xb2   tag:         tip (esc)
   \xe2\x94\x86 \xe2\x94\x86  parent:      5:4f1e3cf15f5d (esc)
   \xe2\x94\x86 \xe2\x94\x86  parent:      3:b74ba7084d2d (esc)
@@ -3084,8 +3084,8 @@
   $ hg init multiroots
   $ cd multiroots
   $ cat <<EOF > .hg/hgrc
-  > [ui]
-  > logtemplate = '{rev} {desc}\n\n'
+  > [command-templates]
+  > log = '{rev} {desc}\n\n'
   > EOF
 
   $ touch foo
@@ -3095,7 +3095,7 @@
   $ hg ci -Aqm bar
 
   $ hg log -Gr null:
-  \xe2\x97\x8d  1 bar (esc)
+  \xe2\x97\x89  1 bar (esc)
   \xe2\x94\x82 (esc)
   \xe2\x94\x82 \xe2\x97\x8b  0 foo (esc)
   \xe2\x94\x82\xe2\x95\xb1 (esc)
@@ -3107,7 +3107,7 @@
   \xe2\x97\x8b  -1 (esc)
   
   $ hg log -Gr null+1
-  \xe2\x97\x8d  1 bar (esc)
+  \xe2\x97\x89  1 bar (esc)
   \xe2\x94\x82 (esc)
   \xe2\x97\x8b  -1 (esc)
   
--- a/tests/test-glog-topological.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-glog-topological.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,8 +1,8 @@
 This test file aims at test topological iteration and the various configuration it can has.
 
   $ cat >> $HGRCPATH << EOF
-  > [ui]
-  > logtemplate={rev}\n
+  > [command-templates]
+  > log={rev}\n
   > EOF
 
 On this simple example, all topological branch are displayed in turn until we
--- a/tests/test-glog.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-glog.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1438,51 +1438,51 @@
     (list
       (func
         (symbol 'user')
-        (string 'test'))
+        (string 'literal:test'))
       (func
         (symbol 'user')
-        (string 'not-a-user'))))
+        (string 'literal:not-a-user'))))
   <filteredset
     <spanset- 0:37>,
     <addset
       <filteredset
         <fullreposet+ 0:37>,
-        <user 'test'>>,
+        <user 'literal:test'>>,
       <filteredset
         <fullreposet+ 0:37>,
-        <user 'not-a-user'>>>>
+        <user 'literal:not-a-user'>>>>
   $ testlog -b not-a-branch
-  abort: unknown revision 'not-a-branch'!
-  abort: unknown revision 'not-a-branch'!
-  abort: unknown revision 'not-a-branch'!
+  abort: unknown revision 'not-a-branch'
+  abort: unknown revision 'not-a-branch'
+  abort: unknown revision 'not-a-branch'
   $ testlog -b 35 -b 36 --only-branch branch
   []
   (or
     (list
       (func
         (symbol 'branch')
-        (string 'default'))
+        (string 'literal:default'))
       (or
         (list
           (func
             (symbol 'branch')
-            (string 'branch'))
+            (string 'literal:branch'))
           (func
             (symbol 'branch')
-            (string 'branch'))))))
+            (string 'literal:branch'))))))
   <filteredset
     <spanset- 0:37>,
     <addset
       <filteredset
         <fullreposet+ 0:37>,
-        <branch 'default'>>,
+        <branch 'literal:default'>>,
       <addset
         <filteredset
           <fullreposet+ 0:37>,
-          <branch 'branch'>>,
+          <branch 'literal:branch'>>,
         <filteredset
           <fullreposet+ 0:37>,
-          <branch 'branch'>>>>>
+          <branch 'literal:branch'>>>>>
   $ testlog -k expand -k merge
   []
   (or
@@ -1532,7 +1532,7 @@
     <date '2 0 to 4 0'>>
   $ hg log -G -d 'brace ) in a date'
   hg: parse error: invalid date: 'brace ) in a date'
-  [255]
+  [10]
   $ testlog --prune 31 --prune 32
   []
   (not
@@ -2384,6 +2384,18 @@
 
 node template with changesetprinter:
 
+  $ hg log -Gqr 5:7 --config command-templates.graphnode='"{rev}"'
+  7  7:9febbb9c8b2e
+  |
+  6    6:9feeac35a70a
+  |\
+  | ~
+  5  5:99b31f1c2782
+  |
+  ~
+
+node template with changesetprinter (legacy config):
+
   $ hg log -Gqr 5:7 --config ui.graphnodetemplate='"{rev}"'
   7  7:9febbb9c8b2e
   |
@@ -2397,7 +2409,7 @@
 node template with changesettemplater (shared cache variable):
 
   $ hg log -Gr 5:7 -T '{latesttag % "{rev} {tag}+{distance}"}\n' \
-  > --config ui.graphnodetemplate='{ifeq(latesttagdistance, 0, "#", graphnode)}'
+  > --config command-templates.graphnode='{ifeq(latesttagdistance, 0, "#", graphnode)}'
   o  7 foo-bar+1
   |
   #    6 foo-bar+0
@@ -2410,7 +2422,7 @@
 label() should just work in node template:
 
   $ hg log -Gqr 7 --config extensions.color= --color=debug \
-  > --config ui.graphnodetemplate='{label("branch.{branch}", rev)}'
+  > --config command-templates.graphnode='{label("branch.{branch}", rev)}'
   [branch.default|7]  [log.node|7:9febbb9c8b2e]
   |
   ~
@@ -3420,8 +3432,8 @@
   $ hg init multiroots
   $ cd multiroots
   $ cat <<EOF > .hg/hgrc
-  > [ui]
-  > logtemplate = '{rev} {desc}\n\n'
+  > [command-templates]
+  > log = '{rev} {desc}\n\n'
   > EOF
 
   $ touch foo
--- a/tests/test-graft-interrupted.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-graft-interrupted.t	Tue Jan 19 21:48:43 2021 +0530
@@ -238,7 +238,7 @@
 
   $ hg graft --stop
   abort: no interrupted graft found
-  [255]
+  [20]
 
   $ hg graft -r 3
   grafting 3:9150fe93bec6 "added d"
@@ -250,17 +250,17 @@
 
   $ hg graft --stop --continue
   abort: cannot specify both --stop and --continue
-  [255]
+  [10]
 
   $ hg graft --stop -U
   abort: cannot specify both --stop and --user
-  [255]
+  [10]
   $ hg graft --stop --rev 4
   abort: cannot specify both --stop and --rev
-  [255]
+  [10]
   $ hg graft --stop --log
   abort: cannot specify both --stop and --log
-  [255]
+  [10]
 
   $ hg graft --stop
   stopped the interrupted graft
@@ -342,7 +342,7 @@
   $ hg abort
   abort: no interrupted graft to abort (abortflag !)
   abort: no operation in progress (abortcommand !)
-  [255]
+  [20]
 
 when stripping is required
   $ hg graft -r 4 -r 5
@@ -356,19 +356,19 @@
 
   $ hg graft --continue --abort
   abort: cannot specify both --abort and --continue
-  [255]
+  [10]
 
   $ hg graft --abort --stop
   abort: cannot specify both --abort and --stop
-  [255]
+  [10]
 
   $ hg graft --abort --currentuser
   abort: cannot specify both --abort and --user
-  [255]
+  [10]
 
   $ hg graft --abort --edit
   abort: cannot specify both --abort and --edit
-  [255]
+  [10]
 
 #if abortcommand
 when in dry-run mode
@@ -554,15 +554,15 @@
 
   $ hg graft 1 --no-commit -e
   abort: cannot specify both --no-commit and --edit
-  [255]
+  [10]
 
   $ hg graft 1 --no-commit --log
   abort: cannot specify both --no-commit and --log
-  [255]
+  [10]
 
   $ hg graft 1 --no-commit -D
   abort: cannot specify both --no-commit and --currentdate
-  [255]
+  [10]
 
 Test --no-commit is working:
   $ hg graft 1 --no-commit
--- a/tests/test-graft.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-graft.t	Tue Jan 19 21:48:43 2021 +0530
@@ -85,14 +85,14 @@
   $ hg rm -q e
   $ hg graft --continue
   abort: no graft in progress
-  [255]
+  [20]
   $ hg revert -r . -q e
 
 Need to specify a rev:
 
   $ hg graft
   abort: no revisions specified
-  [255]
+  [10]
 
 Can't graft ancestor:
 
@@ -119,10 +119,10 @@
   $ hg up -q 0
   $ hg graft -U --user foo 2
   abort: cannot specify both --user and --currentuser
-  [255]
+  [10]
   $ hg graft -D --date '0 0' 2
   abort: cannot specify both --date and --currentdate
-  [255]
+  [10]
 
 Can't graft with dirty wd:
 
@@ -130,7 +130,7 @@
   $ echo foo > a
   $ hg graft 1
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg revert a
 
 Graft a rename:
@@ -210,8 +210,8 @@
   resolving manifests
    branchmerge: True, force: True, partial: False
    ancestor: 68795b066622, local: ef0ef43d49e7+, remote: 5d205f8b35b6
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
-  starting 4 threads for background file closing (?)
    b: local copied/moved from a -> m (premerge)
   picked tool ':merge' for b (binary False symlink False changedelete False)
   merging b and a to b
@@ -292,7 +292,7 @@
   $ hg ci -m 'commit interrupted graft'
   abort: graft in progress
   (use 'hg graft --continue' or 'hg graft --stop' to stop)
-  [255]
+  [20]
 
 Abort the graft and try committing:
 
@@ -324,7 +324,7 @@
   $ hg graft -c
   grafting 4:9c233e8e184d "4"
   abort: unresolved merge conflicts (see 'hg help resolve')
-  [255]
+  [20]
 
 Fix up:
 
@@ -337,11 +337,11 @@
 
   $ hg graft -c 6
   abort: can't specify --continue and revisions
-  [255]
+  [10]
 
   $ hg graft -c -r 6
   abort: can't specify --continue and revisions
-  [255]
+  [10]
 
 Continue for real, clobber usernames
 
--- a/tests/test-grep.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-grep.t	Tue Jan 19 21:48:43 2021 +0530
@@ -25,13 +25,13 @@
 
   $ hg log -r 'diffcontains()'
   hg: parse error: diffcontains takes at least 1 argument
-  [255]
+  [10]
   $ hg log -r 'diffcontains(:)'
   hg: parse error: diffcontains requires a string pattern
-  [255]
+  [10]
   $ hg log -r 'diffcontains("re:**test**")'
   hg: parse error: invalid regular expression: nothing to repeat* (glob)
-  [255]
+  [10]
 
 simple
 
@@ -851,8 +851,8 @@
   $ cd follow
 
   $ cat <<'EOF' >> .hg/hgrc
-  > [ui]
-  > logtemplate = '{rev}: {join(files % "{status} {path}", ", ")}\n'
+  > [command-templates]
+  > log = '{rev}: {join(files % "{status} {path}", ", ")}\n'
   > EOF
 
   $ for f in add0 add0-mod1 add0-rm1 add0-mod2 add0-rm2 add0-mod3 add0-mod4 add0-rm4; do
@@ -1018,8 +1018,8 @@
   $ hg grep --diff -fr'wdir()' data
   add0-cp4-mod4:2147483647:+:data4
   add0-mod4:2147483647:+:data4
-  add0-rm4:2147483647:-:abort: add0-rm4@None: not found in manifest!
-  [255]
+  add0-rm4:2147483647:-:abort: add0-rm4@None: not found in manifest
+  [50]
 
   $ hg grep -fr'wdir()' data
   add0:2147483647:data0
--- a/tests/test-hardlinks.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-hardlinks.t	Tue Jan 19 21:48:43 2021 +0530
@@ -69,6 +69,7 @@
   linking: 6 files
   linking: 7 files
   linked 7 files
+  updating the branch cache
 
 Create non-hardlinked clone r3:
 
@@ -238,9 +239,16 @@
   2 r4/.hg/00changelog.i
   2 r4/.hg/branch
   2 r4/.hg/cache/branch2-base
+  2 r4/.hg/cache/branch2-immutable
   2 r4/.hg/cache/branch2-served
+  2 r4/.hg/cache/branch2-served.hidden
+  2 r4/.hg/cache/branch2-visible
+  2 r4/.hg/cache/branch2-visible-hidden
+  2 r4/.hg/cache/hgtagsfnodes1
   2 r4/.hg/cache/rbc-names-v1
   2 r4/.hg/cache/rbc-revs-v1
+  2 r4/.hg/cache/tags2
+  2 r4/.hg/cache/tags2-served
   2 r4/.hg/dirstate
   2 r4/.hg/fsmonitor.state (fsmonitor !)
   2 r4/.hg/hgrc
@@ -289,9 +297,16 @@
   2 r4/.hg/00changelog.i
   1 r4/.hg/branch
   2 r4/.hg/cache/branch2-base
+  2 r4/.hg/cache/branch2-immutable
   2 r4/.hg/cache/branch2-served
+  2 r4/.hg/cache/branch2-served.hidden
+  2 r4/.hg/cache/branch2-visible
+  2 r4/.hg/cache/branch2-visible-hidden
+  2 r4/.hg/cache/hgtagsfnodes1
   2 r4/.hg/cache/rbc-names-v1
   2 r4/.hg/cache/rbc-revs-v1
+  2 r4/.hg/cache/tags2
+  2 r4/.hg/cache/tags2-served
   1 r4/.hg/dirstate
   1 r4/.hg/fsmonitor.state (fsmonitor !)
   2 r4/.hg/hgrc
--- a/tests/test-help.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-help.t	Tue Jan 19 21:48:43 2021 +0530
@@ -380,7 +380,6 @@
        relink        recreates hardlinks between repository clones
        schemes       extend schemes with shortcuts to repository swarms
        share         share a common history between several working directories
-       strip         strip changesets and their descendants from history
        transplant    command to transplant changesets from another branch
        win32mbcs     allow the use of MBCS paths with problematic encodings
        zeroconf      discover and advertise repositories on the local network
@@ -594,7 +593,7 @@
    -n --dry-run             do not perform actions, just print output
   
   (use 'hg add -h' to show more help)
-  [255]
+  [10]
 
 Test ambiguous command help
 
@@ -630,7 +629,7 @@
   (some details hidden, use --verbose to show complete help)
 
   $ hg help diff
-  hg diff [OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...
+  hg diff [OPTION]... ([-c REV] | [--from REV1] [--to REV2]) [FILE]...
   
   diff repository (or selected files)
   
@@ -643,13 +642,17 @@
          default to comparing against the working directory's first parent
          changeset if no revisions are specified.
   
-      When two revision arguments are given, then changes are shown between
-      those revisions. If only one revision is specified then that revision is
-      compared to the working directory, and, when no revisions are specified,
-      the working directory files are compared to its first parent.
+      By default, the working directory files are compared to its first parent.
+      To see the differences from another revision, use --from. To see the
+      difference to another revision, use --to. For example, 'hg diff --from .^'
+      will show the differences from the working copy's grandparent to the
+      working copy, 'hg diff --to .' will show the diff from the working copy to
+      its parent (i.e. the reverse of the default), and 'hg diff --from 1.0 --to
+      1.2' will show the diff between those two revisions.
   
       Alternatively you can specify -c/--change with a revision to see the
-      changes in that changeset relative to its first parent.
+      changes in that changeset relative to its first parent (i.e. 'hg diff -c
+      42' is equivalent to 'hg diff --from 42^ --to 42')
   
       Without the -a/--text option, diff will avoid generating diffs of files it
       detects as binary. With -a, diff will generate a diff anyway, probably
@@ -662,7 +665,8 @@
   
   options ([+] can be repeated):
   
-   -r --rev REV [+]         revision
+      --from REV1           revision to diff from
+      --to REV2             revision to diff to
    -c --change REV          change made by revision
    -a --text                treat all files as text
    -g --git                 use git extended diff format
@@ -754,18 +758,18 @@
   $ hg help foo
   abort: no such help topic: foo
   (try 'hg help --keyword foo')
-  [255]
+  [10]
 
   $ hg skjdfks
   hg: unknown command 'skjdfks'
   (use 'hg help' for a list of commands)
-  [255]
+  [10]
 
 Typoed command gives suggestion
   $ hg puls
   hg: unknown command 'puls'
   (did you mean one of pull, push?)
-  [255]
+  [10]
 
 Not enabled extension gets suggested
 
@@ -776,7 +780,7 @@
       rebase        command to move sets of revisions to a different ancestor
   
   (use 'hg help extensions' for information on enabling extensions)
-  [255]
+  [10]
 
 Disabled extension gets suggested
   $ hg --config extensions.rebase=! rebase
@@ -786,7 +790,7 @@
       rebase        command to move sets of revisions to a different ancestor
   
   (use 'hg help extensions' for information on enabling extensions)
-  [255]
+  [10]
 
 Checking that help adapts based on the config:
 
@@ -800,16 +804,16 @@
   $ hg .log
   hg: unknown command '.log'
   (did you mean log?)
-  [255]
+  [10]
 
   $ hg log.
   hg: unknown command 'log.'
   (did you mean log?)
-  [255]
+  [10]
   $ hg pu.lh
   hg: unknown command 'pu.lh'
   (did you mean one of pull, push?)
-  [255]
+  [10]
 
   $ cat > helpext.py <<EOF
   > import os
@@ -1064,9 +1068,11 @@
    debugserve    run a server with advanced settings
    debugsetparents
                  manually set the parents of the current working directory
+                 (DANGEROUS)
    debugsidedata
                  dump the side data for a cl/manifest/file revision
    debugssl      test a secure connection to a server
+   debugstrip    strip changesets and all their descendants from the repository
    debugsub      (no help text available)
    debugsuccessorssets
                  show set of successors for revision
@@ -1339,7 +1345,7 @@
   $ hg help internals.foo
   abort: no such help topic: internals.foo
   (try 'hg help --keyword foo')
-  [255]
+  [10]
 
 test advanced, deprecated and experimental options are hidden in command help
   $ hg help debugoptADV
@@ -1444,7 +1450,7 @@
       - "<DATE" - at or before a given date/time
       - ">DATE" - on or after a given date/time
       - "DATE to DATE" - a date range, inclusive
-      - "-DAYS" - within a given number of days of today
+      - "-DAYS" - within a given number of days from today
 
 Test repeated config section name
 
@@ -1473,7 +1479,7 @@
 
   $ hg help config.annotate.git
   abort: help section not found: config.annotate.git
-  [255]
+  [10]
 
   $ hg help config.update.check
       "commands.update.check"
@@ -1503,7 +1509,7 @@
 
   $ hg help config.ommands.update.check
   abort: help section not found: config.ommands.update.check
-  [255]
+  [10]
 
 Unrelated trailing paragraphs shouldn't be included
 
@@ -1545,6 +1551,10 @@
   
       "usefncache"
   
+      "use-persistent-nodemap"
+  
+      "use-share-safe"
+  
       "usestore"
   
       "sparse-revlog"
@@ -1649,7 +1659,7 @@
   $ hg help -c schemes
   abort: no such help topic: schemes
   (try 'hg help --keyword schemes')
-  [255]
+  [10]
   $ hg help -e schemes |head -1
   schemes extension - extend schemes with shortcuts to repository swarms
   $ hg help -c -k dates |egrep '^(Topics|Extensions|Commands):'
@@ -1664,7 +1674,7 @@
   $ hg help -e commit
   abort: no such help topic: commit
   (try 'hg help --keyword commit')
-  [255]
+  [10]
 
 Test keyword search help
 
@@ -1709,14 +1719,14 @@
   $ hg help nonexistingtopicthatwillneverexisteverever
   abort: no such help topic: nonexistingtopicthatwillneverexisteverever
   (try 'hg help --keyword nonexistingtopicthatwillneverexisteverever')
-  [255]
+  [10]
 
 Test unfound keyword
 
   $ hg help --keyword nonexistingwordthatwillneverexisteverever
   abort: no matches
   (try 'hg help' for a list of topics)
-  [255]
+  [10]
 
 Test omit indicating for help
 
@@ -1863,11 +1873,11 @@
   
   $ hg help glossary.mcguffin
   abort: help section not found: glossary.mcguffin
-  [255]
+  [10]
 
   $ hg help glossary.mc.guffin
   abort: help section not found: glossary.mc.guffin
-  [255]
+  [10]
 
   $ hg help template.files
       files         List of strings. All files modified, added, or removed by
@@ -2052,6 +2062,13 @@
         partially merged file. Marker will have three sections, one from each
         side of the merge and one for the base content.
   
+      ":mergediff"
+        Uses the internal non-interactive simple merge algorithm for merging
+        files. It will fail if there are any conflicts and leave markers in the
+        partially merged file. The marker will have two sections, one with the
+        content from one side of the merge, and one with a diff from the base
+        content to the content on the other side. (experimental)
+  
       ":other"
         Uses the other 'p2()' version of files as the merged version.
   
@@ -3281,7 +3298,7 @@
    <li> &quot;&lt;DATE&quot; - at or before a given date/time
    <li> &quot;&gt;DATE&quot; - on or after a given date/time
    <li> &quot;DATE to DATE&quot; - a date range, inclusive
-   <li> &quot;-DAYS&quot; - within a given number of days of today
+   <li> &quot;-DAYS&quot; - within a given number of days from today
   </ul>
   
   </div>
--- a/tests/test-hgrc.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-hgrc.t	Tue Jan 19 21:48:43 2021 +0530
@@ -16,7 +16,7 @@
 
   $ echo "invalid" > $HGRC
   $ hg version
-  hg: parse error at $TESTTMP/hgrc:1: invalid
+  config error at $TESTTMP/hgrc:1: invalid
   [255]
   $ echo "" > $HGRC
 
@@ -59,7 +59,7 @@
 #if unix-permissions no-root
   $ chmod u-r $TESTTMP/included
   $ hg showconfig section
-  hg: parse error at $TESTTMP/hgrc:2: cannot include $TESTTMP/included (Permission denied)
+  config error at $TESTTMP/hgrc:2: cannot include $TESTTMP/included (Permission denied)
   [255]
 #endif
 
@@ -68,8 +68,7 @@
   $ echo '[foo]' > $HGRC
   $ echo '  x = y' >> $HGRC
   $ hg version
-  hg: parse error at $TESTTMP/hgrc:2:   x = y
-  unexpected leading whitespace
+  config error at $TESTTMP/hgrc:2: unexpected leading whitespace:   x = y
   [255]
 
   $ "$PYTHON" -c "from __future__ import print_function; print('[foo]\nbar = a\n b\n c \n  de\n fg \nbaz = bif cb \n')" \
@@ -276,7 +275,7 @@
   > EOF
 
   $ hg path
-  hg: parse error at $TESTTMP/.hg/hgrc:3: [broken
+  config error at $TESTTMP/.hg/hgrc:3: [broken
   [255]
   $ HGRCSKIPREPO=1 hg path
   foo = $TESTTMP/bar
@@ -284,7 +283,7 @@
 Check that hgweb respect HGRCSKIPREPO=1
 
   $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
-  hg: parse error at $TESTTMP/.hg/hgrc:3: [broken
+  config error at $TESTTMP/.hg/hgrc:3: [broken
   [255]
   $ test -f hg.pid && (cat hg.pid >> $DAEMON_PIDS)
   [1]
@@ -303,7 +302,7 @@
 Check that zeroconf respect HGRCSKIPREPO=1
 
   $ hg paths --config extensions.zeroconf=
-  hg: parse error at $TESTTMP/.hg/hgrc:3: [broken
+  config error at $TESTTMP/.hg/hgrc:3: [broken
   [255]
   $ HGRCSKIPREPO=1 hg paths --config extensions.zeroconf=
   foo = $TESTTMP/bar
--- a/tests/test-histedit-arguments.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-arguments.t	Tue Jan 19 21:48:43 2021 +0530
@@ -55,11 +55,11 @@
 
   $ hg histedit --continue
   abort: no histedit in progress
-  [255]
+  [20]
   $ hg abort
   abort: no histedit in progress (abortflag !)
   abort: no operation in progress (abortcommand !)
-  [255]
+  [20]
 
 Run a dummy edit to make sure we get tip^^ correctly via revsingle.
 --------------------------------------------------------------------
@@ -77,7 +77,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -126,8 +126,8 @@
 ---------------------------
 
   $ hg histedit --config "histedit.defaultrev="
-  abort: config option histedit.defaultrev can't be empty
-  [255]
+  config error: config option histedit.defaultrev can't be empty
+  [30]
 
 Run on a revision not descendants of the initial parent
 --------------------------------------------------------------------
@@ -149,14 +149,14 @@
   > edit 08d98a8350f3 4 five
   > EOF
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
-  Editing (08d98a8350f3), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (08d98a8350f3), commit as needed now to split the change
+  (to edit 08d98a8350f3, `hg histedit --continue` after making changes)
+  [240]
 
   $ hg graft --continue
   abort: no graft in progress
   (continue: hg histedit --continue)
-  [255]
+  [20]
 
   $ mv .hg/histedit-state .hg/histedit-state.back
   $ hg update --quiet --clean 2
@@ -187,7 +187,7 @@
   > EOF
   hg: parse error: missing rules for changeset c8e68270e35a
   (use "drop c8e68270e35a" to discard, see also: 'hg help -e histedit.config')
-  [255]
+  [10]
 
 Test that extra revisions are detected
 ---------------------------------------
@@ -199,7 +199,7 @@
   > EOF
   hg: parse error: pick "6058cbb6cfd7" changeset was not a candidate
   (only use listed changesets)
-  [255]
+  [10]
 
 Test malformed line
 ---------------------------------------
@@ -210,7 +210,7 @@
   > pick 08d98a8350f3 4 five
   > EOF
   hg: parse error: malformed line "pickeb57da33312f2three"
-  [255]
+  [10]
 
 Test unknown changeset
 ---------------------------------------
@@ -221,7 +221,7 @@
   > pick 08d98a8350f3 4 five
   > EOF
   hg: parse error: unknown changeset 0123456789ab listed
-  [255]
+  [10]
 
 Test unknown command
 ---------------------------------------
@@ -232,7 +232,7 @@
   > pick 08d98a8350f3 4 five
   > EOF
   hg: parse error: unknown action "coin"
-  [255]
+  [10]
 
 Test duplicated changeset
 ---------------------------------------
@@ -245,7 +245,7 @@
   > pick 08d98a8350f3 4 five
   > EOF
   hg: parse error: duplicated command for changeset eb57da33312f
-  [255]
+  [10]
 
 Test bogus rev
 ---------------------------------------
@@ -256,7 +256,7 @@
   > pick 08d98a8350f3 4 five
   > EOF
   hg: parse error: invalid changeset 0u98
-  [255]
+  [10]
 
 Test short version of command
 ---------------------------------------
@@ -319,7 +319,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -335,9 +335,9 @@
   > edit eb57da33312f 2 three
   > pick f3cfcca30c44 4 x
   > EOF
-  Editing (eb57da33312f), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (eb57da33312f), commit as needed now to split the change
+  (to edit eb57da33312f, `hg histedit --continue` after making changes)
+  [240]
   $ echo edit >> alpha
   $ hg histedit -q --continue
   $ hg log -G -T '{rev}:{node|short} {desc}'
@@ -362,9 +362,9 @@
   $ hg histedit . -q --commands - << EOF
   > edit 8fda0c726bf2 6 x
   > EOF
-  Editing (8fda0c726bf2), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (8fda0c726bf2), commit as needed now to split the change
+  (to edit 8fda0c726bf2, `hg histedit --continue` after making changes)
+  [240]
 Corrupt histedit state file
   $ sed 's/8fda0c726bf2/123456789012/' .hg/histedit-state > ../corrupt-histedit
   $ mv ../corrupt-histedit .hg/histedit-state
@@ -489,23 +489,23 @@
   warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
   Fix up the change (pick 8cde254db839)
   (hg histedit --continue to resume)
-  [1]
+  [240]
   $ hg resolve -m --all
   (no more unresolved files)
   continue: hg histedit --continue
   $ hg histedit --cont
   merging foo
   warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
-  Editing (6f2f0241f119), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (6f2f0241f119), commit as needed now to split the change
+  (to edit 6f2f0241f119, `hg histedit --continue` after making changes)
+  [240]
   $ hg resolve -m --all
   (no more unresolved files)
   continue: hg histedit --continue
   $ hg commit --amend -m 'reject this fold'
   abort: histedit in progress
   (use 'hg histedit --continue' or 'hg histedit --abort')
-  [255]
+  [20]
 
 With markers enabled, histedit does not get confused, and
 amend should not be blocked by the ongoing histedit.
@@ -557,7 +557,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
--- a/tests/test-histedit-bookmark-motion.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-bookmark-motion.t	Tue Jan 19 21:48:43 2021 +0530
@@ -73,7 +73,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -130,7 +130,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
--- a/tests/test-histedit-commute.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-commute.t	Tue Jan 19 21:48:43 2021 +0530
@@ -68,7 +68,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -94,7 +94,33 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
+  #  m, mess = edit commit message without changing commit content
+  #  p, pick = use commit
+  #  b, base = checkout changeset and apply further changesets from there
+  #  d, drop = remove commit from history
+  #  f, fold = use commit, but combine it with the one above
+  #  r, roll = like fold, but discard this commit's description and date
+  #
+
+
+colors in the custom template don't show up in the editor
+  $ HGEDITOR=cat hg histedit 177f92b77385 --color=debug \
+  >  --config histedit.summary-template='I am rev {label("rev", rev)}'
+  pick 177f92b77385 I am rev 2
+  pick 055a42cdd887 I am rev 3
+  pick e860deea161a I am rev 4
+  pick 652413bf663e I am rev 5
+  
+  # Edit history between 177f92b77385 and 652413bf663e
+  #
+  # Commits are listed from least to most recent
+  #
+  # You can reorder changesets by reordering the lines
+  #
+  # Commands:
+  #
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -116,8 +142,8 @@
   > EOF
   $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle
   0 files updated, 0 files merged, 4 files removed, 0 files unresolved
-  Editing (177f92b77385), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (177f92b77385), commit as needed now to split the change
+  (to edit 177f92b77385, `hg histedit --continue` after making changes)
 
 rules should end up in .hg/histedit-last-edit.txt:
   $ cat .hg/histedit-last-edit.txt
@@ -379,7 +405,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
--- a/tests/test-histedit-edit.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-edit.t	Tue Jan 19 21:48:43 2021 +0530
@@ -63,7 +63,7 @@
   $ hg histedit 177f92b77385 --commands - 2>&1 << EOF
   > EOF
   abort: uncommitted changes
-  [255]
+  [20]
   $ echo g > g
 
 edit the history
@@ -75,14 +75,14 @@
   > pick 3c6a8ed2ebe8 g
   > EOF
   0 files updated, 0 files merged, 3 files removed, 0 files unresolved
-  Editing (e860deea161a), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (e860deea161a), commit as needed now to split the change
+  (to edit e860deea161a, `hg histedit --continue` after making changes)
 
 try to update and get an error
   $ hg update tip
   abort: histedit in progress
   (use 'hg histedit --continue' or 'hg histedit --abort')
-  [255]
+  [20]
 
 edit the plan via the editor
   $ cat >> $TESTTMP/editplan.sh <<EOF
@@ -136,7 +136,7 @@
   $ hg up 0
   abort: histedit in progress
   (use 'hg histedit --continue' or 'hg histedit --abort')
-  [255]
+  [20]
 
 Try to delete necessary commit
   $ hg strip -r 652413b
@@ -153,7 +153,7 @@
   $ hg --config extensions.mq= qnew please-fail
   abort: histedit in progress
   (use 'hg histedit --continue' or 'hg histedit --abort')
-  [255]
+  [20]
   $ HGEDITOR='echo foobaz > ' hg histedit --continue 2>&1 | fixbundle
 
   $ hg log --graph
@@ -205,8 +205,8 @@
   > pick b5f70786f9b0 g
   > EOF
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  Editing (1a60820cd1f6), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (1a60820cd1f6), commit as needed now to split the change
+  (to edit 1a60820cd1f6, `hg histedit --continue` after making changes)
 
   $ mv .hg/histedit-state .hg/histedit-state.bak
   $ hg strip -q -r b5f70786f9b0
@@ -247,8 +247,8 @@
   > edit b5f70786f9b0 f
   > EOF
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  Editing (b5f70786f9b0), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (b5f70786f9b0), commit as needed now to split the change
+  (to edit b5f70786f9b0, `hg histedit --continue` after making changes)
   $ hg status
   A f
 
@@ -435,9 +435,9 @@
   > edit cb9a9f314b8b a > $EDITED
   > EOF
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  Editing (cb9a9f314b8b), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (cb9a9f314b8b), commit as needed now to split the change
+  (to edit cb9a9f314b8b, `hg histedit --continue` after making changes)
+  [240]
   $ HGEDITOR=true hg histedit --continue
   saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-histedit.hg
 
@@ -463,7 +463,7 @@
   $ HGEDITOR="sh ../edit.sh" hg histedit 2
   warning: histedit rules saved to: .hg/histedit-last-edit.txt
   hg: parse error: first changeset cannot use verb "fold"
-  [255]
+  [10]
   $ cat .hg/histedit-last-edit.txt
   fold 0012be4a27ea 2 extend a
   
@@ -475,7 +475,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, fold = use commit
   #  b, base = checkout changeset and apply further changesets from there
--- a/tests/test-histedit-fold.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-fold.t	Tue Jan 19 21:48:43 2021 +0530
@@ -291,7 +291,7 @@
   warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
   Fix up the change (fold 251d831eeec5)
   (hg histedit --continue to resume)
-  [1]
+  [240]
 There were conflicts, we keep P1 content. This
 should effectively drop the changes from +6.
 
@@ -364,7 +364,7 @@
   warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
   Fix up the change (fold 251d831eeec5)
   (hg histedit --continue to resume)
-  [1]
+  [240]
   $ cat > file << EOF
   > 1
   > 2
@@ -588,7 +588,7 @@
   > r 5db65b93a12b
   > EOF
   hg: parse error: first changeset cannot use verb "roll"
-  [255]
+  [10]
   $ hg log -G -T '{node|short} {desc}'
   @  5db65b93a12b cc
   |
--- a/tests/test-histedit-merge-tools.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-merge-tools.t	Tue Jan 19 21:48:43 2021 +0530
@@ -12,8 +12,6 @@
   > [extensions]
   > histedit=
   > mockmakedate = $TESTDIR/mockmakedate.py
-  > [ui]
-  > pre-merge-tool-output-template='pre-merge message for {node}\n'
   > EOF
 
 Merge conflict
@@ -21,6 +19,11 @@
 
   $ hg init r
   $ cd r
+  $ cat >> .hg/hgrc <<EOF
+  > [command-templates]
+  > pre-merge-tool-output='pre-merge message for {node}\n'
+  > EOF
+
   $ echo foo > file
   $ hg add file
   $ hg ci -m "First" -d "1 0"
@@ -55,3 +58,26 @@
   merging file
   pre-merge message for b90fa2e91a6d11013945a5f684be45b84a8ca6ec
   7181f42b8fca: skipping changeset (no changes)
+  $ hg histedit --abort
+  abort: no histedit in progress
+  [20]
+  $ cd ..
+
+Test legacy config name
+
+  $ hg init r2
+  $ cd r2
+  $ echo foo > file
+  $ hg add file
+  $ hg ci -m "First"
+  $ echo bar > file
+  $ hg ci -m "Second"
+  $ echo conflict > file
+  $ hg co -m 0 --config ui.merge=false \
+  > --config ui.pre-merge-tool-output-template='legacy config: {node}\n'
+  merging file
+  legacy config: 889c9c4d58bd4ce74815efd04a01e0f2bf6765a7
+  merging file failed!
+  0 files updated, 0 files merged, 0 files removed, 1 files unresolved
+  use 'hg resolve' to retry unresolved file merges
+  [1]
--- a/tests/test-histedit-no-backup.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-no-backup.t	Tue Jan 19 21:48:43 2021 +0530
@@ -50,9 +50,9 @@
   > edit 7d5187087c79 3 four
   > EOF
   merging file
-  Editing (7d5187087c79), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (7d5187087c79), commit as needed now to split the change
+  (to edit 7d5187087c79, `hg histedit --continue` after making changes)
+  [240]
   $ hg abort
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   saved backup bundle to $TESTTMP/foo/.hg/strip-backup/1d8f701c7b35-cf7be322-backup.hg
@@ -72,9 +72,9 @@
   > edit 7d5187087c79 3 four
   > EOF
   merging file
-  Editing (7d5187087c79), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (7d5187087c79), commit as needed now to split the change
+  (to edit 7d5187087c79, `hg histedit --continue` after making changes)
+  [240]
 
   $ hg abort
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-histedit-no-change.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-no-change.t	Tue Jan 19 21:48:43 2021 +0530
@@ -91,8 +91,8 @@
   | edit e860deea161a 4 e
   | pick 652413bf663e 5 f
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
-  Editing (e860deea161a), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (e860deea161a), commit as needed now to split the change
+  (to edit e860deea161a, `hg histedit --continue` after making changes)
   $ continueediting true "(leaving commit message unaltered)"
   % finalize changeset editing (leaving commit message unaltered)
 
@@ -142,12 +142,12 @@
   | edit e860deea161a 4 e
   | pick 652413bf663e 5 f
   0 files updated, 0 files merged, 3 files removed, 0 files unresolved
-  Editing (055a42cdd887), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (055a42cdd887), commit as needed now to split the change
+  (to edit 055a42cdd887, `hg histedit --continue` after making changes)
   $ continueediting true "(leaving commit message unaltered)"
   % finalize changeset editing (leaving commit message unaltered)
-  Editing (e860deea161a), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (e860deea161a), commit as needed now to split the change
+  (to edit e860deea161a, `hg histedit --continue` after making changes)
   $ graphlog "log after first edit"
   % log after first edit
   @  6 e5ae3ca2f1ffdbd89ec41ebc273a231f7c3022f2 "d"
@@ -169,7 +169,7 @@
   $ hg up 0
   abort: histedit in progress
   (use 'hg histedit --continue' or 'hg histedit --abort')
-  [255]
+  [20]
   $ mv .hg/histedit-state .hg/histedit-state-ignore
   $ hg up 0
   0 files updated, 0 files merged, 3 files removed, 0 files unresolved
@@ -211,8 +211,8 @@
   $ startediting 1 1 "(not changing anything)" # edit the 3rd of 3 changesets
   % start editing the history (not changing anything)
   | edit 292aec348d9e 6 closebranch
-  Editing (292aec348d9e), you may commit or record as needed now.
-  (hg histedit --continue to resume)
+  Editing (292aec348d9e), commit as needed now to split the change
+  (to edit 292aec348d9e, `hg histedit --continue` after making changes)
   $ hg histedit --abort
 
   $ cd ..
--- a/tests/test-histedit-obsolete.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-obsolete.t	Tue Jan 19 21:48:43 2021 +0530
@@ -43,9 +43,9 @@
   $ echo "pick `hg log -r 2 -T '{node|short}'`" >> plan
   $ echo "edit `hg log -r 1 -T '{node|short}'`" >> plan
   $ hg histedit -r 'all()' --commands plan
-  Editing (1b2d564fad96), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (1b2d564fad96), commit as needed now to split the change
+  (to edit 1b2d564fad96, `hg histedit --continue` after making changes)
+  [240]
   $ hg st
   A b
   A c
@@ -70,9 +70,9 @@
   $ echo "pick `hg log -r 5 -T '{node|short}'`" >> plan
   $ echo "edit `hg log -r 4 -T '{node|short}'`" >> plan
   $ hg histedit -r 'all()' --commands plan
-  Editing (49d44ab2be1b), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (49d44ab2be1b), commit as needed now to split the change
+  (to edit 49d44ab2be1b, `hg histedit --continue` after making changes)
+  [240]
   $ hg st
   A b
   A d
@@ -139,7 +139,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -225,9 +225,9 @@
   > edit b346ab9a313d 6 c
   > EOF
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  Editing (b346ab9a313d), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (b346ab9a313d), commit as needed now to split the change
+  (to edit b346ab9a313d, `hg histedit --continue` after making changes)
+  [240]
   $ echo c >> c
   $ hg histedit --continue
   1 new orphan changesets
@@ -293,8 +293,8 @@
 -------------------------------------------
 
   $ cat >> $HGRCPATH << EOF
-  > [ui]
-  > logtemplate= {rev}:{node|short} ({phase}) {desc|firstline}\n
+  > [command-templates]
+  > log = {rev}:{node|short} ({phase}) {desc|firstline}\n
   > EOF
 
   $ hg ph -pv '.^'
@@ -309,7 +309,7 @@
   $ hg histedit -r '.~2'
   abort: cannot edit public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
 
 
 Prepare further testing
@@ -359,9 +359,9 @@
   > pick ee118ab9fa44 16 k
   > EOF
   0 files updated, 0 files merged, 6 files removed, 0 files unresolved
-  Editing (b449568bf7fc), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (b449568bf7fc), commit as needed now to split the change
+  (to edit b449568bf7fc, `hg histedit --continue` after making changes)
+  [240]
   $ echo f >> f
   $ hg histedit --continue
   $ hg log -G
@@ -401,9 +401,9 @@
   > pick ee118ab9fa44 16 k
   > EOF
   0 files updated, 0 files merged, 6 files removed, 0 files unresolved
-  Editing (b449568bf7fc), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (b449568bf7fc), commit as needed now to split the change
+  (to edit b449568bf7fc, `hg histedit --continue` after making changes)
+  [240]
   $ echo f >> f
   $ hg histedit --continue
   $ hg log -G
@@ -527,9 +527,9 @@
   > roll 3a6c53ee7f3d 17 j
   > edit ee118ab9fa44 18 k
   > EOF
-  Editing (ee118ab9fa44), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (ee118ab9fa44), commit as needed now to split the change
+  (to edit ee118ab9fa44, `hg histedit --continue` after making changes)
+  [240]
 
 #if abortcommand
 when in dry-run mode
@@ -566,9 +566,9 @@
   > pick 3a6c53ee7f3d 17 j
   > edit ee118ab9fa44 18 k
   > EOF
-  Editing (ee118ab9fa44), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (ee118ab9fa44), commit as needed now to split the change
+  (to edit ee118ab9fa44, `hg histedit --continue` after making changes)
+  [240]
   $ hg histedit --continue --config experimental.evolution.track-operation=1
   $ hg log -G
   @  23:175d6b286a22 (secret) k
--- a/tests/test-histedit-outgoing.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-histedit-outgoing.t	Tue Jan 19 21:48:43 2021 +0530
@@ -49,7 +49,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -84,7 +84,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
@@ -111,7 +111,7 @@
   #
   # Commands:
   #
-  #  e, edit = use commit, but stop for amending
+  #  e, edit = use commit, but allow edits before making new commit
   #  m, mess = edit commit message without changing commit content
   #  p, pick = use commit
   #  b, base = checkout changeset and apply further changesets from there
--- a/tests/test-hook.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-hook.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1122,8 +1122,8 @@
 
   $ hg id
   loading pre-identify.npmd hook failed:
-  abort: No module named repo! (no-py3 !)
-  abort: No module named 'repo'! (py3 !)
+  abort: No module named repo (no-py3 !)
+  abort: No module named 'repo' (py3 !)
   [255]
 
   $ cd ../../b
--- a/tests/test-http-bad-server.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-http-bad-server.t	Tue Jan 19 21:48:43 2021 +0530
@@ -41,7 +41,7 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   abort: error: (\$ECONNRESET\$|\$EADDRNOTAVAIL\$) (re)
-  [255]
+  [100]
 
 (The server exits on its own, but there is a race between that and starting a new server.
 So ensure the process is dead.)
@@ -63,7 +63,7 @@
 --runs-per-test=20 on macOS 10.12 during the freeze for 4.2.
   $ hg clone http://localhost:$HGPORT/ clone
   abort: error: * (glob)
-  [255]
+  [100]
 
   $ killdaemons.py $DAEMON_PIDS
 
@@ -74,7 +74,7 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   abort: error: bad HTTP status line: * (glob)
-  [255]
+  [100]
 
   $ killdaemons.py $DAEMON_PIDS
 
@@ -90,7 +90,7 @@
   $ cat hg.pid > $DAEMON_PIDS
   $ hg clone http://localhost:$HGPORT/ clone
   abort: error: bad HTTP status line: * (glob)
-  [255]
+  [100]
 
   $ killdaemons.py $DAEMON_PIDS
 
@@ -107,7 +107,7 @@
   $ cat hg.pid > $DAEMON_PIDS
   $ hg clone http://localhost:$HGPORT/ clone
   abort: error: bad HTTP status line: * (glob)
-  [255]
+  [100]
 
   $ killdaemons.py $DAEMON_PIDS
 
@@ -150,7 +150,7 @@
   $ hg clone http://localhost:$HGPORT/ clone
   requesting all changes
   abort: error: bad HTTP status line: * (glob)
-  [255]
+  [100]
 
   $ killdaemons.py $DAEMON_PIDS
 
@@ -217,7 +217,7 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   abort: error: bad HTTP status line: * (glob)
-  [255]
+  [100]
 
   $ killdaemons.py $DAEMON_PIDS
 
@@ -277,7 +277,7 @@
 
   $ hg clone http://localhost:$HGPORT/ clone
   abort: error: bad HTTP status line: H
-  [255]
+  [100]
 
   $ killdaemons.py $DAEMON_PIDS
 
@@ -351,7 +351,7 @@
   ---%<--- (applicat)
   
   ---%<---
-  !
+  
   [255]
 
   $ killdaemons.py $DAEMON_PIDS
@@ -474,7 +474,7 @@
   ---%<--- (application/mercuri)
   
   ---%<---
-  !
+  
   [255]
 
   $ killdaemons.py $DAEMON_PIDS
--- a/tests/test-http-bundle1.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-http-bundle1.t	Tue Jan 19 21:48:43 2021 +0530
@@ -178,7 +178,7 @@
 
   $ hg clone http://localhost:$HGPORT/bad
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
 
 test http authentication
 + use the same server to test server side streaming preference
@@ -250,7 +250,7 @@
   [255]
   $ hg id http://user:pass2@localhost:$HGPORT2/
   abort: HTTP Error 403: no
-  [255]
+  [100]
 
   $ hg -R dest-pull tag -r tip top
   $ hg -R dest-pull push http://user:pass@localhost:$HGPORT2/
@@ -338,7 +338,7 @@
   updating to branch default
   cloning subrepo sub from http://localhost:$HGPORT/sub
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
   $ hg clone http://localhost:$HGPORT/ slash-clone
   requesting all changes
   adding changesets
@@ -349,7 +349,7 @@
   updating to branch default
   cloning subrepo sub from http://localhost:$HGPORT/sub
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
 
 check error log
 
@@ -364,7 +364,7 @@
   requesting all changes
   abort: remote error:
   this is an exercise
-  [255]
+  [100]
   $ cat error.log
 
 disable pull-based clones
@@ -375,7 +375,7 @@
   requesting all changes
   abort: remote error:
   server has pull-based clones disabled
-  [255]
+  [100]
 
 #if no-reposimplestore
 ... but keep stream clones working
--- a/tests/test-http-clone-r.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-http-clone-r.t	Tue Jan 19 21:48:43 2021 +0530
@@ -217,6 +217,6 @@
 
   $ hg clone http://localhost:$HGPORT/
   abort: empty destination path is not valid
-  [255]
+  [10]
 
   $ cat error.log
--- a/tests/test-http-permissions.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-http-permissions.t	Tue Jan 19 21:48:43 2021 +0530
@@ -765,7 +765,7 @@
   no bookmarks set
   $ hg bookmark -d bm
   abort: bookmark 'bm' does not exist
-  [255]
+  [10]
 
   $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=customwritenoperm'
   405 push requires POST request
@@ -806,7 +806,7 @@
   no bookmarks set
   $ hg bookmark -d bm
   abort: bookmark 'bm' does not exist
-  [255]
+  [10]
 
   $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=customwritenoperm'
   405 push requires POST request
@@ -874,13 +874,13 @@
   searching for changes
   no changes found
   abort: HTTP Error 403: ssl required
-  [255]
+  [100]
 
   $ hg --cwd ../test2 push http://localhost:$HGPORT/
   pushing to http://localhost:$HGPORT/
   searching for changes
   abort: HTTP Error 403: ssl required
-  [255]
+  [100]
 
   $ killdaemons.py
 
--- a/tests/test-http-proxy.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-http-proxy.t	Tue Jan 19 21:48:43 2021 +0530
@@ -91,7 +91,7 @@
 
   $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f
   abort: error: (Connection refused|Protocol not supported|.* actively refused it|\$EADDRNOTAVAIL\$|No route to host) (re)
-  [255]
+  [100]
 
 do not use the proxy if it is in the no list
 
--- a/tests/test-http.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-http.t	Tue Jan 19 21:48:43 2021 +0530
@@ -167,7 +167,7 @@
 
   $ hg clone http://localhost:$HGPORT/bad
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
 
 test http authentication
 + use the same server to test server side streaming preference
@@ -283,7 +283,7 @@
   [255]
   $ hg id http://user:pass2@localhost:$HGPORT2/
   abort: HTTP Error 403: no
-  [255]
+  [100]
 
   $ hg -R dest-pull tag -r tip top
   $ hg -R dest-pull push http://user:pass@localhost:$HGPORT2/
@@ -475,7 +475,7 @@
   updating to branch default
   cloning subrepo sub from http://localhost:$HGPORT/sub
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
   $ hg clone http://localhost:$HGPORT/ slash-clone
   requesting all changes
   adding changesets
@@ -486,7 +486,7 @@
   updating to branch default
   cloning subrepo sub from http://localhost:$HGPORT/sub
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
 
 check error log
 
@@ -588,7 +588,7 @@
 
   $ hg id http://localhost:$HGPORT
   abort: HTTP Error 500: no-cookie
-  [255]
+  [100]
 
 Populate a cookies file
 
@@ -602,7 +602,7 @@
 
   $ hg --config auth.cookiefile=cookies.txt id http://localhost:$HGPORT/
   abort: HTTP Error 500: no-cookie
-  [255]
+  [100]
 
 Add a cookie entry for our test server and verify it is sent
 
@@ -612,4 +612,4 @@
 
   $ hg --config auth.cookiefile=cookies.txt id http://localhost:$HGPORT/
   abort: HTTP Error 500: Cookie: hgkey=localhostvalue
-  [255]
+  [100]
--- a/tests/test-https.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-https.t	Tue Jan 19 21:48:43 2021 +0530
@@ -52,14 +52,14 @@
   $ hg clone https://localhost:$HGPORT/ copy-pull
   (an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
 #endif
 
 #if defaultcacertsloaded
   $ hg clone https://localhost:$HGPORT/ copy-pull
   (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
 #endif
 
 Specifying a per-host certificate file that doesn't exist will abort.  The full
@@ -84,7 +84,7 @@
   (an attempt was made to load CA certificates but none were loaded; see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error)
   (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
 
 A per-host certificate matching the server's cert will be accepted
 
@@ -125,7 +125,7 @@
   $ hg clone https://localhost:$HGPORT/ copy-pull $DISABLECACERTS
   abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
   (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
-  [255]
+  [150]
 
   $ hg clone --insecure https://localhost:$HGPORT/ copy-pull
   warning: connection security to localhost is disabled per current settings; communication is susceptible to eavesdropping and tampering
@@ -160,7 +160,7 @@
   pulling from https://localhost:$HGPORT/
   abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
   (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
-  [255]
+  [150]
 
   $ hg pull --insecure
   pulling from https://localhost:$HGPORT/
@@ -227,7 +227,7 @@
   pulling from https://*:$HGPORT/ (glob)
   abort: $LOCALIP certificate error: certificate is for localhost (glob)
   (set hostsecurity.$LOCALIP:certfingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e config setting or use --insecure to connect insecurely)
-  [255]
+  [150]
   $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub.pem" \
   > https://$LOCALIP:$HGPORT/ --insecure
   pulling from https://*:$HGPORT/ (glob)
@@ -238,7 +238,7 @@
   pulling from https://localhost:$HGPORT/
   (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
   $ hg -R copy-pull pull --config web.cacerts="$CERTSDIR/pub-other.pem" \
   > --insecure
   pulling from https://localhost:$HGPORT/
@@ -255,7 +255,7 @@
   pulling from https://localhost:$HGPORT1/
   (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
 
 Test server cert which no longer is valid
 
@@ -266,7 +266,7 @@
   pulling from https://localhost:$HGPORT2/
   (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
 
 Setting ciphers to an invalid value aborts
   $ P="$CERTSDIR" hg --config hostsecurity.ciphers=invalid -R copy-pull id https://localhost:$HGPORT/
@@ -319,18 +319,18 @@
   $ hg --config 'hostfingerprints.localhost=deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/ --insecure
   abort: certificate for localhost has unexpected fingerprint ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
   (check hostfingerprint configuration)
-  [255]
+  [150]
 
   $ hg --config 'hostsecurity.localhost:fingerprints=sha1:deadbeefdeadbeefdeadbeefdeadbeefdeadbeef, sha1:aeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' -R copy-pull id https://localhost:$HGPORT/
   abort: certificate for localhost has unexpected fingerprint sha1:ec:d8:7c:d6:b3:86:d0:4f:c1:b8:b4:1c:9d:8f:5e:16:8e:ef:1c:03
   (check hostsecurity configuration)
-  [255]
+  [150]
 
 - fails when cert doesn't match hostname (port is ignored)
   $ hg -R copy-pull id https://localhost:$HGPORT1/ --config hostfingerprints.localhost=ecd87cd6b386d04fc1b8b41c9d8f5e168eef1c03
   abort: certificate for localhost has unexpected fingerprint f4:2f:5a:0c:3e:52:5b:db:e7:24:a8:32:1d:18:97:6d:69:b5:87:84
   (check hostfingerprint configuration)
-  [255]
+  [150]
 
 
 - ignores that certificate doesn't match hostname
@@ -375,26 +375,26 @@
   (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
   (see https://mercurial-scm.org/wiki/SecureConnections for more info)
   abort: error: .*(unsupported protocol|wrong ssl version).* (re)
-  [255]
+  [100]
 
   $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.1 id https://localhost:$HGPORT/
   (could not negotiate a common security protocol (tls1.1+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
   (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
   (see https://mercurial-scm.org/wiki/SecureConnections for more info)
   abort: error: .*(unsupported protocol|wrong ssl version).* (re)
-  [255]
+  [100]
   $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT/
   (could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
   (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
   (see https://mercurial-scm.org/wiki/SecureConnections for more info)
   abort: error: .*(unsupported protocol|wrong ssl version).* (re)
-  [255]
+  [100]
   $ P="$CERTSDIR" hg --config hostsecurity.minimumprotocol=tls1.2 id https://localhost:$HGPORT1/
   (could not negotiate a common security protocol (tls1.2+) with localhost; the likely cause is Mercurial is configured to be more secure than the server can support)
   (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
   (see https://mercurial-scm.org/wiki/SecureConnections for more info)
   abort: error: .*(unsupported protocol|wrong ssl version).* (re)
-  [255]
+  [100]
 
 --insecure will allow TLS 1.0 connections and override configs
 
@@ -417,7 +417,7 @@
   (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
   (see https://mercurial-scm.org/wiki/SecureConnections for more info)
   abort: error: .*(unsupported protocol|wrong ssl version).* (re)
-  [255]
+  [100]
 
 .hg/hgrc file [hostsecurity] settings are applied to remote ui instances (issue5305)
 
@@ -430,7 +430,7 @@
   (consider contacting the operator of this server and ask them to support modern TLS protocol versions; or, set hostsecurity.localhost:minimumprotocol=tls1.0 to allow use of legacy, less secure protocols when communicating with this server)
   (see https://mercurial-scm.org/wiki/SecureConnections for more info)
   abort: error: .*(unsupported protocol|wrong ssl version).* (re)
-  [255]
+  [100]
 
   $ killdaemons.py hg0.pid
   $ killdaemons.py hg1.pid
@@ -482,13 +482,13 @@
   pulling from https://localhost:$HGPORT/
   (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
   $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull \
   > --config web.cacerts="$CERTSDIR/pub-expired.pem" https://localhost:$HGPORT2/
   pulling from https://localhost:$HGPORT2/
   (the full certificate chain may not be available locally; see "hg help debugssl") (windows !)
   abort: error: *certificate verify failed* (glob)
-  [255]
+  [100]
 
 
   $ killdaemons.py hg0.pid
@@ -518,7 +518,7 @@
 
   $ P="$CERTSDIR" hg id https://localhost:$HGPORT/
   abort: error: .*(\$ECONNRESET\$|certificate required|handshake failure).* (re)
-  [255]
+  [100]
 
 with client certificate:
 
@@ -539,7 +539,7 @@
 
   $ env P="$CERTSDIR" hg id https://localhost:$HGPORT/
   abort: error: * (glob)
-  [255]
+  [100]
 
 Missing certficate and key files result in error
 
--- a/tests/test-i18n.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-i18n.t	Tue Jan 19 21:48:43 2021 +0530
@@ -8,20 +8,20 @@
 using the "replace" error handler:
 
   $ LANGUAGE=pt_BR hg tip
-  abortado: n?o foi encontrado um reposit?rio em '$TESTTMP' (.hg n?o encontrado)!
-  [255]
+  abortado: n?o foi encontrado um reposit?rio em '$TESTTMP' (.hg n?o encontrado)
+  [10]
 
 Using a more accommodating encoding:
 
   $ HGENCODING=UTF-8 LANGUAGE=pt_BR hg tip
-  abortado: n\xc3\xa3o foi encontrado um reposit\xc3\xb3rio em '$TESTTMP' (.hg n\xc3\xa3o encontrado)! (esc)
-  [255]
+  abortado: n\xc3\xa3o foi encontrado um reposit\xc3\xb3rio em '$TESTTMP' (.hg n\xc3\xa3o encontrado) (esc)
+  [10]
 
 Different encoding:
 
   $ HGENCODING=Latin-1 LANGUAGE=pt_BR hg tip
-  abortado: n\xe3o foi encontrado um reposit\xf3rio em '$TESTTMP' (.hg n\xe3o encontrado)! (esc)
-  [255]
+  abortado: n\xe3o foi encontrado um reposit\xf3rio em '$TESTTMP' (.hg n\xe3o encontrado) (esc)
+  [10]
 
 #endif
 
--- a/tests/test-identify.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-identify.t	Tue Jan 19 21:48:43 2021 +0530
@@ -6,7 +6,7 @@
 
   $ hg id
   abort: there is no Mercurial repository here (.hg not found)
-  [255]
+  [10]
 
 #endif
 
@@ -115,19 +115,19 @@
 
   $ hg id -n http://localhost:$HGPORT1/
   abort: can't query remote revision number, branch, or tags
-  [255]
+  [10]
 
 remote with tags?
 
   $ hg id -t http://localhost:$HGPORT1/
   abort: can't query remote revision number, branch, or tags
-  [255]
+  [10]
 
 remote with branch?
 
   $ hg id -b http://localhost:$HGPORT1/
   abort: can't query remote revision number, branch, or tags
-  [255]
+  [10]
 
 test bookmark support
 
@@ -155,21 +155,21 @@
 test invalid lookup
 
   $ hg id -r noNoNO http://localhost:$HGPORT1/
-  abort: unknown revision 'noNoNO'!
+  abort: unknown revision 'noNoNO'
   [255]
 
 Make sure we do not obscure unknown requires file entries (issue2649)
 
   $ echo fake >> .hg/requires
   $ hg id
-  abort: repository requires features unknown to this Mercurial: fake!
+  abort: repository requires features unknown to this Mercurial: fake
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
   $ cd ..
 #if no-outer-repo
   $ hg id test
-  abort: repository requires features unknown to this Mercurial: fake!
+  abort: repository requires features unknown to this Mercurial: fake
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 #endif
--- a/tests/test-import-bypass.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-import-bypass.t	Tue Jan 19 21:48:43 2021 +0530
@@ -27,7 +27,7 @@
 
   $ hg import --bypass --exact --edit ../test.diff
   abort: cannot specify both --exact and --edit
-  [255]
+  [10]
   $ hg import --bypass --exact ../test.diff
   applying ../test.diff
   $ shortlog
@@ -189,13 +189,13 @@
 
   $ hg import --bypass --no-commit ../test.diff
   abort: cannot specify both --no-commit and --bypass
-  [255]
+  [10]
   $ hg import --bypass --similarity 50 ../test.diff
   abort: cannot use --similarity with --bypass
-  [255]
+  [10]
   $ hg import --exact --prefix dir/ ../test.diff
   abort: cannot specify both --exact and --prefix
-  [255]
+  [10]
 
 Test commit editor
 (this also tests that editor is invoked, if the patch doesn't contain
@@ -221,7 +221,7 @@
   HG: branch 'default'
   HG: changed a
   abort: empty commit message
-  [255]
+  [10]
 
 Test patch.eol is handled
 (this also tests that editor is not invoked for '--bypass', if the
--- a/tests/test-import.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-import.t	Tue Jan 19 21:48:43 2021 +0530
@@ -14,7 +14,7 @@
 
   $ hg --cwd a import
   abort: need at least one patch to import
-  [255]
+  [10]
 
 generate patches for the test
 
@@ -127,7 +127,7 @@
   HG: branch 'default'
   HG: changed a
   abort: empty commit message
-  [255]
+  [10]
 
 Test avoiding editor invocation at applying the patch with --exact,
 even if commit message is empty
@@ -374,7 +374,7 @@
   $ egrep -v '^(Subject|email)' msg.patch | hg --cwd b import -
   applying patch from stdin
   abort: empty commit message
-  [255]
+  [10]
   $ rm -r b
 
 
@@ -445,7 +445,7 @@
   $ hg clone -r0 a b -q
   $ hg --cwd b import --no-commit --secret ../exported-tip.patch
   abort: cannot specify both --no-commit and --secret
-  [255]
+  [10]
   $ hg --cwd b import --secret ../exported-tip.patch
   applying ../exported-tip.patch
   $ hg --cwd b diff -c . --nodates
@@ -1036,7 +1036,7 @@
   adding a
   $ hg import -p foo
   abort: invalid value 'foo' for option -p, expected int
-  [255]
+  [10]
   $ hg import -p0 - << EOF
   > foobar
   > --- a	Sat Apr 12 22:43:58 2008 -0400
@@ -1172,7 +1172,7 @@
   transaction abort!
   rollback completed
   abort: empty.patch: no diffs found
-  [255]
+  [10]
   $ hg tip --template '{rev}  {desc|firstline}\n'
   0  commit
   $ hg -q status
--- a/tests/test-infinitepush-ci.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-infinitepush-ci.t	Tue Jan 19 21:48:43 2021 +0530
@@ -10,7 +10,7 @@
   $ . "$TESTDIR/library-infinitepush.sh"
   $ cat >> $HGRCPATH <<EOF
   > [ui]
-  > ssh = python "$TESTDIR/dummyssh"
+  > ssh = $PYTHON "$TESTDIR/dummyssh"
   > [alias]
   > glog = log -GT "{rev}:{node|short} {desc}\n{phase}"
   > EOF
@@ -205,7 +205,7 @@
 
   $ hg pull -r b4e4bce660512ad3e71189e14588a70ac8e31fef
   pulling from $TESTTMP/repo
-  abort: unknown revision 'b4e4bce660512ad3e71189e14588a70ac8e31fef'!
+  abort: unknown revision 'b4e4bce660512ad3e71189e14588a70ac8e31fef'
   [255]
   $ hg glog
   o  1:6cb0989601f1 added a
@@ -242,7 +242,7 @@
 XXX: we should support this
   $ hg pull -r b4e4bce660512
   pulling from ssh://user@dummy/repo
-  abort: unknown revision 'b4e4bce660512'!
+  abort: unknown revision 'b4e4bce660512'
   [255]
 
 XXX: we should show better message when the pull is happening from bundlestore
@@ -356,7 +356,7 @@
 
   $ hg phase -r . --draft --force
   $ hg rebase -r 6 -d 3
-  rebasing 6:9b42578d4447 "added f" (tip)
+  rebasing 6:9b42578d4447 tip "added f"
 
   $ hg glog
   @  7:99949238d9ac added f
--- a/tests/test-init.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-init.t	Tue Jan 19 21:48:43 2021 +0530
@@ -32,11 +32,11 @@
 test custom revlog chunk cache sizes
 
   $ hg --config format.chunkcachesize=0 log -R local -pv
-  abort: revlog chunk cache size 0 is not greater than 0!
-  [255]
+  abort: revlog chunk cache size 0 is not greater than 0
+  [50]
   $ hg --config format.chunkcachesize=1023 log -R local -pv
-  abort: revlog chunk cache size 1023 is not a power of 2!
-  [255]
+  abort: revlog chunk cache size 1023 is not a power of 2
+  [50]
   $ hg --config format.chunkcachesize=1024 log -R local -pv
   changeset:   0:08b9e9f63b32
   tag:         tip
@@ -103,7 +103,7 @@
 test failure
 
   $ hg init local
-  abort: repository local already exists!
+  abort: repository local already exists
   [255]
 
 init+push to remote2
@@ -148,15 +148,15 @@
 init to existing repo
 
   $ hg init -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote1
-  abort: repository remote1 already exists!
-  abort: could not create remote repo!
+  abort: repository remote1 already exists
+  abort: could not create remote repo
   [255]
 
 clone to existing repo
 
   $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote1
-  abort: repository remote1 already exists!
-  abort: could not create remote repo!
+  abort: repository remote1 already exists
+  abort: could not create remote repo
   [255]
 
 output of dummyssh
--- a/tests/test-install.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-install.t	Tue Jan 19 21:48:43 2021 +0530
@@ -222,13 +222,11 @@
   no problems detected
 #endif
 
-#if py2virtualenv
+#if virtualenv no-py3
 
-Note: --no-site-packages is deprecated, but some places have an
-ancient virtualenv from their linux distro or similar and it's not yet
-the default for them.
+Note: --no-site-packages is the default for all versions enabled by hghave
 
-  $ "$PYTHON" -m virtualenv --no-site-packages --never-download installenv >> pip.log
+  $ "$PYTHON" -m virtualenv --never-download installenv >> pip.log
   DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
   DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
 
@@ -237,6 +235,7 @@
   $ ./installenv/*/pip install --no-index $TESTDIR/.. >> pip.log
   DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. (?)
   DEPRECATION: Python 2.7 will reach the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 won't be maintained after that date. A future version of pip will drop support for Python 2.7. More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support (?)
+  DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained. pip 21.0 will drop support for Python 2.7 in January 2021. More details about Python 2 support in pip can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support pip 21.0 will remove support for this functionality. (?)
   $ ./installenv/*/hg debuginstall || cat pip.log
   checking encoding (ascii)...
   checking Python executable (*) (glob)
--- a/tests/test-issue672.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-issue672.t	Tue Jan 19 21:48:43 2021 +0530
@@ -63,8 +63,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c64f439569a9, local: f4a9cff3cd0b+, remote: 746e9549ea96
+  starting 4 threads for background file closing (?)
    preserving 1a for resolve of 1a
-  starting 4 threads for background file closing (?)
    1a: local copied/moved from 1 -> m (premerge)
   picked tool ':merge' for 1a (binary False symlink False changedelete False)
   merging 1a and 1 to 1a
@@ -86,9 +86,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: c64f439569a9, local: 746e9549ea96+, remote: f4a9cff3cd0b
+  starting 4 threads for background file closing (?)
    preserving 1 for resolve of 1a
   removing 1
-  starting 4 threads for background file closing (?)
    1a: remote moved from 1 -> m (premerge)
   picked tool ':merge' for 1a (binary False symlink False changedelete False)
   merging 1 and 1a to 1a
--- a/tests/test-journal-exists.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-journal-exists.t	Tue Jan 19 21:48:43 2021 +0530
@@ -9,7 +9,7 @@
 
   $ echo foo > a
   $ hg ci -Am0
-  abort: abandoned transaction found!
+  abort: abandoned transaction found
   (run 'hg recover' to clean up transaction)
   [255]
 
@@ -21,7 +21,7 @@
 
   $ touch .hg/store/journal
   $ hg ci -Am0
-  abort: abandoned transaction found!
+  abort: abandoned transaction found
   (run 'hg recover' to clean up transaction)
   [255]
   $ hg recover --verify
@@ -36,7 +36,7 @@
 
   $ touch .hg/store/journal
   $ hg ci -Am0
-  abort: abandoned transaction found!
+  abort: abandoned transaction found
   (run 'hg recover' to clean up transaction)
   [255]
   $ hg recover --no-verify
--- a/tests/test-keyword.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-keyword.t	Tue Jan 19 21:48:43 2021 +0530
@@ -167,7 +167,7 @@
 
   $ hg --debug commit
   abort: empty commit message
-  [255]
+  [10]
   $ hg status
   A a
   A b
@@ -816,7 +816,7 @@
 
   $ hg commit
   abort: empty commit message
-  [255]
+  [10]
   $ hg status
   M a
   ? c
--- a/tests/test-largefiles-misc.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-largefiles-misc.t	Tue Jan 19 21:48:43 2021 +0530
@@ -23,7 +23,7 @@
   > EOF
 
   $ hg config extensions
-  abort: repository requires features unknown to this Mercurial: largefiles!
+  abort: repository requires features unknown to this Mercurial: largefiles
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
@@ -42,7 +42,7 @@
 
   $ hg config extensions
   \*\*\* failed to import extension largefiles from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob)
-  abort: repository requires features unknown to this Mercurial: largefiles!
+  abort: repository requires features unknown to this Mercurial: largefiles
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
@@ -125,7 +125,7 @@
   $ hg clone -q . ../fetch
   $ hg --config extensions.fetch= fetch ../fetch
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg up -qC
   $ cd ..
 
@@ -1009,7 +1009,7 @@
   $ hg -R enabledlocally root
   $TESTTMP/individualenabling/enabledlocally
   $ hg -R notenabledlocally root
-  abort: repository requires features unknown to this Mercurial: largefiles!
+  abort: repository requires features unknown to this Mercurial: largefiles
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
@@ -1026,7 +1026,7 @@
   [255]
 
   $ hg clone enabledlocally clone-dst
-  abort: repository requires features unknown to this Mercurial: largefiles!
+  abort: repository requires features unknown to this Mercurial: largefiles
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ test -d clone-dst
--- a/tests/test-largefiles-update.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-largefiles-update.t	Tue Jan 19 21:48:43 2021 +0530
@@ -307,7 +307,7 @@
 
   $ hg rebase -s 1 -d 2 --keep
   rebasing 1:72518492caa6 "#1"
-  rebasing 4:07d6153b5c04 "#4" (tip)
+  rebasing 4:07d6153b5c04 tip "#4"
 
   $ hg status -A large1
   large1: $ENOENT$
@@ -594,7 +594,7 @@
   merging normal1
   warning: conflicts while merging normal1! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ cat .hglf/large1
   58e24f733a964da346e2407a2bee99d9001184f5
   $ cat large1
@@ -762,7 +762,7 @@
   > hgext.largefiles.lfutil.getlfilestoupdate = getlfilestoupdate
   > EOF
   $ hg up -Cr "8" --config extensions.crashupdatelfiles=../crashupdatelfiles.py
-  [7]
+  [254]
 Check large1 content and status ... and that update will undo modifications:
   $ cat large1
   large1 in #3
--- a/tests/test-largefiles-wireproto.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-largefiles-wireproto.t	Tue Jan 19 21:48:43 2021 +0530
@@ -117,7 +117,7 @@
   This repository uses the largefiles extension.
   
   Please enable it in your Mercurial config file.
-  [255]
+  [100]
 
 used all HGPORTs, kill all daemons
   $ killdaemons.py
@@ -133,7 +133,7 @@
   remote: -
   abort: remote error
   (check previous remote output)
-  [255]
+  [100]
 
 #if serve
 
--- a/tests/test-largefiles.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-largefiles.t	Tue Jan 19 21:48:43 2021 +0530
@@ -412,7 +412,7 @@
   $ hg revert --quiet z
   $ hg commit -m "Empty subdir" z
   abort: z: no match under directory!
-  [255]
+  [10]
   $ rm -rf z
   $ hg ci -m "standin" .hglf
   abort: file ".hglf" is a largefile standin
@@ -1070,14 +1070,14 @@
 Clone URL without path:
 
   $ hg clone file://
-  abort: repository / not found!
+  abort: repository / not found
   [255]
 
 Ensure base clone command argument validation
 
   $ hg clone -U -u 0 a a-clone-failure
   abort: cannot specify both --noupdate and --updaterev
-  [255]
+  [10]
 
   $ hg clone --all-largefiles a ssh://localhost/a
   abort: --all-largefiles is incompatible with non-local destination ssh://localhost/a
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-legacy-exit-code.t	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,49 @@
+Tests that the exit code is as expected when ui.detailed-exit-code is *not*
+enabled.
+
+  $ cat >> $HGRCPATH << EOF
+  > [ui]
+  > detailed-exit-code=no
+  > EOF
+
+  $ hg init
+  $ echo a > a
+Expect exit code 0 on success
+  $ hg ci -Aqm initial
+
+  $ hg co nonexistent
+  abort: unknown revision 'nonexistent'
+  [255]
+
+  $ hg co 'none()'
+  abort: empty revision set
+  [255]
+
+  $ hg co 'invalid('
+  hg: parse error at 8: not a prefix: end
+  (invalid(
+           ^ here)
+  [255]
+
+  $ hg co 'invalid('
+  hg: parse error at 8: not a prefix: end
+  (invalid(
+           ^ here)
+  [255]
+
+  $ hg continue
+  abort: no operation in progress
+  [255]
+
+  $ hg st --config a=b
+  abort: malformed --config option: 'a=b' (use --config section.name=value)
+  [255]
+
+  $ echo b > a
+  $ hg ci -m second
+  $ echo c > a
+  $ hg ci -m third
+  $ hg --config extensions.rebase= rebase -r . -d 0 -q
+  warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
+  unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
+  [1]
--- a/tests/test-lfconvert.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-lfconvert.t	Tue Jan 19 21:48:43 2021 +0530
@@ -212,16 +212,16 @@
   abort: ssh://localhost/foo is not a local Mercurial repo
   [255]
   $ hg lfconvert nosuchrepo foo
-  abort: repository nosuchrepo not found!
+  abort: repository nosuchrepo not found
   [255]
   $ hg share -q -U bigfile-repo shared
   $ printf 'bogus' > shared/.hg/sharedpath
   $ hg lfconvert shared foo
-  abort: .hg/sharedpath points to nonexistent directory $TESTTMP/bogus!
+  abort: .hg/sharedpath points to nonexistent directory $TESTTMP/bogus
   [255]
   $ hg lfconvert bigfile-repo largefiles-repo
   initializing destination largefiles-repo
-  abort: repository largefiles-repo already exists!
+  abort: repository largefiles-repo already exists
   [255]
 
 add another largefile to the new largefiles repo
--- a/tests/test-lfs-serve-access.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-lfs-serve-access.t	Tue Jan 19 21:48:43 2021 +0530
@@ -30,9 +30,9 @@
   $ hg -R client push http://localhost:$HGPORT
   pushing to http://localhost:$HGPORT/
   searching for changes
-  abort: LFS HTTP error: HTTP Error 400: no such method: .git!
+  abort: LFS HTTP error: HTTP Error 400: no such method: .git
   (check that lfs serving is enabled on http://localhost:$HGPORT/.git/info/lfs and "upload" is supported)
-  [255]
+  [50]
 
 ... so do a local push to make the data available.  Remove the blob from the
 default cache, so it attempts to download.
@@ -52,9 +52,9 @@
   added 1 changesets with 1 changes to 1 files
   new changesets 525251863cad
   updating to branch default
-  abort: LFS HTTP error: HTTP Error 400: no such method: .git!
+  abort: LFS HTTP error: HTTP Error 400: no such method: .git
   (check that lfs serving is enabled on http://localhost:$HGPORT/.git/info/lfs and "download" is supported)
-  [255]
+  [50]
 
   $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
 
@@ -78,16 +78,16 @@
 Reasonable hint for a misconfigured blob server
 
   $ hg -R httpclone update default --config lfs.url=http://localhost:$HGPORT/missing
-  abort: LFS HTTP error: HTTP Error 404: Not Found!
+  abort: LFS HTTP error: HTTP Error 404: Not Found
   (the "lfs.url" config may be used to override http://localhost:$HGPORT/missing)
-  [255]
+  [50]
 
   $ hg -R httpclone update default --config lfs.url=http://localhost:$HGPORT2/missing
-  abort: LFS error: *onnection *refused*! (glob) (?)
-  abort: LFS error: $EADDRNOTAVAIL$! (glob) (?)
-  abort: LFS error: No route to host! (?)
+  abort: LFS error: *onnection *refused* (glob) (?)
+  abort: LFS error: $EADDRNOTAVAIL$ (glob) (?)
+  abort: LFS error: No route to host (?)
   (the "lfs.url" config may be used to override http://localhost:$HGPORT2/missing)
-  [255]
+  [50]
 
 Blob URIs are correct when --prefix is used
 
@@ -158,6 +158,7 @@
   getting lfs.bin
   lfs: found f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e in the local lfs store
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating the branch cache
   (sent 3 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
@@ -266,8 +267,8 @@
   added 1 changesets with 1 changes to 1 files
   new changesets 525251863cad
   updating to branch default
-  abort: LFS server error for "lfs.bin": Internal server error!
-  [255]
+  abort: LFS server error for "lfs.bin": Internal server error
+  [50]
 
 Test an I/O error in localstore.verify() (Batch API) with PUT
 
@@ -276,8 +277,8 @@
   $ hg -R client push http://localhost:$HGPORT1
   pushing to http://localhost:$HGPORT1/
   searching for changes
-  abort: LFS server error for "unknown": Internal server error!
-  [255]
+  abort: LFS server error for "unknown": Internal server error
+  [50]
 TODO: figure out how to associate the file name in the error above
 
 Test a bad checksum sent by the client in the transfer API
@@ -285,8 +286,8 @@
   $ hg -R client push http://localhost:$HGPORT1
   pushing to http://localhost:$HGPORT1/
   searching for changes
-  abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c, action=upload)!
-  [255]
+  abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c, action=upload)
+  [50]
 
   $ echo 'test lfs file' > server/lfs3.bin
   $ hg --config experimental.lfs.disableusercache=True \
@@ -297,15 +298,15 @@
 
   $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
   >    -R client update -r tip
-  abort: LFS HTTP error: HTTP Error 500: Internal Server Error (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
-  [255]
+  abort: LFS HTTP error: HTTP Error 500: Internal Server Error (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)
+  [50]
 
 Test a checksum failure during the processing of the GET request
 
   $ hg --config lfs.url=http://localhost:$HGPORT1/.git/info/lfs \
   >    -R client update -r tip
-  abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)!
-  [255]
+  abort: LFS HTTP error: HTTP Error 422: corrupt blob (oid=276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d, action=download)
+  [50]
 
   $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
 
@@ -451,9 +452,9 @@
   $ hg -R auth_clone push --config extensions.x=use_digests.py
   pushing to http://localhost:$HGPORT1/
   searching for changes
-  abort: LFS HTTP error: HTTP Error 401: the server must support Basic Authentication!
+  abort: LFS HTTP error: HTTP Error 401: the server must support Basic Authentication
   (api=http://localhost:$HGPORT1/.git/info/lfs/objects/batch, action=upload)
-  [255]
+  [50]
 
   $ hg -R auth_clone --debug push | egrep '^[{}]|  '
   {
--- a/tests/test-lfs-serve.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-lfs-serve.t	Tue Jan 19 21:48:43 2021 +0530
@@ -128,11 +128,6 @@
   @@ -0,0 +1,1 @@
   +non-lfs
   *** runcommand debugupgraderepo -q --run
-  upgrade will perform the following actions:
-  
-  requirements
-     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
-  
 
   $ grep 'lfs' .hg/requires $SERVER_REQUIRES
   [1]
@@ -282,7 +277,7 @@
 
   $ hg clone http://localhost:$HGPORT $TESTTMP/client4_clone
   (remote is using large file support (lfs), but it is explicitly disabled in the local configuration)
-  abort: repository requires features unknown to this Mercurial: lfs!
+  abort: repository requires features unknown to this Mercurial: lfs
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ grep 'lfs' $TESTTMP/client4_clone/.hg/requires $SERVER_REQUIRES
@@ -510,7 +505,7 @@
 Misc: process dies early if a requirement exists and the extension is disabled
 
   $ hg --config extensions.lfs=! summary
-  abort: repository requires features unknown to this Mercurial: lfs!
+  abort: repository requires features unknown to this Mercurial: lfs
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
--- a/tests/test-lfs-test-server.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-lfs-test-server.t	Tue Jan 19 21:48:43 2021 +0530
@@ -459,8 +459,8 @@
   Content-Type: text/plain; charset=utf-8 (git-server !)
   Date: $HTTP_DATE$ (git-server !)
   abort: corrupt remote lfs object: d11e1a642b60813aee592094109b406089b8dff4cb157157f753418ec7857998 (git-server !)
-  abort: LFS server error for "c": Validation error! (hg-server !)
-  [255]
+  abort: LFS server error for "c": Validation error (hg-server !)
+  [50]
 
 The corrupted blob is not added to the usercache or local store
 
@@ -826,8 +826,8 @@
     ]
     "transfer": "basic" (hg-server !)
   }
-  abort: LFS server error for "b": The object does not exist!
-  [255]
+  abort: LFS server error for "b": The object does not exist
+  [50]
 
 Check error message when object does not exist:
 
@@ -937,7 +937,7 @@
     ]
     "transfer": "basic" (hg-server !)
   }
-  abort: LFS server error for "a": The object does not exist!
-  [255]
+  abort: LFS server error for "a": The object does not exist
+  [50]
 
   $ "$PYTHON" $RUNTESTDIR/killdaemons.py $DAEMON_PIDS
--- a/tests/test-lfs.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-lfs.t	Tue Jan 19 21:48:43 2021 +0530
@@ -22,7 +22,7 @@
   > EOF
 
   $ hg config extensions
-  abort: repository requires features unknown to this Mercurial: lfs!
+  abort: repository requires features unknown to this Mercurial: lfs
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
@@ -41,7 +41,7 @@
 
   $ hg config extensions
   \*\*\* failed to import extension lfs from missing.py: [Errno *] $ENOENT$: 'missing.py' (glob)
-  abort: repository requires features unknown to this Mercurial: lfs!
+  abort: repository requires features unknown to this Mercurial: lfs
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
@@ -70,7 +70,7 @@
   $ hg --config lfs.track='"size(\">1000B\")"' commit -Aqm "add small file"
   hg: parse error: unsupported file pattern: size(">1000B")
   (paths must be prefixed with "path:")
-  [255]
+  [10]
   $ hg --config lfs.track='size(">1000B")' commit -Aqm "add small file"
 
 # Commit large file
@@ -985,8 +985,8 @@
 Accessing a corrupt file will complain
 
   $ hg --cwd fromcorrupt2 cat -r 0 large
-  abort: integrity check failed on data/large.i:0!
-  [255]
+  abort: integrity check failed on data/large.i:0
+  [50]
 
 lfs -> normal -> lfs round trip conversions are possible.  The 'none()'
 predicate on the command line will override whatever is configured globally and
@@ -1118,8 +1118,8 @@
 
   $ echo x > file.txt
   $ hg ci -Aqm 'should fail'
-  hg: parse error at .hglfs:3: bad file ... no commit
-  [255]
+  config error at .hglfs:3: bad file ... no commit
+  [30]
 
   $ cat > .hglfs << EOF
   > [track]
--- a/tests/test-linelog.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-linelog.py	Tue Jan 19 21:48:43 2021 +0530
@@ -69,29 +69,60 @@
         ll.replacelines(1, 0, 0, 0, 3)
         self.assertEqual(
             [(l.rev, l.linenum) for l in ll.annotate(1)],
-            [(1, 0), (1, 1), (1, 2),],
+            [
+                (1, 0),
+                (1, 1),
+                (1, 2),
+            ],
         )
         # Replace line 1 with a new line
         ll.replacelines(2, 1, 2, 1, 2)
         self.assertEqual(
             [(l.rev, l.linenum) for l in ll.annotate(2)],
-            [(1, 0), (2, 1), (1, 2),],
+            [
+                (1, 0),
+                (2, 1),
+                (1, 2),
+            ],
         )
         # delete a line out of 2
         ll.replacelines(3, 1, 2, 0, 0)
         self.assertEqual(
-            [(l.rev, l.linenum) for l in ll.annotate(3)], [(1, 0), (1, 2),]
+            [(l.rev, l.linenum) for l in ll.annotate(3)],
+            [
+                (1, 0),
+                (1, 2),
+            ],
         )
         # annotation of 1 is unchanged
         self.assertEqual(
             [(l.rev, l.linenum) for l in ll.annotate(1)],
-            [(1, 0), (1, 1), (1, 2),],
+            [
+                (1, 0),
+                (1, 1),
+                (1, 2),
+            ],
         )
         ll.annotate(3)  # set internal state to revision 3
         start = ll.getoffset(0)
         end = ll.getoffset(1)
-        self.assertEqual(ll.getalllines(start, end), [(1, 0), (2, 1), (1, 1),])
-        self.assertEqual(ll.getalllines(), [(1, 0), (2, 1), (1, 1), (1, 2),])
+        self.assertEqual(
+            ll.getalllines(start, end),
+            [
+                (1, 0),
+                (2, 1),
+                (1, 1),
+            ],
+        )
+        self.assertEqual(
+            ll.getalllines(),
+            [
+                (1, 0),
+                (2, 1),
+                (1, 1),
+                (1, 2),
+            ],
+        )
 
     def testparseclinelogfile(self):
         # This data is what the replacements in testsimpleedits
@@ -116,14 +147,26 @@
         llc = linelog.linelog.fromdata(data)
         self.assertEqual(
             [(l.rev, l.linenum) for l in llc.annotate(1)],
-            [(1, 0), (1, 1), (1, 2),],
+            [
+                (1, 0),
+                (1, 1),
+                (1, 2),
+            ],
         )
         self.assertEqual(
             [(l.rev, l.linenum) for l in llc.annotate(2)],
-            [(1, 0), (2, 1), (1, 2),],
+            [
+                (1, 0),
+                (2, 1),
+                (1, 2),
+            ],
         )
         self.assertEqual(
-            [(l.rev, l.linenum) for l in llc.annotate(3)], [(1, 0), (1, 2),]
+            [(l.rev, l.linenum) for l in llc.annotate(3)],
+            [
+                (1, 0),
+                (1, 2),
+            ],
         )
         # Check we emit the same bytecode.
         ll = linelog.linelog()
--- a/tests/test-lock-badness.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-lock-badness.t	Tue Jan 19 21:48:43 2021 +0530
@@ -56,7 +56,7 @@
   > EOF
   $ echo b > b/b
   $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout &
-  $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" \
+  $ hg -R b up -q --config ui.timeout.warn=0 --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" \
   > > preup-stdout 2>preup-stderr
   $ wait
   $ cat preup-stdout
@@ -136,6 +136,6 @@
   pushing to a
   searching for changes
   abort: could not lock repository a: Permission denied
-  [255]
+  [20]
 
   $ chmod 700 a/.hg/store
--- a/tests/test-lock.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-lock.py	Tue Jan 19 21:48:43 2021 +0530
@@ -73,7 +73,10 @@
             self._acquirecalled,
             called,
             'expected acquire to be %s but was actually %s'
-            % (self._tocalled(called), self._tocalled(self._acquirecalled),),
+            % (
+                self._tocalled(called),
+                self._tocalled(self._acquirecalled),
+            ),
         )
 
     def resetacquirefn(self):
@@ -84,7 +87,10 @@
             self._releasecalled,
             called,
             'expected release to be %s but was actually %s'
-            % (self._tocalled(called), self._tocalled(self._releasecalled),),
+            % (
+                self._tocalled(called),
+                self._tocalled(self._releasecalled),
+            ),
         )
 
     def assertpostreleasecalled(self, called):
@@ -104,7 +110,10 @@
             actual,
             exists,
             'expected lock to %s but actually did %s'
-            % (self._toexists(exists), self._toexists(actual),),
+            % (
+                self._toexists(exists),
+                self._toexists(actual),
+            ),
         )
 
     def _tocalled(self, called):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-log-bookmark.t	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,198 @@
+Test 'hg log' with a bookmark
+
+
+Create the repository
+
+  $ hg init Test-D8973
+  $ cd Test-D8973
+  $ echo "bar" > foo.txt
+  $ hg add foo.txt
+  $ hg commit -m "Add foo in 'default'"
+
+
+Add a bookmark for topic X
+
+  $ hg branch -f sebhtml
+  marked working directory as branch sebhtml
+  (branches are permanent and global, did you want a bookmark?)
+
+  $ hg bookmark sebhtml/99991-topic-X
+  $ hg up sebhtml/99991-topic-X
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ echo "X" > x.txt
+  $ hg add x.txt
+  $ hg commit -m "Add x.txt in 'sebhtml/99991-topic-X'"
+
+  $ hg log -B sebhtml/99991-topic-X
+  changeset:   1:29f39dea9bf9
+  branch:      sebhtml
+  bookmark:    sebhtml/99991-topic-X
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Add x.txt in 'sebhtml/99991-topic-X'
+  
+
+Add a bookmark for topic Y
+
+  $ hg update default
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (leaving bookmark sebhtml/99991-topic-X)
+
+  $ echo "Y" > y.txt
+  $ hg add y.txt
+  $ hg branch -f sebhtml
+  marked working directory as branch sebhtml
+  $ hg bookmark sebhtml/99992-topic-Y
+  $ hg up sebhtml/99992-topic-Y
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg commit -m "Add y.txt in 'sebhtml/99992-topic-Y'"
+  created new head
+
+  $ hg log -B sebhtml/99992-topic-Y
+  changeset:   2:11df7969cf8d
+  branch:      sebhtml
+  bookmark:    sebhtml/99992-topic-Y
+  tag:         tip
+  parent:      0:eaea25376a59
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Add y.txt in 'sebhtml/99992-topic-Y'
+  
+
+The log of topic Y does not interfere with the log of topic X
+
+  $ hg log -B sebhtml/99991-topic-X
+  changeset:   1:29f39dea9bf9
+  branch:      sebhtml
+  bookmark:    sebhtml/99991-topic-X
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Add x.txt in 'sebhtml/99991-topic-X'
+  
+
+Merge topics Y and X in the default branch
+
+  $ hg update default
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (leaving bookmark sebhtml/99992-topic-Y)
+
+  $ hg bookmark
+     sebhtml/99991-topic-X     1:29f39dea9bf9
+     sebhtml/99992-topic-Y     2:11df7969cf8d
+
+  $ hg merge sebhtml/99992-topic-Y
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+  $ hg commit -m "Merge branch 'sebhtml/99992-topic-Y' into 'default'"
+
+  $ hg update default
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+
+  $ hg merge sebhtml/99991-topic-X
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+
+  $ hg commit -m "Merge branch 'sebhtml/99991-topic-X' into 'default'"
+
+
+Check the log of topic X, topic Y, and default branch
+
+  $ hg log -B sebhtml/99992-topic-Y
+
+  $ hg log -B sebhtml/99991-topic-X
+
+  $ hg log -b default
+  changeset:   4:c26ba8c1e1cb
+  tag:         tip
+  parent:      3:2189f3fb90d6
+  parent:      1:29f39dea9bf9
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Merge branch 'sebhtml/99991-topic-X' into 'default'
+  
+  changeset:   3:2189f3fb90d6
+  parent:      0:eaea25376a59
+  parent:      2:11df7969cf8d
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Merge branch 'sebhtml/99992-topic-Y' into 'default'
+  
+  changeset:   0:eaea25376a59
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     Add foo in 'default'
+  
+
+Set up multiple bookmarked heads:
+
+  $ hg bookmark merged-head
+  $ hg up 1
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  (leaving bookmark merged-head)
+  $ echo "Z" > z.txt
+  $ hg ci -Am 'Add Z'
+  adding z.txt
+  $ hg bookmark topic-Z
+
+  $ hg log -GT '{rev}: {branch}, {bookmarks}\n'
+  @  5: sebhtml, topic-Z
+  |
+  | o  4: default, merged-head
+  |/|
+  | o    3: default,
+  | |\
+  | | o  2: sebhtml, sebhtml/99992-topic-Y
+  | |/
+  o |  1: sebhtml, sebhtml/99991-topic-X
+  |/
+  o  0: default,
+  
+
+Multiple revisions under bookmarked head:
+
+  $ hg log -GT '{rev}: {branch}, {bookmarks}\n' -B merged-head
+  o    4: default, merged-head
+  |\
+  | ~
+  o    3: default,
+  |\
+  ~ ~
+
+Follows multiple bookmarks:
+
+  $ hg log -GT '{rev}: {branch}, {bookmarks}\n' -B merged-head -B topic-Z
+  @  5: sebhtml, topic-Z
+  |
+  ~
+  o    4: default, merged-head
+  |\
+  | ~
+  o    3: default,
+  |\
+  ~ ~
+
+Filter by bookmark and branch:
+
+  $ hg log -GT '{rev}: {branch}, {bookmarks}\n' -B merged-head -B topic-Z -b default
+  o    4: default, merged-head
+  |\
+  | ~
+  o    3: default,
+  |\
+  ~ ~
+
+
+Unknown bookmark:
+
+  $ hg log -B unknown
+  abort: bookmark 'unknown' does not exist
+  [255]
+
+Shouldn't accept string-matcher syntax:
+
+  $ hg log -B 're:.*'
+  abort: bookmark 're:.*' does not exist
+  [255]
--- a/tests/test-log-linerange.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-log-linerange.t	Tue Jan 19 21:48:43 2021 +0530
@@ -363,7 +363,7 @@
 
   $ hg log -f -L foo,5:7 -p bar
   abort: FILE arguments are not compatible with --line-range option
-  [255]
+  [10]
 
 Option --rev acts as a restriction.
 
@@ -1135,14 +1135,14 @@
 
   $ hg log -L foo,5:7
   abort: --line-range requires --follow
-  [255]
+  [10]
 
 Non-exact pattern kinds are not allowed.
 
   $ cd ..
   $ hg log -f -L glob:*a*,1:2
   hg: parse error: line range pattern 'glob:*a*' must match exactly one file
-  [255]
+  [10]
 
 We get an error for removed files.
 
--- a/tests/test-log.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-log.t	Tue Jan 19 21:48:43 2021 +0530
@@ -4,13 +4,13 @@
   $ cd empty
   $ hg log
   $ hg log -r 1
-  abort: unknown revision '1'!
+  abort: unknown revision '1'
   [255]
   $ hg log -r -1:0
-  abort: unknown revision '-1'!
+  abort: unknown revision '-1'
   [255]
   $ hg log -r 'branch(name)'
-  abort: unknown revision 'name'!
+  abort: unknown revision 'name'
   [255]
   $ hg log -r null -q
   -1:000000000000
@@ -450,6 +450,16 @@
   a
   
   
+Respects ui.logtemplate and command-templates.log configs (the latter takes
+precedence)
+
+  $ hg log -r 0 --config ui.logtemplate="foo {rev}\n"
+  foo 0
+  $ hg log -r 0 --config command-templates.log="bar {rev}\n"
+  bar 0
+  $ hg log -r 0 --config ui.logtemplate="foo {rev}\n" \
+  > --config command-templates.log="bar {rev}\n"
+  bar 0
 
 
 -f and multiple filelog heads
@@ -1053,12 +1063,12 @@
 
   $ hg log -r ''
   hg: parse error: empty query
-  [255]
+  [10]
 
 log -r <some unknown node id>
 
   $ hg log -r 1000000000000000000000000000000000000000
-  abort: unknown revision '1000000000000000000000000000000000000000'!
+  abort: unknown revision '1000000000000000000000000000000000000000'
   [255]
 
 log -k r1
@@ -1122,8 +1132,8 @@
   $ hg init follow-dup
   $ cd follow-dup
   $ cat <<EOF >> .hg/hgrc
-  > [ui]
-  > logtemplate = '=== {rev}: {desc}\n'
+  > [command-templates]
+  > log = '=== {rev}: {desc}\n'
   > [diff]
   > nodates = True
   > EOF
@@ -1368,6 +1378,14 @@
   1 k1
   0 k0
 
+ log -b/-u/-k shouldn't accept string-matcher syntax:
+
+  $ hg log -b 're:.*'
+  abort: unknown revision 're:.*'
+  [255]
+  $ hg log -k 're:.*'
+  $ hg log -u 're:.*'
+
  log FILE in ascending order, against dagrange:
 
   $ hg log -r1:: -T '{rev} {files}\n' f1 f2
@@ -1490,7 +1508,7 @@
 log -b dummy
 
   $ hg log -b dummy
-  abort: unknown revision 'dummy'!
+  abort: unknown revision 'dummy'
   [255]
 
 
@@ -1986,7 +2004,7 @@
   1:a765632148dc55d38c35c4f247c618701886cb2f
   0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05
   $ hg log -r a
-  abort: hidden revision 'a' is pruned!
+  abort: hidden revision 'a' is pruned
   (use --hidden to access hidden revisions)
   [255]
 
@@ -2050,7 +2068,7 @@
   2:94375ec45bddd2a824535fc04855bd058c926ec0
   3:d7d28b288a6b83d5d2cf49f10c5974deed3a1d2e
   $ hg log -T'{rev}:{node}\n' -r:0
-  abort: hidden revision '0' is pruned!
+  abort: hidden revision '0' is pruned
   (use --hidden to access hidden revisions)
   [255]
   $ hg log -T'{rev}:{node}\n' -f
@@ -2425,7 +2443,7 @@
   hg: parse error at 14: unexpected token: end
   ({shortest(node}
                  ^ here)
-  [255]
+  [10]
 
 multi-line template with error
   $ hg log -r . -T 'line 1
@@ -2435,7 +2453,7 @@
   hg: parse error at 27: unexpected token: end
   (line 1\nline2\n{shortest(node}\nline4\nline5
                                 ^ here)
-  [255]
+  [10]
 
   $ cd ..
 
--- a/tests/test-logexchange.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-logexchange.t	Tue Jan 19 21:48:43 2021 +0530
@@ -481,15 +481,15 @@
 Testing for a literal name which does not exists, which should fail.
 
   $ hg log -r 'remotebranches(def)' -GT "{rev}:{node|short} {remotenames}\n"
-  abort: remote name 'def' does not exist!
+  abort: remote name 'def' does not exist
   [255]
 
   $ hg log -r 'remotebookmarks("server3")' -GT "{rev}:{node|short} {remotenames}\n"
-  abort: remote name 'server3' does not exist!
+  abort: remote name 'server3' does not exist
   [255]
 
   $ hg log -r 'remotenames("server3")' -GT "{rev}:{node|short} {remotenames}\n"
-  abort: remote name 'server3' does not exist!
+  abort: remote name 'server3' does not exist
   [255]
 
 Testing for a pattern which does not match anything, which shouldn't fail.
@@ -500,15 +500,15 @@
 
   $ hg log -r 'remotenames("re:default", "re:server2")' -GT "{rev}:{node|short} {remotenames}\n"
   hg: parse error: only one argument accepted
-  [255]
+  [10]
 
   $ hg log -r 'remotebranches("default/wat", "server2/wat")' -GT "{rev}:{node|short} {remotebranches}\n"
   hg: parse error: only one argument accepted
-  [255]
+  [10]
 
   $ hg log -r 'remotebookmarks("default/foo", "server2/foo")' -GT "{rev}:{node|short} {remotebookmarks}\n"
   hg: parse error: only one argument accepted
-  [255]
+  [10]
 
 Testing pattern matching
 
--- a/tests/test-manifest.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-manifest.py	Tue Jan 19 21:48:43 2021 +0530
@@ -22,7 +22,12 @@
 BIN_HASH_3 = binascii.unhexlify(HASH_3)
 A_SHORT_MANIFEST = (
     b'bar/baz/qux.py\0%(hash2)s%(flag2)s\n' b'foo\0%(hash1)s%(flag1)s\n'
-) % {b'hash1': HASH_1, b'flag1': b'', b'hash2': HASH_2, b'flag2': b'l',}
+) % {
+    b'hash1': HASH_1,
+    b'flag1': b'',
+    b'hash2': HASH_2,
+    b'flag2': b'l',
+}
 
 A_DEEPER_MANIFEST = (
     b'a/b/c/bar.py\0%(hash3)s%(flag1)s\n'
@@ -265,9 +270,9 @@
         self.assertEqual(len(m), len(list(m)))
 
     def testMatchesMetadata(self):
-        '''Tests matches() for a few specific files to make sure that both
+        """Tests matches() for a few specific files to make sure that both
         the set of files as well as their flags and nodeids are correct in
-        the resulting manifest.'''
+        the resulting manifest."""
         m = self.parsemanifest(A_HUGE_MANIFEST)
 
         match = matchmod.exact([b'file1', b'file200', b'file300'])
@@ -281,9 +286,9 @@
         self.assertEqual(w, m2.text())
 
     def testMatchesNonexistentFile(self):
-        '''Tests matches() for a small set of specific files, including one
+        """Tests matches() for a small set of specific files, including one
         nonexistent file to make sure in only matches against existing files.
-        '''
+        """
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.exact(
@@ -296,8 +301,8 @@
         )
 
     def testMatchesNonexistentDirectory(self):
-        '''Tests matches() for a relpath match on a directory that doesn't
-        actually exist.'''
+        """Tests matches() for a relpath match on a directory that doesn't
+        actually exist."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(
@@ -308,8 +313,7 @@
         self.assertEqual([], m2.keys())
 
     def testMatchesExactLarge(self):
-        '''Tests matches() for files matching a large list of exact files.
-        '''
+        """Tests matches() for files matching a large list of exact files."""
         m = self.parsemanifest(A_HUGE_MANIFEST)
 
         flist = m.keys()[80:300]
@@ -328,8 +332,8 @@
         self.assertEqual(m.keys(), m2.keys())
 
     def testMatchesDirectory(self):
-        '''Tests matches() on a relpath match on a directory, which should
-        match against all files within said directory.'''
+        """Tests matches() on a relpath match on a directory, which should
+        match against all files within said directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(
@@ -353,9 +357,9 @@
         )
 
     def testMatchesExactPath(self):
-        '''Tests matches() on an exact match on a directory, which should
+        """Tests matches() on an exact match on a directory, which should
         result in an empty manifest because you can't perform an exact match
-        against a directory.'''
+        against a directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.exact([b'a/b'])
@@ -364,8 +368,8 @@
         self.assertEqual([], m2.keys())
 
     def testMatchesCwd(self):
-        '''Tests matches() on a relpath match with the current directory ('.')
-        when not in the root directory.'''
+        """Tests matches() on a relpath match with the current directory ('.')
+        when not in the root directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(
@@ -389,8 +393,8 @@
         )
 
     def testMatchesWithPattern(self):
-        '''Tests matches() for files matching a pattern that reside
-        deeper than the specified directory.'''
+        """Tests matches() for files matching a pattern that reside
+        deeper than the specified directory."""
         m = self.parsemanifest(A_DEEPER_MANIFEST)
 
         match = matchmod.match(util.localpath(b'/repo'), b'', [b'a/b/*/*.txt'])
--- a/tests/test-manifest.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-manifest.t	Tue Jan 19 21:48:43 2021 +0530
@@ -87,12 +87,12 @@
 The next two calls are expected to abort:
 
   $ hg manifest -r 2
-  abort: unknown revision '2'!
+  abort: unknown revision '2'
   [255]
 
   $ hg manifest -r tip tip
   abort: please specify just one revision
-  [255]
+  [10]
 
 Testing the manifest full text cache utility
 --------------------------------------------
--- a/tests/test-match.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-match.py	Tue Jan 19 21:48:43 2021 +0530
@@ -91,7 +91,9 @@
 
     def testVisitdirRootfilesin(self):
         m = matchmod.match(
-            util.localpath(b'/repo'), b'', patterns=[b'rootfilesin:dir/subdir'],
+            util.localpath(b'/repo'),
+            b'',
+            patterns=[b'rootfilesin:dir/subdir'],
         )
         assert isinstance(m, matchmod.patternmatcher)
         self.assertFalse(m.visitdir(b'dir/subdir/x'))
@@ -103,7 +105,9 @@
 
     def testVisitchildrensetRootfilesin(self):
         m = matchmod.match(
-            util.localpath(b'/repo'), b'', patterns=[b'rootfilesin:dir/subdir'],
+            util.localpath(b'/repo'),
+            b'',
+            patterns=[b'rootfilesin:dir/subdir'],
         )
         assert isinstance(m, matchmod.patternmatcher)
         self.assertEqual(m.visitchildrenset(b'dir/subdir/x'), set())
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-combination-exec-bytes.t	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,63 @@
+Testing recorded "modified" files for merge commit
+==================================================
+
+#require execbit
+
+This file shows what hg says are "modified" files for a merge commit
+(hg log -T {files}), somewhat exhaustively.
+
+This test file shows merges that involves executable bit changing, check test-merge-combination-exec-bytes.t
+
+For merges that involve files contents changing, check test-merge-combination-file-content.t
+
+For testing of multiple corner case, check test-merge-combination-misc.t
+
+Case with multiple or zero merge ancestors, copies/renames, and identical file contents
+with different filelog revisions are not currently covered.
+
+  $ . $TESTDIR/testlib/merge-combination-util.sh
+
+All the merges of executable bit.
+
+  $ range () {
+  >   max=a
+  >   for i in $@; do
+  >     if [ $i = - ]; then continue; fi
+  >     if [ $i > $max ]; then max=$i; fi
+  >   done
+  >   if [ $max = a ]; then echo f; else echo f x; fi
+  > }
+  $ isgood () { case $line in *f*x*) true;; *) false;; esac; }
+  $ createfile () {
+  >   if [ -f a ] && (([ -x a ] && [ $v = x ]) || (! [ -x a ] && [ $v != x ]))
+  >   then touch $file
+  >   else touch a; if [ $v = x ]; then chmod +x a; else chmod -x a; fi
+  >   fi
+  > }
+
+  $ genmerges
+  fffx  : agree on "a"
+  ffxf  : agree on "a"
+  ffxx  : agree on ""
+  ffx-  : agree on "a"
+  ff-x  : hg said "", expected "a"
+  fxff  : hg said "", expected "a"
+  fxfx  : hg said "a", expected ""
+  fxf-  : agree on "a"
+  fxxf  : agree on "a"
+  fxxx  : agree on ""
+  fxx-  : agree on "a"
+  fx-f  : hg said "", expected "a"
+  fx-x  : hg said "", expected "a"
+  fx--  : hg said "", expected "a"
+  f-fx  : agree on "a"
+  f-xf  : agree on "a"
+  f-xx  : hg said "", expected "a"
+  f-x-  : agree on "a"
+  f--x  : agree on "a"
+  -ffx  : agree on "a"
+  -fxf C: agree on "a"
+  -fxx C: hg said "", expected "a"
+  -fx- C: agree on "a"
+  -f-x  : hg said "", expected "a"
+  --fx  : agree on "a"
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-combination-file-content.t	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,88 @@
+Testing recorded "modified" files for merge commit
+==================================================
+
+This file shows what hg says are "modified" files for a merge commit
+(hg log -T {files}), somewhat exhaustively.
+
+This test file shows merges that involves files contents changing
+
+For merges taht involves executable bit changing, check test-merge-combination-exec-bytes.t
+
+For testing of multiple corner case, check test-merge-combination-misc.t
+
+Case with multiple or zero merge ancestors, copies/renames, and identical file contents
+with different filelog revisions are not currently covered.
+
+  $ . $TESTDIR/testlib/merge-combination-util.sh
+
+All the merges of various file contents.
+
+  $ range () {
+  >   max=0
+  >   for i in $@; do
+  >     if [ $i = - ]; then continue; fi
+  >     if [ $i -gt $max ]; then max=$i; fi
+  >   done
+  >   $TESTDIR/seq.py `expr $max + 1`
+  > }
+  $ isgood () { true; }
+  $ createfile () {
+  >   if [ -f a ] && [ "`cat a`" = $1 ]
+  >   then touch $file
+  >   else echo $v > a
+  >   fi
+  > }
+
+  $ genmerges
+  1111  : agree on ""
+  1112  : agree on "a"
+  111-  : agree on "a"
+  1121  : agree on "a"
+  1122  : agree on ""
+  1123  : agree on "a"
+  112-  : agree on "a"
+  11-1  : hg said "", expected "a"
+  11-2  : agree on "a"
+  11--  : agree on ""
+  1211  : agree on "a"
+  1212  : agree on ""
+  1213  : agree on "a"
+  121-  : agree on "a"
+  1221  : agree on "a"
+  1222  : agree on ""
+  1223  : agree on "a"
+  122-  : agree on "a"
+  1231 C: agree on "a"
+  1232 C: agree on "a"
+  1233 C: agree on "a"
+  1234 C: agree on "a"
+  123- C: agree on "a"
+  12-1 C: agree on "a"
+  12-2 C: hg said "", expected "a"
+  12-3 C: agree on "a"
+  12-- C: agree on "a"
+  1-11  : hg said "", expected "a"
+  1-12  : agree on "a"
+  1-1-  : agree on ""
+  1-21 C: agree on "a"
+  1-22 C: hg said "", expected "a"
+  1-23 C: agree on "a"
+  1-2- C: agree on "a"
+  1--1  : agree on "a"
+  1--2  : agree on "a"
+  1---  : agree on ""
+  -111  : agree on ""
+  -112  : agree on "a"
+  -11-  : agree on "a"
+  -121 C: agree on "a"
+  -122 C: agree on "a"
+  -123 C: agree on "a"
+  -12- C: agree on "a"
+  -1-1  : agree on ""
+  -1-2  : agree on "a"
+  -1--  : agree on "a"
+  --11  : agree on ""
+  --12  : agree on "a"
+  --1-  : agree on "a"
+  ---1  : agree on "a"
+  ----  : agree on ""
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-merge-combination-misc.t	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,47 @@
+Testing recorded "modified" files for merge commit
+==================================================
+
+This file shows what hg says are "modified" files for a merge commit
+(hg log -T {files}), somewhat exhaustively.
+
+This file test multiple corner case.
+
+For merges that involve files contents changing, check test-merge-combination-file-content.t
+
+For merges that involve executable bit changing, check test-merge-combination-exec-bytes.t
+
+
+Case with multiple or zero merge ancestors, copies/renames, and identical file contents
+with different filelog revisions are not currently covered.
+
+  $ . $TESTDIR/testlib/merge-combination-util.sh
+
+Files modified or cleanly merged, with no greatest common ancestors:
+
+  $ hg init repo; cd repo
+  $ touch a0 b0; hg commit -qAm 0
+  $ hg up -qr null; touch a1 b1; hg commit -qAm 1
+  $ hg merge -qr 0; rm b*; hg commit -qAm 2
+  $ hg log -r . -T '{files}\n'
+  b0 b1
+  $ cd ../
+  $ rm -rf repo
+
+A few cases of criss-cross merges involving deletions (listing all
+such merges is probably too much). Both gcas contain $files, so we
+expect the final merge to behave like a merge with a single gca
+containing $files.
+
+  $ hg init repo; cd repo
+  $ files="c1 u1 c2 u2"
+  $ touch $files; hg commit -qAm '0 root'
+  $ for f in $files; do echo f > $f; done; hg commit -qAm '1 gca1'
+  $ hg up -qr0; hg revert -qr 1 --all; hg commit -qAm '2 gca2'
+  $ hg up -qr 1; hg merge -qr 2; rm *1; hg commit -qAm '3 p1'
+  $ hg up -qr 2; hg merge -qr 1; rm *2; hg commit -qAm '4 p2'
+  $ hg merge -qr 3; echo f > u1; echo f > u2; rm -f c1 c2
+  $ hg commit -qAm '5 merge with two gcas'
+  $ hg log -r . -T '{files}\n' # expecting u1 u2
+  
+  $ cd ../
+  $ rm -rf repo
--- a/tests/test-merge-combination.t	Thu Dec 24 15:58:08 2020 +0900
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,220 +0,0 @@
-This file shows what hg says are "modified" files for a merge commit
-(hg log -T {files}), somewhat exhaustively.
-It shows merges that involves files contents changing, and merges that
-involve executable bit changing, but not merges with multiple or zero
-merge ancestors, nor copies/renames, and nor identical file contents
-with different filelog revisions.
-
-genmerges is the workhorse. Given:
-- a range function describing the possible values for file a
-- a isgood function to filter out uninteresting combination
-- a createfile function to actually write the values for file a on the
-filesystem
-it print a series of lines that look like: abcd C: output of -T {files}
-describing the file a at respectively the base, p2, p1, merge
-revision. "C" indicates that hg merge had conflicts.
-  $ genmerges () {
-  >   for base in `range` -; do
-  >     for r1 in `range $base` -; do
-  >       for r2 in `range $base $r1` -; do
-  >         for m in `range $base $r1 $r2` -; do
-  >           line="$base$r1$r2$m"
-  >           isgood $line || continue
-  >           hg init repo
-  >           cd repo
-  >           make_commit () {
-  >             v=$1; msg=$2; file=$3;
-  >             if [ $v != - ]; then
-  >               createfile $v
-  >             else
-  >               if [ -f a ]
-  >               then rm a
-  >               else touch $file
-  >               fi
-  >             fi
-  >             hg commit -q -Am $msg || exit 123
-  >           }
-  >           echo foo > foo
-  >           make_commit $base base b
-  >           make_commit $r1 r1 c
-  >           hg up -r 0 -q
-  >           make_commit $r2 r2 d
-  >           hg merge -q -r 1 > ../output 2>&1
-  >           if [ $? -ne 0 ]; then rm -f *.orig; hg resolve -m --all -q; fi
-  >           if [ -s ../output ]; then conflicts=" C"; else conflicts="  "; fi
-  >           make_commit $m m e
-  >           if [ $m = $r1 ] && [ $m = $r2 ]
-  >           then expected=
-  >           elif [ $m = $r1 ]
-  >           then if [ $base = $r2 ]
-  >                then expected=
-  >                else expected=a
-  >                fi
-  >           elif [ $m = $r2 ]
-  >           then if [ $base = $r1 ]
-  >                then expected=
-  >                else expected=a
-  >                fi
-  >           else expected=a
-  >           fi
-  >           got=`hg log -r 3 --template '{files}\n' | tr -d 'e '`
-  >           if [ "$got" = "$expected" ]
-  >           then echo "$line$conflicts: agree on \"$got\""
-  >           else echo "$line$conflicts: hg said \"$got\", expected \"$expected\""
-  >           fi
-  >           cd ../
-  >           rm -rf repo
-  >         done
-  >       done
-  >     done
-  >   done
-  > }
-
-All the merges of various file contents.
-
-  $ range () {
-  >   max=0
-  >   for i in $@; do
-  >     if [ $i = - ]; then continue; fi
-  >     if [ $i -gt $max ]; then max=$i; fi
-  >   done
-  >   $TESTDIR/seq.py `expr $max + 1`
-  > }
-  $ isgood () { true; }
-  $ createfile () {
-  >   if [ -f a ] && [ "`cat a`" = $1 ]
-  >   then touch $file
-  >   else echo $v > a
-  >   fi
-  > }
-
-  $ genmerges
-  1111  : agree on ""
-  1112  : agree on "a"
-  111-  : agree on "a"
-  1121  : agree on "a"
-  1122  : agree on ""
-  1123  : agree on "a"
-  112-  : agree on "a"
-  11-1  : hg said "", expected "a"
-  11-2  : agree on "a"
-  11--  : agree on ""
-  1211  : agree on "a"
-  1212  : agree on ""
-  1213  : agree on "a"
-  121-  : agree on "a"
-  1221  : agree on "a"
-  1222  : agree on ""
-  1223  : agree on "a"
-  122-  : agree on "a"
-  1231 C: agree on "a"
-  1232 C: agree on "a"
-  1233 C: agree on "a"
-  1234 C: agree on "a"
-  123- C: agree on "a"
-  12-1 C: agree on "a"
-  12-2 C: hg said "", expected "a"
-  12-3 C: agree on "a"
-  12-- C: agree on "a"
-  1-11  : hg said "", expected "a"
-  1-12  : agree on "a"
-  1-1-  : agree on ""
-  1-21 C: agree on "a"
-  1-22 C: hg said "", expected "a"
-  1-23 C: agree on "a"
-  1-2- C: agree on "a"
-  1--1  : agree on "a"
-  1--2  : agree on "a"
-  1---  : agree on ""
-  -111  : agree on ""
-  -112  : agree on "a"
-  -11-  : agree on "a"
-  -121 C: agree on "a"
-  -122 C: agree on "a"
-  -123 C: agree on "a"
-  -12- C: agree on "a"
-  -1-1  : agree on ""
-  -1-2  : agree on "a"
-  -1--  : agree on "a"
-  --11  : agree on ""
-  --12  : agree on "a"
-  --1-  : agree on "a"
-  ---1  : agree on "a"
-  ----  : agree on ""
-
-All the merges of executable bit.
-
-  $ range () {
-  >   max=a
-  >   for i in $@; do
-  >     if [ $i = - ]; then continue; fi
-  >     if [ $i > $max ]; then max=$i; fi
-  >   done
-  >   if [ $max = a ]; then echo f; else echo f x; fi
-  > }
-  $ isgood () { case $line in *f*x*) true;; *) false;; esac; }
-  $ createfile () {
-  >   if [ -f a ] && (([ -x a ] && [ $v = x ]) || (! [ -x a ] && [ $v != x ]))
-  >   then touch $file
-  >   else touch a; if [ $v = x ]; then chmod +x a; else chmod -x a; fi
-  >   fi
-  > }
-
-#if execbit
-  $ genmerges
-  fffx  : agree on "a"
-  ffxf  : agree on "a"
-  ffxx  : agree on ""
-  ffx-  : agree on "a"
-  ff-x  : hg said "", expected "a"
-  fxff  : hg said "", expected "a"
-  fxfx  : hg said "a", expected ""
-  fxf-  : agree on "a"
-  fxxf  : agree on "a"
-  fxxx  : agree on ""
-  fxx-  : agree on "a"
-  fx-f  : hg said "", expected "a"
-  fx-x  : hg said "", expected "a"
-  fx--  : hg said "", expected "a"
-  f-fx  : agree on "a"
-  f-xf  : agree on "a"
-  f-xx  : hg said "", expected "a"
-  f-x-  : agree on "a"
-  f--x  : agree on "a"
-  -ffx  : agree on "a"
-  -fxf C: agree on "a"
-  -fxx C: hg said "", expected "a"
-  -fx- C: agree on "a"
-  -f-x  : hg said "", expected "a"
-  --fx  : agree on "a"
-#endif
-
-Files modified or cleanly merged, with no greatest common ancestors:
-
-  $ hg init repo; cd repo
-  $ touch a0 b0; hg commit -qAm 0
-  $ hg up -qr null; touch a1 b1; hg commit -qAm 1
-  $ hg merge -qr 0; rm b*; hg commit -qAm 2
-  $ hg log -r . -T '{files}\n'
-  b0 b1
-  $ cd ../
-  $ rm -rf repo
-
-A few cases of criss-cross merges involving deletions (listing all
-such merges is probably too much). Both gcas contain $files, so we
-expect the final merge to behave like a merge with a single gca
-containing $files.
-
-  $ hg init repo; cd repo
-  $ files="c1 u1 c2 u2"
-  $ touch $files; hg commit -qAm '0 root'
-  $ for f in $files; do echo f > $f; done; hg commit -qAm '1 gca1'
-  $ hg up -qr0; hg revert -qr 1 --all; hg commit -qAm '2 gca2'
-  $ hg up -qr 1; hg merge -qr 2; rm *1; hg commit -qAm '3 p1'
-  $ hg up -qr 2; hg merge -qr 1; rm *2; hg commit -qAm '4 p2'
-  $ hg merge -qr 3; echo f > u1; echo f > u2; rm -f c1 c2
-  $ hg commit -qAm '5 merge with two gcas'
-  $ hg log -r . -T '{files}\n' # expecting u1 u2
-  
-  $ cd ../
-  $ rm -rf repo
--- a/tests/test-merge-commit.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge-commit.t	Tue Jan 19 21:48:43 2021 +0530
@@ -70,8 +70,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 0555950ead28
+  starting 4 threads for background file closing (?)
    preserving bar for resolve of bar
-  starting 4 threads for background file closing (?)
    bar: versions differ -> m (premerge)
   picked tool ':merge' for bar (binary False symlink False changedelete False)
   merging bar
@@ -157,8 +157,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 3ffa6b9e35f0
+  starting 4 threads for background file closing (?)
    preserving bar for resolve of bar
-  starting 4 threads for background file closing (?)
    bar: versions differ -> m (premerge)
   picked tool ':merge' for bar (binary False symlink False changedelete False)
   merging bar
--- a/tests/test-merge-default.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge-default.t	Tue Jan 19 21:48:43 2021 +0530
@@ -69,7 +69,7 @@
 
   $ hg merge --config commands.merge.require-rev=True
   abort: configuration requires specifying revision to merge with
-  [255]
+  [10]
 
 Should succeed - 2 heads:
 
@@ -101,7 +101,7 @@
 
   $ hg merge --config commands.merge.require-rev=True
   abort: configuration requires specifying revision to merge with
-  [255]
+  [10]
 
 Should fail because at tip:
 
--- a/tests/test-merge-halt.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge-halt.t	Tue Jan 19 21:48:43 2021 +0530
@@ -28,7 +28,7 @@
   merging a failed!
   merging b failed!
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg resolve --list
   U a
@@ -45,7 +45,7 @@
   merging b
   merging a failed!
   merge halted after failed merge (see hg resolve)
-  [1]
+  [240]
 
   $ hg resolve --list
   U a
@@ -73,7 +73,7 @@
   merging b failed!
   continue merge operation (yn)? n
   merge halted after failed merge (see hg resolve)
-  [1]
+  [240]
 
   $ hg resolve --list
   U a
@@ -102,7 +102,7 @@
   merging b failed!
   continue merge operation (yn)? n
   merge halted after failed merge (see hg resolve)
-  [1]
+  [240]
 
   $ hg resolve --list
   R a
@@ -125,7 +125,7 @@
   merging b
   merging a failed!
   merge halted after failed merge (see hg resolve)
-  [1]
+  [240]
 
   $ hg resolve --list
   U a
@@ -146,7 +146,7 @@
   was merge of 'b' successful (yn)? n
   merging b failed!
   merge halted after failed merge (see hg resolve)
-  [1]
+  [240]
 
   $ hg resolve --list
   R a
--- a/tests/test-merge-subrepos.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge-subrepos.t	Tue Jan 19 21:48:43 2021 +0530
@@ -46,7 +46,7 @@
 
   $ hg up --check -r '.^'
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg st -S
   ! a
   $ hg up -Cq .
--- a/tests/test-merge-tools.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge-tools.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1255,6 +1255,46 @@
   # hg resolve --list
   R f
 
+premerge=keep-mergediff keeps conflict markers with base content:
+
+  $ beforemerge
+  [merge-tools]
+  false.whatever=
+  true.priority=1
+  true.executable=cat
+  # hg update -C 1
+  $ hg merge -r 4 --config merge-tools.true.premerge=keep-mergediff
+  merging f
+  <<<<<<<
+  ------- base
+  +++++++ working copy: ef83787e2614 - test: revision 1
+  -revision 0
+  +revision 1
+   space
+  ======= merge rev:    81448d39c9a0 - test: revision 4
+  revision 4
+  >>>>>>>
+  revision 0
+  space
+  revision 4
+  0 files updated, 1 files merged, 0 files removed, 0 files unresolved
+  (branch merge, don't forget to commit)
+  $ aftermerge
+  # cat f
+  <<<<<<<
+  ------- base
+  +++++++ working copy: ef83787e2614 - test: revision 1
+  -revision 0
+  +revision 1
+   space
+  ======= merge rev:    81448d39c9a0 - test: revision 4
+  revision 4
+  >>>>>>>
+  # hg stat
+  M f
+  # hg resolve --list
+  R f
+
 premerge=keep respects ui.mergemarkers=basic:
 
   $ beforemerge
@@ -1868,6 +1908,7 @@
   $ hg update -q -C 1
   $ hg mv f f.txt
   $ hg ci -qm "f.txt"
+  warning: commit already existed in the repository!
   $ hg update -q -C 2
   $ hg merge -y -r tip --tool echo \
   >    --config merge-tools.echo.args='$base $local $other $output' \
--- a/tests/test-merge1.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge1.t	Tue Jan 19 21:48:43 2021 +0530
@@ -36,7 +36,7 @@
   $ hg ci
   abort: last update was interrupted
   (use 'hg update' to get a consistent checkout)
-  [255]
+  [20]
   $ hg sum
   parent: 0:538afb845929 
    commit #0
@@ -138,8 +138,8 @@
 
 bad config
   $ hg merge 1 --config merge.checkunknown=x
-  abort: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn')
-  [255]
+  config error: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn')
+  [30]
 this merge should fail
   $ hg merge 1 --config merge.checkunknown=abort
   b: untracked file differs
@@ -282,7 +282,7 @@
   $ hg merge 2
   abort: uncommitted changes
   (use 'hg status' to list changes)
-  [255]
+  [20]
 merge expected!
   $ hg merge -f 2
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -319,7 +319,7 @@
   $ hg merge 2
   abort: uncommitted changes
   (use 'hg status' to list changes)
-  [255]
+  [20]
 merge of b expected
   $ hg merge -f 2
   merging b
--- a/tests/test-merge2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -52,6 +52,6 @@
 
   $ hg merge 'wdir()'
   abort: merging with the working copy has no effect
-  [255]
+  [10]
 
   $ cd ..
--- a/tests/test-merge4.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge4.t	Tue Jan 19 21:48:43 2021 +0530
@@ -29,10 +29,10 @@
 # Bad usage
   $ hg log -r 'conflictlocal(foo)'
   hg: parse error: conflictlocal takes no arguments
-  [255]
+  [10]
   $ hg log -r 'conflictother(foo)'
   hg: parse error: conflictother takes no arguments
-  [255]
+  [10]
   $ hg co -C .
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 # No merge parents when not merging
--- a/tests/test-merge5.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge5.t	Tue Jan 19 21:48:43 2021 +0530
@@ -16,7 +16,7 @@
   $ rm b
   $ hg update -c 2
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg revert b
   $ hg update -c 2
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
--- a/tests/test-merge7.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge7.t	Tue Jan 19 21:48:43 2021 +0530
@@ -84,8 +84,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 96b70246a118, local: 50c3a7e29886+, remote: 40d11a4173a8
+  starting 4 threads for background file closing (?)
    preserving test.txt for resolve of test.txt
-  starting 4 threads for background file closing (?)
    test.txt: versions differ -> m (premerge)
   picked tool ':merge' for test.txt (binary False symlink False changedelete False)
   merging test.txt
--- a/tests/test-merge9.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-merge9.t	Tue Jan 19 21:48:43 2021 +0530
@@ -78,7 +78,7 @@
   $ hg resolve
   abort: no files or directories specified
   (use --all to re-merge all unresolved files)
-  [255]
+  [10]
 
 resolve all
   $ hg resolve -a
--- a/tests/test-missing-capability.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-missing-capability.t	Tue Jan 19 21:48:43 2021 +0530
@@ -43,4 +43,4 @@
   $ hg pull ssh://user@dummy/repo1 -r tip -B a
   pulling from ssh://user@dummy/repo1
   abort: remote bookmark a not found!
-  [255]
+  [10]
--- a/tests/test-mq-qdelete.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-mq-qdelete.t	Tue Jan 19 21:48:43 2021 +0530
@@ -114,7 +114,7 @@
   now at: pb
 
   $ hg qfinish -a pc
-  abort: unknown revision 'pc'!
+  abort: unknown revision 'pc'
   [255]
 
   $ hg qpush
--- a/tests/test-mq-qpush-fail.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-mq-qpush-fail.t	Tue Jan 19 21:48:43 2021 +0530
@@ -45,7 +45,7 @@
   >     # Touching files truncated at "transaction.abort" causes
   >     # forcible re-loading invalidated filecache properties
   >     # (including repo.changelog)
-  >     for f, o, _ignore in entries:
+  >     for f, o in entries:
   >         if o or not unlink:
   >             os.utime(opener.join(f), (0.0, 0.0))
   > def extsetup(ui):
--- a/tests/test-mq-qrefresh-interactive.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-mq-qrefresh-interactive.t	Tue Jan 19 21:48:43 2021 +0530
@@ -125,10 +125,10 @@
   $ hg commit -m aaa
   $ hg qrecord --config ui.interactive=false patch
   abort: running non-interactively, use qnew instead
-  [255]
+  [10]
   $ hg qnew -i --config ui.interactive=false patch
   abort: running non-interactively
-  [255]
+  [10]
   $ hg qnew -d '0 0' patch
 
 Changing files
@@ -179,7 +179,7 @@
 
   $ hg qrefresh -i --config ui.interactive=false
   abort: running non-interactively
-  [255]
+  [10]
   $ hg qrefresh -i -d '0 0' <<EOF
   > y
   > y
--- a/tests/test-mq-qrefresh.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-mq-qrefresh.t	Tue Jan 19 21:48:43 2021 +0530
@@ -504,8 +504,8 @@
   transaction abort!
   rollback completed
   qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
-  abort: username 'foo\nbar' contains a newline!
-  [255]
+  abort: username 'foo\nbar' contains a newline
+  [50]
   $ rm a
   $ cat .hg/patches/a
   # HG changeset patch
@@ -520,8 +520,8 @@
   transaction abort!
   rollback completed
   qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
-  abort: empty username!
-  [255]
+  abort: empty username
+  [50]
   $ cat .hg/patches/a
   # HG changeset patch
   # Parent  0000000000000000000000000000000000000000
--- a/tests/test-mq.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-mq.t	Tue Jan 19 21:48:43 2021 +0530
@@ -171,7 +171,7 @@
   guards
   $ cat .hg/patches/series
   $ hg qinit -c
-  abort: repository $TESTTMP/d/.hg/patches already exists!
+  abort: repository $TESTTMP/d/.hg/patches already exists
   [255]
   $ cd ..
 
@@ -253,7 +253,7 @@
 init --mq with nonexistent directory
 
   $ hg init --mq nonexistentdir
-  abort: repository nonexistentdir not found!
+  abort: repository nonexistentdir not found
   [255]
 
 
@@ -812,7 +812,7 @@
   $ hg add y
   $ hg strip tip
   abort: uncommitted changes
-  [255]
+  [20]
 
 --force strip with local changes
 
--- a/tests/test-narrow-clone-no-ellipsis.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-clone-no-ellipsis.t	Tue Jan 19 21:48:43 2021 +0530
@@ -136,8 +136,8 @@
 
   $ hg clone ssh://user@dummy/master specfile --narrowspec narrowspecs
   reading narrowspec from '$TESTTMP/narrowspecs'
-  abort: cannot specify other files using '%include' in narrowspec
-  [255]
+  config error: cannot specify other files using '%include' in narrowspec
+  [30]
 
   $ cat > narrowspecs <<EOF
   > [include]
--- a/tests/test-narrow-clone.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-clone.t	Tue Jan 19 21:48:43 2021 +0530
@@ -69,7 +69,7 @@
   abort: server does not support narrow clones
   [255]
   $ hg tracked -R narrow-via-localpeer
-  abort: repository narrow-via-localpeer not found!
+  abort: repository narrow-via-localpeer not found
   [255]
   $ rm -Rf narrow-via-localpeer
 
@@ -254,8 +254,8 @@
 
   $ hg clone ssh://user@dummy/master specfile --narrowspec narrowspecs
   reading narrowspec from '$TESTTMP/narrowspecs'
-  abort: cannot specify other files using '%include' in narrowspec
-  [255]
+  config error: cannot specify other files using '%include' in narrowspec
+  [30]
 
   $ cat > narrowspecs <<EOF
   > [include]
--- a/tests/test-narrow-copies.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-copies.t	Tue Jan 19 21:48:43 2021 +0530
@@ -70,5 +70,5 @@
   $ echo new > inside/f5
   $ hg ci -Aqm 'add inside/f5'
   $ hg --config extensions.rebase= rebase -d 'public()' -r .
-  rebasing 6:610b60178c28 "add inside/f5" (tip)
+  rebasing 6:610b60178c28 tip "add inside/f5"
   saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/610b60178c28-65716a78-rebase.hg
--- a/tests/test-narrow-exchange.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-exchange.t	Tue Jan 19 21:48:43 2021 +0530
@@ -105,8 +105,8 @@
   remote: adding file changes
   remote: transaction abort!
   remote: rollback completed
-  remote: abort: data/inside2/f.i@4a1aa07735e6: unknown parent! (reporevlogstore !)
-  remote: abort: data/inside2/f/index@4a1aa07735e6: no node! (reposimplestore !)
+  remote: abort: data/inside2/f.i@4a1aa07735e6: unknown parent (reporevlogstore !)
+  remote: abort: data/inside2/f/index@4a1aa07735e6: no node (reposimplestore !)
   abort: stream ended unexpectedly (got 0 bytes, expected 4)
   [255]
 
@@ -221,5 +221,5 @@
   remote: error: pretxnchangegroup.lfs hook raised an exception: data/inside2/f.i@f59b4e021835: no match found (lfs-on !)
   remote: transaction abort! (lfs-on !)
   remote: rollback completed (lfs-on !)
-  remote: abort: data/inside2/f.i@f59b4e021835: no match found! (lfs-on !)
+  remote: abort: data/inside2/f.i@f59b4e021835: no match found (lfs-on !)
   abort: stream ended unexpectedly (got 0 bytes, expected 4) (lfs-on !)
--- a/tests/test-narrow-rebase.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-rebase.t	Tue Jan 19 21:48:43 2021 +0530
@@ -61,7 +61,7 @@
   $ echo modified > inside/f2
   $ hg ci -qm 'modify inside/f2'
   $ hg rebase -d 'desc("modify inside/f1")'
-  rebasing 5:c2f36d04e05d "modify inside/f2" (tip)
+  rebasing 5:c2f36d04e05d tip "modify inside/f2"
   saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-rebase.hg (glob)
 
 Can rebase onto conflicting changes inside narrow spec
@@ -70,14 +70,14 @@
   $ echo conflicting > inside/f1
   $ hg ci -qm 'conflicting inside/f1'
   $ hg rebase -d 'desc("modify inside/f1")' 2>&1 | egrep -v '(warning:|incomplete!)'
-  rebasing 6:cdce97fbf653 "conflicting inside/f1" (tip)
+  rebasing 6:cdce97fbf653 tip "conflicting inside/f1"
   merging inside/f1
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
   $ echo modified3 > inside/f1
   $ hg resolve -m 2>&1 | grep -v continue:
   (no more unresolved files)
   $ hg continue
-  rebasing 6:cdce97fbf653 "conflicting inside/f1" (tip)
+  rebasing 6:cdce97fbf653 tip "conflicting inside/f1"
   saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-rebase.hg (glob)
 
 Can rebase onto non-conflicting changes outside narrow spec
@@ -86,7 +86,7 @@
   $ echo modified > inside/f2
   $ hg ci -qm 'modify inside/f2'
   $ hg rebase -d 'desc("modify outside/f1")'
-  rebasing 7:c2f36d04e05d "modify inside/f2" (tip)
+  rebasing 7:c2f36d04e05d tip "modify inside/f2"
   saved backup bundle to $TESTTMP/narrow/.hg/strip-backup/*-rebase.hg (glob)
 
 Rebase interrupts on conflicting changes outside narrow spec
--- a/tests/test-narrow-shallow.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-shallow.t	Tue Jan 19 21:48:43 2021 +0530
@@ -91,15 +91,29 @@
   2: Commit rev2 of f9, d1/f9, d2/f9
   1: Commit rev2 of f8, d1/f8, d2/f8
   0...: Commit rev2 of f7, d1/f7, d2/f7
+
+XXX flaky output (see issue6150)
+XXX
+XXX The filectx implementation is buggy and return wrong data during status.
+XXX Leading to more file being "merged". The right output is the one with just
+XXX 10 files updated.
+
   $ hg update 4
-  merging d2/f1
-  merging d2/f2
-  merging d2/f3
-  merging d2/f4
-  merging d2/f5
-  merging d2/f6
-  merging d2/f7
-  3 files updated, 7 files merged, 0 files removed, 0 files unresolved
+  merging d2/f1 (?)
+  merging d2/f2 (?)
+  merging d2/f3 (?)
+  merging d2/f4 (?)
+  merging d2/f5 (?)
+  merging d2/f6 (?)
+  merging d2/f7 (?)
+  3 files updated, 7 files merged, 0 files removed, 0 files unresolved (?)
+  4 files updated, 6 files merged, 0 files removed, 0 files unresolved (?)
+  5 files updated, 5 files merged, 0 files removed, 0 files unresolved (?)
+  6 files updated, 4 files merged, 0 files removed, 0 files unresolved (?)
+  7 files updated, 3 files merged, 0 files removed, 0 files unresolved (?)
+  8 files updated, 2 files merged, 0 files removed, 0 files unresolved (?)
+  9 files updated, 1 files merged, 0 files removed, 0 files unresolved (?)
+  10 files updated, 0 files merged, 0 files removed, 0 files unresolved (?)
   $ cat d2/f7 d2/f8
   d2/f7 rev3
   d2/f8 rev2
--- a/tests/test-narrow-trackedcmd.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-trackedcmd.t	Tue Jan 19 21:48:43 2021 +0530
@@ -118,10 +118,10 @@
                                    web.cacerts config)
   
   (use 'hg tracked -h' to show more help)
-  [255]
+  [10]
   $ hg tracked --import-rules doesnotexist
   abort: cannot read narrowspecs from '$TESTTMP/narrow/doesnotexist': $ENOENT$
-  [255]
+  [50]
 
   $ cat > specs <<EOF
   > %include foo
@@ -133,7 +133,7 @@
 
   $ hg tracked --import-rules specs
   abort: including other spec files using '%include' is not supported in narrowspec
-  [255]
+  [10]
 
   $ cat > specs <<EOF
   > [include]
@@ -198,7 +198,7 @@
 
   $ hg tracked --import-rules specs --clear
   abort: the --clear option is not yet supported
-  [255]
+  [10]
 
 Testing with passing a out of wdir file
 
@@ -224,4 +224,4 @@
   $ cd non-narrow
   $ hg tracked --addinclude foobar
   abort: the tracked command is only supported on repositories cloned with --narrow
-  [255]
+  [10]
--- a/tests/test-narrow-widen-no-ellipsis.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow-widen-no-ellipsis.t	Tue Jan 19 21:48:43 2021 +0530
@@ -107,7 +107,7 @@
 
   $ hg tracked --addinclude widest/f --debug
   comparing with ssh://user@dummy/master
-  running python "*dummyssh" *user@dummy* *hg -R master serve --stdio* (glob)
+  running * "*dummyssh" *user@dummy* *hg -R master serve --stdio* (glob)
   sending hello command
   sending between command
   remote: * (glob)
--- a/tests/test-narrow.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-narrow.t	Tue Jan 19 21:48:43 2021 +0530
@@ -98,7 +98,7 @@
   * (glob)
   abort: local changes found
   (use --force-delete-local-changes to ignore)
-  [255]
+  [20]
 Check that nothing was removed by the failed attempts
   $ hg tracked
   I path:d0
@@ -422,7 +422,7 @@
   ...and 5 more, use --verbose to list all
   abort: local changes found
   (use --force-delete-local-changes to ignore)
-  [255]
+  [20]
 Now test it *with* verbose.
   $ hg tracked --removeinclude d9 --verbose
   comparing with ssh://user@dummy/master
@@ -446,7 +446,7 @@
   ^[0-9a-f]{12}$ (re)
   abort: local changes found
   (use --force-delete-local-changes to ignore)
-  [255]
+  [20]
   $ cd ..
 
 Test --auto-remove-includes
--- a/tests/test-newbranch.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-newbranch.t	Tue Jan 19 21:48:43 2021 +0530
@@ -32,7 +32,7 @@
   $ hg branch default
   abort: a branch of the same name already exists
   (use 'hg update' to switch to it)
-  [255]
+  [10]
 
   $ hg branch -f default
   marked working directory as branch default
@@ -61,7 +61,7 @@
   $ hg branch bar
   abort: a branch of the same name already exists
   (use 'hg update' to switch to it)
-  [255]
+  [10]
 
   $ hg branch -f bar
   marked working directory as branch bar
@@ -84,7 +84,7 @@
   $ hg branch bar
   abort: a branch of the same name already exists
   (use 'hg update' to switch to it)
-  [255]
+  [10]
 
  set (other) parent branch as branch name
 
--- a/tests/test-nointerrupt.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-nointerrupt.t	Tue Jan 19 21:48:43 2021 +0530
@@ -59,7 +59,7 @@
   > EOF
 
 Test ctrl-c
-  $ python $TESTTMP/timeout.py -s INT 1 hg sleep 2
+  $ $PYTHON $TESTTMP/timeout.py -s INT 1 hg sleep 2
   interrupted!
   [124]
 
@@ -68,7 +68,7 @@
   > nointerrupt = yes
   > EOF
 
-  $ python $TESTTMP/timeout.py -s INT 1 hg sleep 2
+  $ $PYTHON $TESTTMP/timeout.py -s INT 1 hg sleep 2
   interrupted!
   [124]
 
@@ -77,7 +77,7 @@
   > nointerrupt-interactiveonly = False
   > EOF
 
-  $ python $TESTTMP/timeout.py -s INT 1 hg sleep 2
+  $ $PYTHON $TESTTMP/timeout.py -s INT 1 hg sleep 2
   shutting down cleanly
   press ^C again to terminate immediately (dangerous)
   end of unsafe operation
--- a/tests/test-obshistory.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-obshistory.t	Tue Jan 19 21:48:43 2021 +0530
@@ -51,7 +51,7 @@
 Actual test
 -----------
   $ hg update 471f378eab4c
-  abort: hidden revision '471f378eab4c' was rewritten as: 4ae3a4151de9!
+  abort: hidden revision '471f378eab4c' was rewritten as: 4ae3a4151de9
   (use --hidden to access hidden revisions)
   [255]
   $ hg update --hidden "desc(A0)"
@@ -115,7 +115,7 @@
   $ hg up 1
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg up 0dec01379d3b
-  abort: hidden revision '0dec01379d3b' is pruned!
+  abort: hidden revision '0dec01379d3b' is pruned
   (use --hidden to access hidden revisions)
   [255]
   $ hg up --hidden -r 'desc(B0)'
@@ -193,7 +193,7 @@
 Actual test
 -----------
   $ hg update 471597cad322
-  abort: hidden revision '471597cad322' was split as: 337fec4d2edc, f257fde29c7a!
+  abort: hidden revision '471597cad322' was split as: 337fec4d2edc, f257fde29c7a
   (use --hidden to access hidden revisions)
   [255]
   $ hg update --hidden 'min(desc(A0))'
@@ -293,7 +293,7 @@
 Actual test
 -----------
   $ hg update de7290d8b885
-  abort: hidden revision 'de7290d8b885' was split as: 337fec4d2edc, f257fde29c7a and 2 more!
+  abort: hidden revision 'de7290d8b885' was split as: 337fec4d2edc, f257fde29c7a and 2 more
   (use --hidden to access hidden revisions)
   [255]
   $ hg update --hidden 'min(desc(A0))'
@@ -374,7 +374,7 @@
  Actual test
  -----------
   $ hg update 471f378eab4c
-  abort: hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192!
+  abort: hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192
   (use --hidden to access hidden revisions)
   [255]
   $ hg update --hidden 'desc(A0)'
@@ -382,7 +382,7 @@
   updated to hidden changeset 471f378eab4c
   (hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192)
   $ hg update 0dec01379d3b
-  abort: hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192!
+  abort: hidden revision '0dec01379d3b' was rewritten as: eb5a0daa2192
   (use --hidden to access hidden revisions)
   [255]
   $ hg update --hidden 'desc(B0)'
@@ -457,7 +457,7 @@
 Actual test
 -----------
   $ hg update 471f378eab4c
-  abort: hidden revision '471f378eab4c' has diverged!
+  abort: hidden revision '471f378eab4c' has diverged
   (use --hidden to access hidden revisions)
   [255]
   $ hg update --hidden 'desc(A0)'
@@ -554,7 +554,7 @@
  Actual test
  -----------
   $ hg update 471f378eab4c
-  abort: hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192!
+  abort: hidden revision '471f378eab4c' was rewritten as: eb5a0daa2192
   (use --hidden to access hidden revisions)
   [255]
   $ hg update --hidden 'desc(A0)'
--- a/tests/test-obsmarker-template.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-obsmarker-template.t	Tue Jan 19 21:48:43 2021 +0530
@@ -122,7 +122,7 @@
   o  ea207398892e
   
 
-  $ hg log -G --config ui.logtemplate=
+  $ hg log -G --config command-templates.log=
   o  changeset:   3:d004c8f274b9
   |  tag:         tip
   |  parent:      0:ea207398892e
--- a/tests/test-obsmarkers-effectflag.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-obsmarkers-effectflag.t	Tue Jan 19 21:48:43 2021 +0530
@@ -76,7 +76,7 @@
   $ mkcommit C0
   $ mkcommit D0
   $ hg rebase -r . -d 'desc(B0)'
-  rebasing 10:c85eff83a034 "D0" (tip)
+  rebasing 10:c85eff83a034 tip "D0"
 
 check result
 
--- a/tests/test-obsolete-bundle-strip.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-obsolete-bundle-strip.t	Tue Jan 19 21:48:43 2021 +0530
@@ -9,9 +9,9 @@
 ------------
 
   $ cat >> $HGRCPATH <<EOF
-  > [ui]
+  > [command-templates]
   > # simpler log output
-  > logtemplate = "{node|short}: {desc}\n"
+  > log = "{node|short}: {desc}\n"
   > 
   > [experimental]
   > # enable evolution
@@ -1444,3 +1444,35 @@
   # unbundling: new changesets 9ac430e15fca (1 drafts)
   # unbundling: (1 other changesets obsolete on arrival)
   # unbundling: (run 'hg update' to get a working copy)
+
+Test that advisory obsolescence markers in bundles are ignored if unsupported
+
+  $ hg init repo-with-obs
+  $ cd repo-with-obs
+  $ hg debugbuilddag +1
+  $ hg debugobsolete `getid 0`
+  1 new obsolescence markers
+  obsoleted 1 changesets
+  $ hg bundle --config experimental.evolution.bundle-obsmarker=true --config experimental.evolution.bundle-obsmarker:mandatory=false --all --hidden bundle-with-obs
+  1 changesets found
+  $ cd ..
+  $ hg init repo-without-obs
+  $ cd repo-without-obs
+  $ hg --config experimental.evolution=False unbundle ../repo-with-obs/bundle-with-obs --debug
+  bundle2-input-bundle: 1 params with-transaction
+  bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
+  adding changesets
+  add changeset 1ea73414a91b
+  adding manifests
+  adding file changes
+  bundle2-input-part: total payload size 190
+  bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
+  bundle2-input-part: total payload size 39
+  bundle2-input-part: "obsmarkers" (advisory) supported
+  bundle2-input-part: total payload size 50
+  ignoring obsolescence markers, feature not enabled
+  bundle2-input-bundle: 3 parts total
+  updating the branch cache
+  added 1 changesets with 0 changes to 0 files
+  new changesets 1ea73414a91b (1 drafts)
+  (run 'hg update' to get a working copy)
--- a/tests/test-obsolete-checkheads.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-obsolete-checkheads.t	Tue Jan 19 21:48:43 2021 +0530
@@ -3,8 +3,8 @@
   > [phases]
   > # public changeset are not obsolete
   > publish=false
-  > [ui]
-  > logtemplate='{node|short} ({phase}) {desc|firstline}\n'
+  > [command-templates]
+  > log='{node|short} ({phase}) {desc|firstline}\n'
   > [experimental]
   > evolution.createmarkers=True
   > EOF
@@ -95,9 +95,9 @@
   $ hg push
   pushing to $TESTTMP/remote
   searching for changes
-  abort: push creates new remote head 71e3228bffe1!
+  abort: push creates new remote head 71e3228bffe1
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
 old head is now public (public remote version)
 ==============================================
@@ -155,9 +155,9 @@
   $ hg push -r 'desc("other")'
   pushing to $TESTTMP/remote
   searching for changes
-  abort: push creates new remote head d7d41ccbd4de!
+  abort: push creates new remote head d7d41ccbd4de
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
 
 
@@ -267,9 +267,9 @@
   pushing to $TESTTMP/remote
   searching for changes
   remote has heads on branch 'default' that are not known locally: c70b08862e08
-  abort: push creates new remote head 71e3228bffe1!
+  abort: push creates new remote head 71e3228bffe1
   (pull and merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
 Pulling the missing data makes it work
 
@@ -317,6 +317,6 @@
   $ hg push
   pushing to $TESTTMP/remote
   searching for changes
-  abort: push creates new remote head 350a93b716be!
+  abort: push creates new remote head 350a93b716be
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
--- a/tests/test-obsolete-distributed.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-obsolete-distributed.t	Tue Jan 19 21:48:43 2021 +0530
@@ -16,8 +16,8 @@
   > evolution = all
   > [phases]
   > publish = False
-  > [ui]
-  > logtemplate= {rev}:{node|short} {desc}{if(obsfate, " [{join(obsfate, "; ")}]")}\n
+  > [command-templates]
+  > log = {rev}:{node|short} {desc}{if(obsfate, " [{join(obsfate, "; ")}]")}\n
   > EOF
 
 Check distributed chain building
@@ -138,14 +138,42 @@
 
   $ hg up 'desc("ROOT")'
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  $ hg pull --confirm --config ui.interactive=True << EOF
+  $ hg pull --debug --confirm --config ui.interactive=True << EOF
   > n
   > EOF
   pulling from $TESTTMP/distributed-chain-building/server
+  query 1; heads
   searching for changes
+  taking quick initial sample
+  query 2; still undecided: 1, sample size is: 1
+  2 total queries in *.*s (glob)
+  1 changesets found
+  list of changesets:
+  391a2bf12b1b8b05a72400ae36b26d50a091dc22
+  listing keys for "bookmarks"
+  bundle2-output-bundle: "HG20", 5 parts total
+  bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload
+  bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload
+  bundle2-output-part: "obsmarkers" streamed payload
+  bundle2-output-part: "phase-heads" 48 bytes payload
+  bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
+  bundle2-input-bundle: with-transaction
+  bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
   adding changesets
+  add changeset 391a2bf12b1b
   adding manifests
   adding file changes
+  adding c_B1 revisions
+  bundle2-input-part: total payload size 485
+  bundle2-input-part: "listkeys" (params: 1 mandatory) supported
+  bundle2-input-part: "obsmarkers" supported
+  bundle2-input-part: total payload size 143
+  bundle2-input-part: "phase-heads" supported
+  bundle2-input-part: total payload size 48
+  bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
+  bundle2-input-part: total payload size 39
+  bundle2-input-bundle: 5 parts total
+  checking for updated bookmarks
   adding 1 changesets with 1 changes to 1 files (+1 heads)
   1 new obsolescence markers
   obsoleting 1 changesets
@@ -544,7 +572,7 @@
   adding file changes
   added 2 changesets with 0 changes to 2 files (+1 heads)
   (2 other changesets obsolete on arrival)
-  abort: cannot update to target: filtered revision '6'!
+  abort: cannot update to target: filtered revision '6'
   [255]
 
   $ cd ..
--- a/tests/test-obsolete.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-obsolete.t	Tue Jan 19 21:48:43 2021 +0530
@@ -47,10 +47,10 @@
 
   $ hg debugobsolete 0
   abort: changeset references must be full hexadecimal node identifiers
-  [255]
+  [10]
   $ hg debugobsolete '00'
   abort: changeset references must be full hexadecimal node identifiers
-  [255]
+  [10]
   $ hg debugobsolete -d '0 0' `getid kill_me` -u babar
   1 new obsolescence markers
   obsoleted 1 changesets
@@ -200,10 +200,10 @@
   $ hg tip
   5:5601fb93a350 (draft) [tip ] add new_3_c
   $ hg log -r 6
-  abort: unknown revision '6'!
+  abort: unknown revision '6'
   [255]
   $ hg log -r 4
-  abort: hidden revision '4' was rewritten as: 5601fb93a350!
+  abort: hidden revision '4' was rewritten as: 5601fb93a350
   (use --hidden to access hidden revisions)
   [255]
   $ hg debugrevspec 'rev(6)'
@@ -1541,7 +1541,7 @@
   4:13bedc178fce (draft *obsolete*) [ bookb] add b [rewritten using amend as 5:a9b1f8652753]
   $ hg book -d bookb
   $ hg log -r 13bedc178fce
-  abort: hidden revision '13bedc178fce' was rewritten as: a9b1f8652753!
+  abort: hidden revision '13bedc178fce' was rewritten as: a9b1f8652753
   (use --hidden to access hidden revisions)
   [255]
 
@@ -1682,6 +1682,24 @@
   |
   @  0:a78f55e5508c (draft) [ ] 0
   
+
+
+Test that bundles can ship the markers without making them mandatory
+for non-obsmarker enabled clients:
+
+  $ hg --config experimental.evolution.bundle-obsmarker=1 --config experimental.evolution.bundle-obsmarker:mandatory=0 bundle --base 0 -r 1:: obslog-bundle.hg
+  2 changesets found
+  $ hg debugbundle obslog-bundle.hg
+  Stream params: {Compression: BZ}
+  changegroup -- {nbchanges: 2, version: 02} (mandatory: True)
+      e016b03fd86fcccc54817d120b90b751aaf367d6
+      b0551702f918510f01ae838ab03a463054c67b46
+  cache:rev-branch-cache -- {} (mandatory: False)
+  obsmarkers -- {} (mandatory: False)
+      version: 1 (92 bytes)
+      e008cf2834908e5d6b0f792a9d4b0e2272260fb8 b0551702f918510f01ae838ab03a463054c67b46 0 (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '8', 'operation': 'amend', 'user': 'test'}
+
+
 Test that 'hg debugobsolete --index --rev' can show indices of obsmarkers when
 only a subset of those are displayed (because of --rev option)
   $ hg init doindexrev
--- a/tests/test-parents.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-parents.t	Tue Jan 19 21:48:43 2021 +0530
@@ -53,8 +53,8 @@
   
 
   $ hg parents -r 3 c
-  abort: 'c' not found in manifest!
-  [255]
+  abort: 'c' not found in manifest
+  [10]
 
   $ hg parents -r 2
   changeset:   1:d786049f033a
@@ -95,7 +95,7 @@
 
   $ hg parents -r 2 glob:a
   abort: can only specify an explicit filename
-  [255]
+  [10]
 
 
 merge working dir with 2 parents, hg parents c
--- a/tests/test-parse-date.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-parse-date.t	Tue Jan 19 21:48:43 2021 +0530
@@ -18,13 +18,13 @@
   $ echo "fail" >> a
   $ hg ci -d "should fail" -m "fail"
   hg: parse error: invalid date: 'should fail'
-  [255]
+  [10]
   $ hg ci -d "100000000000000000 1400" -m "fail"
   hg: parse error: date exceeds 32 bits: 100000000000000000
-  [255]
+  [10]
   $ hg ci -d "100000 1400000" -m "fail"
   hg: parse error: impossible time zone offset: 1400000
-  [255]
+  [10]
 
 Check with local timezone other than GMT and with DST
 
--- a/tests/test-parseindex2.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-parseindex2.py	Tue Jan 19 21:48:43 2021 +0530
@@ -5,17 +5,19 @@
 
 from __future__ import absolute_import, print_function
 
+import os
 import struct
 import subprocess
 import sys
 import unittest
 
 from mercurial.node import (
+    bin,
+    hex,
     nullid,
     nullrev,
 )
 from mercurial import (
-    node as nodemod,
     policy,
     pycompat,
 )
@@ -129,7 +131,7 @@
         "import sys; sys.hexversion=%s; "
         "import mercurial.cext.parsers" % hexversion
     )
-    cmd = "python -c \"%s\"" % code
+    cmd = "\"%s\" -c \"%s\"" % (os.environ['PYTHON'], code)
     # We need to do these tests inside a subprocess because parser.c's
     # version-checking code happens inside the module init function, and
     # when using reload() to reimport an extension module, "The init function
@@ -231,7 +233,7 @@
                 self.assertEqual(
                     ix[r[7]],
                     i,
-                    'Reverse lookup inconsistent for %r' % nodemod.hex(r[7]),
+                    'Reverse lookup inconsistent for %r' % hex(r[7]),
                 )
             except TypeError:
                 # pure version doesn't support this
@@ -254,7 +256,7 @@
             if rev == nullrev:
                 return b'\xff\xff\xff\xff'
             else:
-                return nodemod.bin('%08x' % rev)
+                return bin('%08x' % rev)
 
         def appendrev(p1, p2=nullrev):
             # node won't matter for this test, let's just make sure
--- a/tests/test-patchbomb-tls.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-patchbomb-tls.t	Tue Jan 19 21:48:43 2021 +0530
@@ -73,7 +73,7 @@
   (verifying remote certificate)
   abort: unable to verify security of localhost (no loaded CA certificates); refusing to connect
   (see https://mercurial-scm.org/wiki/SecureConnections for how to configure Mercurial to avoid this error or set hostsecurity.localhost:fingerprints=sha256:20:de:b3:ad:b4:cd:a5:42:f0:74:41:1c:a2:70:1e:da:6e:c0:5c:16:9e:e7:22:0f:f1:b7:e5:6e:e4:92:af:7e to trust this server)
-  [255]
+  [150]
 
 With global certificates:
 
--- a/tests/test-patchbomb.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-patchbomb.t	Tue Jan 19 21:48:43 2021 +0530
@@ -3100,7 +3100,7 @@
   $ echo 'publicurl=$TESTTMP/missing' >> $HGRCPATH
   $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10'
   unable to access public repo: $TESTTMP/missing
-  abort: repository $TESTTMP/missing not found!
+  abort: repository $TESTTMP/missing not found
   [255]
 
 node missing at public
--- a/tests/test-pathconflicts-update.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-pathconflicts-update.t	Tue Jan 19 21:48:43 2021 +0530
@@ -129,7 +129,7 @@
   R base
   $ hg up --check dir
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg up dir
   a: path conflict - a file or link has the same name as a directory
   the local file has been renamed to a~d20a80d4def3
@@ -154,7 +154,7 @@
   $ echo 9 > a/b/c
   $ hg up file2 --check --config merge.checkunknown=warn
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg up file2 --clean
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   (activating bookmark file2)
--- a/tests/test-pathencode.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-pathencode.py	Tue Jan 19 21:48:43 2021 +0530
@@ -54,8 +54,8 @@
 
 
 def buildprobtable(fp, cmd='hg manifest tip'):
-    '''Construct and print a table of probabilities for path name
-    components.  The numbers are percentages.'''
+    """Construct and print a table of probabilities for path name
+    components.  The numbers are percentages."""
 
     counts = collections.defaultdict(lambda: 0)
     for line in os.popen(cmd).read().splitlines():
--- a/tests/test-persistent-nodemap.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-persistent-nodemap.t	Tue Jan 19 21:48:43 2021 +0530
@@ -8,13 +8,36 @@
   > [devel]
   > persistent-nodemap=yes
   > EOF
-  $ hg init test-repo
+
+  $ hg init test-repo --config storage.revlog.persistent-nodemap.slow-path=allow
   $ cd test-repo
+
+Check handling of the default slow-path value
+
+#if no-pure no-rust
+
+  $ hg id
+  abort: accessing `persistent-nodemap` repository without associated fast implementation.
+  (check `hg help config.format.use-persistent-nodemap` for details)
+  [255]
+
+Unlock further check (we are here to test the feature)
+
+  $ cat << EOF >> $HGRCPATH
+  > [storage]
+  > # to avoid spamming the test
+  > revlog.persistent-nodemap.slow-path=allow
+  > EOF
+
+#endif
+
+
   $ hg debugformat
   format-variant     repo
   fncache:            yes
   dotencode:          yes
   generaldelta:       yes
+  share-safe:          no
   sparserevlog:       yes
   sidedata:            no
   persistent-nodemap: yes
@@ -22,9 +45,8 @@
   plain-cl-delta:     yes
   compression:        zlib
   compression-level:  default
-  $ hg debugbuilddag .+5000 --new-file --config "storage.revlog.nodemap.mode=warn"
-  persistent nodemap in strict mode without efficient method (no-rust no-pure !)
-  persistent nodemap in strict mode without efficient method (no-rust no-pure !)
+  $ hg debugbuilddag .+5000 --new-file
+
   $ hg debugnodemap --metadata
   uid: ???????????????? (glob)
   tip-rev: 5000
@@ -109,14 +131,40 @@
   $ echo foo > foo
   $ hg add foo
 
+
+Check slow-path config value handling
+-------------------------------------
+
 #if no-pure no-rust
 
-  $ hg ci -m 'foo' --config "storage.revlog.nodemap.mode=strict"
-  transaction abort!
-  rollback completed
-  abort: persistent nodemap in strict mode without efficient method
+  $ hg id --config "storage.revlog.persistent-nodemap.slow-path=invalid-value"
+  unknown value for config "storage.revlog.persistent-nodemap.slow-path": "invalid-value"
+  falling back to default value: abort
+  abort: accessing `persistent-nodemap` repository without associated fast implementation.
+  (check `hg help config.format.use-persistent-nodemap` for details)
   [255]
 
+  $ hg log -r . --config "storage.revlog.persistent-nodemap.slow-path=warn"
+  warning: accessing `persistent-nodemap` repository without associated fast implementation.
+  (check `hg help config.format.use-persistent-nodemap` for details)
+  changeset:   5000:6b02b8c7b966
+  tag:         tip
+  user:        debugbuilddag
+  date:        Thu Jan 01 01:23:20 1970 +0000
+  summary:     r5000
+  
+  $ hg ci -m 'foo' --config "storage.revlog.persistent-nodemap.slow-path=abort"
+  abort: accessing `persistent-nodemap` repository without associated fast implementation.
+  (check `hg help config.format.use-persistent-nodemap` for details)
+  [255]
+
+#else
+
+  $ hg id --config "storage.revlog.persistent-nodemap.slow-path=invalid-value"
+  unknown value for config "storage.revlog.persistent-nodemap.slow-path": "invalid-value"
+  falling back to default value: abort
+  6b02b8c7b966+ tip
+
 #endif
 
   $ hg ci -m 'foo'
@@ -168,12 +216,12 @@
 
   $ echo bar > bar
   $ hg add bar
-  $ hg ci -m 'bar' --config storage.revlog.nodemap.mmap=no
+  $ hg ci -m 'bar' --config storage.revlog.persistent-nodemap.mmap=no
 
-  $ hg debugnodemap --check --config storage.revlog.nodemap.mmap=yes
+  $ hg debugnodemap --check --config storage.revlog.persistent-nodemap.mmap=yes
   revision in index:   5003
   revision in nodemap: 5003
-  $ hg debugnodemap --check --config storage.revlog.nodemap.mmap=no
+  $ hg debugnodemap --check --config storage.revlog.persistent-nodemap.mmap=no
   revision in index:   5003
   revision in nodemap: 5003
 
@@ -326,6 +374,38 @@
   $ hg log -r "$OTHERNODE" -T '{rev}\n'
   5002
 
+missing data file
+-----------------
+
+  $ UUID=`hg debugnodemap --metadata| grep 'uid:' | \
+  > sed 's/uid: //'`
+  $ FILE=.hg/store/00changelog-"${UUID}".nd
+  $ mv $FILE ../tmp-data-file
+  $ cp .hg/store/00changelog.n ../tmp-docket
+
+mercurial don't crash
+
+  $ hg log -r .
+  changeset:   5002:b355ef8adce0
+  tag:         tip
+  parent:      4998:d918ad6d18d3
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     babar
+  
+  $ hg debugnodemap --metadata
+
+  $ hg debugupdatecache
+  $ hg debugnodemap --metadata
+  uid: * (glob)
+  tip-rev: 5002
+  tip-node: b355ef8adce0949b8bdf6afc72ca853740d65944
+  data-length: 121088
+  data-unused: 0
+  data-unused: 0.000%
+  $ mv ../tmp-data-file $FILE
+  $ mv ../tmp-docket .hg/store/00changelog.n
+
 Check transaction related property
 ==================================
 
@@ -476,6 +556,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap: yes     no      no
@@ -490,6 +571,11 @@
      preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
      removed: persistent-nodemap
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
   [1]
   $ hg debugnodemap --metadata
@@ -506,6 +592,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no    yes      no
@@ -520,6 +607,11 @@
      preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
      added: persistent-nodemap
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
   00changelog-*.nd (glob)
   00changelog.n
@@ -544,6 +636,11 @@
   
   optimisations: re-delta-all
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ ls -1 .hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
   00changelog-*.nd (glob)
   00changelog.n
@@ -557,3 +654,74 @@
   data-length: 121088
   data-unused: 0
   data-unused: 0.000%
+
+Persistent nodemap and local/streaming clone
+============================================
+
+  $ cd ..
+
+standard clone
+--------------
+
+The persistent nodemap should exist after a streaming clone
+
+  $ hg clone --pull --quiet -U test-repo standard-clone
+  $ ls -1 standard-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
+  00changelog-*.nd (glob)
+  00changelog.n
+  00manifest-*.nd (glob)
+  00manifest.n
+  $ hg -R standard-clone debugnodemap --metadata
+  uid: * (glob)
+  tip-rev: 5005
+  tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
+  data-length: 121088
+  data-unused: 0
+  data-unused: 0.000%
+
+
+local clone
+------------
+
+The persistent nodemap should exist after a streaming clone
+
+  $ hg clone -U test-repo local-clone
+  $ ls -1 local-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
+  00changelog-*.nd (glob)
+  00changelog.n
+  00manifest-*.nd (glob)
+  00manifest.n
+  $ hg -R local-clone debugnodemap --metadata
+  uid: * (glob)
+  tip-rev: 5005
+  tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
+  data-length: 121088
+  data-unused: 0
+  data-unused: 0.000%
+
+stream clone
+------------
+
+The persistent nodemap should exist after a streaming clone
+
+  $ hg clone -U --stream --config ui.ssh="\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/test-repo stream-clone --debug | egrep '00(changelog|manifest)'
+  adding [s] 00manifest.n (70 bytes)
+  adding [s] 00manifest.i (313 KB)
+  adding [s] 00manifest.d (452 KB)
+  adding [s] 00manifest-*.nd (118 KB) (glob)
+  adding [s] 00changelog.n (70 bytes)
+  adding [s] 00changelog.i (313 KB)
+  adding [s] 00changelog.d (360 KB)
+  adding [s] 00changelog-*.nd (118 KB) (glob)
+  $ ls -1 stream-clone/.hg/store/ | egrep '00(changelog|manifest)(\.n|-.*\.nd)'
+  00changelog-*.nd (glob)
+  00changelog.n
+  00manifest-*.nd (glob)
+  00manifest.n
+  $ hg -R stream-clone debugnodemap --metadata
+  uid: * (glob)
+  tip-rev: 5005
+  tip-node: 90d5d3ba2fc47db50f712570487cb261a68c8ffe
+  data-length: 121088
+  data-unused: 0
+  data-unused: 0.000%
--- a/tests/test-phabricator.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-phabricator.t	Tue Jan 19 21:48:43 2021 +0530
@@ -101,7 +101,7 @@
   $ hg ci --addremove -m 'create alpha for phabricator test €'
   adding alpha
   $ hg phabsend -r . --test-vcr "$VCR/phabsend-create-alpha.json"
-  D7915 - created - d386117f30e6: create alpha for phabricator test \xe2\x82\xac (esc)
+  D7915 - created - 0:d386117f30e6 tip "create alpha for phabricator test \xe2\x82\xac" (esc)
   new commits: ['347bf67801e5']
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d386117f30e6-24ffe649-phabsend.hg
   $ echo more >> alpha
@@ -112,8 +112,8 @@
   adding beta
   $ hg phabsend -r ".^::" --test-vcr "$VCR/phabsend-update-alpha-create-beta.json"
   c44b38f24a45 mapped to old nodes []
-  D7915 - updated - c44b38f24a45: create alpha for phabricator test \xe2\x82\xac (esc)
-  D7916 - created - 9e6901f21d5b: create beta for phabricator test
+  D7915 - updated - 0:c44b38f24a45 "create alpha for phabricator test \xe2\x82\xac" (esc)
+  D7916 - created - 1:9e6901f21d5b tip "create beta for phabricator test"
   new commits: ['a692622e6937']
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9e6901f21d5b-1fcd4f0e-phabsend.hg
   $ unset HGENCODING
@@ -127,8 +127,8 @@
   $ echo 'draft change' > alpha
   $ hg ci -m 'create draft change for phabricator testing'
   $ hg phabsend --amend -r '.^::' --test-vcr "$VCR/phabsend-create-public.json"
-  D7917 - created - 7b4185ab5d16: create public change for phabricator testing
-  D7918 - created - 251c1c333fc6: create draft change for phabricator testing
+  D7917 - created - 2:7b4185ab5d16 "create public change for phabricator testing"
+  D7918 - created - 3:251c1c333fc6 tip "create draft change for phabricator testing"
   warning: not updating public commit 2:7b4185ab5d16
   new commits: ['3244dc4a3334']
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/251c1c333fc6-41cb7c3b-phabsend.hg
@@ -175,7 +175,7 @@
   $ hg ci --addremove -m "create comment for phabricator test"
   adding comment
   $ hg phabsend -r . -m "For default branch" --test-vcr "$VCR/phabsend-comment-created.json"
-  D7919 - created - d5dddca9023d: create comment for phabricator test
+  D7919 - created - 4:d5dddca9023d tip "create comment for phabricator test"
   new commits: ['f7db812bbe1d']
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/d5dddca9023d-adf673ba-phabsend.hg
   $ echo comment2 >> comment
@@ -183,12 +183,12 @@
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f7db812bbe1d-8fcded77-amend.hg
   $ hg phabsend -r . -m "Address review comments" --test-vcr "$VCR/phabsend-comment-updated.json"
   1849d7828727 mapped to old nodes []
-  D7919 - updated - 1849d7828727: create comment for phabricator test
+  D7919 - updated - 4:1849d7828727 tip "create comment for phabricator test"
 
 Phabsending a skipped commit:
   $ hg phabsend --no-amend -r . --test-vcr "$VCR/phabsend-skipped.json"
   1849d7828727 mapped to old nodes ['1849d7828727']
-  D7919 - skipped - 1849d7828727: create comment for phabricator test
+  D7919 - skipped - 4:1849d7828727 tip "create comment for phabricator test"
 
 Phabsend doesn't create an instability when restacking existing revisions on top
 of new revisions.
@@ -210,7 +210,7 @@
   $ echo "mod2" > file1.txt
   $ hg ci -m 'modified 2'
   $ hg phabsend -r . --test-vcr "$VCR/phabsend-add-parent-setup.json"
-  D8433 - created - 5d3959e20d1d: modified 2
+  D8433 - created - 2:5d3959e20d1d tip "modified 2"
   new commits: ['2b4aa8a88d61']
   $ hg log -G -T compact
   @  3[tip]:1   2b4aa8a88d61   1970-01-01 00:00 +0000   test
@@ -231,8 +231,8 @@
   $ hg up -q 3
   $ hg phabsend -r ".^ + ." --test-vcr "$VCR/phabsend-add-parent.json"
   2b4aa8a88d61 mapped to old nodes ['2b4aa8a88d61']
-  D8434 - created - d549263bcb2d: modified 1
-  D8433 - updated - 2b4aa8a88d61: modified 2
+  D8434 - created - 1:d549263bcb2d "modified 1"
+  D8433 - updated - 3:2b4aa8a88d61 "modified 2"
   new commits: ['876a60d024de']
   new commits: ['0c6523cb1d0f']
   restabilizing 1eda4bf55021 as d2c78c3a3e01
@@ -313,9 +313,9 @@
   $ hg phabsend -r 5::tip --test-vcr "$VCR/phabsend-no-restack-orphan.json"
   876a60d024de mapped to old nodes ['876a60d024de']
   0c6523cb1d0f mapped to old nodes ['0c6523cb1d0f']
-  D8434 - updated - 876a60d024de: modified 1
-  D8433 - updated - 0c6523cb1d0f: modified 2
-  D8435 - created - 082be6c94150: modified A
+  D8434 - updated - 5:876a60d024de "modified 1"
+  D8433 - updated - 6:0c6523cb1d0f "modified 2"
+  D8435 - created - 10:082be6c94150 tip "modified A"
   new commits: ['b5913193c805']
   not restabilizing unchanged d2c78c3a3e01
   $ hg log -G
@@ -374,10 +374,10 @@
   $ hg ci -m 'remove binary'
   $ hg phabsend -r .~2:: --test-vcr "$VCR/phabsend-binary.json"
   uploading bin@aa24a81f55de
-  D8007 - created - aa24a81f55de: add binary
+  D8007 - created - 5:aa24a81f55de "add binary"
   uploading bin@d8d62a881b54
-  D8008 - created - d8d62a881b54: modify binary
-  D8009 - created - af55645b2e29: remove binary
+  D8008 - created - 6:d8d62a881b54 "modify binary"
+  D8009 - created - 7:af55645b2e29 tip "remove binary"
   new commits: ['b8139fbb4a57']
   new commits: ['c88ce4c2d2ad']
   new commits: ['75dbbc901145']
@@ -421,14 +421,14 @@
 
   $ hg phabsend -r .~4:: --test-vcr "$VCR/phabsend-binary-renames.json"
   uploading bin2@f42f9195e00c
-  D8128 - created - f42f9195e00c: add another binary
-  D8129 - created - 834ab31d80ae: moved binary
-  D8130 - created - 494b750e5194: copied binary
+  D8128 - created - 8:f42f9195e00c "add another binary"
+  D8129 - created - 9:834ab31d80ae "moved binary"
+  D8130 - created - 10:494b750e5194 "copied binary"
   uploading bin2_moved_again@25f766b50cc2
-  D8131 - created - 25f766b50cc2: move+mod copied binary
+  D8131 - created - 11:25f766b50cc2 "move+mod copied binary"
   uploading bin2_moved_copied@1b87b363a5e4
   uploading bin2_moved@1b87b363a5e4
-  D8132 - created - 1b87b363a5e4: copy+mod moved binary
+  D8132 - created - 12:1b87b363a5e4 tip "copy+mod moved binary"
   new commits: ['90437c20312a']
   new commits: ['f391f4da4c61']
   new commits: ['da86a9f3268c']
@@ -590,6 +590,14 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     create beta for phabricator test
   
+phabupdate can convert from local revisions
+
+  $ hg phabupdate --reclaim D7917 -r '.: and not public()'
+  abort: cannot specify both DREVSPEC and --rev
+  [10]
+
+  $ hg phabupdate --reclaim -r '.: and not public()' --test-vcr "$VCR/phabupdate-revs.json"
+
 Phabimport accepts multiple DREVSPECs
 
   $ hg rollback --config ui.rollback=True
@@ -662,9 +670,9 @@
 
   $ hg phase -r tip --public
   $ hg phabsend --fold -r 1:: --test-vcr "$VCR/phabsend-fold-immutable.json"
-  D8386 - created - a959a3f69d8d: one: first commit to review
-  D8386 - created - 24a4438154ba: two: second commit to review
-  D8386 - created - d235829e802c: 3: a commit with no detailed message
+  D8386 - created - 1:a959a3f69d8d "one: first commit to review"
+  D8386 - created - 2:24a4438154ba "two: second commit to review"
+  D8386 - created - 3:d235829e802c tip "3: a commit with no detailed message"
   warning: not updating public commit 1:a959a3f69d8d
   warning: not updating public commit 2:24a4438154ba
   warning: not updating public commit 3:d235829e802c
@@ -676,13 +684,13 @@
 
   $ echo y | hg phabsend --fold --confirm -r 1:: \
   >          --test-vcr "$VCR/phabsend-fold-initial.json"
-  NEW - a959a3f69d8d: one: first commit to review
-  NEW - 24a4438154ba: two: second commit to review
-  NEW - d235829e802c: 3: a commit with no detailed message
+  NEW - 1:a959a3f69d8d "one: first commit to review"
+  NEW - 2:24a4438154ba "two: second commit to review"
+  NEW - 3:d235829e802c tip "3: a commit with no detailed message"
   Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
-  D8387 - created - a959a3f69d8d: one: first commit to review
-  D8387 - created - 24a4438154ba: two: second commit to review
-  D8387 - created - d235829e802c: 3: a commit with no detailed message
+  D8387 - created - 1:a959a3f69d8d "one: first commit to review"
+  D8387 - created - 2:24a4438154ba "two: second commit to review"
+  D8387 - created - 3:d235829e802c tip "3: a commit with no detailed message"
   updating local commit list for D8387
   new commits: ['602c4e738243', '832553266fe8', '921f8265efbd']
   saved backup bundle to $TESTTMP/folded/.hg/strip-backup/a959a3f69d8d-a4a24136-phabsend.hg
@@ -728,7 +736,7 @@
   obsolete feature not enabled but 1 markers found!
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ hg rebase --config experimental.evolution=all --config extensions.rebase=
-  note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 "two: second commit to review" (tip)
+  note: not rebasing 2:832553266fe8 "two: second commit to review", already in destination as 4:0124e5474c88 tip "two: second commit to review"
   rebasing 3:921f8265efbd "3: a commit with no detailed message"
 
 When commits have changed locally, the local commit list on Phabricator is
@@ -740,13 +748,13 @@
   602c4e738243 mapped to old nodes ['602c4e738243']
   0124e5474c88 mapped to old nodes ['832553266fe8']
   e4edb1fe3565 mapped to old nodes ['921f8265efbd']
-  D8387 - 602c4e738243: one: first commit to review
-  D8387 - 0124e5474c88: two: second commit to review
-  D8387 - e4edb1fe3565: 3: a commit with no detailed message
+  D8387 - 1:602c4e738243 "one: first commit to review"
+  D8387 - 4:0124e5474c88 "two: second commit to review"
+  D8387 - 5:e4edb1fe3565 tip "3: a commit with no detailed message"
   Send the above changes to https://phab.mercurial-scm.org/ (Y/n)? y
-  D8387 - updated - 602c4e738243: one: first commit to review
-  D8387 - updated - 0124e5474c88: two: second commit to review
-  D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
+  D8387 - updated - 1:602c4e738243 "one: first commit to review"
+  D8387 - updated - 4:0124e5474c88 "two: second commit to review"
+  D8387 - updated - 5:e4edb1fe3565 tip "3: a commit with no detailed message"
   obsolete feature not enabled but 2 markers found! (?)
   updating local commit list for D8387
   new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565']
@@ -772,9 +780,9 @@
   602c4e738243 mapped to old nodes ['602c4e738243']
   0124e5474c88 mapped to old nodes ['0124e5474c88']
   e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
-  D8387 - updated - 602c4e738243: one: first commit to review
-  D8387 - updated - 0124e5474c88: two: second commit to review
-  D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
+  D8387 - updated - 1:602c4e738243 "one: first commit to review"
+  D8387 - updated - 4:0124e5474c88 "two: second commit to review"
+  D8387 - updated - 5:e4edb1fe3565 tip "3: a commit with no detailed message"
   obsolete feature not enabled but 2 markers found! (?)
   local commit list for D8387 is already up-to-date
   $ hg log -Tcompact
@@ -801,10 +809,10 @@
   602c4e738243 mapped to old nodes ['602c4e738243']
   0124e5474c88 mapped to old nodes ['0124e5474c88']
   e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
-  D8387 - updated - 602c4e738243: one: first commit to review
-  D8387 - updated - 0124e5474c88: two: second commit to review
-  D8387 - updated - e4edb1fe3565: 3: a commit with no detailed message
-  D8387 - created - 94aaae213b23: four: extend the fold range
+  D8387 - updated - 1:602c4e738243 "one: first commit to review"
+  D8387 - updated - 4:0124e5474c88 "two: second commit to review"
+  D8387 - updated - 5:e4edb1fe3565 "3: a commit with no detailed message"
+  D8387 - created - 6:94aaae213b23 tip "four: extend the fold range"
   updating local commit list for D8387
   new commits: ['602c4e738243', '0124e5474c88', 'e4edb1fe3565', '51a04fea8707']
   $ hg log -r . -T '{desc}\n'
@@ -832,11 +840,11 @@
   0124e5474c88 mapped to old nodes ['0124e5474c88']
   e4edb1fe3565 mapped to old nodes ['e4edb1fe3565']
   51a04fea8707 mapped to old nodes ['51a04fea8707']
-  D8388 - created - 98d480e0d494: added file
-  D8388 - updated - 602c4e738243: one: first commit to review
-  D8388 - updated - 0124e5474c88: two: second commit to review
-  D8388 - updated - e4edb1fe3565: 3: a commit with no detailed message
-  D8388 - updated - 51a04fea8707: four: extend the fold range
+  D8388 - created - 0:98d480e0d494 "added file"
+  D8388 - updated - 1:602c4e738243 "one: first commit to review"
+  D8388 - updated - 4:0124e5474c88 "two: second commit to review"
+  D8388 - updated - 5:e4edb1fe3565 "3: a commit with no detailed message"
+  D8388 - updated - 7:51a04fea8707 tip "four: extend the fold range"
   updating local commit list for D8388
   new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'ac7db67f0991']
 
@@ -925,12 +933,12 @@
   30682b960804 mapped to old nodes ['30682b960804']
   6bc15dc99efd mapped to old nodes ['ac7db67f0991']
   b50946d5e490 mapped to old nodes ['ac7db67f0991']
-  D8388 - updated - 15e9b14b4b4c: added file
-  D8388 - updated - 6320b7d714cf: one: first commit to review
-  D8388 - updated - 3ee132d41dbc: two: second commit to review
-  D8388 - updated - 30682b960804: 3: a commit with no detailed message
-  D8388 - updated - 6bc15dc99efd: four: extend the fold range
-  D8388 - updated - b50946d5e490: four: extend the fold range
+  D8388 - updated - 8:15e9b14b4b4c "added file"
+  D8388 - updated - 9:6320b7d714cf "one: first commit to review"
+  D8388 - updated - 10:3ee132d41dbc "two: second commit to review"
+  D8388 - updated - 11:30682b960804 "3: a commit with no detailed message"
+  D8388 - updated - 14:6bc15dc99efd "four: extend the fold range"
+  D8388 - updated - 15:b50946d5e490 tip "four: extend the fold range"
   updating local commit list for D8388
   new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', '6bc15dc99efd', 'b50946d5e490']
 
@@ -939,7 +947,7 @@
   $ hg --config experimental.evolution=all --config extensions.rebase= \
   >    rebase -r '.^' -r . -d '.^^' --collapse -l log.txt
   rebasing 14:6bc15dc99efd "four: extend the fold range"
-  rebasing 15:b50946d5e490 "four: extend the fold range" (tip)
+  rebasing 15:b50946d5e490 tip "four: extend the fold range"
 
   $ hg phabsend --fold -r 8:: --test-vcr "$VCR/phabsend-fold-fold-end.json" \
   >             --config experimental.evolution=all
@@ -948,11 +956,11 @@
   3ee132d41dbc mapped to old nodes ['3ee132d41dbc']
   30682b960804 mapped to old nodes ['30682b960804']
   e919cdf3d4fe mapped to old nodes ['6bc15dc99efd', 'b50946d5e490']
-  D8388 - updated - 15e9b14b4b4c: added file
-  D8388 - updated - 6320b7d714cf: one: first commit to review
-  D8388 - updated - 3ee132d41dbc: two: second commit to review
-  D8388 - updated - 30682b960804: 3: a commit with no detailed message
-  D8388 - updated - e919cdf3d4fe: four: extend the fold range
+  D8388 - updated - 8:15e9b14b4b4c "added file"
+  D8388 - updated - 9:6320b7d714cf "one: first commit to review"
+  D8388 - updated - 10:3ee132d41dbc "two: second commit to review"
+  D8388 - updated - 11:30682b960804 "3: a commit with no detailed message"
+  D8388 - updated - 16:e919cdf3d4fe tip "four: extend the fold range"
   updating local commit list for D8388
   new commits: ['15e9b14b4b4c', '6320b7d714cf', '3ee132d41dbc', '30682b960804', 'e919cdf3d4fe']
 
@@ -987,8 +995,8 @@
   $ hg ci -m 'c2874a398f7e is my parent (generate test for phabsend)'
 
   $ hg phabsend -r 17::18  --test-vcr "$VCR/phabsend-hash-fixes.json"
-  D8945 - created - 133c1c6c6449: base review (generate test for phabsend)
-  D8946 - created - c2874a398f7e: 133c1c6c6449 is my parent (generate test for phabsend)
+  D8945 - created - 17:133c1c6c6449 "base review (generate test for phabsend)"
+  D8946 - created - 18:c2874a398f7e "133c1c6c6449 is my parent (generate test for phabsend)"
   new commits: ['f444f060f4d6']
   new commits: ['9c9290f945b1']
   restabilizing 1528c12fa2e4 as b28b20212bd4
--- a/tests/test-phases-exchange.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-phases-exchange.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1015,9 +1015,9 @@
   $ hg push -r 435b5d83910c ../mu
   pushing to ../mu
   searching for changes
-  abort: push creates new remote head 435b5d83910c!
+  abort: push creates new remote head 435b5d83910c
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg push -fr 435b5d83910c ../mu # because the push will create new visible head
   pushing to ../mu
   searching for changes
@@ -1322,12 +1322,12 @@
 error, but EEXIST)
 
   $ touch .hg/store/lock
-  $ hg push ../Phi --config ui.timeout=1
+  $ hg push ../Phi --config ui.timeout=1 --config ui.timeout.warn=0
   pushing to ../Phi
   waiting for lock on repository $TESTTMP/Upsilon held by ''
   abort: repository $TESTTMP/Upsilon: timed out waiting for lock held by ''
   (lock might be very busy)
-  [255]
+  [20]
   $ rm .hg/store/lock
 
   $ cd ..
--- a/tests/test-phases.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-phases.t	Tue Jan 19 21:48:43 2021 +0530
@@ -512,8 +512,8 @@
   $ mkcommit I --config phases.new-commit='babar'
   transaction abort!
   rollback completed
-  abort: phases.new-commit: not a valid phase name ('babar')
-  [255]
+  config error: phases.new-commit: not a valid phase name ('babar')
+  [30]
 Test phase command
 ===================
 
@@ -1010,7 +1010,7 @@
   $ hg up -C 1
   0 files updated, 0 files merged, 4 files removed, 0 files unresolved
   $ mkcommit C
-  created new head
+  warning: commit already existed in the repository!
   $ hg phase -r 2
   2: public
 
@@ -1027,6 +1027,7 @@
   7: draft
   $ mkcommit F
   test-debug-phase: new rev 8:  x -> 2
+  warning: commit already existed in the repository!
   test-hook-close-phase: de414268ec5ce2330c590b942fbb5ff0b0ca1a0a:   -> secret
   $ hg phase -r tip
   8: secret
@@ -1037,7 +1038,7 @@
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ mkcommit H
   test-debug-phase: new rev 5:  x -> 2
-  created new head
+  warning: commit already existed in the repository!
   test-hook-close-phase: a030c6be5127abc010fcbff1851536552e6951a8:   -> secret
   $ hg phase -r 5
   5: secret
--- a/tests/test-profile.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-profile.t	Tue Jan 19 21:48:43 2021 +0530
@@ -64,19 +64,19 @@
 
 Install an extension that can sleep and guarantee a profiler has time to run
 
-  $ cat >> sleepext.py << EOF
+  $ cat >> sleepext_with_a_long_filename.py << EOF
   > import time
   > from mercurial import registrar
   > cmdtable = {}
   > command = registrar.command(cmdtable)
   > @command(b'sleep', [], b'hg sleep')
-  > def sleep(ui, *args, **kwargs):
+  > def sleep_for_at_least_one_stat_cycle(ui, *args, **kwargs):
   >     time.sleep(0.1)
   > EOF
 
   $ cat >> $HGRCPATH << EOF
   > [extensions]
-  > sleep = `pwd`/sleepext.py
+  > sleep = `pwd`/sleepext_with_a_long_filename.py
   > EOF
 
 statistical profiler works
@@ -90,7 +90,7 @@
   $ grep -v _path_stat ../out | head -n 3
     %   cumulative      self          
    time    seconds   seconds  name    
-  * sleepext.py:*:sleep (glob)
+  * sleepext_with_a_long_filename.py:*:sleep_for_at_least_one_stat_cycle (glob)
   $ cat ../out | statprofran
 
   $ hg --profile --config profiling.statformat=bymethod sleep 2>../out || cat ../out
@@ -100,6 +100,8 @@
 
   $ hg --profile --config profiling.statformat=hotpath sleep 2>../out || cat ../out
   $ cat ../out | statprofran
+  $ grep sleepext_with_a_long_filename.py ../out
+  .* [0-9.]+%  [0-9.]+s  sleepext_with_a_long_filename.py:\s*sleep_for_at_least_one_stat_cycle, line 7:    time\.sleep.* (re)
 
   $ hg --profile --config profiling.statformat=json sleep 2>../out || cat ../out
   $ cat ../out
--- a/tests/test-progress.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-progress.t	Tue Jan 19 21:48:43 2021 +0530
@@ -203,17 +203,23 @@
 
 test interaction with ui.timestamp-output
 
+XXX: The timestamp on Windows with py2 hg is in 1970, and py3 hg is now.  But
+the py2/py3 checks here test the test runner, not the binary.  The Windows lines
+can be dropped when switching to py3-only.
+
   $ hg loop --warn --config ui.timestamp-output=true 6
   \r (no-eol) (esc)
   loop [                                                ] 0/6\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
-  \[20[2-9][0-9]-[01][0-9]-[0-3][0-9]T[0-5][0-9]:[0-5][0-9]:[0-5][0-9]\.[0-9][0-9][0-9][0-9][0-9][0-9]\] reached step 0 (re)
+  [*T*] reached step 0 (glob) (windows !)
+  \[20[2-9][0-9]-[01][0-9]-[0-3][0-9]T[0-5][0-9]:[0-5][0-9]:[0-5][0-9]\.[0-9][0-9][0-9][0-9][0-9][0-9]\] reached step 0 (re) (no-windows !)
   \r (no-eol) (esc)
   loop [=======>                                        ] 1/6\r (no-eol) (esc)
   loop [===============>                                ] 2/6\r (no-eol) (esc)
   loop [=======================>                        ] 3/6\r (no-eol) (esc)
                                                               \r (no-eol) (esc)
-  \[20[2-9][0-9]-[01][0-9]-[0-3][0-9]T[0-5][0-9]:[0-5][0-9]:[0-5][0-9]\.[0-9][0-9][0-9][0-9][0-9][0-9]\] reached step 3 (re)
+  [*T*] reached step 3 (glob) (windows !)
+  \[20[2-9][0-9]-[01][0-9]-[0-3][0-9]T[0-5][0-9]:[0-5][0-9]:[0-5][0-9]\.[0-9][0-9][0-9][0-9][0-9][0-9]\] reached step 3 (re) (no-windows !)
   \r (no-eol) (esc)
   loop [===============================>                ] 4/6\r (no-eol) (esc)
   loop [=======================================>        ] 5/6\r (no-eol) (esc)
--- a/tests/test-pull-bundle.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-pull-bundle.t	Tue Jan 19 21:48:43 2021 +0530
@@ -2,31 +2,30 @@
 
   $ hg init repo
   $ cd repo
-  $ echo foo > foo
-  $ hg ci -qAm 'add foo'
-  $ echo >> foo
-  $ hg ci -m 'change foo'
-  $ hg up -qC 0
-  $ echo bar > bar
-  $ hg ci -qAm 'add bar'
+  $ hg debugbuilddag '+3<3+1'
 
   $ hg log
-  changeset:   2:effea6de0384
+  changeset:   3:6100d3090acf
   tag:         tip
-  parent:      0:bbd179dfa0a7
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     add bar
+  parent:      0:1ea73414a91b
+  user:        debugbuilddag
+  date:        Thu Jan 01 00:00:03 1970 +0000
+  summary:     r3
   
-  changeset:   1:ed1b79f46b9a
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     change foo
+  changeset:   2:01241442b3c2
+  user:        debugbuilddag
+  date:        Thu Jan 01 00:00:02 1970 +0000
+  summary:     r2
   
-  changeset:   0:bbd179dfa0a7
-  user:        test
+  changeset:   1:66f7d451a68b
+  user:        debugbuilddag
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     r1
+  
+  changeset:   0:1ea73414a91b
+  user:        debugbuilddag
   date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     add foo
+  summary:     r0
   
   $ cd ..
 
@@ -47,10 +46,13 @@
   1 changesets found
   $ hg bundle --base 1 -r 2 .hg/2.hg
   1 changesets found
+  $ hg bundle --base 1 -r 3 .hg/3.hg
+  1 changesets found
   $ cat <<EOF > .hg/pullbundles.manifest
-  > 2.hg BUNDLESPEC=none-v2 heads=effea6de0384e684f44435651cb7bd70b8735bd4 bases=bbd179dfa0a71671c253b3ae0aa1513b60d199fa
-  > 1.hg BUNDLESPEC=bzip2-v2 heads=ed1b79f46b9a29f5a6efa59cf12fcfca43bead5a bases=bbd179dfa0a71671c253b3ae0aa1513b60d199fa
-  > 0.hg BUNDLESPEC=gzip-v2 heads=bbd179dfa0a71671c253b3ae0aa1513b60d199fa
+  > 3.hg BUNDLESPEC=none-v2 heads=6100d3090acf50ed11ec23196cec20f5bd7323aa bases=1ea73414a91b0920940797d8fc6a11e447f8ea1e
+  > 2.hg BUNDLESPEC=none-v2 heads=01241442b3c2bf3211e593b549c655ea65b295e3 bases=66f7d451a68b85ed82ff5fcc254daf50c74144bd
+  > 1.hg BUNDLESPEC=bzip2-v2 heads=66f7d451a68b85ed82ff5fcc254daf50c74144bd bases=1ea73414a91b0920940797d8fc6a11e447f8ea1e
+  > 0.hg BUNDLESPEC=gzip-v2 heads=1ea73414a91b0920940797d8fc6a11e447f8ea1e
   > EOF
   $ hg --config blackbox.track=debug --debug serve -p $HGPORT2 -d --pid-file=../repo.pid -E ../error.txt
   listening at http://*:$HGPORT2/ (bound to $LOCALIP:$HGPORT2) (glob) (?)
@@ -60,10 +62,10 @@
   adding changesets
   adding manifests
   adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets bbd179dfa0a7 (1 drafts)
+  added 1 changesets with 0 changes to 0 files
+  new changesets 1ea73414a91b (1 drafts)
   updating to branch default
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cat error.txt
   $ cd repo.pullbundle
   $ hg pull -r 1
@@ -72,24 +74,24 @@
   adding changesets
   adding manifests
   adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets ed1b79f46b9a (1 drafts)
+  added 1 changesets with 0 changes to 0 files
+  new changesets 66f7d451a68b (1 drafts)
   (run 'hg update' to get a working copy)
-  $ hg pull -r 2
+  $ hg pull -r 3
   pulling from http://localhost:$HGPORT2/
   searching for changes
   adding changesets
   adding manifests
   adding file changes
-  added 1 changesets with 1 changes to 1 files (+1 heads)
-  new changesets effea6de0384 (1 drafts)
+  added 1 changesets with 0 changes to 0 files (+1 heads)
+  new changesets 6100d3090acf (1 drafts)
   (run 'hg heads' to see heads, 'hg merge' to merge)
   $ cd ..
   $ killdaemons.py
   $ grep 'sending pullbundle ' repo/.hg/blackbox.log
   * sending pullbundle "0.hg" (glob)
   * sending pullbundle "1.hg" (glob)
-  * sending pullbundle "2.hg" (glob)
+  * sending pullbundle "3.hg" (glob)
   $ rm repo/.hg/blackbox.log
 
 Test pullbundle functionality for incremental pulls
@@ -110,15 +112,19 @@
   adding changesets
   adding manifests
   adding file changes
-  added 3 changesets with 3 changes to 3 files (+1 heads)
-  new changesets bbd179dfa0a7:ed1b79f46b9a (3 drafts)
+  adding changesets
+  adding manifests
+  adding file changes
+  added 4 changesets with 0 changes to 0 files (+1 heads)
+  new changesets 1ea73414a91b:01241442b3c2 (4 drafts)
   updating to branch default
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ killdaemons.py
   $ grep 'sending pullbundle ' repo/.hg/blackbox.log
   * sending pullbundle "0.hg" (glob)
+  * sending pullbundle "3.hg" (glob)
+  * sending pullbundle "1.hg" (glob)
   * sending pullbundle "2.hg" (glob)
-  * sending pullbundle "1.hg" (glob)
   $ rm repo/.hg/blackbox.log
 
 Test pullbundle functionality for incoming
@@ -132,19 +138,19 @@
   adding changesets
   adding manifests
   adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets bbd179dfa0a7 (1 drafts)
+  added 1 changesets with 0 changes to 0 files
+  new changesets 1ea73414a91b (1 drafts)
   updating to branch default
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd repo.pullbundle2a
-  $ hg incoming -r ed1b79f46b9a
+  $ hg incoming -r 66f7d451a68b
   comparing with http://localhost:$HGPORT2/
   searching for changes
-  changeset:   1:ed1b79f46b9a
+  changeset:   1:66f7d451a68b
   tag:         tip
-  user:        test
-  date:        Thu Jan 01 00:00:00 1970 +0000
-  summary:     change foo
+  user:        debugbuilddag
+  date:        Thu Jan 01 00:00:01 1970 +0000
+  summary:     r1
   
   $ cd ..
   $ killdaemons.py
@@ -157,8 +163,8 @@
 
   $ cd repo
   $ cat <<EOF > .hg/pullbundles.manifest
-  > 0.hg heads=ed1b79f46b9a29f5a6efa59cf12fcfca43bead5a bases=bbd179dfa0a71671c253b3ae0aa1513b60d199fa
-  > 0.hg heads=bbd179dfa0a71671c253b3ae0aa1513b60d199fa
+  > 0.hg heads=66f7d451a68b85ed82ff5fcc254daf50c74144bd bases=1ea73414a91b0920940797d8fc6a11e447f8ea1e
+  > 0.hg heads=1ea73414a91b0920940797d8fc6a11e447f8ea1e
   > EOF
   $ hg --config blackbox.track=debug --debug serve -p $HGPORT2 -d --pid-file=../repo.pid
   listening at http://*:$HGPORT2/ (bound to $LOCALIP:$HGPORT2) (glob) (?)
@@ -168,10 +174,10 @@
   adding changesets
   adding manifests
   adding file changes
-  added 1 changesets with 1 changes to 1 files
-  new changesets bbd179dfa0a7 (1 drafts)
+  added 1 changesets with 0 changes to 0 files
+  new changesets 1ea73414a91b (1 drafts)
   updating to branch default
-  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd repo.pullbundle3
   $ hg pull -r 1
   pulling from http://localhost:$HGPORT2/
@@ -179,9 +185,8 @@
   adding changesets
   adding manifests
   adding file changes
-  added 0 changesets with 0 changes to 1 files
-  abort: 00changelog.i@ed1b79f46b9a: no node!
-  [255]
+  abort: 00changelog.i@66f7d451a68b: no node
+  [50]
   $ cd ..
   $ killdaemons.py
   $ grep 'sending pullbundle ' repo/.hg/blackbox.log
@@ -193,7 +198,6 @@
 
   $ hg --repo repo debugobsolete ed1b79f46b9a29f5a6efa59cf12fcfca43bead5a
   1 new obsolescence markers
-  obsoleted 1 changesets
   $ hg serve --repo repo --config server.view=visible -p $HGPORT -d --pid-file=hg.pid -E errors.log
   $ cat hg.pid >> $DAEMON_PIDS
   $ hg clone http://localhost:$HGPORT repo-obs
@@ -204,8 +208,8 @@
   adding changesets
   adding manifests
   adding file changes
-  added 2 changesets with 2 changes to 2 files
-  new changesets bbd179dfa0a7:effea6de0384
+  added 1 changesets with 0 changes to 0 files
+  new changesets 1ea73414a91b (1 drafts)
   updating to branch default
-  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ killdaemons.py
--- a/tests/test-pull-r.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-pull-r.t	Tue Jan 19 21:48:43 2021 +0530
@@ -111,7 +111,7 @@
 Pull a missing revision:
 
   $ hg pull -qr missing ../repo
-  abort: unknown revision 'missing'!
+  abort: unknown revision 'missing'
   [255]
 
 Pull multiple revisions with update:
--- a/tests/test-pull-update.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-pull-update.t	Tue Jan 19 21:48:43 2021 +0530
@@ -29,7 +29,7 @@
   new changesets 107cefe13e42
   1 local changesets published
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg --config extensions.strip= strip --no-backup tip
   $ hg co -qC tip
 
--- a/tests/test-pull.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-pull.t	Tue Jan 19 21:48:43 2021 +0530
@@ -68,17 +68,17 @@
 
   $ hg pull -r 'xxxxxxxxxxxxxxxxxxxy'
   pulling from http://foo@localhost:$HGPORT/
-  abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'!
+  abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'
   [255]
   $ hg pull -r 'xxxxxxxxxxxxxxxxxx y'
   pulling from http://foo@localhost:$HGPORT/
-  abort: unknown revision 'xxxxxxxxxxxxxxxxxx y'!
+  abort: unknown revision 'xxxxxxxxxxxxxxxxxx y'
   [255]
 
 Test pull of working copy revision
   $ hg pull -r 'ffffffffffff'
   pulling from http://foo@localhost:$HGPORT/
-  abort: unknown revision 'ffffffffffff'!
+  abort: unknown revision 'ffffffffffff'
   [255]
 
 Issue622: hg init && hg pull -u URL doesn't checkout default branch
@@ -140,11 +140,11 @@
   [255]
   $ hg pull 'ssh://fakehost|touch${IFS}owned/path'
   pulling from ssh://fakehost%7Ctouch%24%7BIFS%7Downed/path
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ hg --config ui.timestamp-output=true pull 'ssh://fakehost%7Ctouch%20owned/path'
   \[20[2-9][0-9]-[01][0-9]-[0-3][0-9]T[0-5][0-9]:[0-5][0-9]:[0-5][0-9]\.[0-9][0-9][0-9][0-9][0-9][0-9]\] pulling from ssh://fakehost%7Ctouch%20owned/path (re)
-  \[20[2-9][0-9]-[01][0-9]-[0-3][0-9]T[0-5][0-9]:[0-5][0-9]:[0-5][0-9]\.[0-9][0-9][0-9][0-9][0-9][0-9]\] abort: no suitable response from remote hg! (re)
+  \[20[2-9][0-9]-[01][0-9]-[0-3][0-9]T[0-5][0-9]:[0-5][0-9]:[0-5][0-9]\.[0-9][0-9][0-9][0-9][0-9][0-9]\] abort: no suitable response from remote hg (re)
   [255]
 
   $ [ ! -f owned ] || echo 'you got owned'
--- a/tests/test-push-checkheads-multibranches-E3.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-multibranches-E3.t	Tue Jan 19 21:48:43 2021 +0530
@@ -87,8 +87,8 @@
   $ hg push -r 'desc("C1")'
   pushing to $TESTTMP/E1/server
   searching for changes
-  abort: push creates new remote head dc44c53142f0!
+  abort: push creates new remote head dc44c53142f0
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-checkheads-partial-C1.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-partial-C1.t	Tue Jan 19 21:48:43 2021 +0530
@@ -78,8 +78,8 @@
   $ hg push
   pushing to $TESTTMP/C1/server
   searching for changes
-  abort: push creates new remote head 25c56d33e4c4!
+  abort: push creates new remote head 25c56d33e4c4
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-checkheads-partial-C2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-partial-C2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -79,8 +79,8 @@
   $ hg push --rev 'desc(A1)'
   pushing to $TESTTMP/C2/server
   searching for changes
-  abort: push creates new remote head f6082bc4ffef!
+  abort: push creates new remote head f6082bc4ffef
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-checkheads-partial-C3.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-partial-C3.t	Tue Jan 19 21:48:43 2021 +0530
@@ -78,8 +78,8 @@
   $ hg push
   pushing to $TESTTMP/C3/server
   searching for changes
-  abort: push creates new remote head 0f88766e02d6!
+  abort: push creates new remote head 0f88766e02d6
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-checkheads-partial-C4.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-partial-C4.t	Tue Jan 19 21:48:43 2021 +0530
@@ -79,8 +79,8 @@
   $ hg push --rev 'desc(C0)'
   pushing to $TESTTMP/C4/server
   searching for changes
-  abort: push creates new remote head 0f88766e02d6!
+  abort: push creates new remote head 0f88766e02d6
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-checkheads-unpushed-D1.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-unpushed-D1.t	Tue Jan 19 21:48:43 2021 +0530
@@ -71,9 +71,9 @@
   $ hg push -r 'desc(B0)'
   pushing to $TESTTMP/D1/server
   searching for changes
-  abort: push creates new remote head 74ff5441d343!
+  abort: push creates new remote head 74ff5441d343
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
 
--- a/tests/test-push-checkheads-unpushed-D2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-unpushed-D2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -92,8 +92,8 @@
   $ hg push --rev 'desc(C0)'
   pushing to $TESTTMP/D2/server
   searching for changes
-  abort: push creates new remote head 0f88766e02d6!
+  abort: push creates new remote head 0f88766e02d6
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-checkheads-unpushed-D3.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-unpushed-D3.t	Tue Jan 19 21:48:43 2021 +0530
@@ -91,15 +91,15 @@
   $ hg push --rev 'desc(A1)'
   pushing to $TESTTMP/D3/server
   searching for changes
-  abort: push creates new remote head f6082bc4ffef!
+  abort: push creates new remote head f6082bc4ffef
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg push --rev 'desc(B1)'
   pushing to $TESTTMP/D3/server
   searching for changes
-  abort: push creates new remote head 25c56d33e4c4!
+  abort: push creates new remote head 25c56d33e4c4
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
 Extra testing
 -------------
@@ -109,8 +109,8 @@
   $ hg push
   pushing to $TESTTMP/D3/server
   searching for changes
-  abort: push creates new remote head 25c56d33e4c4!
+  abort: push creates new remote head 25c56d33e4c4
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-checkheads-unpushed-D4.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-unpushed-D4.t	Tue Jan 19 21:48:43 2021 +0530
@@ -109,9 +109,9 @@
   $ hg push --rev 'desc(A1)'
   pushing to $TESTTMP/D4/server
   searching for changes
-  abort: push creates new remote head f6082bc4ffef!
+  abort: push creates new remote head f6082bc4ffef
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
 Actual testing (existing branch only)
 ------------------------------------
--- a/tests/test-push-checkheads-unpushed-D5.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-unpushed-D5.t	Tue Jan 19 21:48:43 2021 +0530
@@ -98,9 +98,9 @@
   $ hg push --rev 'desc(B1)'
   pushing to $TESTTMP/D5/server
   searching for changes
-  abort: push creates new remote head 25c56d33e4c4!
+  abort: push creates new remote head 25c56d33e4c4
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg push --rev 'desc(A1)'
   pushing to $TESTTMP/D5/server
   searching for changes
--- a/tests/test-push-checkheads-unpushed-D6.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-checkheads-unpushed-D6.t	Tue Jan 19 21:48:43 2021 +0530
@@ -79,8 +79,8 @@
   $ hg push --rev 'desc(C0)'
   pushing to $TESTTMP/D6/server
   searching for changes
-  abort: push creates new remote head 0f88766e02d6!
+  abort: push creates new remote head 0f88766e02d6
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ../..
--- a/tests/test-push-http.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-http.t	Tue Jan 19 21:48:43 2021 +0530
@@ -41,7 +41,7 @@
   searching for changes
   abort: HTTP Error 403: ssl required
   % serve errors
-  [255]
+  [100]
 
 expect authorization error
 
--- a/tests/test-push-warn.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push-warn.t	Tue Jan 19 21:48:43 2021 +0530
@@ -25,15 +25,15 @@
   pushing to ../a
   abort: specified revisions evaluate to an empty set
   (use different revision arguments)
-  [255]
+  [10]
 
   $ hg push ../a
   pushing to ../a
   searching for changes
   remote has heads on branch 'default' that are not known locally: 1c9246a22a0a
-  abort: push creates new remote head 1e108cc5548c!
+  abort: push creates new remote head 1e108cc5548c
   (pull and merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg push --debug ../a
   pushing to ../a
@@ -49,9 +49,9 @@
   remote has heads on branch 'default' that are not known locally: 1c9246a22a0a
   new remote heads on branch 'default':
    1e108cc5548c
-  abort: push creates new remote head 1e108cc5548c!
+  abort: push creates new remote head 1e108cc5548c
   (pull and merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg pull ../a
   pulling from ../a
@@ -66,9 +66,9 @@
   $ hg push ../a
   pushing to ../a
   searching for changes
-  abort: push creates new remote head 1e108cc5548c!
+  abort: push creates new remote head 1e108cc5548c
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg merge
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -119,9 +119,9 @@
   $ hg push ../c
   pushing to ../c
   searching for changes
-  abort: push creates new remote head 6346d66eb9f5!
+  abort: push creates new remote head 6346d66eb9f5
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg push -r 2 ../c
   pushing to ../c
@@ -132,9 +132,9 @@
   $ hg push -r 3 ../c
   pushing to ../c
   searching for changes
-  abort: push creates new remote head a5dda829a167!
+  abort: push creates new remote head a5dda829a167
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg push -v -r 3 -r 4 ../c
   pushing to ../c
@@ -142,9 +142,9 @@
   new remote heads on branch 'default':
    a5dda829a167
    ee8fbc7a0295
-  abort: push creates new remote head a5dda829a167!
+  abort: push creates new remote head a5dda829a167
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg push -v -f -r 3 -r 4 ../c
   pushing to ../c
@@ -232,16 +232,16 @@
   $ hg push ../f
   pushing to ../f
   searching for changes
-  abort: push creates new remote branches: c!
+  abort: push creates new remote branches: c
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
   $ hg push -r 4 -r 5 ../f
   pushing to ../f
   searching for changes
-  abort: push creates new remote branches: c!
+  abort: push creates new remote branches: c
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
 
 Multiple new branches:
@@ -253,16 +253,16 @@
   $ hg push ../f
   pushing to ../f
   searching for changes
-  abort: push creates new remote branches: c, d!
+  abort: push creates new remote branches: c, d
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
   $ hg push -r 4 -r 6 ../f
   pushing to ../f
   searching for changes
-  abort: push creates new remote branches: c, d!
+  abort: push creates new remote branches: c, d
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
   $ cd ../g
 
@@ -276,9 +276,9 @@
   $ hg push -r 4 -r 7 ../f
   pushing to ../f
   searching for changes
-  abort: push creates new remote head 0b715ef6ff8f on branch 'a'!
+  abort: push creates new remote head 0b715ef6ff8f on branch 'a'
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
 Push replacement head on existing branches:
 
@@ -353,9 +353,9 @@
   $ hg push -r 12 -r 13 ../f
   pushing to ../f
   searching for changes
-  abort: push creates new remote branches: e!
+  abort: push creates new remote branches: e
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
 
 Using --new-branch to push new named branch:
@@ -383,7 +383,7 @@
   searching for changes
   abort: push creates new branch 'f' with multiple heads
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg push --branch f --new-branch --force ../f
   pushing to ../f
   searching for changes
@@ -425,26 +425,26 @@
   pushing to h
   searching for changes
   remote has heads on branch 'default' that are not known locally: 534543e22c29 764f8ec07b96 afe7cc7679f5 ce4212fc8847
-  abort: push creates new remote head 97bd0c84d346!
+  abort: push creates new remote head 97bd0c84d346
   (pull and merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg -R h up -q 0; echo x > h/b; hg -R h ci -qAmx
   $ hg -R i push h
   pushing to h
   searching for changes
   remote has heads on branch 'default' that are not known locally: 18ddb72c4590 534543e22c29 764f8ec07b96 afe7cc7679f5 and 1 others
-  abort: push creates new remote head 97bd0c84d346!
+  abort: push creates new remote head 97bd0c84d346
   (pull and merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
   $ hg -R i push h -v
   pushing to h
   searching for changes
   remote has heads on branch 'default' that are not known locally: 18ddb72c4590 534543e22c29 764f8ec07b96 afe7cc7679f5 ce4212fc8847
   new remote heads on branch 'default':
    97bd0c84d346
-  abort: push creates new remote head 97bd0c84d346!
+  abort: push creates new remote head 97bd0c84d346
   (pull and merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
 
 Check prepush logic with merged branches:
@@ -477,9 +477,9 @@
   $ hg -R k push -r a j
   pushing to j
   searching for changes
-  abort: push creates new remote branches: b!
+  abort: push creates new remote branches: b
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
 
 Prepush -r should not allow you to sneak in new heads:
@@ -513,9 +513,9 @@
   $ hg push ../l -b b
   pushing to ../l
   searching for changes
-  abort: push creates new remote head 451211cc22b0 on branch 'a'!
+  abort: push creates new remote head 451211cc22b0 on branch 'a'
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ..
 
@@ -763,16 +763,16 @@
   $ hg push inner
   pushing to inner
   searching for changes
-  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'!
+  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg push inner -r4 -r5
   pushing to inner
   searching for changes
-  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'!
+  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ hg in inner
   comparing with inner
@@ -786,9 +786,9 @@
   pushing to inner
   searching for changes
   running fail-push hook
-  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'!
+  abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 
   $ cd ..
 
@@ -815,9 +815,9 @@
   $ hg push ../x
   pushing to ../x
   searching for changes
-  abort: push creates new remote branches: foo (1 closed)!
+  abort: push creates new remote branches: foo (1 closed)
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
 When there is more than one closed branches
   $ hg -q branch bar
@@ -828,9 +828,9 @@
   $ hg push ../x
   pushing to ../x
   searching for changes
-  abort: push creates new remote branches: bar, foo (2 closed)!
+  abort: push creates new remote branches: bar, foo (2 closed)
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
 When there are more than one new branches and not all are closed
   $ hg -q branch bar1
@@ -840,8 +840,8 @@
   $ hg push ../x
   pushing to ../x
   searching for changes
-  abort: push creates new remote branches: bar, bar1, foo (2 closed)!
+  abort: push creates new remote branches: bar, bar1, foo (2 closed)
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
 
   $ cd ..
--- a/tests/test-push.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-push.t	Tue Jan 19 21:48:43 2021 +0530
@@ -340,11 +340,11 @@
   [255]
   $ hg -R test-revflag push 'ssh://fakehost|touch${IFS}owned/path'
   pushing to ssh://fakehost%7Ctouch%24%7BIFS%7Downed/path
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ hg -R test-revflag push 'ssh://fakehost%7Ctouch%20owned/path'
   pushing to ssh://fakehost%7Ctouch%20owned/path
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
   $ [ ! -f owned ] || echo 'you got owned'
@@ -365,7 +365,7 @@
   pushing to $TESTTMP/test-require-revs-dest
   abort: no revisions specified to push
   (did you mean "hg push -r ."?)
-  [255]
+  [10]
   $ hg push -r 0
   pushing to $TESTTMP/test-require-revs-dest
   searching for changes
@@ -385,16 +385,16 @@
   $ hg push -b default
   pushing to $TESTTMP/test-require-revs-dest
   searching for changes
-  abort: push creates new remote head [0-9a-f]+! (re)
+  abort: push creates new remote head [0-9a-f]+ (re)
   (merge or see 'hg help push' for details about pushing new heads)
-  [255]
+  [20]
 (demonstrate that even though we don't have anything to exchange, we're still
 showing the error)
   $ hg push
   pushing to $TESTTMP/test-require-revs-dest
   abort: no revisions specified to push
   (did you mean "hg push -r ."?)
-  [255]
+  [10]
   $ hg push --config paths.default:pushrev=0
   pushing to $TESTTMP/test-require-revs-dest
   searching for changes
--- a/tests/test-qrecord.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-qrecord.t	Tue Jan 19 21:48:43 2021 +0530
@@ -105,7 +105,7 @@
   interactively record a new patch
   
   (use 'hg qrecord -h' to show more help)
-  [255]
+  [10]
 
 qrecord patch (mq not present)
 
@@ -456,9 +456,9 @@
   > edit ea55e2ae468f foo bar
   > EOF
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
-  Editing (ea55e2ae468f), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (ea55e2ae468f), commit as needed now to split the change
+  (to edit ea55e2ae468f, `hg histedit --continue` after making changes)
+  [240]
   $ echo 'foo bar' > a
   $ hg qrecord -d '0 0' -m aaa a.patch <<EOF
   > y
@@ -470,4 +470,4 @@
   > EOF
   abort: histedit in progress
   (use 'hg histedit --continue' or 'hg histedit --abort')
-  [255]
+  [20]
--- a/tests/test-rebase-abort.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-abort.t	Tue Jan 19 21:48:43 2021 +0530
@@ -78,11 +78,11 @@
 
   $ hg rebase -s 3 -d 2
   rebasing 3:3163e20567cc "L1"
-  rebasing 4:46f0b057b5c0 "L2" (tip)
+  rebasing 4:46f0b057b5c0 tip "L2"
   merging common
   warning: conflicts while merging common! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Insert unsupported advisory merge record:
 
@@ -145,11 +145,11 @@
 
   $ hg rebase -s 3 -d 2
   rebasing 3:3163e20567cc "L1"
-  rebasing 4:46f0b057b5c0 "L2" (tip)
+  rebasing 4:46f0b057b5c0 tip "L2"
   merging common
   warning: conflicts while merging common! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ mv .hg/rebasestate .hg/rebasestate.back
   $ hg update --quiet --clean 2
@@ -215,11 +215,11 @@
   $ hg rebase -b 4 -d 2
   rebasing 3:a6484957d6b9 "B bis"
   note: not rebasing 3:a6484957d6b9 "B bis", its destination already has all its changes
-  rebasing 4:145842775fec "C1" (tip)
+  rebasing 4:145842775fec tip "C1"
   merging c
   warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg tglog
   %  4:draft 'C1'
@@ -274,11 +274,11 @@
   
 
   $ hg rebase -d master -r foo
-  rebasing 3:6c0f977a22d8 "C" (foo tip)
+  rebasing 3:6c0f977a22d8 foo tip "C"
   merging c
   warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg abort
   rebase aborted
   $ hg log -G --template "{rev} {desc} {bookmarks}"
@@ -314,9 +314,9 @@
   created new head
 
   $ hg rebase -d @ -b foo --tool=internal:fail
-  rebasing 2:070cf4580bb5 "b2" (foo tip)
+  rebasing 2:070cf4580bb5 foo tip "b2"
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ mv .hg/rebasestate ./ # so we're allowed to hg up like in mercurial <2.6.3
   $ hg up -C 0            # user does other stuff in the repo
@@ -328,7 +328,7 @@
   $ hg up 1               # user gets an error saying to run hg rebase --abort
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
 
   $ cat a
   new
@@ -398,20 +398,20 @@
   $ hg rebase -s 3 -d tip
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
   $ hg up .
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
   $ hg up -C .
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
 
   $ hg graft 3
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
 
   $ hg abort
   saved backup bundle to $TESTTMP/interrupted/.hg/strip-backup/3d8812cf300d-93041a90-backup.hg
@@ -463,7 +463,7 @@
   $ hg rebase -d 1 --tool 'internal:fail'
   rebasing 2:e4ea5cdc9789 "conflicting 1"
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg abort
   rebase aborted
   $ hg summary
@@ -504,7 +504,7 @@
   note: not rebasing 3:0682fd3dabf5 "disappear draft", its destination already has all its changes
   warning: conflicts while merging root! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg abort
   rebase aborted
   $ cd ..
--- a/tests/test-rebase-backup.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-backup.t	Tue Jan 19 21:48:43 2021 +0530
@@ -127,7 +127,7 @@
   merging c
   warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --abort
   saved backup bundle to $TESTTMP/repo1/.hg/strip-backup/818c1a43c916-2b644d96-backup.hg
   rebase aborted
@@ -143,7 +143,7 @@
   merging c
   warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --abort
   rebase aborted
   $ cd ..
--- a/tests/test-rebase-base-flag.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-base-flag.t	Tue Jan 19 21:48:43 2021 +0530
@@ -36,9 +36,9 @@
   >   |
   >   R
   > EOS
-  rebasing 3:d6003a550c2c "C" (C)
-  rebasing 5:4526cf523425 "D" (D)
-  rebasing 6:b296604d9846 "E" (E tip)
+  rebasing 3:d6003a550c2c C "C"
+  rebasing 5:4526cf523425 D "D"
+  rebasing 6:b296604d9846 E tip "E"
   o  6: 4870f5e7df37 E
   |
   | o  5: dc999528138a D
@@ -66,9 +66,9 @@
   >   |
   >   R
   > EOS
-  rebasing 2:c1e6b162678d "B" (B)
-  rebasing 3:d6003a550c2c "C" (C)
-  rebasing 6:54c8f00cb91c "E" (E tip)
+  rebasing 2:c1e6b162678d B "B"
+  rebasing 3:d6003a550c2c C "C"
+  rebasing 6:54c8f00cb91c E tip "E"
   o    6: 00598421b616 E
   |\
   | o  5: 6b3e11729672 C
@@ -94,8 +94,8 @@
   >   |
   >   R
   > EOS
-  rebasing 2:c1e6b162678d "B" (B)
-  rebasing 5:54c8f00cb91c "E" (E tip)
+  rebasing 2:c1e6b162678d B "B"
+  rebasing 5:54c8f00cb91c E tip "E"
   o    5: e583bf3ff54c E
   |\
   | o  4: 85260910e847 B
@@ -119,9 +119,9 @@
   >   |
   >   R
   > EOS
-  rebasing 2:c1e6b162678d "B" (B)
-  rebasing 3:d6003a550c2c "C" (C)
-  rebasing 5:54c8f00cb91c "E" (E tip)
+  rebasing 2:c1e6b162678d B "B"
+  rebasing 3:d6003a550c2c C "C"
+  rebasing 5:54c8f00cb91c E tip "E"
   o    5: 00598421b616 E
   |\
   | o  4: 6b3e11729672 C
@@ -167,12 +167,12 @@
   >   |
   >   R
   > EOS
-  rebasing 3:a113dbaa660a "B1" (B1)
-  rebasing 5:06ce7b1cc8c2 "B2" (B2)
-  rebasing 6:0ac98cce32d3 "C1" (C1)
-  rebasing 8:781512f5e33d "C2" (C2)
-  rebasing 9:428d8c18f641 "E1" (E1)
-  rebasing 11:e1bf82f6b6df "E2" (E2)
+  rebasing 3:a113dbaa660a B1 "B1"
+  rebasing 5:06ce7b1cc8c2 B2 "B2"
+  rebasing 6:0ac98cce32d3 C1 "C1"
+  rebasing 8:781512f5e33d C2 "C2"
+  rebasing 9:428d8c18f641 E1 "E1"
+  rebasing 11:e1bf82f6b6df E2 "E2"
   o  12: e4a37b6fdbd2 E2
   |
   o  11: 9675bea983df E1
@@ -210,19 +210,19 @@
   >  \|/  |/  |/
   >   A   A   A
   > EOS
-  rebasing 2:dc0947a82db8 "C" (C)
-  rebasing 8:4e4f9194f9f1 "D" (D)
-  rebasing 9:03ca77807e91 "E" (E)
-  rebasing 10:afc707c82df0 "F" (F)
-  rebasing 13:690dfff91e9e "G" (G)
-  rebasing 14:2893b886bb10 "H" (H)
-  rebasing 3:08ebfeb61bac "I" (I)
-  rebasing 4:a0a5005cec67 "J" (J)
-  rebasing 5:83780307a7e8 "K" (K)
-  rebasing 6:e131637a1cb6 "L" (L)
-  rebasing 11:d1f6d0c3c7e4 "M" (M)
-  rebasing 12:7aaec6f81888 "N" (N)
-  rebasing 15:325bc8f1760d "P" (P tip)
+  rebasing 2:dc0947a82db8 C "C"
+  rebasing 8:4e4f9194f9f1 D "D"
+  rebasing 9:03ca77807e91 E "E"
+  rebasing 10:afc707c82df0 F "F"
+  rebasing 13:690dfff91e9e G "G"
+  rebasing 14:2893b886bb10 H "H"
+  rebasing 3:08ebfeb61bac I "I"
+  rebasing 4:a0a5005cec67 J "J"
+  rebasing 5:83780307a7e8 K "K"
+  rebasing 6:e131637a1cb6 L "L"
+  rebasing 11:d1f6d0c3c7e4 M "M"
+  rebasing 12:7aaec6f81888 N "N"
+  rebasing 15:325bc8f1760d P tip "P"
   o    15: 6ef6a0ea3b18 P
   |\
   | o    14: 20ba3610a7e5 N
@@ -268,13 +268,13 @@
   > |
   > M0
   > EOF
-  rebasing 4:8817fae53c94 "C0" (C0)
-  rebasing 6:06ca5dfe3b5b "B2" (B2)
-  rebasing 7:73508237b032 "C1" (C1)
-  rebasing 9:fdb955e2faed "A2" (A2)
-  rebasing 11:4e449bd1a643 "A3" (A3)
-  rebasing 10:0a33b0519128 "B1" (B1)
-  rebasing 12:209327807c3a "B3" (B3 tip)
+  rebasing 4:8817fae53c94 C0 "C0"
+  rebasing 6:06ca5dfe3b5b B2 "B2"
+  rebasing 7:73508237b032 C1 "C1"
+  rebasing 9:fdb955e2faed A2 "A2"
+  rebasing 11:4e449bd1a643 A3 "A3"
+  rebasing 10:0a33b0519128 B1 "B1"
+  rebasing 12:209327807c3a B3 tip "B3"
   o    12: ceb984566332 B3
   |\
   | o  11: 19d93caac497 B1
@@ -318,8 +318,8 @@
   >  \|\|
   >   C A
   > EOF
-  rebasing 2:112478962961 "B" (B)
-  rebasing 3:b70f76719894 "D" (D)
+  rebasing 2:112478962961 B "B"
+  rebasing 3:b70f76719894 D "D"
   o  4: 511efad7bf13 D
   |
   | o  3: 25c4e279af62 B
@@ -349,8 +349,8 @@
   >  \|\|
   >   A C
   > EOF
-  rebasing 2:f675d5a1c6a4 "B" (B)
-  rebasing 5:f68696fe6af8 "E" (E tip)
+  rebasing 2:f675d5a1c6a4 B "B"
+  rebasing 5:f68696fe6af8 E tip "E"
   o    5: f6e6f5081554 E
   |\
   | o    4: 30cabcba27be B
@@ -370,8 +370,8 @@
   >  \|\|\
   >   A C A
   > EOF
-  rebasing 2:f675d5a1c6a4 "B" (B)
-  rebasing 3:c2a779e13b56 "D" (D)
+  rebasing 2:f675d5a1c6a4 B "B"
+  rebasing 3:c2a779e13b56 D "D"
   o    4: 5eecd056b5f8 D
   |\
   +---o  3: 30cabcba27be B
--- a/tests/test-rebase-bookmarks.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-bookmarks.t	Tue Jan 19 21:48:43 2021 +0530
@@ -75,7 +75,7 @@
   o  0: 1994f17a630e 'A' bookmarks: Y@diverge
   
   $ hg rebase -s Y -d 3
-  rebasing 2:49cb3485fa0c "C" (Y Z)
+  rebasing 2:49cb3485fa0c Y Z "C"
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-rebase.hg
 
   $ hg tglog
@@ -97,7 +97,7 @@
   $ hg book W@diverge
 
   $ hg rebase -s W -d .
-  rebasing 3:41acb9dca9eb "D" (W tip)
+  rebasing 3:41acb9dca9eb W tip "D"
   saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-rebase.hg
 
   $ hg bookmarks
@@ -115,8 +115,8 @@
   $ hg up -q Z
 
   $ hg rebase -s 1 -d 3
-  rebasing 1:6c81ed0049f8 "B" (X)
-  rebasing 2:49cb3485fa0c "C" (Y Z)
+  rebasing 1:6c81ed0049f8 X "B"
+  rebasing 2:49cb3485fa0c Y Z "C"
   saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg
 
   $ hg tglog
@@ -138,8 +138,8 @@
   $ hg up -q X
 
   $ hg rebase -d W
-  rebasing 1:6c81ed0049f8 "B" (X)
-  rebasing 2:49cb3485fa0c "C" (Y Z)
+  rebasing 1:6c81ed0049f8 X "B"
+  rebasing 2:49cb3485fa0c Y Z "C"
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-rebase.hg
 
   $ hg tglog
@@ -169,17 +169,17 @@
   $ hg up 3
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg rebase --dest 4
-  rebasing 3:3d5fa227f4b5 "C" (Y Z)
+  rebasing 3:3d5fa227f4b5 Y Z "C"
   merging c
   warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ echo 'c' > c
   $ hg resolve --mark c
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase --continue
-  rebasing 3:3d5fa227f4b5 "C" (Y Z)
+  rebasing 3:3d5fa227f4b5 Y Z "C"
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-rebase.hg
   $ hg tglog
   @  4: 45c0f0ec1203 'C' bookmarks: Y Z
@@ -209,7 +209,7 @@
   $ hg rebase -r '"bisect"^^::"bisect"^' -r bisect -d Z
   rebasing 5:345c90f326a4 "bisect"
   rebasing 6:f677a2907404 "bisect2"
-  rebasing 7:325c16001345 "bisect3" (bisect tip)
+  rebasing 7:325c16001345 bisect tip "bisect3"
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-rebase.hg
 
 Bookmark and working parent get moved even if --keep is set (issue5682)
@@ -232,7 +232,7 @@
   o  0: 426bada5c675 'A' bookmarks: A
   
   $ hg rebase -r B -d C --keep
-  rebasing 1:112478962961 "B" (B)
+  rebasing 1:112478962961 B "B"
   $ hg tglog
   @  3: 9769fc65c4c5 'B' bookmarks: B
   |
--- a/tests/test-rebase-cache.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-cache.t	Tue Jan 19 21:48:43 2021 +0530
@@ -165,7 +165,7 @@
   o  0: 'A'
   
   $ hg rebase -s 8 -d 6
-  rebasing 8:4666b71e8e32 "F" (tip)
+  rebasing 8:4666b71e8e32 tip "F"
   saved backup bundle to $TESTTMP/a2/.hg/strip-backup/4666b71e8e32-fc1c4e96-rebase.hg
 
   $ hg branches
@@ -232,7 +232,7 @@
   $ hg rebase -s 7 -d 6
   rebasing 7:653b9feb4616 "branch3"
   note: not rebasing 7:653b9feb4616 "branch3", its destination already has all its changes
-  rebasing 8:4666b71e8e32 "F" (tip)
+  rebasing 8:4666b71e8e32 tip "F"
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/653b9feb4616-3c88de16-rebase.hg
 
   $ hg branches
@@ -478,6 +478,6 @@
   $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --dest 7 --source 5 -e
   rebasing 5:361a99976cc9 "F"
   HGEDITFORM=rebase.merge
-  rebasing 8:326cfedc031c "I" (tip)
+  rebasing 8:326cfedc031c tip "I"
   HGEDITFORM=rebase.normal
   saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/361a99976cc9-35e980d0-rebase.hg
--- a/tests/test-rebase-check-restore.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-check-restore.t	Tue Jan 19 21:48:43 2021 +0530
@@ -70,7 +70,7 @@
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Solve the conflict and go on:
 
@@ -123,11 +123,11 @@
   o  0:draft 'A'
   
   $ hg rebase -s 5 -d 4 --keepbranches
-  rebasing 5:01e6ebbd8272 "F" (tip)
+  rebasing 5:01e6ebbd8272 tip "F"
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Solve the conflict and go on:
 
@@ -137,7 +137,7 @@
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase --continue
-  rebasing 5:01e6ebbd8272 "F" (tip)
+  rebasing 5:01e6ebbd8272 tip "F"
   saved backup bundle to $TESTTMP/a2/.hg/strip-backup/01e6ebbd8272-6fd3a015-rebase.hg
 
   $ hg tglog
--- a/tests/test-rebase-collapse.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-collapse.t	Tue Jan 19 21:48:43 2021 +0530
@@ -35,9 +35,9 @@
   > echo "edited manually" >> \$1
   > EOF
   $ HGEDITOR="sh $TESTTMP/editor.sh" hg rebase --collapse --keepbranches -e --source B --dest F
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:26805aba1e60 "C" (C)
-  rebasing 5:f585351a92f8 "D" (D tip)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:26805aba1e60 C "C"
+  rebasing 5:f585351a92f8 D tip "D"
   ==== before editing
   Collapsed revision
   * B
@@ -96,8 +96,8 @@
 
   $ hg phase --force --secret D
   $ hg rebase --source B --collapse --dest F
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:4e4f9194f9f1 "D" (D)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:4e4f9194f9f1 D "D"
   saved backup bundle to $TESTTMP/linearized-merge/.hg/strip-backup/112478962961-e389075b-rebase.hg
 
   $ hg tglog
@@ -141,8 +141,8 @@
   > true
   > EOF
   $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --source B --collapse -m 'custom message' -e --dest D
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:26805aba1e60 "C" (C tip)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:26805aba1e60 C tip "C"
   HGEDITFORM=rebase.collapse
   saved backup bundle to $TESTTMP/message/.hg/strip-backup/112478962961-f4131707-rebase.hg
 
@@ -185,9 +185,9 @@
 Rebase and collapse - E onto H:
 
   $ hg rebase -s E --dest H --collapse # root (E) is not a merge
-  rebasing 5:49cb92066bfd "E" (E)
-  rebasing 6:11abe3fb10b8 "F" (F)
-  rebasing 7:64e264db77f0 "G" (G tip)
+  rebasing 5:49cb92066bfd E "E"
+  rebasing 6:11abe3fb10b8 F "F"
+  rebasing 7:64e264db77f0 G tip "G"
   saved backup bundle to $TESTTMP/multiple-external-parents/.hg/strip-backup/49cb92066bfd-ee8a8a79-rebase.hg
 
   $ hg tglog
@@ -287,21 +287,21 @@
   > EOF
 
   $ hg rebase -s F --dest I --collapse # root (F) is not a merge
-  rebasing 6:c82b08f646f1 "F" (F)
+  rebasing 6:c82b08f646f1 F "F"
   file 'E' was deleted in local [dest] but was modified in other [source].
   You can use (c)hanged version, leave (d)eleted, or leave (u)nresolved.
   What do you want to do? u
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ echo F > E
   $ hg resolve -m
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase -c
-  rebasing 6:c82b08f646f1 "F" (F)
-  rebasing 7:a6db7fa104e1 "G" (G)
-  rebasing 8:e1d201b72d91 "H" (H tip)
+  rebasing 6:c82b08f646f1 F "F"
+  rebasing 7:a6db7fa104e1 G "G"
+  rebasing 8:e1d201b72d91 H tip "H"
   saved backup bundle to $TESTTMP/external-parent/.hg/strip-backup/c82b08f646f1-f2721fbf-rebase.hg
 
   $ hg tglog
@@ -347,8 +347,8 @@
   > A
   > EOF
   $ hg rebase --collapse -r 'B+C' -d D
-  rebasing 1:fc2b737bb2e5 "B" (B)
-  rebasing 2:dc0947a82db8 "C" (C)
+  rebasing 1:fc2b737bb2e5 B "B"
+  rebasing 2:dc0947a82db8 C "C"
   saved backup bundle to $TESTTMP/multiple-bases/.hg/strip-backup/dc0947a82db8-b0c1a7ea-rebase.hg
   $ hg tglog
   o  2: 2127ae44d291 'Collapsed revision
@@ -424,10 +424,10 @@
 
 
   $ hg rebase -s B --collapse --dest F
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:26805aba1e60 "C" (C)
-  rebasing 4:be0ef73c17ad "D" (D)
-  rebasing 5:02c4367d6973 "E" (E tip)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:26805aba1e60 C "C"
+  rebasing 4:be0ef73c17ad D "D"
+  rebasing 5:02c4367d6973 E tip "E"
   saved backup bundle to $TESTTMP/internal-merge/.hg/strip-backup/112478962961-1dfb057b-rebase.hg
 
   $ hg tglog
@@ -514,7 +514,7 @@
   merging a and d to d
   merging b and e to e
   merging c and f to f
-  rebasing 3:338e84e2e558 "move2" (tip)
+  rebasing 3:338e84e2e558 tip "move2"
   merging f and c to c
   merging e and g to g
   saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-rebase.hg
@@ -551,13 +551,13 @@
   $ hg rebase --collapse -r 1 -d 0
   abort: cannot rebase changeset with children
   (use --keep to keep original changesets)
-  [255]
+  [10]
 
 Test collapsing in place
 
   $ hg rebase --collapse -b . -d 0
   rebasing 1:1352765a01d4 "change"
-  rebasing 2:64b456429f67 "Collapsed revision" (tip)
+  rebasing 2:64b456429f67 tip "Collapsed revision"
   saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-rebase.hg
   $ hg st --change tip --copies
   M a
@@ -631,7 +631,7 @@
   $ hg book foo
   $ hg rebase -d 0 -r "1::2" --collapse -m collapsed
   rebasing 1:6d8d9f24eec3 "a"
-  rebasing 2:1cc73eca5ecc "b" (foo tip)
+  rebasing 2:1cc73eca5ecc foo tip "b"
   saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-rebase.hg
   $ hg log -G --template "{rev}: '{desc}' {bookmarks}"
   @  1: 'collapsed' foo
@@ -655,17 +655,17 @@
   > A
   > EOF
   $ hg rebase --collapse -m "new message" -b B -d C
-  rebasing 1:81e5401e4d37 "B" (B)
+  rebasing 1:81e5401e4d37 B "B"
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ rm A.orig
   $ hg resolve --mark A
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase --continue
-  rebasing 1:81e5401e4d37 "B" (B)
+  rebasing 1:81e5401e4d37 B "B"
   saved backup bundle to $TESTTMP/collapse_remember_message/.hg/strip-backup/81e5401e4d37-96c3dd30-rebase.hg
   $ hg log
   changeset:   2:17186933e123
@@ -702,11 +702,11 @@
   > A
   > EOF
   $ hg rebase --collapse -t internal:merge3 -s B -d D
-  rebasing 1:f899f3910ce7 "B" (B)
+  rebasing 1:f899f3910ce7 B "B"
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg tglog
   o  3: 63668d570d21 'C'
   |
@@ -729,12 +729,12 @@
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase --continue
-  rebasing 1:f899f3910ce7 "B" (B)
-  rebasing 3:63668d570d21 "C" (C tip)
+  rebasing 1:f899f3910ce7 B "B"
+  rebasing 3:63668d570d21 C tip "C"
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg tglog
   %  3: 63668d570d21 'C'
   |
@@ -757,10 +757,10 @@
   (no more unresolved files)
   continue: hg rebase --continue
   $ HGEDITOR=false hg rebase --continue --config ui.interactive=1
-  already rebased 1:f899f3910ce7 "B" (B) as 82b8abf9c185
-  rebasing 3:63668d570d21 "C" (C tip)
+  already rebased 1:f899f3910ce7 B "B" as 82b8abf9c185
+  rebasing 3:63668d570d21 C tip "C"
   abort: edit failed: false exited with status 1
-  [255]
+  [250]
   $ hg tglog
   o  3: 63668d570d21 'C'
   |
@@ -771,6 +771,6 @@
   o  0: 4a2df7238c3b 'A'
   
   $ hg rebase --continue
-  already rebased 1:f899f3910ce7 "B" (B) as 82b8abf9c185
-  already rebased 3:63668d570d21 "C" (C tip) as 82b8abf9c185
+  already rebased 1:f899f3910ce7 B "B" as 82b8abf9c185
+  already rebased 3:63668d570d21 C tip "C" as 82b8abf9c185
   saved backup bundle to $TESTTMP/aborted-editor/.hg/strip-backup/f899f3910ce7-7cab5e15-rebase.hg
--- a/tests/test-rebase-conflicts.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-conflicts.t	Tue Jan 19 21:48:43 2021 +0530
@@ -57,7 +57,7 @@
 
   $ hg rebase --continue
   abort: no rebase in progress
-  [255]
+  [20]
 
 Conflicting rebase:
 
@@ -67,7 +67,7 @@
   merging common
   warning: conflicts while merging common! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg status --config commands.status.verbose=1
   M common
@@ -89,7 +89,7 @@
 
   $ hg rebase --continue
   abort: unresolved merge conflicts (see 'hg help resolve')
-  [255]
+  [20]
 
 Conclude rebase:
 
@@ -100,7 +100,7 @@
   $ hg rebase --continue
   already rebased 3:3163e20567cc "L1" as 3e046f2ecedb
   rebasing 4:46f0b057b5c0 "L2"
-  rebasing 5:8029388f38dc "L3" (mybook)
+  rebasing 5:8029388f38dc mybook "L3"
   saved backup bundle to $TESTTMP/a/.hg/strip-backup/3163e20567cc-5ca4656e-rebase.hg
 
   $ hg tglog
@@ -261,7 +261,7 @@
   updating the branch cache
   rebased as 19c888675e13
   rebase status stored
-  rebasing 10:2f2496ddf49d "merge" (tip)
+  rebasing 10:2f2496ddf49d tip "merge"
    future parents are 11 and 7
    already in destination
    merge against 10:2f2496ddf49d
@@ -341,11 +341,11 @@
   $ echo c >> a
   $ hg commit -q -m 'abc'
   $ hg rebase -s 7bc217434fc1 -d ab --keep
-  rebasing 13:7bc217434fc1 "abc" (tip)
+  rebasing 13:7bc217434fc1 tip "abc"
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg diff
   diff -r 328e4ab1f7cc a
   --- a/a	Thu Jan 01 00:00:00 1970 +0000
@@ -361,11 +361,11 @@
   rebase aborted
   $ hg up -q -C 7bc217434fc1
   $ hg rebase -s . -d ab --keep -t internal:merge3
-  rebasing 13:7bc217434fc1 "abc" (tip)
+  rebasing 13:7bc217434fc1 tip "abc"
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg diff
   diff -r 328e4ab1f7cc a
   --- a/a	Thu Jan 01 00:00:00 1970 +0000
@@ -399,19 +399,19 @@
   $ echo 3 > B
   $ hg commit --amend -m E -A B -q
   $ hg rebase -r B+D -d . --config experimental.evolution=true
-  rebasing 1:112478962961 "B" (B)
+  rebasing 1:112478962961 B "B"
   merging B
   warning: conflicts while merging B! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ echo 4 > B
   $ hg resolve -m
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase --continue --config experimental.evolution=none
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:f585351a92f8 "D" (D)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:f585351a92f8 D "D"
   warning: orphaned descendants detected, not stripping 112478962961
   saved backup bundle to $TESTTMP/b/.hg/strip-backup/f585351a92f8-e536a9e4-rebase.hg
 
@@ -448,14 +448,14 @@
   $ hg co F
   5 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg rebase -d B
-  rebasing 2:dc0947a82db8 "C" (C)
-  rebasing 3:e7b3f00ed42e "D" (D)
-  rebasing 4:03ca77807e91 "E" (E)
-  rebasing 5:9a6b91dc2044 "F" (F tip)
+  rebasing 2:dc0947a82db8 C "C"
+  rebasing 3:e7b3f00ed42e D "D"
+  rebasing 4:03ca77807e91 E "E"
+  rebasing 5:9a6b91dc2044 F tip "F"
   merging conflict
   warning: conflicts while merging conflict! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg tglog
   @  8:draft 'E'
   |
@@ -480,10 +480,10 @@
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase -c
-  already rebased 2:dc0947a82db8 "C" (C) as 0199610c343e
-  already rebased 3:e7b3f00ed42e "D" (D) as f0dd538aaa63
-  already rebased 4:03ca77807e91 "E" (E) as cbf25af8347d
-  rebasing 5:9a6b91dc2044 "F" (F)
+  already rebased 2:dc0947a82db8 C "C" as 0199610c343e
+  already rebased 3:e7b3f00ed42e D "D" as f0dd538aaa63
+  already rebased 4:03ca77807e91 E "E" as cbf25af8347d
+  rebasing 5:9a6b91dc2044 F "F"
   saved backup bundle to $TESTTMP/conflict-in-merge/.hg/strip-backup/dc0947a82db8-ca7e7d5b-rebase.hg
   $ hg tglog
   @    5:draft 'F'
--- a/tests/test-rebase-dest.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-dest.t	Tue Jan 19 21:48:43 2021 +0530
@@ -20,15 +20,15 @@
   (use: hg rebase -d REV)
   [255]
   $ hg rebase -d 1
-  rebasing 2:5db65b93a12b "cc" (tip)
+  rebasing 2:5db65b93a12b tip "cc"
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/5db65b93a12b-4fb789ec-rebase.hg
   $ hg rebase -d 0 -r . -q
   $ HGPLAIN=1 hg rebase
-  rebasing 2:889b0bc6a730 "cc" (tip)
+  rebasing 2:889b0bc6a730 tip "cc"
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/889b0bc6a730-41ec4f81-rebase.hg
   $ hg rebase -d 0 -r . -q
   $ hg --config commands.rebase.requiredest=False rebase
-  rebasing 2:279de9495438 "cc" (tip)
+  rebasing 2:279de9495438 tip "cc"
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/279de9495438-ab0a5128-rebase.hg
 
 Requiring dest should not break continue or other rebase options
@@ -45,17 +45,17 @@
   o  0 aa
   
   $ hg rebase -d 2
-  rebasing 3:0537f6b50def "dc" (tip)
+  rebasing 3:0537f6b50def tip "dc"
   merging c
   warning: conflicts while merging c! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ echo d > c
   $ hg resolve --mark --all
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase --continue
-  rebasing 3:0537f6b50def "dc" (tip)
+  rebasing 3:0537f6b50def tip "dc"
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0537f6b50def-be4c7386-rebase.hg
 
   $ cd ..
@@ -161,7 +161,7 @@
   > | |
   > A D
   > EOS
-  abort: unknown revision 'SRC'!
+  abort: unknown revision 'SRC'
   [255]
 
 Rebase to null should work:
@@ -171,9 +171,9 @@
   > | |
   > A B
   > EOS
-  already rebased 0:426bada5c675 "A" (A)
-  already rebased 2:dc0947a82db8 "C" (C)
-  rebasing 3:004dc1679908 "D" (D tip)
+  already rebased 0:426bada5c675 A "A"
+  already rebased 2:dc0947a82db8 C "C"
+  rebasing 3:004dc1679908 D tip "D"
   o  4: d8d8601abd5e D
   
   o  2: dc0947a82db8 C
@@ -227,10 +227,10 @@
   > |  |
   > A  D
   > EOS
-  rebasing 2:112478962961 "B" (B)
-  rebasing 4:26805aba1e60 "C" (C)
-  rebasing 3:cd488e83d208 "E" (E)
-  rebasing 5:0069ba24938a "F" (F tip)
+  rebasing 2:112478962961 B "B"
+  rebasing 4:26805aba1e60 C "C"
+  rebasing 3:cd488e83d208 E "E"
+  rebasing 5:0069ba24938a F tip "F"
   o  9: d150ff263fc8 F
   |
   o  8: 66f30a1a2eab E
@@ -254,8 +254,8 @@
   > |\|
   > A D
   > EOS
-  rebasing 3:a4256619d830 "B" (B)
-  rebasing 6:8e139e245220 "C" (C tip)
+  rebasing 3:a4256619d830 B "B"
+  rebasing 6:8e139e245220 C tip "C"
   o    8: d7d1169e9b1c C
   |\
   | o    7: 2ed0c8546285 B
@@ -283,9 +283,9 @@
   >   |/
   >   A
   > EOS
-  rebasing 4:33441538d4aa "F" (F)
-  rebasing 6:cf43ad9da869 "G" (G)
-  rebasing 7:eef94f3b5f03 "H" (H tip)
+  rebasing 4:33441538d4aa F "F"
+  rebasing 6:cf43ad9da869 G "G"
+  rebasing 7:eef94f3b5f03 H tip "H"
   o  10: b3d84c6666cf H
   |
   | o  5: f585351a92f8 D
@@ -309,8 +309,8 @@
   >  \|/
   >   A
   > EOS
-  rebasing 2:dc0947a82db8 "C" (C)
-  rebasing 1:112478962961 "B" (B)
+  rebasing 2:dc0947a82db8 C "C"
+  rebasing 1:112478962961 B "B"
   o  5: 5fe9935d5222 B
   |
   o  4: 12d20731b9e0 C
@@ -352,11 +352,11 @@
   >  \|/
   >   A
   > EOS
-  already rebased 1:112478962961 "B" (B)
-  already rebased 2:dc0947a82db8 "C" (C)
-  already rebased 3:b18e25de2cf5 "D" (D)
-  already rebased 4:312782b8f06e "E" (E)
-  already rebased 5:ad6717a6a58e "F" (F tip)
+  already rebased 1:112478962961 B "B"
+  already rebased 2:dc0947a82db8 C "C"
+  already rebased 3:b18e25de2cf5 D "D"
+  already rebased 4:312782b8f06e E "E"
+  already rebased 5:ad6717a6a58e F tip "F"
   o  5: ad6717a6a58e F
   |
   o  3: b18e25de2cf5 D
@@ -380,17 +380,17 @@
   >  \| |
   >   A H
   > EOS
-  rebasing 4:701514e1408d "I" (I)
-  rebasing 0:426bada5c675 "A" (A)
-  rebasing 1:e7050b6e5048 "H" (H)
-  rebasing 5:26805aba1e60 "C" (C)
-  rebasing 7:cf89f86b485b "J" (J)
-  rebasing 2:112478962961 "B" (B)
-  rebasing 3:7fb047a69f22 "E" (E)
-  rebasing 8:f585351a92f8 "D" (D)
-  rebasing 10:ae41898d7875 "K" (K tip)
-  rebasing 9:711f53bbef0b "G" (G)
-  rebasing 6:64a8289d2492 "F" (F)
+  rebasing 4:701514e1408d I "I"
+  rebasing 0:426bada5c675 A "A"
+  rebasing 1:e7050b6e5048 H "H"
+  rebasing 5:26805aba1e60 C "C"
+  rebasing 7:cf89f86b485b J "J"
+  rebasing 2:112478962961 B "B"
+  rebasing 3:7fb047a69f22 E "E"
+  rebasing 8:f585351a92f8 D "D"
+  rebasing 10:ae41898d7875 K tip "K"
+  rebasing 9:711f53bbef0b G "G"
+  rebasing 6:64a8289d2492 F "F"
   o  21: 3735afb3713a F
   |
   o  20: 07698142d7a7 G
@@ -433,12 +433,12 @@
   >      A
   > EOF
   6 new orphan changesets
-  rebasing 16:5c432343bf59 "J" (J tip)
-  rebasing 3:26805aba1e60 "C" (C)
-  rebasing 6:f585351a92f8 "D" (D)
-  rebasing 10:ffebc37c5d0b "E3" (E3)
-  rebasing 13:fb184bcfeee8 "F2" (F2)
-  rebasing 11:dc838ab4c0da "G" (G)
+  rebasing 16:5c432343bf59 J tip "J"
+  rebasing 3:26805aba1e60 C "C"
+  rebasing 6:f585351a92f8 D "D"
+  rebasing 10:ffebc37c5d0b E3 "E3"
+  rebasing 13:fb184bcfeee8 F2 "F2"
+  rebasing 11:dc838ab4c0da G "G"
   o  22: 174f63d574a8 G
   |
   o  21: c9d9fbe76705 F2
--- a/tests/test-rebase-detach.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-detach.t	Tue Jan 19 21:48:43 2021 +0530
@@ -24,7 +24,7 @@
   $ hg phase --force --secret D
 
   $ hg rebase -s D -d B
-  rebasing 3:e7b3f00ed42e "D" (D tip)
+  rebasing 3:e7b3f00ed42e D tip "D"
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/e7b3f00ed42e-6f368371-rebase.hg
 
   $ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n"
@@ -60,8 +60,8 @@
   > EOF
 
   $ hg rebase -s D -d B
-  rebasing 3:e7b3f00ed42e "D" (D)
-  rebasing 4:69a34c08022a "E" (E tip)
+  rebasing 3:e7b3f00ed42e D "D"
+  rebasing 4:69a34c08022a E tip "E"
   saved backup bundle to $TESTTMP/a2/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg
 
   $ hg tglog
@@ -97,8 +97,8 @@
   > EOF
 
   $ hg rebase -s C -d B
-  rebasing 2:dc0947a82db8 "C" (C)
-  rebasing 3:e7b3f00ed42e "D" (D tip)
+  rebasing 2:dc0947a82db8 C "C"
+  rebasing 3:e7b3f00ed42e D tip "D"
   saved backup bundle to $TESTTMP/a3/.hg/strip-backup/dc0947a82db8-b8481714-rebase.hg
 
   $ hg tglog
@@ -136,8 +136,8 @@
   $ hg phase --force --secret E
 
   $ hg rebase --collapse -s D -d B
-  rebasing 3:e7b3f00ed42e "D" (D)
-  rebasing 4:69a34c08022a "E" (E tip)
+  rebasing 3:e7b3f00ed42e D "D"
+  rebasing 4:69a34c08022a E tip "E"
   saved backup bundle to $TESTTMP/a4/.hg/strip-backup/e7b3f00ed42e-a2ec7cea-rebase.hg
 
   $ hg  log -G --template "{rev}:{phase} '{desc}' {branches}\n"
@@ -173,9 +173,9 @@
   > EOF
 
   $ hg rebase -s C -d B
-  rebasing 2:dc0947a82db8 "C" (C)
-  rebasing 3:e7b3f00ed42e "D" (D)
-  rebasing 4:69a34c08022a "E" (E tip)
+  rebasing 2:dc0947a82db8 C "C"
+  rebasing 3:e7b3f00ed42e D "D"
+  rebasing 4:69a34c08022a E tip "E"
   saved backup bundle to $TESTTMP/a5/.hg/strip-backup/dc0947a82db8-3eefec98-rebase.hg
 
   $ hg tglog
@@ -191,7 +191,7 @@
   
   $ hg rebase -d 1 -s 3
   rebasing 3:e9153d36a1af "D"
-  rebasing 4:e3d0c70d606d "E" (tip)
+  rebasing 4:e3d0c70d606d tip "E"
   saved backup bundle to $TESTTMP/a5/.hg/strip-backup/e9153d36a1af-db7388ed-rebase.hg
   $ hg tglog
   o  4: 2c24e540eccd 'E'
@@ -248,9 +248,9 @@
   o  0: 426bada5c675 'A'
   
   $ hg rebase -s I -d H --collapse --config ui.merge=internal:other
-  rebasing 5:b92d164ad3cb "I" (I)
+  rebasing 5:b92d164ad3cb I "I"
   rebasing 6:0cfbc7e8faaf "Merge"
-  rebasing 7:c6aaf0d259c0 "J" (tip)
+  rebasing 7:c6aaf0d259c0 tip "J"
   saved backup bundle to $TESTTMP/a6/.hg/strip-backup/b92d164ad3cb-88fd7ab7-rebase.hg
 
   $ hg tglog
@@ -294,17 +294,17 @@
   adding B
   $ hg phase --force --secret .
   $ hg rebase -s . -d B --config ui.merge=internal:merge
-  rebasing 3:17b4880d2402 "B2" (tip)
+  rebasing 3:17b4880d2402 tip "B2"
   merging B
   warning: conflicts while merging B! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg resolve --all -t internal:local
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase -c
-  rebasing 3:17b4880d2402 "B2" (tip)
-  note: not rebasing 3:17b4880d2402 "B2" (tip), its destination already has all its changes
+  rebasing 3:17b4880d2402 tip "B2"
+  note: not rebasing 3:17b4880d2402 tip "B2", its destination already has all its changes
   saved backup bundle to $TESTTMP/a7/.hg/strip-backup/17b4880d2402-1ae1f6cc-rebase.hg
   $ hg  log -G --template "{rev}:{phase} '{desc}' {branches}\n"
   o  2:draft 'C'
--- a/tests/test-rebase-empty-successor.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-empty-successor.t	Tue Jan 19 21:48:43 2021 +0530
@@ -24,15 +24,15 @@
 
   $ hg rebase -s 2 -d 1 --config rewrite.empty-successor=skip --dry-run
   starting dry-run rebase; repository will not be changed
-  rebasing 2:6e2aad5e0f3c "b2" (tip)
-  note: not rebasing 2:6e2aad5e0f3c "b2" (tip), its destination already has all its changes
+  rebasing 2:6e2aad5e0f3c tip "b2"
+  note: not rebasing 2:6e2aad5e0f3c tip "b2", its destination already has all its changes
   dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
 
 With rewrite.empty-successor=keep, b2 will be recreated although it became empty.
 
   $ hg rebase -s 2 -d 1 --config rewrite.empty-successor=keep
-  rebasing 2:6e2aad5e0f3c "b2" (tip)
-  note: created empty successor for 2:6e2aad5e0f3c "b2" (tip), its destination already has all its changes
+  rebasing 2:6e2aad5e0f3c tip "b2"
+  note: created empty successor for 2:6e2aad5e0f3c tip "b2", its destination already has all its changes
   saved backup bundle to $TESTTMP/.hg/strip-backup/6e2aad5e0f3c-7d7c8801-rebase.hg
 
   $ hg tglog
--- a/tests/test-rebase-emptycommit.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-emptycommit.t	Tue Jan 19 21:48:43 2021 +0530
@@ -50,10 +50,10 @@
 With --keep, bookmark should move
 
   $ hg rebase -r 3+4 -d E --keep
-  rebasing 3:e7b3f00ed42e "D" (BOOK-D)
-  note: not rebasing 3:e7b3f00ed42e "D" (BOOK-D), its destination already has all its changes
-  rebasing 4:69a34c08022a "E" (BOOK-E)
-  note: not rebasing 4:69a34c08022a "E" (BOOK-E), its destination already has all its changes
+  rebasing 3:e7b3f00ed42e BOOK-D "D"
+  note: not rebasing 3:e7b3f00ed42e BOOK-D "D", its destination already has all its changes
+  rebasing 4:69a34c08022a BOOK-E "E"
+  note: not rebasing 4:69a34c08022a BOOK-E "E", its destination already has all its changes
   $ hg log -G -T '{rev} {desc} {bookmarks}'
   o  7 E BOOK-D BOOK-E
   |
@@ -82,12 +82,12 @@
 "BOOK-D", and "BOOK-E" include changes introduced by "C".
 
   $ hg rebase -s 2 -d E
-  rebasing 2:dc0947a82db8 "C" (BOOK-C C)
-  rebasing 3:e7b3f00ed42e "D" (BOOK-D)
-  note: not rebasing 3:e7b3f00ed42e "D" (BOOK-D), its destination already has all its changes
-  rebasing 4:69a34c08022a "E" (BOOK-E)
-  note: not rebasing 4:69a34c08022a "E" (BOOK-E), its destination already has all its changes
-  rebasing 5:6b2aeab91270 "F" (BOOK-F F)
+  rebasing 2:dc0947a82db8 BOOK-C C "C"
+  rebasing 3:e7b3f00ed42e BOOK-D "D"
+  note: not rebasing 3:e7b3f00ed42e BOOK-D "D", its destination already has all its changes
+  rebasing 4:69a34c08022a BOOK-E "E"
+  note: not rebasing 4:69a34c08022a BOOK-E "E", its destination already has all its changes
+  rebasing 5:6b2aeab91270 BOOK-F F "F"
   saved backup bundle to $TESTTMP/non-merge/.hg/strip-backup/dc0947a82db8-52bb4973-rebase.hg
   $ hg log -G -T '{rev} {desc} {bookmarks}'
   o  5 F BOOK-F
@@ -139,12 +139,12 @@
   $ hg branch foo -q
 
   $ hg rebase -r '(A::)-(B::)-A' -d H --keepbranches
-  rebasing 2:dc0947a82db8 "C" (BOOK-C)
-  note: not rebasing 2:dc0947a82db8 "C" (BOOK-C), its destination already has all its changes
-  rebasing 3:b18e25de2cf5 "D" (BOOK-D)
-  note: not rebasing 3:b18e25de2cf5 "D" (BOOK-D), its destination already has all its changes
-  rebasing 4:86a1f6686812 "E" (BOOK-E E)
-  note: not rebasing 4:86a1f6686812 "E" (BOOK-E E), its destination already has all its changes
+  rebasing 2:dc0947a82db8 BOOK-C "C"
+  note: not rebasing 2:dc0947a82db8 BOOK-C "C", its destination already has all its changes
+  rebasing 3:b18e25de2cf5 BOOK-D "D"
+  note: not rebasing 3:b18e25de2cf5 BOOK-D "D", its destination already has all its changes
+  rebasing 4:86a1f6686812 BOOK-E E "E"
+  note: not rebasing 4:86a1f6686812 BOOK-E E "E", its destination already has all its changes
   saved backup bundle to $TESTTMP/merge1/.hg/strip-backup/b18e25de2cf5-1fd0a4ba-rebase.hg
   $ hg update null -q
 
@@ -189,13 +189,13 @@
   > EOS
 
   $ hg rebase -r '(A::)-(B::)-A' -d H
-  rebasing 2:dc0947a82db8 "C" (BOOK-C)
-  note: not rebasing 2:dc0947a82db8 "C" (BOOK-C), its destination already has all its changes
-  rebasing 3:b18e25de2cf5 "D" (BOOK-D D)
-  rebasing 4:03ca77807e91 "E" (BOOK-E E)
-  rebasing 5:ad6717a6a58e "F" (BOOK-F)
-  note: not rebasing 5:ad6717a6a58e "F" (BOOK-F), its destination already has all its changes
-  rebasing 6:c58e8bdac1f4 "G" (BOOK-G G)
+  rebasing 2:dc0947a82db8 BOOK-C "C"
+  note: not rebasing 2:dc0947a82db8 BOOK-C "C", its destination already has all its changes
+  rebasing 3:b18e25de2cf5 BOOK-D D "D"
+  rebasing 4:03ca77807e91 BOOK-E E "E"
+  rebasing 5:ad6717a6a58e BOOK-F "F"
+  note: not rebasing 5:ad6717a6a58e BOOK-F "F", its destination already has all its changes
+  rebasing 6:c58e8bdac1f4 BOOK-G G "G"
   saved backup bundle to $TESTTMP/merge2/.hg/strip-backup/b18e25de2cf5-2d487005-rebase.hg
 
   $ hg log -G -T '{rev} {desc} {bookmarks}'
--- a/tests/test-rebase-inmemory.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-inmemory.t	Tue Jan 19 21:48:43 2021 +0530
@@ -42,7 +42,7 @@
   b (no-eol)
   $ hg rebase --debug -r b -d c | grep rebasing
   rebasing in memory
-  rebasing 2:db0e82a16a62 "b" (b)
+  rebasing 2:db0e82a16a62 b "b"
   $ hg tglog
   o  3: ca58782ad1e4 'b'
   |
@@ -102,7 +102,7 @@
   somefile (no-eol)
   $ hg rebase --debug -s b -d a | grep rebasing
   rebasing in memory
-  rebasing 2:db0e82a16a62 "b" (b)
+  rebasing 2:db0e82a16a62 b "b"
   $ hg tglog
   o  3: fc055c3b4d33 'b'
   |
@@ -118,7 +118,7 @@
   b (no-eol)
   $ hg rebase --debug -s 1 -d 3 | grep rebasing
   rebasing in memory
-  rebasing 1:02952614a83d "d" (d)
+  rebasing 1:02952614a83d d "d"
   rebasing 2:f56b71190a8f "c"
   $ hg tglog
   o  3: 753feb6fd12a 'c'
@@ -149,7 +149,7 @@
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ hg rebase -r 3 -d 0 --debug | grep rebasing
   rebasing in memory
-  rebasing 3:753feb6fd12a "c" (tip)
+  rebasing 3:753feb6fd12a tip "c"
   $ hg tglog
   @  3: 844a7de3e617 'c'
   |
@@ -179,7 +179,7 @@
   o  0: b173517d0057 'a'
   
   $ hg rebase -r . -d 2
-  rebasing 4:daf7dfc139cb "a/a" (tip)
+  rebasing 4:daf7dfc139cb tip "a/a"
   saved backup bundle to $TESTTMP/repo2/.hg/strip-backup/daf7dfc139cb-fdbfcf4f-rebase.hg
 
   $ hg tglog
@@ -220,7 +220,7 @@
   o  0: b173517d0057 'a'
   
   $ hg rebase -r . -d 5
-  rebasing 7:855e9797387e "added a back!" (tip)
+  rebasing 7:855e9797387e tip "added a back!"
   saved backup bundle to $TESTTMP/repo2/.hg/strip-backup/855e9797387e-81ee4c5d-rebase.hg
 
   $ hg tglog
@@ -246,7 +246,7 @@
   $ hg ci -m 'c/subdir/file.txt'
   $ hg rebase -r . -d 3 -n
   starting dry-run rebase; repository will not be changed
-  rebasing 8:e147e6e3c490 "c/subdir/file.txt" (tip)
+  rebasing 8:e147e6e3c490 tip "c/subdir/file.txt"
   abort: error: 'c/subdir/file.txt' conflicts with file 'c' in 3.
   [255]
 FIXME: shouldn't need this, but when we hit path conflicts in dryrun mode, we
@@ -333,10 +333,16 @@
 Make sure it throws error while passing --continue or --abort with --dry-run
   $ hg rebase -s 2 -d 6 -n --continue
   abort: cannot specify both --continue and --dry-run
-  [255]
+  [10]
   $ hg rebase -s 2 -d 6 -n --abort
   abort: cannot specify both --abort and --dry-run
-  [255]
+  [10]
+
+When nothing to rebase
+  $ hg reb -r . -d '.^' -n
+  starting dry-run rebase; repository will not be changed
+  nothing to rebase
+  [1]
 
 Check dryrun gives correct results when there is no conflict in rebasing
   $ hg rebase -s 2 -d 6 -n
@@ -465,7 +471,7 @@
   merging e
   warning: conflicts while merging e! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --abort
   saved backup bundle to $TESTTMP/repo3/.hg/strip-backup/c1e524d4287c-f91f82e1-backup.hg
   rebase aborted
@@ -484,7 +490,7 @@
   transaction abort!
   rollback completed
   abort: uncommitted changes
-  [255]
+  [20]
   $ cat a
   dirty
   $ hg status -v
@@ -505,7 +511,7 @@
   $ hg rebase -s 2 -d 7
   abort: outstanding uncommitted merge
   (use 'hg commit' or 'hg merge --abort')
-  [255]
+  [20]
   $ hg resolve -l
   U e
 
@@ -541,13 +547,13 @@
 Check it gives error when both --dryrun and --confirm is used:
   $ hg rebase -s 2 -d . --confirm --dry-run
   abort: cannot specify both --confirm and --dry-run
-  [255]
+  [10]
   $ hg rebase -s 2 -d . --confirm --abort
   abort: cannot specify both --abort and --confirm
-  [255]
+  [10]
   $ hg rebase -s 2 -d . --confirm --continue
   abort: cannot specify both --continue and --confirm
-  [255]
+  [10]
 
 Test --confirm option when there are no conflicts:
   $ hg rebase -s 2 -d . --keep --config ui.interactive=True --confirm << EOF
@@ -711,7 +717,7 @@
   $ hg ci -qAm 'add +x to foo.txt'
 issue5960: this was raising an AttributeError exception
   $ hg rebase -r . -d 1
-  rebasing 2:539b93e77479 "add +x to foo.txt" (tip)
+  rebasing 2:539b93e77479 tip "add +x to foo.txt"
   saved backup bundle to $TESTTMP/no_exception/.hg/strip-backup/*.hg (glob)
   $ hg diff -c tip
   diff --git a/foo.txt b/foo.txt
@@ -774,7 +780,7 @@
   $ hg mv a b
   $ hg ci -qm 'rename a to b'
   $ hg rebase -d 1
-  rebasing 2:b977edf6f839 "rename a to b" (tip)
+  rebasing 2:b977edf6f839 tip "rename a to b"
   merging a and b to b
   saved backup bundle to $TESTTMP/rebase-rename/.hg/strip-backup/b977edf6f839-0864f570-rebase.hg
   $ hg st --copies --change .
@@ -795,7 +801,7 @@
   $ hg mv a b
   $ hg ci -qm 'rename a to b'
   $ hg rebase -d 1
-  rebasing 2:b977edf6f839 "rename a to b" (tip)
+  rebasing 2:b977edf6f839 tip "rename a to b"
   merging a and b to b
   saved backup bundle to $TESTTMP/rebase-rename-empty/.hg/strip-backup/b977edf6f839-0864f570-rebase.hg
   $ hg st --copies --change .
@@ -815,7 +821,7 @@
   $ echo a2 > a
   $ hg ci -qm 'modify a'
   $ hg rebase -r . -d 1 --collapse
-  rebasing 2:41c4ea50d4cf "modify a" (tip)
+  rebasing 2:41c4ea50d4cf tip "modify a"
   merging b and a to b
   saved backup bundle to $TESTTMP/rebase-rename-collapse/.hg/strip-backup/41c4ea50d4cf-b90b7994-rebase.hg
   $ cd ..
@@ -852,13 +858,13 @@
   created new head
 
   $ hg rebase -r . -d 1 --config ui.merge=internal:merge3
-  rebasing 2:fb62b706688e "add b to foo" (tip)
+  rebasing 2:fb62b706688e tip "add b to foo"
   merging foo
   hit merge conflicts; rebasing that commit again in the working copy
   merging foo
   warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ cd $TESTTMP
 
@@ -891,11 +897,11 @@
   merging foo
   warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase -r 3 -d 1 -t:merge3
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
   $ hg resolve --list
   U foo
   $ hg resolve --all --re-merge -t:other
@@ -945,7 +951,7 @@
   $ hg rebase -s 2 -d 3
   rebasing 2:0194f1db184a "b"
   note: not rebasing 2:0194f1db184a "b", its destination already has all its changes
-  rebasing 4:59c8292117b1 "merge" (tip)
+  rebasing 4:59c8292117b1 tip "merge"
   saved backup bundle to $TESTTMP/keep_merge/.hg/strip-backup/0194f1db184a-aee31d03-rebase.hg
   $ hg tglog
   o    3: 506e2454484b 'merge'
@@ -971,6 +977,6 @@
   $ echo bar > test; hg add test; hg ci -m c
   created new head
   $ hg rebase -d 2 -d 1 --tool :local
-  rebasing 2:ca2749322ee5 "c" (tip)
-  note: not rebasing 2:ca2749322ee5 "c" (tip), its destination already has all its changes
+  rebasing 2:ca2749322ee5 tip "c"
+  note: not rebasing 2:ca2749322ee5 tip "c", its destination already has all its changes
   saved backup bundle to $TESTTMP/nofilechanges/.hg/strip-backup/ca2749322ee5-6dc7e94b-rebase.hg
--- a/tests/test-rebase-interruptions.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-interruptions.t	Tue Jan 19 21:48:43 2021 +0530
@@ -62,7 +62,7 @@
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Force a commit on C during the interruption:
 
@@ -99,7 +99,7 @@
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Solve the conflict and go on:
 
@@ -158,7 +158,7 @@
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Force a commit on B' during the interruption:
 
@@ -230,7 +230,7 @@
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Change phase on B and B'
 
@@ -303,7 +303,7 @@
   merging A
   warning: conflicts while merging A! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg tglog
   @  5: 45396c49d53b 'B'
   |
@@ -348,7 +348,7 @@
   $ hg rebase --source 2 --dest 5 --tool internal:other --config 'hooks.precommit=hg status | grep "M A"'
   rebasing 2:965c486023db "C"
   M A
-  rebasing 6:a0b2430ebfb8 "F" (tip)
+  rebasing 6:a0b2430ebfb8 tip "F"
   abort: precommit hook exited with status 1
   [255]
   $ hg tglogp
@@ -397,7 +397,7 @@
   >     --config 'hooks.tonative.pretxncommit=True' --config 'hooks.pretxncommit=hg log -r $HG_NODE | grep "summary:     C"'
   rebasing 2:965c486023db "C"
   summary:     C
-  rebasing 6:a0b2430ebfb8 "F" (tip)
+  rebasing 6:a0b2430ebfb8 tip "F"
   transaction abort!
   rollback completed
   abort: pretxncommit hook exited with status 1
@@ -447,7 +447,7 @@
   $ hg rebase --source 2 --dest 5 --tool internal:other --config 'hooks.pretxnclose=hg log -r tip | grep "summary:     C"'
   rebasing 2:965c486023db "C"
   summary:     C
-  rebasing 6:a0b2430ebfb8 "F" (tip)
+  rebasing 6:a0b2430ebfb8 tip "F"
   transaction abort!
   rollback completed
   abort: pretxnclose hook exited with status 1
@@ -506,7 +506,7 @@
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ echo a > a
   $ echo c >> a
   $ hg resolve --mark a
@@ -522,18 +522,18 @@
 Now try again with --collapse
   $ hg unbundle -q .hg/strip-backup/fdaca8533b86-7fd70513-rebase.hg
   $ hg rebase -s 2 -d 1 --noninteractive --collapse
-  rebasing 2:fdaca8533b86 "b" (tip)
+  rebasing 2:fdaca8533b86 tip "b"
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ echo a > a
   $ echo c >> a
   $ hg resolve --mark a
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase --continue
-  rebasing 2:fdaca8533b86 "b" (tip)
+  rebasing 2:fdaca8533b86 tip "b"
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/fdaca8533b86-7fd70513-rebase.hg
   $ hg resolve --list
   $ test -d .hg/merge
--- a/tests/test-rebase-legacy.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-legacy.t	Tue Jan 19 21:48:43 2021 +0530
@@ -58,11 +58,11 @@
 #endif
 
   $ hg continue
-  rebasing 4:c1e6b162678d "B" (B)
-  rebasing 8:6f7a236de685 "D" (D)
-  rebasing 2:de008c61a447 "E" (E)
-  rebasing 7:d2fa1c02b240 "G" (G)
-  rebasing 9:6582e6951a9c "H" (H tip)
+  rebasing 4:c1e6b162678d B "B"
+  rebasing 8:6f7a236de685 D "D"
+  rebasing 2:de008c61a447 E "E"
+  rebasing 7:d2fa1c02b240 G "G"
+  rebasing 9:6582e6951a9c H tip "H"
   warning: orphaned descendants detected, not stripping c1e6b162678d, de008c61a447
   saved backup bundle to $TESTTMP/.hg/strip-backup/6f7a236de685-9880a3dc-rebase.hg
 
--- a/tests/test-rebase-mq-skip.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-mq-skip.t	Tue Jan 19 21:48:43 2021 +0530
@@ -57,7 +57,7 @@
   $ hg up -q -C qtip
 
   $ hg rebase -v
-  rebasing 2:13a46ce44f60 "P0" (p0.patch qbase)
+  rebasing 2:13a46ce44f60 p0.patch qbase "P0"
   resolving manifests
   removing p0
   getting r1
@@ -67,9 +67,9 @@
   p0
   committing manifest
   committing changelog
-  rebasing 3:148775c71080 "P1" (p1.patch qtip)
+  rebasing 3:148775c71080 p1.patch qtip "P1"
   resolving manifests
-  note: not rebasing 3:148775c71080 "P1" (p1.patch qtip), its destination already has all its changes
+  note: not rebasing 3:148775c71080 p1.patch qtip "P1", its destination already has all its changes
   rebase merging completed
   updating mq patch p0.patch to 5:9ecc820b1737
   $TESTTMP/a/.hg/patches/p0.patch
@@ -149,28 +149,28 @@
   $ hg up -q qtip
 
   $ HGMERGE=internal:fail hg rebase
-  rebasing 1:b4bffa6e4776 "r1" (qbase r1)
-  note: not rebasing 1:b4bffa6e4776 "r1" (qbase r1), its destination already has all its changes
-  rebasing 2:c0fd129beb01 "r2" (r2)
-  rebasing 3:6ff5b8feed8e "r3" (r3)
-  note: not rebasing 3:6ff5b8feed8e "r3" (r3), its destination already has all its changes
-  rebasing 4:094320fec554 "r4" (r4)
+  rebasing 1:b4bffa6e4776 qbase r1 "r1"
+  note: not rebasing 1:b4bffa6e4776 qbase r1 "r1", its destination already has all its changes
+  rebasing 2:c0fd129beb01 r2 "r2"
+  rebasing 3:6ff5b8feed8e r3 "r3"
+  note: not rebasing 3:6ff5b8feed8e r3 "r3", its destination already has all its changes
+  rebasing 4:094320fec554 r4 "r4"
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ HGMERGE=internal:local hg resolve --all
   (no more unresolved files)
   continue: hg rebase --continue
 
   $ hg continue
-  already rebased 1:b4bffa6e4776 "r1" (qbase r1) as 057f55ff8f44
-  already rebased 2:c0fd129beb01 "r2" (r2) as 1660ab13ce9a
-  already rebased 3:6ff5b8feed8e "r3" (r3) as 1660ab13ce9a
-  rebasing 4:094320fec554 "r4" (r4)
-  note: not rebasing 4:094320fec554 "r4" (r4), its destination already has all its changes
-  rebasing 5:681a378595ba "r5" (r5)
-  rebasing 6:512a1f24768b "r6" (qtip r6)
-  note: not rebasing 6:512a1f24768b "r6" (qtip r6), its destination already has all its changes
+  already rebased 1:b4bffa6e4776 qbase r1 "r1" as 057f55ff8f44
+  already rebased 2:c0fd129beb01 r2 "r2" as 1660ab13ce9a
+  already rebased 3:6ff5b8feed8e r3 "r3" as 1660ab13ce9a
+  rebasing 4:094320fec554 r4 "r4"
+  note: not rebasing 4:094320fec554 r4 "r4", its destination already has all its changes
+  rebasing 5:681a378595ba r5 "r5"
+  rebasing 6:512a1f24768b qtip r6 "r6"
+  note: not rebasing 6:512a1f24768b qtip r6 "r6", its destination already has all its changes
   saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-rebase.hg
 
   $ hg tglog
--- a/tests/test-rebase-mq.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-mq.t	Tue Jan 19 21:48:43 2021 +0530
@@ -59,11 +59,11 @@
 Rebase - generate a conflict:
 
   $ hg rebase -s 2 -d 1
-  rebasing 2:3504f44bffc0 "P0" (f.patch qbase)
+  rebasing 2:3504f44bffc0 f.patch qbase "P0"
   merging f
   warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Fix the 1st conflict:
 
@@ -72,12 +72,12 @@
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase -c
-  rebasing 2:3504f44bffc0 "P0" (f.patch qbase)
-  rebasing 3:929394423cd3 "P1" (f2.patch qtip tip)
+  rebasing 2:3504f44bffc0 f.patch qbase "P0"
+  rebasing 3:929394423cd3 f2.patch qtip tip "P1"
   merging f
   warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Fix the 2nd conflict:
 
@@ -86,8 +86,8 @@
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg rebase -c
-  already rebased 2:3504f44bffc0 "P0" (f.patch qbase) as ebe9914c0d1c
-  rebasing 3:929394423cd3 "P1" (f2.patch qtip)
+  already rebased 2:3504f44bffc0 f.patch qbase "P0" as ebe9914c0d1c
+  rebasing 3:929394423cd3 f2.patch qtip "P1"
   saved backup bundle to $TESTTMP/a/.hg/strip-backup/3504f44bffc0-30595b40-rebase.hg
 
   $ hg tglog
@@ -203,8 +203,8 @@
 Rebase the applied mq patches:
 
   $ hg rebase -s 2 -d 1
-  rebasing 2:0c587ffcb480 "P0 (git)" (f_git.patch qbase)
-  rebasing 3:c7f18665e4bc "P1" (f.patch qtip tip)
+  rebasing 2:0c587ffcb480 f_git.patch qbase "P0 (git)"
+  rebasing 3:c7f18665e4bc f.patch qtip tip "P1"
   saved backup bundle to $TESTTMP/a/.hg/strip-backup/0c587ffcb480-0ea5695f-rebase.hg
 
   $ hg qci -m 'save patch state'
@@ -337,7 +337,7 @@
   foo
   $ [ -f .hg/patches/empty-important ]
   $ hg -q rebase -d 2
-  note: not rebasing 1:0aaf4c3af7eb "important commit message" (empty-important qbase), its destination already has all its changes
+  note: not rebasing 1:0aaf4c3af7eb empty-important qbase "important commit message", its destination already has all its changes
   $ hg qseries
   guarded
   bar
--- a/tests/test-rebase-named-branches.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-named-branches.t	Tue Jan 19 21:48:43 2021 +0530
@@ -72,7 +72,7 @@
   rebasing 5:24b6387c8c8c "F"
   rebasing 6:eea13746799a "G"
   rebasing 7:02de42196ebe "H"
-  rebasing 9:cb039b7cae8e "dev-two named branch" (tip)
+  rebasing 9:cb039b7cae8e tip "dev-two named branch"
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/24b6387c8c8c-24cb8001-rebase.hg
 
   $ hg tglog
@@ -101,7 +101,7 @@
   rebasing 6:24de4aff8e28 "F"
   rebasing 7:4b988a958030 "G"
   rebasing 8:31d0e4ba75e6 "H"
-  rebasing 9:9e70cd31750f "dev-two named branch" (tip)
+  rebasing 9:9e70cd31750f tip "dev-two named branch"
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-rebase.hg
 
   $ hg tglog
@@ -190,7 +190,7 @@
   rebasing 6:679f28760620 "F"
   rebasing 7:549f007a9f5f "G"
   rebasing 8:12b2bc666e20 "H"
-  rebasing 9:71325f8bc082 "dev-two named branch" (tip)
+  rebasing 9:71325f8bc082 tip "dev-two named branch"
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-6cdd1a52-rebase.hg
 
   $ hg tglog
@@ -253,7 +253,7 @@
   rebasing 6:3944801ae4ea "dev-two named branch"
   rebasing 7:3bdb949809d9 "B"
   rebasing 8:a0d543090fa4 "C"
-  rebasing 9:e9f862ce8bad "D" (tip)
+  rebasing 9:e9f862ce8bad tip "D"
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/3944801ae4ea-fb46ed74-rebase.hg
 
   $ hg tglog
@@ -344,7 +344,7 @@
 
   $ hg rebase -s tip -d 4 --dry-run
   starting dry-run rebase; repository will not be changed
-  rebasing 11:be1dea60f2a6 "D" (tip)
+  rebasing 11:be1dea60f2a6 tip "D"
   dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
   $ hg diff
   diff -r 2b586e70108d A
@@ -360,7 +360,7 @@
   $ echo A-mod > A
   $ echo n | hg rebase -s tip -d 4 --confirm --config ui.interactive=True
   starting in-memory rebase
-  rebasing 11:be1dea60f2a6 "D" (tip)
+  rebasing 11:be1dea60f2a6 tip "D"
   rebase completed successfully
   apply changes (yn)? n
   $ hg diff
@@ -374,7 +374,7 @@
   $ echo A-mod > A
   $ hg rebase -s tip -d 4 --confirm
   starting in-memory rebase
-  rebasing 11:be1dea60f2a6 "D" (tip)
+  rebasing 11:be1dea60f2a6 tip "D"
   rebase completed successfully
   apply changes (yn)? y
   saved backup bundle to $TESTTMP/a1/.hg/strip-backup/be1dea60f2a6-ca6d2dac-rebase.hg
@@ -392,7 +392,7 @@
   $ hg rebase -s 5 -d 4 --dry-run
   starting dry-run rebase; repository will not be changed
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg diff
   diff -r 2b586e70108d A
   --- a/A	Thu Jan 01 00:00:00 1970 +0000
@@ -406,7 +406,7 @@
   $ echo n | hg rebase -s 5 -d 4 --confirm --config ui.interactive=True
   starting in-memory rebase
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg diff
   diff -r 2b586e70108d A
   --- a/A	Thu Jan 01 00:00:00 1970 +0000
@@ -417,7 +417,7 @@
   $ hg rebase -s 5 -d 4 --confirm
   starting in-memory rebase
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg diff
   diff -r 2b586e70108d A
   --- a/A	Thu Jan 01 00:00:00 1970 +0000
@@ -527,8 +527,8 @@
   marked working directory as branch x
   $ hg rebase -r 3:: -d .
   rebasing 3:76abc1c6f8c7 "b1"
-  rebasing 4:8427af5d86f2 "c2 closed" (tip)
-  note: not rebasing 4:8427af5d86f2 "c2 closed" (tip), its destination already has all its changes
+  rebasing 4:8427af5d86f2 tip "c2 closed"
+  note: not rebasing 4:8427af5d86f2 tip "c2 closed", its destination already has all its changes
   saved backup bundle to $TESTTMP/case2/.hg/strip-backup/76abc1c6f8c7-cd698d13-rebase.hg
   $ hg tglog
   o  3: 117b0ed08075 'b1' x
--- a/tests/test-rebase-newancestor.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-newancestor.t	Tue Jan 19 21:48:43 2021 +0530
@@ -251,7 +251,7 @@
   $ echo 'other change while merging future "rebase ancestors"' > other
   $ hg ci -Aqm 'merge rebase ancestors'
   $ hg rebase -d 5 -v
-  rebasing 6:4c5f12f25ebe "merge rebase ancestors" (tip)
+  rebasing 6:4c5f12f25ebe tip "merge rebase ancestors"
   resolving manifests
   removing other
   resolving manifests
@@ -310,9 +310,9 @@
   > R
   > EOS
   $ hg rebase -r D+E+F -d Z
-  rebasing 5:5f2c926dfecf "D" (D)
-  rebasing 6:b296604d9846 "E" (E)
-  rebasing 7:caa9781e507d "F" (F tip)
+  rebasing 5:5f2c926dfecf D "D"
+  rebasing 6:b296604d9846 E "E"
+  rebasing 7:caa9781e507d F tip "F"
   abort: rebasing 7:caa9781e507d will include unwanted changes from 4:d6003a550c2c or 3:c1e6b162678d
   [255]
 
@@ -330,9 +330,9 @@
   > R
   > EOS
   $ hg rebase -r B+C+D -d Z
-  rebasing 3:c1e6b162678d "B" (B)
-  rebasing 4:d6003a550c2c "C" (C)
-  rebasing 5:c8f78076273e "D" (D tip)
+  rebasing 3:c1e6b162678d B "B"
+  rebasing 4:d6003a550c2c C "C"
+  rebasing 5:c8f78076273e D tip "D"
   saved backup bundle to $TESTTMP/dual-merge-base2/.hg/strip-backup/d6003a550c2c-6f1424b6-rebase.hg
   $ hg manifest -r 'desc(D)'
   B
@@ -352,8 +352,8 @@
   > B C Z
   > EOS
   $ hg rebase -r D+F -d Z
-  rebasing 3:004dc1679908 "D" (D)
-  rebasing 5:4be4cbf6f206 "F" (F tip)
+  rebasing 3:004dc1679908 D "D"
+  rebasing 5:4be4cbf6f206 F tip "F"
   saved backup bundle to $TESTTMP/chosen-merge-base1/.hg/strip-backup/004dc1679908-06a66a3c-rebase.hg
   $ hg manifest -r 'desc(F)'
   C
@@ -373,8 +373,8 @@
   > B C Z
   > EOS
   $ hg rebase -r E+F -d Z
-  rebasing 4:974e4943c210 "E" (E)
-  rebasing 5:4be4cbf6f206 "F" (F tip)
+  rebasing 4:974e4943c210 E "E"
+  rebasing 5:4be4cbf6f206 F tip "F"
   saved backup bundle to $TESTTMP/chosen-merge-base2/.hg/strip-backup/974e4943c210-b2874da5-rebase.hg
   $ hg manifest -r 'desc(F)'
   B
--- a/tests/test-rebase-obsolete.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-obsolete.t	Tue Jan 19 21:48:43 2021 +0530
@@ -5,8 +5,8 @@
 Enable obsolete
 
   $ cat >> $HGRCPATH << EOF
-  > [ui]
-  > logtemplate= {rev}:{node|short} {desc|firstline}{if(obsolete,' ({obsfate})')}
+  > [command-templates]
+  > log= {rev}:{node|short} {desc|firstline}{if(obsolete,' ({obsfate})')}
   > [experimental]
   > evolution.createmarkers=True
   > evolution.allowunstable=True
@@ -210,7 +210,7 @@
   
   $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True
   rebasing 8:8877864f1edb "B"
-  note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D" (tip)
+  note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 tip "D"
   rebasing 10:5ae4c968c6ac "C"
   $ hg debugobsolete
   42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (Thu Jan 01 00:00:00 1970 +0000) {'ef1': '0', 'operation': 'rebase', 'user': 'test'}
@@ -258,7 +258,7 @@
   grafting 11:4596109a6a43 "D"
   $ hg up -qr 'desc(E)'
   $ hg rebase -s tip -d .
-  rebasing 14:9e36056a46e3 "D" (tip)
+  rebasing 14:9e36056a46e3 tip "D"
   $ hg log --style default --debug -r tip
   changeset:   15:627d4614809036ba22b9e7cb31638ddc06ab99ab
   tag:         tip
@@ -488,7 +488,7 @@
   $ cd stabilize
   $ hg rebase --auto-orphans '0::' -d 10
   abort: cannot specify both --auto-orphans and --dest
-  [255]
+  [10]
   $ hg rebase --auto-orphans '0::'
   rebasing 9:cf44d2f5a9f4 "D"
   $ hg log -G
@@ -518,7 +518,7 @@
   $ hg rebase --dest 4 --rev '7+11+9'
   rebasing 9:cf44d2f5a9f4 "D"
   rebasing 7:02de42196ebe "H"
-  rebasing 11:0d8f238b634c "C" (tip)
+  rebasing 11:0d8f238b634c tip "C"
   $ hg log -G
   o  14:1e8370e38cca C
   |
@@ -556,9 +556,9 @@
   > EOF
 
   $ hg rebase -d G -r 'B + D + F'
-  rebasing 1:112478962961 "B" (B)
-  rebasing 2:b18e25de2cf5 "D" (D)
-  rebasing 6:f15c3adaf214 "F" (F tip)
+  rebasing 1:112478962961 B "B"
+  rebasing 2:b18e25de2cf5 D "D"
+  rebasing 6:f15c3adaf214 F tip "F"
   abort: cannot rebase 6:f15c3adaf214 without moving at least one of its parents
   [255]
 
@@ -612,7 +612,7 @@
   $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)'
   rebasing 3:32af7686d403 "D"
   rebasing 7:02de42196ebe "H"
-  rebasing 9:4bde274eefcf "I" (tip)
+  rebasing 9:4bde274eefcf tip "I"
   1 new orphan changesets
   $ hg log -G
   @  12:acd174b7ab39 I
@@ -656,7 +656,7 @@
 
   $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback --config experimental.rebaseskipobsolete=off
   rebasing 9:4bde274eefcf "I"
-  rebasing 13:06edfc82198f "J" (tip)
+  rebasing 13:06edfc82198f tip "J"
   2 new content-divergent changesets
   $ hg log -G
   @  15:5ae8a643467b J
@@ -787,7 +787,7 @@
 
   $ hg rebase -d 'desc(B2)'
   note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2"
-  rebasing 4:212cb178bcbb "C" (tip)
+  rebasing 4:212cb178bcbb tip "C"
 
 Even when the chain include missing node
 
@@ -816,7 +816,7 @@
 
   $ hg rebase -d 'desc(B2)'
   note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2"
-  rebasing 5:1a79b7535141 "D" (tip)
+  rebasing 5:1a79b7535141 tip "D"
   $ hg up 4
   1 files updated, 0 files merged, 1 files removed, 0 files unresolved
   $ echo "O" > O
@@ -845,7 +845,7 @@
   $ hg rebase -d 6 -r "4::"
   rebasing 4:ff2c4d47b71d "C"
   note: not rebasing 7:360bbaa7d3ce "O", it has no successor
-  rebasing 8:8d47583e023f "P" (tip)
+  rebasing 8:8d47583e023f tip "P"
 
 If all the changeset to be rebased are obsolete and present in the destination, we
 should display a friendly error message
@@ -885,7 +885,7 @@
   o  0:4a2df7238c3b A
   
   $ hg rebase -r . -d 10
-  note: not rebasing 11:f44da1f4954c "nonrelevant" (tip), it has no successor
+  note: not rebasing 11:f44da1f4954c tip "nonrelevant", it has no successor
 
 If a rebase is going to create divergence, it should abort
 
@@ -970,7 +970,7 @@
 
   $ hg rebase -s 10 -d 12 --config experimental.evolution.allowdivergence=True
   rebasing 10:121d9e3bc4c6 "P"
-  rebasing 14:73568ab6879d "bar foo" (tip)
+  rebasing 14:73568ab6879d tip "bar foo"
   2 new content-divergent changesets
   $ hg summary
   parent: 16:61bd55f69bc4 tip
@@ -1033,7 +1033,7 @@
   merging willconflict
   warning: conflicts while merging willconflict! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg resolve --mark willconflict
   (no more unresolved files)
@@ -1041,7 +1041,7 @@
   $ hg rebase --continue
   rebasing 19:b82fb57ea638 "willconflict second version"
   note: not rebasing 20:8b31da3c4919 "dummy change", already in destination as 18:601db7a18f51 "dummy change successor"
-  rebasing 21:7bdc8a87673d "dummy change" (tip)
+  rebasing 21:7bdc8a87673d tip "dummy change"
   $ cd ..
 
 Divergence cases due to obsolete changesets
@@ -1096,11 +1096,11 @@
 consequence f (descendant of d) is left behind.
 
   $ hg rebase -b 'e' -d 'x'
-  rebasing 1:488e1b7e7341 "b" (b)
-  rebasing 3:a82ac2b38757 "c" (c)
-  rebasing 5:027ad6c5830d "d'" (d')
-  rebasing 6:d60ebfa0f1cb "e" (e)
-  note: not rebasing 4:76be324c128b "d" (d) and its descendants as this would cause divergence
+  rebasing 1:488e1b7e7341 b "b"
+  rebasing 3:a82ac2b38757 c "c"
+  rebasing 5:027ad6c5830d d' "d'"
+  rebasing 6:d60ebfa0f1cb e "e"
+  note: not rebasing 4:76be324c128b d "d" and its descendants as this would cause divergence
   $ hg log -G -r 'a'::
   o  11:eb6d63fc4ed5 e
   |
@@ -1154,9 +1154,9 @@
   (to force the rebase please set experimental.evolution.allowdivergence=True)
   [255]
   $ hg rebase --config experimental.evolution.allowdivergence=true -r 'c'::'f' -d 'x'
-  rebasing 3:a82ac2b38757 "c" (c)
-  rebasing 4:76be324c128b "d" (d)
-  rebasing 7:1143e9adc121 "f" (f tip)
+  rebasing 3:a82ac2b38757 c "c"
+  rebasing 4:76be324c128b d "d"
+  rebasing 7:1143e9adc121 f tip "f"
   1 new orphan changesets
   2 new content-divergent changesets
   $ hg log -G -r 'a':: -T instabilities
@@ -1187,9 +1187,9 @@
 (Not skipping obsoletes means that divergence is allowed.)
 
   $ hg rebase --config experimental.rebaseskipobsolete=false -r 'c'::'f' -d 'x'
-  rebasing 3:a82ac2b38757 "c" (c)
-  rebasing 4:76be324c128b "d" (d)
-  rebasing 7:1143e9adc121 "f" (f tip)
+  rebasing 3:a82ac2b38757 c "c"
+  rebasing 4:76be324c128b d "d"
+  rebasing 7:1143e9adc121 f tip "f"
   1 new orphan changesets
   2 new content-divergent changesets
 
@@ -1231,12 +1231,12 @@
   o  0:b173517d0057 a
   
   $ hg rebase -b 'f' -d 'x'
-  rebasing 1:488e1b7e7341 "b" (b)
-  rebasing 3:a82ac2b38757 "c" (c)
-  rebasing 5:63324dc512ea "e'" (e')
-  rebasing 7:3ffec603ab53 "f" (f)
-  rebasing 4:76be324c128b "d" (d)
-  note: not rebasing 6:e36fae928aec "e" (e) and its descendants as this would cause divergence
+  rebasing 1:488e1b7e7341 b "b"
+  rebasing 3:a82ac2b38757 c "c"
+  rebasing 5:63324dc512ea e' "e'"
+  rebasing 7:3ffec603ab53 f "f"
+  rebasing 4:76be324c128b d "d"
+  note: not rebasing 6:e36fae928aec e "e" and its descendants as this would cause divergence
   $ hg log -G -r 'a':
   o  13:a1707a5b7c2c d
   |
@@ -1294,7 +1294,7 @@
   o  0:b173517d0057 a
   
   $ hg rebase -d 0 -r 2
-  rebasing 2:a82ac2b38757 "c" (c)
+  rebasing 2:a82ac2b38757 c "c"
   $ hg log -G -r 'a': --hidden
   o  5:69ad416a4a26 c
   |
@@ -1325,8 +1325,8 @@
   1 new orphan changesets
 
   $ hg rebase -d B -s D
-  note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
-  rebasing 4:66f1a38021c9 "F" (F tip)
+  note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B"
+  rebasing 4:66f1a38021c9 F tip "F"
   $ hg log -G
   o    5:50e9d60b99c6 F
   |\
@@ -1357,8 +1357,8 @@
   1 new orphan changesets
 
   $ hg rebase -d B -s E
-  note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
-  rebasing 4:66f1a38021c9 "F" (F tip)
+  note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B"
+  rebasing 4:66f1a38021c9 F tip "F"
   $ hg log -G
   o    5:aae1787dacee F
   |\
@@ -1389,8 +1389,8 @@
   1 new orphan changesets
 
   $ hg rebase -d C -s D
-  note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
-  rebasing 5:66f1a38021c9 "F" (F tip)
+  note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B"
+  rebasing 5:66f1a38021c9 F tip "F"
 
   $ hg log -G
   o    6:0913febf6439 F
@@ -1424,8 +1424,8 @@
   1 new orphan changesets
 
   $ hg rebase -d C -s E
-  note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
-  rebasing 5:66f1a38021c9 "F" (F tip)
+  note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B"
+  rebasing 5:66f1a38021c9 F tip "F"
   $ hg log -G
   o    6:c6ab0cc6d220 F
   |\
@@ -1458,10 +1458,10 @@
   1 new orphan changesets
 
   $ hg rebase -d C -b F
-  rebasing 2:b18e25de2cf5 "D" (D)
-  note: not rebasing 3:7fb047a69f22 "E" (E), already in destination as 1:112478962961 "B" (B)
-  rebasing 5:66f1a38021c9 "F" (F tip)
-  note: not rebasing 5:66f1a38021c9 "F" (F tip), its destination already has all its changes
+  rebasing 2:b18e25de2cf5 D "D"
+  note: not rebasing 3:7fb047a69f22 E "E", already in destination as 1:112478962961 B "B"
+  rebasing 5:66f1a38021c9 F tip "F"
+  note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes
   $ hg log -G
   o  6:8f47515dda15 D
   |
@@ -1494,10 +1494,10 @@
   1 new orphan changesets
 
   $ hg rebase -d C -b F
-  note: not rebasing 2:b18e25de2cf5 "D" (D), already in destination as 1:112478962961 "B" (B)
-  rebasing 3:7fb047a69f22 "E" (E)
-  rebasing 5:66f1a38021c9 "F" (F tip)
-  note: not rebasing 5:66f1a38021c9 "F" (F tip), its destination already has all its changes
+  note: not rebasing 2:b18e25de2cf5 D "D", already in destination as 1:112478962961 B "B"
+  rebasing 3:7fb047a69f22 E "E"
+  rebasing 5:66f1a38021c9 F tip "F"
+  note: not rebasing 5:66f1a38021c9 F tip "F", its destination already has all its changes
 
   $ hg log -G
   o  6:533690786a86 E
@@ -1531,8 +1531,8 @@
   > EOF
 
   $ hg rebase -d D -s B
-  rebasing 1:112478962961 "B" (B)
-  note: not rebasing 3:26805aba1e60 "C" (C) and its descendants as this would cause divergence
+  rebasing 1:112478962961 B "B"
+  note: not rebasing 3:26805aba1e60 C "C" and its descendants as this would cause divergence
 
   $ cd ..
 
@@ -1581,9 +1581,9 @@
   > EOS
   1 new orphan changesets
   $ hg rebase -r A+B+E -d F
-  note: not rebasing 4:a3d17304151f "A" (A), already in destination as 0:96cc3511f894 "C" (C)
-  note: not rebasing 5:b23a2cc00842 "B" (B), already in destination as 1:058c1e1fb10a "D" (D)
-  rebasing 7:dac5d11c5a7d "E" (E tip)
+  note: not rebasing 4:a3d17304151f A "A", already in destination as 0:96cc3511f894 C "C"
+  note: not rebasing 5:b23a2cc00842 B "B", already in destination as 1:058c1e1fb10a D "D"
+  rebasing 7:dac5d11c5a7d E tip "E"
   abort: rebasing 7:dac5d11c5a7d will include unwanted changes from 3:59c792af609c, 5:b23a2cc00842 or 2:ba2b7fa7166d, 4:a3d17304151f
   [255]
   $ cd ..
@@ -1600,9 +1600,9 @@
   > EOS
   1 new orphan changesets
   $ hg rebase -r A+B+D -d Z
-  note: not rebasing 0:426bada5c675 "A" (A), already in destination as 2:96cc3511f894 "C" (C)
-  rebasing 1:fc2b737bb2e5 "B" (B)
-  rebasing 3:b8ed089c80ad "D" (D)
+  note: not rebasing 0:426bada5c675 A "A", already in destination as 2:96cc3511f894 C "C"
+  rebasing 1:fc2b737bb2e5 B "B"
+  rebasing 3:b8ed089c80ad D "D"
 
   $ rm .hg/localtags
   $ hg log -G
@@ -1631,9 +1631,9 @@
   > EOS
   1 new orphan changesets
   $ hg rebase -r B+A+D -d Z
-  rebasing 0:426bada5c675 "A" (A)
-  note: not rebasing 1:fc2b737bb2e5 "B" (B), already in destination as 2:96cc3511f894 "C" (C)
-  rebasing 3:b8ed089c80ad "D" (D)
+  rebasing 0:426bada5c675 A "A"
+  note: not rebasing 1:fc2b737bb2e5 B "B", already in destination as 2:96cc3511f894 C "C"
+  rebasing 3:b8ed089c80ad D "D"
 
   $ rm .hg/localtags
   $ hg log -G
@@ -1669,7 +1669,7 @@
   adding b
   created new head
   $ hg rebase -r 2 -d 1
-  rebasing 2:1e9a3c00cbe9 "b" (tip)
+  rebasing 2:1e9a3c00cbe9 tip "b"
   $ hg log -r .  # working dir is at rev 3 (successor of 2)
   3:be1832deae9a b (no-eol)
   $ hg book -r 2 mybook --hidden  # rev 2 has a bookmark on it now
@@ -1679,7 +1679,7 @@
   0 files updated, 0 files merged, 1 files removed, 0 files unresolved
   2:1e9a3c00cbe9 b (rewritten using rebase as 3:be1832deae9a) (no-eol)
   $ hg rebase -r 2 -d 3 --config experimental.evolution.track-operation=1
-  note: not rebasing 2:1e9a3c00cbe9 "b" (mybook), already in destination as 3:be1832deae9a "b" (tip)
+  note: not rebasing 2:1e9a3c00cbe9 mybook "b", already in destination as 3:be1832deae9a tip "b"
 Check that working directory and bookmark was updated to rev 3 although rev 2
 was skipped
   $ hg log -r .
@@ -1706,8 +1706,8 @@
   $ hg update D1 -q
   $ hg bookmark book -i
   $ hg rebase -r B+D1 -d E
-  rebasing 1:112478962961 "B" (B)
-  note: not rebasing 5:15ecf15e0114 "D1" (book D1 tip), already in destination as 2:0807738e0be9 "D2" (D2)
+  rebasing 1:112478962961 B "B"
+  note: not rebasing 5:15ecf15e0114 book D1 tip "D1", already in destination as 2:0807738e0be9 D2 "D2"
   1 new orphan changesets
   $ hg log -G -T '{desc} {bookmarks}'
   @  B book
@@ -1788,7 +1788,7 @@
   merging D
   warning: conflicts while merging D! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ cp -R . $TESTTMP/hidden-state2
 
@@ -1819,7 +1819,7 @@
   continue: hg rebase --continue
   $ hg rebase --continue
   rebasing 1:2ec65233581b "B"
-  rebasing 3:7829726be4dc "C" (tip)
+  rebasing 3:7829726be4dc tip "C"
   $ hg log -G
   @  5:1964d5d5b547 C
   |
@@ -1875,7 +1875,7 @@
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --stop
   1 new orphan changesets
   $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
@@ -1937,7 +1937,7 @@
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --stop
   abort: cannot remove original changesets with unrebased descendants
   (either enable obsmarkers to allow unstable revisions or use --keep to keep original changesets)
@@ -1955,7 +1955,7 @@
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --stop
   $ hg log -G --template "{rev}:{short(node)} {person(author)}\n{firstline(desc)} {topic}\n\n"
   o  7:7fffad344617 test
@@ -2017,7 +2017,7 @@
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --stop
   abort: cannot stop in --collapse session
   [255]
@@ -2050,18 +2050,18 @@
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --stop --dry-run
   abort: cannot specify both --stop and --dry-run
-  [255]
+  [10]
 
   $ hg rebase -s 3 -d 5
   abort: rebase in progress
   (use 'hg rebase --continue', 'hg rebase --abort', or 'hg rebase --stop')
-  [255]
+  [20]
   $ hg rebase --stop --continue
   abort: cannot specify both --stop and --continue
-  [255]
+  [10]
 
 Test --stop moves bookmarks of original revisions to new rebased nodes:
 ======================================================================
@@ -2112,13 +2112,13 @@
   o  0: 1994f17a630e 'A' bookmarks:
   
   $ hg rebase -s 1 -d 5
-  rebasing 1:6c81ed0049f8 "B" (X)
-  rebasing 2:49cb3485fa0c "C" (Y)
-  rebasing 3:67a385d4e6f2 "D" (Z)
+  rebasing 1:6c81ed0049f8 X "B"
+  rebasing 2:49cb3485fa0c Y "C"
+  rebasing 3:67a385d4e6f2 Z "D"
   merging d
   warning: conflicts while merging d! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --stop
   1 new orphan changesets
   $ hg log -GT "{rev}: {node|short} '{desc}' bookmarks: {bookmarks}\n"
--- a/tests/test-rebase-parameters.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-parameters.t	Tue Jan 19 21:48:43 2021 +0530
@@ -62,7 +62,7 @@
 
   $ hg rebase --continue --abort
   abort: cannot specify both --abort and --continue
-  [255]
+  [10]
 
   $ hg rebase --continue --collapse
   abort: cannot use collapse with continue or abort
@@ -70,18 +70,18 @@
 
   $ hg rebase --continue --dest 4
   abort: cannot specify both --continue and --dest
-  [255]
+  [10]
 
   $ hg rebase --base 5 --source 4
   abort: cannot specify both --source and --base
-  [255]
+  [10]
 
   $ hg rebase --rev 5 --source 4
   abort: cannot specify both --rev and --source
-  [255]
+  [10]
   $ hg rebase --base 5 --rev 4
   abort: cannot specify both --rev and --base
-  [255]
+  [10]
 
   $ hg rebase --base 6
   abort: branch 'default' has 3 heads - please rebase to an explicit rev
@@ -451,8 +451,8 @@
   $ cd b1
 
   $ hg rebase -s 2 -d 1 --tool internal:local
-  rebasing 2:e4e3f3546619 "c2b" (tip)
-  note: not rebasing 2:e4e3f3546619 "c2b" (tip), its destination already has all its changes
+  rebasing 2:e4e3f3546619 tip "c2b"
+  note: not rebasing 2:e4e3f3546619 tip "c2b", its destination already has all its changes
   saved backup bundle to $TESTTMP/b1/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg
 
   $ hg cat c2
@@ -465,7 +465,7 @@
   $ cd b2
 
   $ hg rebase -s 2 -d 1 --tool internal:other
-  rebasing 2:e4e3f3546619 "c2b" (tip)
+  rebasing 2:e4e3f3546619 tip "c2b"
   saved backup bundle to $TESTTMP/b2/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg
 
   $ hg cat c2
@@ -478,9 +478,9 @@
   $ cd b3
 
   $ hg rebase -s 2 -d 1 --tool internal:fail
-  rebasing 2:e4e3f3546619 "c2b" (tip)
+  rebasing 2:e4e3f3546619 tip "c2b"
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg summary
   parent: 1:56daeba07f4b 
@@ -500,10 +500,10 @@
   $ hg graft --continue
   abort: no graft in progress
   (continue: hg rebase --continue)
-  [255]
+  [20]
   $ hg rebase -c --tool internal:fail
-  rebasing 2:e4e3f3546619 "c2b" (tip)
-  note: not rebasing 2:e4e3f3546619 "c2b" (tip), its destination already has all its changes
+  rebasing 2:e4e3f3546619 tip "c2b"
+  note: not rebasing 2:e4e3f3546619 tip "c2b", its destination already has all its changes
   saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-rebase.hg
 
   $ hg rebase -i
--- a/tests/test-rebase-partial.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-partial.t	Tue Jan 19 21:48:43 2021 +0530
@@ -35,8 +35,8 @@
   > |/
   > A
   > EOF
-  rebasing 2:b18e25de2cf5 "D" (D)
-  already rebased 3:26805aba1e60 "C" (C tip)
+  rebasing 2:b18e25de2cf5 D "D"
+  already rebased 3:26805aba1e60 C tip "C"
   o  4: fe3b4c6498fa D
   |
   | o  3: 26805aba1e60 C
@@ -56,8 +56,8 @@
   > |/
   > A
   > EOF
-  rebasing 2:b18e25de2cf5 "D" (D)
-  rebasing 3:26805aba1e60 "C" (C tip)
+  rebasing 2:b18e25de2cf5 D "D"
+  rebasing 3:26805aba1e60 C tip "C"
   o  4: a2493f4ace65 Collapsed revision
   |  * D
   |  * C
@@ -81,11 +81,11 @@
   > A
   > EOF
   $ hg rebase -r C+D -d B
-  rebasing 2:ef8c0fe0897b "D" (D)
+  rebasing 2:ef8c0fe0897b D "D"
   merging file
   warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg rebase --abort
   rebase aborted
   $ hg tglog
@@ -111,8 +111,8 @@
   > |
   > A
   > EOF
-  already rebased 1:112478962961 "B" (B)
-  rebasing 3:f585351a92f8 "D" (D tip)
+  already rebased 1:112478962961 B "B"
+  rebasing 3:f585351a92f8 D tip "D"
   o  4: 1e6da8103bc7 D
   |
   | x  3: f585351a92f8 D
--- a/tests/test-rebase-pull.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-pull.t	Tue Jan 19 21:48:43 2021 +0530
@@ -80,7 +80,7 @@
   $ hg pull --rebase
   abort: uncommitted changes
   (cannot pull with rebase: please commit or shelve your changes first)
-  [255]
+  [20]
   $ hg update --clean --quiet
 
 Abort pull early if another operation (histedit) is in progress:
@@ -88,13 +88,13 @@
   $ hg histedit . -q --commands - << EOF
   > edit d80cc2da061e histedit: generate unfinished state
   > EOF
-  Editing (d80cc2da061e), you may commit or record as needed now.
-  (hg histedit --continue to resume)
-  [1]
+  Editing (d80cc2da061e), commit as needed now to split the change
+  (to edit d80cc2da061e, `hg histedit --continue` after making changes)
+  [240]
   $ hg pull --rebase
   abort: histedit in progress
   (use 'hg histedit --continue' or 'hg histedit --abort')
-  [255]
+  [20]
   $ hg histedit --abort --quiet
 
 Abort pull early with pending uncommitted merge:
@@ -117,7 +117,7 @@
   $ hg pull --rebase
   abort: outstanding uncommitted merge
   (cannot pull with rebase: please commit or shelve your changes first)
-  [255]
+  [20]
   $ hg update --clean --quiet
 
 Abort pull early with unclean subrepo:
--- a/tests/test-rebase-rename.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-rename.t	Tue Jan 19 21:48:43 2021 +0530
@@ -60,7 +60,7 @@
 Rebase the revision containing the rename:
 
   $ hg rebase -s 3 -d 2
-  rebasing 3:73a3ee40125d "rename A" (tip)
+  rebasing 3:73a3ee40125d tip "rename A"
   saved backup bundle to $TESTTMP/a/.hg/strip-backup/73a3ee40125d-1d78ebcf-rebase.hg
 
   $ hg tglog
@@ -134,8 +134,8 @@
   o  0: 1994f17a630e 'A'
   
   $ hg rebase -s 5 -d 4
-  rebasing 5:af8ad1f97097 "E" (tip)
-  note: not rebasing 5:af8ad1f97097 "E" (tip), its destination already has all its changes
+  rebasing 5:af8ad1f97097 tip "E"
+  note: not rebasing 5:af8ad1f97097 tip "E", its destination already has all its changes
   saved backup bundle to $TESTTMP/a/.hg/strip-backup/af8ad1f97097-c3e90708-rebase.hg
   $ hg tglog
   @  4: 60f545c27784 'E'
@@ -207,7 +207,7 @@
 Rebase the revision containing the copy:
 
   $ hg rebase -s 3 -d 2
-  rebasing 3:0a8162ff18a8 "copy A" (tip)
+  rebasing 3:0a8162ff18a8 tip "copy A"
   saved backup bundle to $TESTTMP/b/.hg/strip-backup/0a8162ff18a8-dd06302a-rebase.hg
 
   $ hg tglog
@@ -291,7 +291,7 @@
   
 
   $ hg rebase -s 4 -d 3
-  rebasing 4:b918d683b091 "Another unrelated change" (tip)
+  rebasing 4:b918d683b091 tip "Another unrelated change"
   saved backup bundle to $TESTTMP/repo/.hg/strip-backup/b918d683b091-3024bc57-rebase.hg
 
   $ hg diff --stat -c .
@@ -312,14 +312,14 @@
   $ hg commit --message "File b created as copy of a and modified"
   $ hg copy b c
   $ echo c > c
-  $ hg commit --message "File c created as copy of b and modified"
+  $ hg commit --message "File c created as copy of b and modified" ##
   $ hg copy c d
   $ echo d > d
-  $ hg commit --message "File d created as copy of c and modified"
+  $ hg commit --message "File d created as copy of c and modified (child of 327f772bc074)"
 
 Note that there are four entries in the log for d
   $ hg tglog --follow d
-  @  3: 421b7e82bb85 'File d created as copy of c and modified'
+  @  3: 6be224292cfa 'File d created as copy of c and modified (child of 327f772bc074)'
   |
   o  2: 327f772bc074 'File c created as copy of b and modified'
   |
@@ -342,14 +342,14 @@
   $ hg rebase --source 1 --dest 4
   rebasing 1:79d255d24ad2 "File b created as copy of a and modified"
   rebasing 2:327f772bc074 "File c created as copy of b and modified"
-  rebasing 3:421b7e82bb85 "File d created as copy of c and modified"
-  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a2265555-rebase.hg
+  rebasing 3:6be224292cfa "File d created as copy of c and modified (child of 327f772bc074)"
+  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a3e674e3-rebase.hg
   $ hg update 4
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
 There should still be four entries in the log for d
   $ hg tglog --follow d
-  @  4: dbb9ba033561 'File d created as copy of c and modified'
+  @  4: afbdde3a60d5 'File d created as copy of c and modified (child of af74b229bc02)'
   |
   o  3: af74b229bc02 'File c created as copy of b and modified'
   |
@@ -368,9 +368,9 @@
   rebasing 2:68bf06433839 "File b created as copy of a and modified"
   rebasing 3:af74b229bc02 "File c created as copy of b and modified"
   merging b and c to c
-  rebasing 4:dbb9ba033561 "File d created as copy of c and modified"
+  rebasing 4:afbdde3a60d5 "File d created as copy of c and modified (child of af74b229bc02)"
   merging c and d to d
-  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-rebase.hg
+  saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-29d5057f-rebase.hg
   $ hg co tip
   3 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
@@ -378,11 +378,24 @@
 copy of 'a'.
 
   $ hg tglog --follow d
-  @  3: 5a46b94210e5 'Collapsed revision
+  @  3: 75708a266e56 'Collapsed revision
   :  * File b created as copy of a and modified
   :  * File c created as copy of b and modified
-  :  * File d created as copy of c and modified'
+  :  * File d created as copy of c and modified (child of af74b229bc02)'
   o  0: b220cd6d2326 'File a created'
   
+  $ hg log -G -Tcompact
+  @  3[tip]   75708a266e56   1970-01-01 00:00 +0000   test
+  |    Collapsed revision
+  |
+  o  2   15258cf0cf10   1970-01-01 00:00 +0000   test
+  |    unrelated commit is unrelated
+  |
+  o  1   1d689898494b   1970-01-01 00:00 +0000   test
+  |    Unrelated file created
+  |
+  o  0   b220cd6d2326   1970-01-01 00:00 +0000   test
+       File a created
+  
 
   $ cd ..
--- a/tests/test-rebase-scenario-global.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-scenario-global.t	Tue Jan 19 21:48:43 2021 +0530
@@ -170,7 +170,7 @@
   rebasing 5:24b6387c8c8c "F"
   rebasing 6:eea13746799a "G"
   note: not rebasing 6:eea13746799a "G", its destination already has all its changes
-  rebasing 7:02de42196ebe "H" (tip)
+  rebasing 7:02de42196ebe tip "H"
   saved backup bundle to $TESTTMP/a4/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg
 
   $ hg tglog
@@ -228,7 +228,7 @@
   $ hg rebase -s 5 -d 1
   rebasing 5:24b6387c8c8c "F"
   rebasing 6:eea13746799a "G"
-  rebasing 7:02de42196ebe "H" (tip)
+  rebasing 7:02de42196ebe tip "H"
   saved backup bundle to $TESTTMP/a6/.hg/strip-backup/24b6387c8c8c-c3fe765d-rebase.hg
 
   $ hg tglog
@@ -327,44 +327,44 @@
   $ hg rebase -d 0 -b 6
   abort: cannot rebase public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
   $ hg rebase -d 5 -b 6
   abort: cannot rebase public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
   $ hg rebase -d 5 -r '1 + (6::)'
   abort: cannot rebase public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
 
   $ hg rebase -d 5 -b 6 --keep
   rebasing 6:e1c4361dd923 "C"
-  rebasing 7:c9659aac0000 "D" (tip)
+  rebasing 7:c9659aac0000 tip "D"
 
 Check rebasing mutable changeset
 Source phase greater or equal to destination phase: new changeset get the phase of source:
   $ hg id -n
   5
   $ hg rebase -s9 -d0
-  rebasing 9:2b23e52411f4 "D" (tip)
+  rebasing 9:2b23e52411f4 tip "D"
   saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-f942decf-rebase.hg
   $ hg id -n # check we updated back to parent
   5
   $ hg log --template "{phase}\n" -r 9
   draft
   $ hg rebase -s9 -d1
-  rebasing 9:2cb10d0cfc6c "D" (tip)
+  rebasing 9:2cb10d0cfc6c tip "D"
   saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-ddb0f256-rebase.hg
   $ hg log --template "{phase}\n" -r 9
   draft
   $ hg phase --force --secret 9
   $ hg rebase -s9 -d0
-  rebasing 9:c5b12b67163a "D" (tip)
+  rebasing 9:c5b12b67163a tip "D"
   saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-4e372053-rebase.hg
   $ hg log --template "{phase}\n" -r 9
   secret
   $ hg rebase -s9 -d1
-  rebasing 9:2a0524f868ac "D" (tip)
+  rebasing 9:2a0524f868ac tip "D"
   saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-cefd8574-rebase.hg
   $ hg log --template "{phase}\n" -r 9
   secret
@@ -455,13 +455,13 @@
   $ hg rebase -r '2::8' -d 1
   abort: cannot rebase changeset with children
   (use --keep to keep original changesets)
-  [255]
+  [10]
   $ hg rebase -r '2::8' -d 1 -k
   rebasing 2:c9e50f6cdc55 "C"
   rebasing 3:ffd453c31098 "D"
   rebasing 6:3d8a618087a7 "G"
   rebasing 7:72434a4e60b0 "H"
-  rebasing 8:479ddb54a924 "I" (tip)
+  rebasing 8:479ddb54a924 tip "I"
   $ hg tglog
   o  13: 9bf1d9358a90 'I'
   |
@@ -501,12 +501,12 @@
   $ hg rebase -r '3::8' -d 1
   abort: cannot rebase changeset with children
   (use --keep to keep original changesets)
-  [255]
+  [10]
   $ hg rebase -r '3::8' -d 1 --keep
   rebasing 3:ffd453c31098 "D"
   rebasing 6:3d8a618087a7 "G"
   rebasing 7:72434a4e60b0 "H"
-  rebasing 8:479ddb54a924 "I" (tip)
+  rebasing 8:479ddb54a924 tip "I"
   $ hg tglog
   o  12: 9d7da0053b1c 'I'
   |
@@ -544,7 +544,7 @@
   $ hg rebase -r '3::7' -d 1
   abort: cannot rebase changeset with children
   (use --keep to keep original changesets)
-  [255]
+  [10]
   $ hg rebase -r '3::7' -d 1 --keep
   rebasing 3:ffd453c31098 "D"
   rebasing 6:3d8a618087a7 "G"
@@ -584,7 +584,7 @@
   $ hg rebase -r '3::(7+5)' -d 1
   abort: cannot rebase changeset with children
   (use --keep to keep original changesets)
-  [255]
+  [10]
   $ hg rebase -r '3::(7+5)' -d 1 --keep
   rebasing 3:ffd453c31098 "D"
   rebasing 4:c01897464e7f "E"
@@ -632,7 +632,7 @@
   $ hg rebase -r '6::' -d 2
   rebasing 6:3d8a618087a7 "G"
   rebasing 7:72434a4e60b0 "H"
-  rebasing 8:479ddb54a924 "I" (tip)
+  rebasing 8:479ddb54a924 tip "I"
   saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-b4f73f31-rebase.hg
   $ hg tglog
   o  8: fcb52e68a694 'I'
@@ -667,7 +667,7 @@
   rebasing 5:41bfcc75ed73 "F"
   rebasing 6:3d8a618087a7 "G"
   rebasing 7:72434a4e60b0 "H"
-  rebasing 8:479ddb54a924 "I" (tip)
+  rebasing 8:479ddb54a924 tip "I"
   saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-aae93a24-rebase.hg
   $ hg tglog
   o  8: 9136df9a87cf 'I'
@@ -736,7 +736,7 @@
 
   $ hg rebase --dest 'desc(G)' --rev 'desc(K) + desc(I)'
   rebasing 8:e7ec4e813ba6 "I"
-  rebasing 10:23a4ace37988 "K" (tip)
+  rebasing 10:23a4ace37988 tip "K"
   saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-b06984b3-rebase.hg
   $ hg log --rev 'children(desc(G))'
   changeset:   9:adb617877056
@@ -803,7 +803,7 @@
   rebasing 2:779a07b1b7a0 "first source commit"
   current directory was removed (rmcwd !)
   (consider changing to repo root: $TESTTMP/cwd-vanish) (rmcwd !)
-  rebasing 3:a7d6f3a00bf3 "second source with subdir" (tip)
+  rebasing 3:a7d6f3a00bf3 tip "second source with subdir"
   saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-rebase.hg
 
 Get back to the root of cwd-vanish. Note that even though `cd ..`
@@ -979,7 +979,7 @@
   > EOS
 
   $ hg rebase -s C -d B
-  rebasing 2:dc0947a82db8 "C" (C tip)
+  rebasing 2:dc0947a82db8 C tip "C"
 
   $ [ -f .hg/rebasestate ] && echo 'WRONG: rebasestate should not exist'
   [1]
--- a/tests/test-rebase-templates.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-templates.t	Tue Jan 19 21:48:43 2021 +0530
@@ -82,3 +82,42 @@
   o  0:18d04c59bb5d Added a
   
 
+Respects command-templates.oneline-summary
+
+  $ hg rebase -r 7 -d 8 -n --config command-templates.oneline-summary='rev: {rev}'
+  starting dry-run rebase; repository will not be changed
+  rebasing rev: 7
+  note: not rebasing rev: 7, its destination already has all its changes
+  dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
+
+
+command-templates.oneline-summary.rebase overrides
+
+  $ hg rebase -r 7 -d 8 -n \
+  > --config command-templates.oneline-summary='global: {rev}' \
+  > --config command-templates.oneline-summary.rebase='override: {rev}'
+  starting dry-run rebase; repository will not be changed
+  rebasing override: 7
+  note: not rebasing override: 7, its destination already has all its changes
+  dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
+
+
+check namespaces and coloring (labels)
+
+  $ hg tag -l -r 7 my-tag
+  $ hg rebase -r 7 -d 8 -n
+  starting dry-run rebase; repository will not be changed
+  rebasing 7:f48cd65c6dc3 my-tag "Added b"
+  note: not rebasing 7:f48cd65c6dc3 my-tag "Added b", its destination already has all its changes
+  dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
+  $ hg bookmark -r 7 my-bookmark
+  $ hg rebase -r 7 -d 8 -n
+  starting dry-run rebase; repository will not be changed
+  rebasing 7:f48cd65c6dc3 my-bookmark my-tag "Added b"
+  note: not rebasing 7:f48cd65c6dc3 my-bookmark my-tag "Added b", its destination already has all its changes
+  dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase
+  $ hg rebase -r 7 -d 8 -n --color=debug
+  [ ui.status|starting dry-run rebase; repository will not be changed]
+  [ ui.status|rebasing [oneline-summary.changeset|7:f48cd65c6dc3] [oneline-summary.bookmarks|my-bookmark] [oneline-summary.tags|my-tag] "[oneline-summary.desc|Added b]"]
+  [ ui.warning|note: not rebasing [oneline-summary.changeset|7:f48cd65c6dc3] [oneline-summary.bookmarks|my-bookmark] [oneline-summary.tags|my-tag] "[oneline-summary.desc|Added b]", its destination already has all its changes]
+  [ ui.status|dry-run rebase completed successfully; run without -n/--dry-run to perform this rebase]
--- a/tests/test-rebase-transaction.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rebase-transaction.t	Tue Jan 19 21:48:43 2021 +0530
@@ -103,12 +103,12 @@
   >   A
   > EOF
   $ hg rebase --collapse -b D -d Z
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:c26739dbe603 "C" (C)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:c26739dbe603 C "C"
   merging conflict
   warning: conflicts while merging conflict! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ hg tglog
   o  5: D
   |
@@ -132,9 +132,9 @@
   (no more unresolved files)
   continue: hg rebase --continue
   $ hg continue
-  already rebased 1:112478962961 "B" (B) as 79bc8f4973ce
-  rebasing 3:c26739dbe603 "C" (C)
-  rebasing 5:d24bb333861c "D" (D tip)
+  already rebased 1:112478962961 B "B" as 79bc8f4973ce
+  rebasing 3:c26739dbe603 C "C"
+  rebasing 5:d24bb333861c D tip "D"
   saved backup bundle to $TESTTMP/collapse-conflict/.hg/strip-backup/112478962961-b5b34645-rebase.hg
   $ hg tglog
   o  3: Collapsed revision
@@ -165,13 +165,13 @@
   >   A
   > EOF
   $ HGEDITOR=false hg --config ui.interactive=1 rebase --collapse -b D -d Z
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:26805aba1e60 "C" (C)
-  rebasing 5:f585351a92f8 "D" (D tip)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:26805aba1e60 C "C"
+  rebasing 5:f585351a92f8 D tip "D"
   transaction abort!
   rollback completed
   abort: edit failed: false exited with status 1
-  [255]
+  [250]
   $ hg tglog
   o  5: D
   |
@@ -186,9 +186,9 @@
   o  0: A
   
   $ hg continue
-  rebasing 1:112478962961 "B" (B)
-  rebasing 3:26805aba1e60 "C" (C)
-  rebasing 5:f585351a92f8 "D" (D tip)
+  rebasing 1:112478962961 B "B"
+  rebasing 3:26805aba1e60 C "C"
+  rebasing 5:f585351a92f8 D tip "D"
   saved backup bundle to $TESTTMP/collapse-cancel-editor/.hg/strip-backup/112478962961-cb2a9b47-rebase.hg
   $ hg tglog
   o  3: Collapsed revision
--- a/tests/test-record.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-record.t	Tue Jan 19 21:48:43 2021 +0530
@@ -77,7 +77,7 @@
   diff --git a/empty-rw b/empty-rw
   new file mode 100644
   abort: empty commit message
-  [255]
+  [10]
 
   $ hg tip -p
   changeset:   -1:000000000000
--- a/tests/test-releasenotes-formatting.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-releasenotes-formatting.t	Tue Jan 19 21:48:43 2021 +0530
@@ -429,11 +429,11 @@
   $ cd relnotes-raise-error
   $ hg releasenotes -r . -l
   abort: cannot specify both --list and --rev
-  [255]
+  [10]
 
   $ hg releasenotes -l -c
   abort: cannot specify both --list and --check
-  [255]
+  [10]
 
 Display release notes for specified revs if no file is mentioned
 
--- a/tests/test-remotefilelog-bgprefetch.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-bgprefetch.t	Tue Jan 19 21:48:43 2021 +0530
@@ -229,7 +229,7 @@
 .. flaky, the core the test is checked when checking the cache dir, so
 .. hopefully this flakyness is not hiding any actual bug.
   $ hg rebase -s temporary -d foo
-  rebasing 3:d9cf06e3b5b6 "b" (temporary tip)
+  rebasing 3:d9cf06e3b5b6 temporary tip "b"
   saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/d9cf06e3b5b6-e5c3dc63-rebase.hg
   ? files fetched over ? fetches - (? misses, 0.00% hit ratio) over *s (glob)
   $ find $CACHEDIR -type f | sort
@@ -271,9 +271,7 @@
   $ hg up -r 3
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ clearcache
-  $ hg prefetch --repack
-  waiting for lock on prefetching in $TESTTMP/shallow held by process * on host * (glob) (?)
-  got lock after * seconds (glob) (?)
+  $ hg prefetch --repack --config ui.timeout.warn=-1
   (running background incremental repack)
   * files fetched over 1 fetches - (* misses, 0.00% hit ratio) over *s (glob) (?)
 
@@ -312,9 +310,7 @@
 # Check that foreground prefetch fetches revs specified by '. + draft() + bgprefetchrevs + pullprefetch'
 
   $ clearcache
-  $ hg prefetch --repack
-  waiting for lock on prefetching in $TESTTMP/shallow held by process * on host * (glob) (?)
-  got lock after * seconds (glob) (?)
+  $ hg prefetch --repack --config ui.timeout.warn=-1
   (running background incremental repack)
   * files fetched over 1 fetches - (* misses, 0.00% hit ratio) over *s (glob) (?)
 
--- a/tests/test-remotefilelog-cacheprocess.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-cacheprocess.t	Tue Jan 19 21:48:43 2021 +0530
@@ -66,7 +66,7 @@
 
   $ cat >> $HGRCPATH <<EOF
   > [remotefilelog]
-  > cacheprocess = python $TESTTMP/cacheprocess-logger.py
+  > cacheprocess = $PYTHON $TESTTMP/cacheprocess-logger.py
   > EOF
 
 Test cache keys and cache misses.
--- a/tests/test-remotefilelog-datapack.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-datapack.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 from __future__ import absolute_import, print_function
 
 import hashlib
@@ -82,8 +82,7 @@
         return self.datapackreader(path)
 
     def _testAddSingle(self, content):
-        """Test putting a simple blob into a pack and reading it out.
-        """
+        """Test putting a simple blob into a pack and reading it out."""
         filename = b"foo"
         node = self.getHash(content)
 
@@ -124,8 +123,7 @@
             self.assertEqual(content, chain[0][4])
 
     def testAddDeltas(self):
-        """Test putting multiple delta blobs into a pack and read the chain.
-        """
+        """Test putting multiple delta blobs into a pack and read the chain."""
         revisions = []
         filename = b"foo"
         lastnode = nullid
@@ -148,8 +146,7 @@
             self.assertEqual(content, chain[-i - 1][4])
 
     def testPackMany(self):
-        """Pack many related and unrelated objects.
-        """
+        """Pack many related and unrelated objects."""
         # Build a random pack file
         revisions = []
         blobs = {}
@@ -198,8 +195,7 @@
             self.assertEqual(parsedmeta, origmeta)
 
     def testGetMissing(self):
-        """Test the getmissing() api.
-        """
+        """Test the getmissing() api."""
         revisions = []
         filename = b"foo"
         lastnode = nullid
--- a/tests/test-remotefilelog-histpack.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-histpack.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 from __future__ import absolute_import
 
 import hashlib
@@ -76,8 +76,7 @@
         return historypack.historypack(path)
 
     def testAddSingle(self):
-        """Test putting a single entry into a pack and reading it out.
-        """
+        """Test putting a single entry into a pack and reading it out."""
         filename = b"foo"
         node = self.getFakeHash()
         p1 = self.getFakeHash()
@@ -140,8 +139,7 @@
             self.assertEqual(acopyfrom, copyfrom)
 
     def testPackMany(self):
-        """Pack many related and unrelated ancestors.
-        """
+        """Pack many related and unrelated ancestors."""
         # Build a random pack file
         allentries = {}
         ancestorcounts = {}
@@ -201,8 +199,7 @@
             self.assertEqual(acopyfrom, copyfrom)
 
     def testGetMissing(self):
-        """Test the getmissing() api.
-        """
+        """Test the getmissing() api."""
         revisions = []
         filename = b"foo"
         for i in range(10):
--- a/tests/test-remotefilelog-linknodes.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-linknodes.t	Tue Jan 19 21:48:43 2021 +0530
@@ -32,7 +32,7 @@
   b292c1e3311f
 
   $ hg rebase -d 1
-  rebasing 2:0632994590a8 "xx" (tip)
+  rebasing 2:0632994590a8 tip "xx"
   saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/0632994590a8-0bc786d8-rebase.hg (glob)
   $ hg log -f x --template "{node|short}\n"
   81deab2073bc
@@ -41,14 +41,14 @@
 # Rebase back, log -f still works
 
   $ hg rebase -d 0 -r 2
-  rebasing 2:81deab2073bc "xx" (tip)
+  rebasing 2:81deab2073bc tip "xx"
   saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/81deab2073bc-80cb4fda-rebase.hg (glob)
   $ hg log -f x --template "{node|short}\n"
   b3fca10fb42d
   b292c1e3311f
 
   $ hg rebase -d 1 -r 2
-  rebasing 2:b3fca10fb42d "xx" (tip)
+  rebasing 2:b3fca10fb42d tip "xx"
   saved backup bundle to $TESTTMP/shallow/.hg/strip-backup/b3fca10fb42d-da73a0c7-rebase.hg (glob)
 
   $ cd ..
--- a/tests/test-remotefilelog-local.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-local.t	Tue Jan 19 21:48:43 2021 +0530
@@ -204,5 +204,6 @@
   $ hg update -r '.^' -q
   $ echo 1 > A
   $ hg commit -m foo -A A
+  warning: commit already existed in the repository!
   $ hg log -r . -T '{node}\n'
   383ce605500277f879b7460a16ba620eb6930b7f
--- a/tests/test-remotefilelog-prefetch.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-prefetch.t	Tue Jan 19 21:48:43 2021 +0530
@@ -232,14 +232,14 @@
   $ hg mv z2 z3
   z2: not copying - file is not managed
   abort: no files to copy
-  [255]
+  [10]
   $ find $CACHEDIR -type f | sort
 .. The following output line about files fetches is globed because it is
 .. flaky, the core the test is checked when checking the cache dir, so
 .. hopefully this flakyness is not hiding any actual bug.
   $ hg revert -a -r 1 || true
   ? files fetched over 1 fetches - (? misses, 0.00% hit ratio) over * (glob)
-  abort: z2@109c3a557a73: not found in manifest! (?)
+  abort: z2@109c3a557a73: not found in manifest (?)
   $ find $CACHEDIR -type f | sort
   $TESTTMP/hgcache/master/11/f6ad8ec52a2984abaafd7c3b516503785c2072/ef95c5376f34698742fe34f315fd82136f8f68c0
   $TESTTMP/hgcache/master/39/5df8f7c51f007019cb30201c49e884b46b92fa/69a1b67522704ec122181c0890bd16e9d3e7516a
--- a/tests/test-remotefilelog-repack.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-repack.t	Tue Jan 19 21:48:43 2021 +0530
@@ -306,7 +306,7 @@
   $ hg prefetch -r 2
   1 files fetched over 1 fetches - (1 misses, 0.00% hit ratio) over * (glob)
   $ hg prefetch -r 38
-  abort: unknown revision '38'!
+  abort: unknown revision '38'
   [255]
   $ ls_l $TESTTMP/hgcache/master/packs/ | grep datapack
   -r--r--r--      70 052643fdcdebbd42d7c180a651a30d46098e6fe1.datapack
--- a/tests/test-remotefilelog-sparse.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-remotefilelog-sparse.t	Tue Jan 19 21:48:43 2021 +0530
@@ -99,4 +99,4 @@
   $ hg prefetch -r '. + .^' -I x -I z
   4 files fetched over 1 fetches - (4 misses, 0.00% hit ratio) over * (glob)
   $ hg rebase -d 2 --keep
-  rebasing 1:876b1317060d "x2" (foo)
+  rebasing 1:876b1317060d foo "x2"
--- a/tests/test-rename-after-merge.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rename-after-merge.t	Tue Jan 19 21:48:43 2021 +0530
@@ -124,10 +124,10 @@
 
   $ hg copy --forget --at-rev . b2
   abort: cannot mark/unmark copy in merge commit
-  [255]
+  [10]
 
   $ hg copy --after --at-rev . b1 b2
   abort: cannot mark/unmark copy in merge commit
-  [255]
+  [10]
 
   $ cd ..
--- a/tests/test-rename-merge1.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rename-merge1.t	Tue Jan 19 21:48:43 2021 +0530
@@ -216,8 +216,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 5151c134577e, local: 07fcbc9a74ed+, remote: f21419739508
+  starting 4 threads for background file closing (?)
    preserving z for resolve of z
-  starting 4 threads for background file closing (?)
    z: both renamed from y -> m (premerge)
   picked tool ':merge3' for z (binary False symlink False changedelete False)
   merging z
--- a/tests/test-rename-merge2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rename-merge2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -85,11 +85,10 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: e300d1c794ec+, remote: 4ce40f5aca24
+  starting 4 threads for background file closing (?)
    preserving a for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: remote copied from a -> m (premerge)
-  starting 4 threads for background file closing (?)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging a and b to b
   my b@e300d1c794ec+ other b@4ce40f5aca24 ancestor a@924404dff337
@@ -165,10 +164,10 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: e300d1c794ec+, remote: bdb19105162a
+  starting 4 threads for background file closing (?)
    preserving a for resolve of b
    preserving rev for resolve of rev
   removing a
-  starting 4 threads for background file closing (?)
    b: remote moved from a -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging a and b to b
@@ -204,9 +203,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 02963e448370+, remote: f4db7e329e71
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: local copied/moved from a -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b and a to b
@@ -275,8 +274,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 97c705ade336
+  starting 4 threads for background file closing (?)
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    rev: versions differ -> m (premerge)
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
@@ -341,8 +340,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 02963e448370+, remote: 97c705ade336
+  starting 4 threads for background file closing (?)
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    rev: versions differ -> m (premerge)
   picked tool '* ../merge' for rev (binary False symlink False changedelete False) (glob)
   merging rev
@@ -372,9 +371,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 62e7bf090eba+, remote: 49b6d8032493
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: both renamed from a -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
@@ -454,9 +453,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 86a2aa42fc76+, remote: af30c7647fc7
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: both created -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
@@ -496,9 +495,9 @@
    ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a
    a: other deleted -> r
   removing a
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: both created -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
@@ -574,9 +573,9 @@
    ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a
    a: other deleted -> r
   removing a
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: both created -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
@@ -650,11 +649,10 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 0b76e65c8289+, remote: 4ce40f5aca24
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: both renamed from a -> m (premerge)
-  starting 4 threads for background file closing (?)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
   my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor a@924404dff337
@@ -687,9 +685,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 02963e448370+, remote: 8dbce441892a
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: both renamed from a -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
@@ -722,9 +720,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 0b76e65c8289+, remote: bdb19105162a
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: both renamed from a -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b
@@ -760,10 +758,10 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: e300d1c794ec+, remote: 49b6d8032493
+  starting 4 threads for background file closing (?)
    preserving a for resolve of b
    preserving rev for resolve of rev
   removing a
-  starting 4 threads for background file closing (?)
    b: remote moved from a -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging a and b to b
@@ -803,9 +801,9 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 924404dff337, local: 62e7bf090eba+, remote: f4db7e329e71
+  starting 4 threads for background file closing (?)
    preserving b for resolve of b
    preserving rev for resolve of rev
-  starting 4 threads for background file closing (?)
    b: local copied/moved from a -> m (premerge)
   picked tool '* ../merge' for b (binary False symlink False changedelete False) (glob)
   merging b and a to b
--- a/tests/test-rename-rev.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rename-rev.t	Tue Jan 19 21:48:43 2021 +0530
@@ -23,16 +23,16 @@
 # Errors out without --after for now
   $ hg cp --at-rev . d1/b d1/d
   abort: --at-rev requires --after
-  [255]
+  [10]
 # Errors out with non-existent source
   $ hg cp -A --at-rev . d1/non-existent d1/d
   d1/non-existent: no such file in rev 55d1fd85ef0a
   abort: no files to copy
-  [255]
+  [10]
 # Errors out with non-existent destination
   $ hg cp -A --at-rev . d1/b d1/non-existent
   abort: d1/non-existent: copy destination does not exist in 8a9d70fa20c9
-  [255]
+  [10]
 # Successful invocation
   $ hg cp -A --at-rev . d1/b d1/d
   saved backup bundle to $TESTTMP/.hg/strip-backup/8a9d70fa20c9-973ae357-copy.hg
@@ -103,5 +103,5 @@
   created new head
   $ hg cp -A --at-rev . d1 d3
   abort: d3: --at-rev does not support a directory as destination
-  [255]
+  [10]
 
--- a/tests/test-rename.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rename.t	Tue Jan 19 21:48:43 2021 +0530
@@ -13,7 +13,7 @@
   $ hg rename d1/d11/a1 d2/c
   $ hg --config ui.portablefilenames=abort rename d1/a d1/con.xml
   abort: filename contains 'con', which is reserved on Windows: d1/con.xml
-  [255]
+  [10]
   $ hg sum
   parent: 0:9b4b6e7b2c26 tip
    1
@@ -295,7 +295,7 @@
 
   $ hg rename d1/a dx/
   abort: destination dx/ is not a directory
-  [255]
+  [10]
   $ hg status -C
   $ hg update -C
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -304,7 +304,7 @@
 
   $ hg rename 'glob:d1/**' dx
   abort: with multiple sources, destination must be an existing directory
-  [255]
+  [10]
 
 move every file under d1 to d2/d21
 
--- a/tests/test-repo-compengines.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-repo-compengines.t	Tue Jan 19 21:48:43 2021 +0530
@@ -37,12 +37,33 @@
   $ cd unknownrequirement
   $ echo exp-compression-unknown >> .hg/requires
   $ hg log
-  abort: repository requires features unknown to this Mercurial: exp-compression-unknown!
+  abort: repository requires features unknown to this Mercurial: exp-compression-unknown
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
   $ cd ..
 
+Specifying a new format.compression on an existing repo won't introduce data
+with that engine or a requirement
+
+  $ cd default
+  $ touch bar
+  $ hg --config format.revlog-compression=none -q commit -A -m 'add bar with a lot of repeated repeated repeated text'
+
+  $ cat .hg/requires
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+  testonly-simplestore (reposimplestore !)
+
+  $ hg debugrevlog -c | grep 0x78
+      0x78 (x)  :   2 (100.00%)
+      0x78 (x)  : 199 (100.00%)
+  $ cd ..
+
 #if zstd
 
   $ hg --config format.revlog-compression=zstd init zstd
@@ -66,25 +87,6 @@
 
   $ cd ..
 
-Specifying a new format.compression on an existing repo won't introduce data
-with that engine or a requirement
-
-  $ cd default
-  $ touch bar
-  $ hg --config format.revlog-compression=zstd -q commit -A -m 'add bar with a lot of repeated repeated repeated text'
-
-  $ cat .hg/requires
-  dotencode
-  fncache
-  generaldelta
-  revlogv1
-  sparserevlog
-  store
-  testonly-simplestore (reposimplestore !)
-
-  $ hg debugrevlog -c | grep 0x78
-      0x78 (x)  :   2 (100.00%)
-      0x78 (x)  : 199 (100.00%)
 
 #endif
 
@@ -116,10 +118,12 @@
   > done
 
   $ $RUNTESTDIR/f -s */.hg/store/data/*
-  default/.hg/store/data/foo.i: size=64 (pure !)
+  default/.hg/store/data/bar.i: size=64
+  default/.hg/store/data/foo.i: size=64
   zlib-level-1/.hg/store/data/a.i: size=4146
   zlib-level-9/.hg/store/data/a.i: size=4138
   zlib-level-default/.hg/store/data/a.i: size=4138
+  zstd/.hg/store/data/foo.i: size=64 (zstd !)
 
 Test error cases
 
@@ -129,9 +133,9 @@
   > revlog.zlib.level=foobar
   > EOF
   $ commitone zlib-level-invalid
-  abort: storage.revlog.zlib.level is not a valid integer ('foobar')
-  abort: storage.revlog.zlib.level is not a valid integer ('foobar')
-  [255]
+  config error: storage.revlog.zlib.level is not a valid integer ('foobar')
+  config error: storage.revlog.zlib.level is not a valid integer ('foobar')
+  [30]
 
   $ hg init zlib-level-out-of-range
   $ cat << EOF >> zlib-level-out-of-range/.hg/hgrc
@@ -144,6 +148,41 @@
   abort: invalid value for `storage.revlog.zlib.level` config: 42
   [255]
 
+checking details of none compression
+====================================
+
+  $ hg init none-compression --config format.revlog-compression=none
+
+  $ commitone() {
+  >    repo=$1
+  >    cp $RUNTESTDIR/bundles/issue4438-r1.hg $repo/a
+  >    hg -R $repo add $repo/a
+  >    hg -R $repo commit -m some-commit
+  > }
+
+  $ commitone none-compression
+
+  $ hg log -R none-compression
+  changeset:   0:68b53da39cd8
+  tag:         tip
+  user:        test
+  date:        Thu Jan 01 00:00:00 1970 +0000
+  summary:     some-commit
+  
+
+  $ cat none-compression/.hg/requires
+  dotencode
+  exp-compression-none
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+  testonly-simplestore (reposimplestore !)
+
+  $ $RUNTESTDIR/f -s none-compression/.hg/store/data/*
+  none-compression/.hg/store/data/a.i: size=4216
+
 #if zstd
 
 checking zstd options
@@ -186,9 +225,9 @@
   > revlog.zstd.level=foobar
   > EOF
   $ commitone zstd-level-invalid
-  abort: storage.revlog.zstd.level is not a valid integer ('foobar')
-  abort: storage.revlog.zstd.level is not a valid integer ('foobar')
-  [255]
+  config error: storage.revlog.zstd.level is not a valid integer ('foobar')
+  config error: storage.revlog.zstd.level is not a valid integer ('foobar')
+  [30]
 
   $ hg init zstd-level-out-of-range --config format.revlog-compression=zstd
   $ cat << EOF >> zstd-level-out-of-range/.hg/hgrc
--- a/tests/test-requires.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-requires.t	Tue Jan 19 21:48:43 2021 +0530
@@ -5,16 +5,16 @@
   $ hg commit -m test
   $ rm .hg/requires
   $ hg tip
-  abort: unknown version (2) in revlog 00changelog.i!
-  [255]
+  abort: unknown version (2) in revlog 00changelog.i
+  [50]
   $ echo indoor-pool > .hg/requires
   $ hg tip
-  abort: repository requires features unknown to this Mercurial: indoor-pool!
+  abort: repository requires features unknown to this Mercurial: indoor-pool
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ echo outdoor-pool >> .hg/requires
   $ hg tip
-  abort: repository requires features unknown to this Mercurial: indoor-pool outdoor-pool!
+  abort: repository requires features unknown to this Mercurial: indoor-pool outdoor-pool
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ cd ..
@@ -71,7 +71,7 @@
   [255]
 
   $ hg clone supported clone-dst
-  abort: repository requires features unknown to this Mercurial: featuresetup-test!
+  abort: repository requires features unknown to this Mercurial: featuresetup-test
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ hg clone --pull supported clone-dst
--- a/tests/test-resolve.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-resolve.t	Tue Jan 19 21:48:43 2021 +0530
@@ -132,13 +132,13 @@
 
   $ hg resolve --all
   abort: resolve command not applicable when not merging
-  [255]
+  [20]
 
 resolve -m should abort when no merge in progress
 
   $ hg resolve -m
   abort: resolve command not applicable when not merging
-  [255]
+  [20]
 
 can not update or merge when there are unresolved conflicts
 
@@ -191,7 +191,7 @@
   $ hg resolve
   abort: no files or directories specified
   (use --all to re-merge all unresolved files)
-  [255]
+  [10]
 
 resolve --all should re-merge all unresolved files
   $ hg resolve --all
@@ -332,7 +332,7 @@
     file2
   abort: conflict markers detected
   (use --all to mark anyway)
-  [255]
+  [20]
   $ hg resolve -l
   U file1
   U file2
@@ -406,7 +406,7 @@
   R file2
   $ hg resolve --mark --re-merge
   abort: too many actions specified
-  [255]
+  [10]
   $ hg resolve --re-merge --all
   merging file1
   warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
@@ -416,7 +416,7 @@
   $ hg resolve --config commands.resolve.explicit-re-merge=1 --all
   abort: no action specified
   (use --mark, --unmark, --list or --re-merge)
-  [255]
+  [10]
   $ hg resolve --config commands.resolve.explicit-re-merge=1 --re-merge --all
   merging file1
   warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
@@ -464,7 +464,7 @@
   warning: conflicts while merging emp2! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging emp3! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
 Test when commands.resolve.confirm config option is not set:
 ===========================================================
@@ -489,13 +489,13 @@
   $ hg resolve
   abort: no files or directories specified
   (use --all to re-merge all unresolved files)
-  [255]
+  [10]
   $ hg resolve --all << EOF
   > n
   > EOF
   re-merge all unresolved files (yn)? n
   abort: user quit
-  [255]
+  [250]
 
   $ hg resolve --all << EOF
   > y
@@ -523,7 +523,7 @@
   > EOF
   mark all unresolved files as resolved (yn)? n
   abort: user quit
-  [255]
+  [250]
 
   $ hg resolve -m << EOF
   > y
@@ -551,7 +551,7 @@
   > EOF
   mark all resolved files as unresolved (yn)? n
   abort: user quit
-  [255]
+  [250]
 
   $ hg resolve -m << EOF
   > y
--- a/tests/test-revert-interactive-curses.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revert-interactive-curses.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#require tic
+#require curses
 
 Revert interactive tests with the Curses interface
 
--- a/tests/test-revert-interactive.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revert-interactive.t	Tue Jan 19 21:48:43 2021 +0530
@@ -153,7 +153,7 @@
   (enter ? for help) [Ynesfdaq?] q
   
   abort: user quit
-  [255]
+  [250]
   $ ls folder1/
   g
 
--- a/tests/test-revert.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revert.t	Tue Jan 19 21:48:43 2021 +0530
@@ -11,7 +11,7 @@
   $ hg revert
   abort: no files or directories specified
   (use --all to revert all files)
-  [255]
+  [10]
   $ hg revert --all
 
 Introduce some changes and revert them
@@ -250,7 +250,7 @@
   $ hg revert -rtip
   abort: no files or directories specified
   (use --all to revert all files, or 'hg update 1' to update)
-  [255]
+  [10]
 
 call `hg revert` with -I
 ---------------------------
--- a/tests/test-revisions.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revisions.t	Tue Jan 19 21:48:43 2021 +0530
@@ -36,8 +36,8 @@
   1:9
 7 was ambiguous and still is
   $ hg l -r 7
-  abort: 00changelog.i@7: ambiguous identifier!
-  [255]
+  abort: ambiguous revision identifier: 7
+  [10]
 7b is no longer ambiguous
   $ hg l -r 7b
   3:7b
--- a/tests/test-revlog-raw.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revlog-raw.py	Tue Jan 19 21:48:43 2021 +0530
@@ -6,9 +6,9 @@
 import hashlib
 import sys
 
+from mercurial.node import nullid
 from mercurial import (
     encoding,
-    node,
     revlog,
     transaction,
     vfs,
@@ -87,13 +87,13 @@
 
 
 def appendrev(rlog, text, tr, isext=False, isdelta=True):
-    '''Append a revision. If isext is True, set the EXTSTORED flag so flag
+    """Append a revision. If isext is True, set the EXTSTORED flag so flag
     processor will be used (and rawtext is different from text). If isdelta is
     True, force the revision to be a delta, otherwise it's full text.
-    '''
+    """
     nextrev = len(rlog)
     p1 = rlog.node(nextrev - 1)
-    p2 = node.nullid
+    p2 = nullid
     if isext:
         flags = revlog.REVIDX_EXTSTORED
     else:
@@ -111,7 +111,7 @@
 
 
 def addgroupcopy(rlog, tr, destname=b'_destrevlog.i', optimaldelta=True):
-    '''Copy revlog to destname using revlog.addgroup. Return the copied revlog.
+    """Copy revlog to destname using revlog.addgroup. Return the copied revlog.
 
     This emulates push or pull. They use changegroup. Changegroup requires
     repo to work. We don't have a repo, so a dummy changegroup is used.
@@ -122,12 +122,12 @@
 
     This exercises some revlog.addgroup (and revlog._addrevision(text=None))
     code path, which is not covered by "appendrev" alone.
-    '''
+    """
 
     class dummychangegroup(object):
         @staticmethod
         def deltachunk(pnode):
-            pnode = pnode or node.nullid
+            pnode = pnode or nullid
             parentrev = rlog.rev(pnode)
             r = parentrev + 1
             if r >= len(rlog):
@@ -142,7 +142,7 @@
             return {
                 b'node': rlog.node(r),
                 b'p1': pnode,
-                b'p2': node.nullid,
+                b'p2': nullid,
                 b'cs': rlog.node(rlog.linkrev(r)),
                 b'flags': rlog.flags(r),
                 b'deltabase': rlog.node(deltaparent),
@@ -174,14 +174,14 @@
 
 
 def lowlevelcopy(rlog, tr, destname=b'_destrevlog.i'):
-    '''Like addgroupcopy, but use the low level revlog._addrevision directly.
+    """Like addgroupcopy, but use the low level revlog._addrevision directly.
 
     It exercises some code paths that are hard to reach easily otherwise.
-    '''
+    """
     dlog = newrevlog(destname, recreate=True)
     for r in rlog:
         p1 = rlog.node(r - 1)
-        p2 = node.nullid
+        p2 = nullid
         if r == 0 or (rlog.flags(r) & revlog.REVIDX_EXTSTORED):
             text = rlog.rawdata(r)
             cachedelta = None
@@ -218,13 +218,13 @@
 
 
 def genbits(n):
-    '''Given a number n, generate (2 ** (n * 2) + 1) numbers in range(2 ** n).
+    """Given a number n, generate (2 ** (n * 2) + 1) numbers in range(2 ** n).
     i.e. the generated numbers have a width of n bits.
 
     The combination of two adjacent numbers will cover all possible cases.
     That is to say, given any x, y where both x, and y are in range(2 ** n),
     there is an x followed immediately by y in the generated sequence.
-    '''
+    """
     m = 2 ** n
 
     # Gray Code. See https://en.wikipedia.org/wiki/Gray_code
@@ -255,7 +255,7 @@
 
 
 def writecases(rlog, tr):
-    '''Write some revisions interested to the test.
+    """Write some revisions interested to the test.
 
     The test is interested in 3 properties of a revision:
 
@@ -281,7 +281,7 @@
     mentioned above.
 
     Return expected [(text, rawtext)].
-    '''
+    """
     result = []
     for i, x in enumerate(genbits(3)):
         isdelta, isext, isempty = bool(x & 1), bool(x & 2), bool(x & 4)
--- a/tests/test-revlog-v2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revlog-v2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -6,7 +6,7 @@
   $ cd invalidreq
   $ echo exp-revlogv2.unknown >> .hg/requires
   $ hg log
-  abort: repository requires features unknown to this Mercurial: exp-revlogv2.unknown!
+  abort: repository requires features unknown to this Mercurial: exp-revlogv2.unknown
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
   $ cd ..
@@ -35,8 +35,8 @@
   ...     fh.write(b'\xff\x00\xde\xad') and None
 
   $ hg log
-  abort: unknown flags (0xff00) in version 57005 revlog 00changelog.i!
-  [255]
+  abort: unknown flags (0xff00) in version 57005 revlog 00changelog.i
+  [50]
 
   $ cd ..
 
--- a/tests/test-revlog.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revlog.t	Tue Jan 19 21:48:43 2021 +0530
@@ -7,8 +7,8 @@
   ...     fh.write(b'\x00\x01\x00\x00') and None
 
   $ hg log
-  abort: unknown flags (0x01) in version 0 revlog 00changelog.i!
-  [255]
+  abort: unknown flags (0x01) in version 0 revlog 00changelog.i
+  [50]
 
 Unknown flags on revlog version 1 are rejected
 
@@ -16,8 +16,8 @@
   ...     fh.write(b'\x00\x04\x00\x01') and None
 
   $ hg log
-  abort: unknown flags (0x04) in version 1 revlog 00changelog.i!
-  [255]
+  abort: unknown flags (0x04) in version 1 revlog 00changelog.i
+  [50]
 
 Unknown version is rejected
 
@@ -25,8 +25,8 @@
   ...     fh.write(b'\x00\x00\x00\x02') and None
 
   $ hg log
-  abort: unknown version (2) in revlog 00changelog.i!
-  [255]
+  abort: unknown version (2) in revlog 00changelog.i
+  [50]
 
   $ cd ..
 
--- a/tests/test-revset-legacy-lookup.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revset-legacy-lookup.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,7 +1,7 @@
 
   $ cat >> $HGRCPATH << EOF
-  > [ui]
-  > logtemplate="{rev}:{node|short} {desc} [{tags}]\n"
+  > [command-templates]
+  > log="{rev}:{node|short} {desc} [{tags}]\n"
   > EOF
 
   $ hg init legacy-lookup
@@ -95,10 +95,10 @@
   $ hg log -r '"foo"'
   2:fb616635b18f Added tag rev(0) for changeset 43114e71eddd ["foo"]
   $ hg log -r '("foo")'
-  abort: unknown revision 'foo'!
+  abort: unknown revision 'foo'
   [255]
   $ hg log -r 'revset("foo")'
-  abort: unknown revision 'foo'!
+  abort: unknown revision 'foo'
   [255]
   $ hg log -r '("\"foo\"")'
   2:fb616635b18f Added tag rev(0) for changeset 43114e71eddd ["foo"]
@@ -125,10 +125,10 @@
   $ hg log -r 'foo+bar'
   4:bbf52b87b370 Added tag foo-bar for changeset a50aae922707 [foo+bar]
   $ hg log -r '(foo+bar)'
-  abort: unknown revision 'foo'!
+  abort: unknown revision 'foo'
   [255]
   $ hg log -r 'revset(foo+bar)'
-  abort: unknown revision 'foo'!
+  abort: unknown revision 'foo'
   [255]
   $ hg log -r '"foo+bar"'
   4:bbf52b87b370 Added tag foo-bar for changeset a50aae922707 [foo+bar]
@@ -172,10 +172,10 @@
   6:db72e24fe069 Added tag 1.2 for changeset ff42fde8edbb [release_4.1(candidate1)]
   $ hg log -r '(release_4.1(candidate1))'
   hg: parse error: unknown identifier: release_4.1
-  [255]
+  [10]
   $ hg log -r 'revset(release_4.1(candidate1))'
   hg: parse error: unknown identifier: release_4.1
-  [255]
+  [10]
   $ hg log -r '"release_4.1(candidate1)"'
   6:db72e24fe069 Added tag 1.2 for changeset ff42fde8edbb [release_4.1(candidate1)]
   $ hg log -r '("release_4.1(candidate1)")'
@@ -190,7 +190,7 @@
   6:db72e24fe069 Added tag 1.2 for changeset ff42fde8edbb [release_4.1(candidate1)]
   $ hg log -r '::release_4.1(candidate1)'
   hg: parse error: unknown identifier: release_4.1
-  [255]
+  [10]
 
 Test tag with parenthesis and other function like char
 
@@ -200,10 +200,10 @@
   7:b29b25d7d687 Added tag release_4.1(candidate1) for changeset db72e24fe069 [release_4.1(arch=x86,arm)]
   $ hg log -r '(release_4.1(arch=x86,arm))'
   hg: parse error: unknown identifier: release_4.1
-  [255]
+  [10]
   $ hg log -r 'revset(release_4.1(arch=x86,arm))'
   hg: parse error: unknown identifier: release_4.1
-  [255]
+  [10]
   $ hg log -r '"release_4.1(arch=x86,arm)"'
   7:b29b25d7d687 Added tag release_4.1(candidate1) for changeset db72e24fe069 [release_4.1(arch=x86,arm)]
   $ hg log -r '("release_4.1(arch=x86,arm)")'
@@ -219,7 +219,7 @@
   7:b29b25d7d687 Added tag release_4.1(candidate1) for changeset db72e24fe069 [release_4.1(arch=x86,arm)]
   $ hg log -r '::release_4.1(arch=x86,arm)'
   hg: parse error: unknown identifier: release_4.1
-  [255]
+  [10]
 
 Test tag conflicting with revset function
 
@@ -229,10 +229,10 @@
   8:6b2e2d4ea455 Added tag release_4.1(arch=x86,arm) for changeset b29b25d7d687 [secret(team=foo,project=bar)]
   $ hg log -r '(secret(team=foo,project=bar))'
   hg: parse error: secret takes no arguments
-  [255]
+  [10]
   $ hg log -r 'revset(secret(team=foo,project=bar))'
   hg: parse error: secret takes no arguments
-  [255]
+  [10]
   $ hg log -r '"secret(team=foo,project=bar)"'
   8:6b2e2d4ea455 Added tag release_4.1(arch=x86,arm) for changeset b29b25d7d687 [secret(team=foo,project=bar)]
   $ hg log -r '("secret(team=foo,project=bar)")'
@@ -249,7 +249,7 @@
   8:6b2e2d4ea455 Added tag release_4.1(arch=x86,arm) for changeset b29b25d7d687 [secret(team=foo,project=bar)]
   $ hg log -r '::secret(team=foo,project=bar)'
   hg: parse error: secret takes no arguments
-  [255]
+  [10]
 
 Test tag with space
 
@@ -261,12 +261,12 @@
   hg: parse error at 4: unexpected token: symbol
   ((my little version)
        ^ here)
-  [255]
+  [10]
   $ hg log -r 'revset(my little version)'
   hg: parse error at 10: unexpected token: symbol
   (revset(my little version)
              ^ here)
-  [255]
+  [10]
   $ hg log -r '"my little version"'
   9:269192bf8fc3 Added tag secret(team=foo,project=bar) for changeset 6b2e2d4ea455 [my little version]
   $ hg log -r '("my little version")'
@@ -286,4 +286,4 @@
   hg: parse error at 5: invalid token
   (::my little version
         ^ here)
-  [255]
+  [10]
--- a/tests/test-revset-outgoing.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revset-outgoing.t	Tue Jan 19 21:48:43 2021 +0530
@@ -105,11 +105,11 @@
   green = ../a#default
 
   $ hg tout green
-  abort: repository green does not exist!
+  abort: repository green does not exist
   [255]
 
   $ hg tlog -r 'outgoing("green")'
-  abort: repository green does not exist!
+  abort: repository green does not exist
   [255]
 
   $ cd ..
--- a/tests/test-revset.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revset.t	Tue Jan 19 21:48:43 2021 +0530
@@ -305,7 +305,7 @@
       (symbol 'c'))
     (negate
       (symbol 'a')))
-  abort: unknown revision '-a'!
+  abort: unknown revision '-a'
   [255]
   $ try é
   (symbol '\xc3\xa9')
@@ -401,29 +401,29 @@
   hg: parse error at 10: unexpected token: symbol
   (date(this is a test)
              ^ here)
-  [255]
+  [10]
   $ log 'date()'
   hg: parse error: date requires a string
-  [255]
+  [10]
   $ log 'date'
-  abort: unknown revision 'date'!
+  abort: unknown revision 'date'
   [255]
   $ log 'date('
   hg: parse error at 5: not a prefix: end
   (date(
         ^ here)
-  [255]
+  [10]
   $ log 'date("\xy")'
   hg: parse error: invalid \x escape* (glob)
-  [255]
+  [10]
   $ log 'date(tip)'
   hg: parse error: invalid date: 'tip'
-  [255]
+  [10]
   $ log '0:date'
-  abort: unknown revision 'date'!
+  abort: unknown revision 'date'
   [255]
   $ log '::"date"'
-  abort: unknown revision 'date'!
+  abort: unknown revision 'date'
   [255]
   $ hg book date -r 4
   $ log '0:date'
@@ -450,7 +450,7 @@
 
   $ log '"date"(2005)'
   hg: parse error: not a symbol
-  [255]
+  [10]
 
 keyword arguments
 
@@ -459,25 +459,25 @@
 
   $ log 'extra(branch, a, b)'
   hg: parse error: extra takes at most 2 positional arguments
-  [255]
+  [10]
   $ log 'extra(a, label=b)'
   hg: parse error: extra got multiple values for keyword argument 'label'
-  [255]
+  [10]
   $ log 'extra(label=branch, default)'
   hg: parse error: extra got an invalid argument
-  [255]
+  [10]
   $ log 'extra(branch, foo+bar=baz)'
   hg: parse error: extra got an invalid argument
-  [255]
+  [10]
   $ log 'extra(unknown=branch)'
   hg: parse error: extra got an unexpected keyword argument 'unknown'
-  [255]
+  [10]
   $ log 'extra((), x)'
   hg: parse error: first argument to extra must be a string
-  [255]
+  [10]
   $ log 'extra(label=x, ())'
   hg: parse error: extra got an invalid argument
-  [255]
+  [10]
 
   $ try 'foo=bar|baz'
   (keyvalue
@@ -487,7 +487,7 @@
         (symbol 'bar')
         (symbol 'baz'))))
   hg: parse error: can't use a key-value pair in this context
-  [255]
+  [10]
 
  right-hand side should be optimized recursively
 
@@ -506,7 +506,7 @@
       (symbol '_notpublic')
       None))
   hg: parse error: can't use a key-value pair in this context
-  [255]
+  [10]
 
 relation-subscript operator has the highest binding strength (as function call):
 
@@ -565,7 +565,7 @@
     (symbol 'tip')
     (symbol '0'))
   hg: parse error: can't use a subscript in this context
-  [255]
+  [10]
 
   $ hg debugrevspec -p analyzed 'tip#rel[0]'
   * analyzed:
@@ -574,7 +574,7 @@
     (symbol 'rel')
     (symbol '0'))
   hg: parse error: unknown identifier: rel
-  [255]
+  [10]
 
   $ hg debugrevspec -p analyzed '(tip#rel)[0]'
   * analyzed:
@@ -584,7 +584,7 @@
       (symbol 'rel'))
     (symbol '0'))
   hg: parse error: can't use a subscript in this context
-  [255]
+  [10]
 
   $ hg debugrevspec -p analyzed 'tip#rel[0][1]'
   * analyzed:
@@ -595,7 +595,7 @@
       (symbol '0'))
     (symbol '1'))
   hg: parse error: can't use a subscript in this context
-  [255]
+  [10]
 
   $ hg debugrevspec -p analyzed 'tip#rel0#rel1[1]'
   * analyzed:
@@ -606,7 +606,7 @@
     (symbol 'rel1')
     (symbol '1'))
   hg: parse error: unknown identifier: rel1
-  [255]
+  [10]
 
   $ hg debugrevspec -p analyzed 'tip#rel0[0]#rel1[1]'
   * analyzed:
@@ -618,7 +618,7 @@
     (symbol 'rel1')
     (symbol '1'))
   hg: parse error: unknown identifier: rel1
-  [255]
+  [10]
 
 parse errors of relation, subscript and relation-subscript operators:
 
@@ -626,48 +626,48 @@
   hg: parse error at 0: not a prefix: [
   ([0]
    ^ here)
-  [255]
+  [10]
   $ hg debugrevspec '.#'
   hg: parse error at 2: not a prefix: end
   (.#
      ^ here)
-  [255]
+  [10]
   $ hg debugrevspec '#rel'
   hg: parse error at 0: not a prefix: #
   (#rel
    ^ here)
-  [255]
+  [10]
   $ hg debugrevspec '.#rel[0'
   hg: parse error at 7: unexpected token: end
   (.#rel[0
           ^ here)
-  [255]
+  [10]
   $ hg debugrevspec '.]'
   hg: parse error at 1: invalid token
   (.]
     ^ here)
-  [255]
+  [10]
 
   $ hg debugrevspec '.#generations[a]'
   hg: parse error: relation subscript must be an integer or a range
-  [255]
+  [10]
   $ hg debugrevspec '.#generations[1-2]'
   hg: parse error: relation subscript must be an integer or a range
-  [255]
+  [10]
   $ hg debugrevspec '.#generations[foo:bar]'
   hg: parse error: relation subscript bounds must be integers
-  [255]
+  [10]
 
 suggested relations
 
   $ hg debugrevspec '.#generafions[0]'
   hg: parse error: unknown identifier: generafions
   (did you mean generations?)
-  [255]
+  [10]
 
   $ hg debugrevspec '.#f[0]'
   hg: parse error: unknown identifier: f
-  [255]
+  [10]
 
 parsed tree at stages:
 
@@ -686,7 +686,7 @@
   * optimized:
   None
   hg: parse error: missing argument
-  [255]
+  [10]
 
   $ hg debugrevspec --no-optimized -p all '()'
   * parsed:
@@ -701,7 +701,7 @@
   * analyzed:
   None
   hg: parse error: missing argument
-  [255]
+  [10]
 
   $ hg debugrevspec -p parsed -p analyzed -p optimized '(0|1)-1'
   * parsed:
@@ -899,7 +899,7 @@
       (rangepre
         (symbol '2'))))
   hg: parse error: ^ expects a number 0, 1, or 2
-  [255]
+  [10]
 
  x^:y should be resolved recursively
 
@@ -1005,7 +1005,7 @@
     (rangepre
       (symbol '2')))
   hg: parse error: ^ expects a number 0, 1, or 2
-  [255]
+  [10]
 
 '::' itself isn't a valid expression
 
@@ -1013,7 +1013,7 @@
   (dagrangeall
     None)
   hg: parse error: can't use '::' in this context
-  [255]
+  [10]
 
 ancestor can accept 0 or more arguments
 
@@ -1170,10 +1170,10 @@
 
   $ log 'ancestors(., depth=-1)'
   hg: parse error: negative depth
-  [255]
+  [10]
   $ log 'ancestors(., depth=foo)'
   hg: parse error: ancestors expects an integer depth
-  [255]
+  [10]
 
 test descendants
 
@@ -1447,10 +1447,10 @@
     (symbol 'grep')
     (string '('))
   hg: parse error: invalid match pattern: (unbalanced parenthesis|missing \),.*) (re)
-  [255]
+  [10]
   $ log 'desc("re:(")'
   hg: parse error: invalid regular expression: (unbalanced parenthesis|missing \),.*) (re)
-  [255]
+  [10]
   $ try 'grep("\bissue\d+")'
   (func
     (symbol 'grep')
@@ -1472,7 +1472,7 @@
   hg: parse error at 7: unterminated string
   (grep(r"\")
           ^ here)
-  [255]
+  [10]
   $ log 'head()'
   0
   1
@@ -1530,15 +1530,15 @@
   $ log 'limit(author("re:bob|test"), offset=10)'
   $ log 'limit(all(), 1, -1)'
   hg: parse error: negative offset
-  [255]
+  [10]
   $ log 'limit(all(), -1)'
   hg: parse error: negative number to select
-  [255]
+  [10]
   $ log 'limit(all(), 0)'
 
   $ log 'last(all(), -1)'
   hg: parse error: negative number to select
-  [255]
+  [10]
   $ log 'last(all(), 0)'
   $ log 'last(all(), 1)'
   9
@@ -1861,7 +1861,7 @@
   $ log 'rev(10)'
   $ log 'rev(tip)'
   hg: parse error: rev expects a number
-  [255]
+  [10]
 
 Test hexadecimal revision
   $ log 'id(2)'
@@ -1873,8 +1873,8 @@
   3
   $ hg log --template '{rev}\n' -r 'id(x)'
   $ hg log --template '{rev}\n' -r 'x'
-  abort: 00changelog.i@: ambiguous identifier!
-  [255]
+  abort: ambiguous revision identifier: x
+  [10]
   $ log 'id(23268)'
   4
   $ log 'id(2785f51eece)'
@@ -1965,7 +1965,7 @@
   $ hg debugrevspec 'wdir()^2'
   $ hg debugrevspec 'wdir()^3'
   hg: parse error: ^ expects a number 0, 1, or 2
-  [255]
+  [10]
 For tests consistency
   $ hg up 9
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -2040,14 +2040,14 @@
   obsoleted 1 changesets
 
   $ hg debugrevspec 'fff'
-  abort: 00changelog.i@fff: ambiguous identifier!
-  [255]
+  abort: ambiguous revision identifier: fff
+  [10]
   $ hg debugrevspec 'ffff'
-  abort: 00changelog.i@ffff: ambiguous identifier!
-  [255]
+  abort: ambiguous revision identifier: ffff
+  [10]
   $ hg debugrevspec 'fffb'
-  abort: 00changelog.i@fffb: ambiguous identifier!
-  [255]
+  abort: ambiguous revision identifier: fffb
+  [10]
 BROKEN should be '2' (node lookup uses unfiltered repo)
   $ hg debugrevspec 'id(fffb)'
 BROKEN should be '2' (node lookup uses unfiltered repo)
@@ -2532,10 +2532,10 @@
 
   $ log '0:2 & sort()'
   hg: parse error: sort requires one or two arguments
-  [255]
+  [10]
   $ log '0:2 & sort(all(), -invalid)'
   hg: parse error: unknown sort key '-invalid'
-  [255]
+  [10]
 
  for 'A & f(B)', 'B' should not be affected by the order of 'A':
 
@@ -2762,7 +2762,7 @@
 
   $ log 'sort(all(), -invalid)'
   hg: parse error: unknown sort key '-invalid'
-  [255]
+  [10]
 
   $ cd ..
 
@@ -2980,11 +2980,11 @@
 
   $ hg log -r 'sort(all(), "topo user")'
   hg: parse error: topo sort order cannot be combined with other sort keys
-  [255]
+  [10]
 
   $ hg log -r 'sort(all(), user, topo.firstbranch=book1)'
   hg: parse error: topo.firstbranch can only be used when using the topo sort key
-  [255]
+  [10]
 
 topo.firstbranch should accept any kind of expressions:
 
@@ -3003,7 +3003,7 @@
   hg: parse error at 9: not a prefix: end
   ( . + .^ +
             ^ here)
-  [255]
+  [10]
   $ hg debugrevspec -v 'revset(first(rev(0)))' -p all
   * parsed:
   (func
@@ -3052,10 +3052,10 @@
 abort if the revset doesn't expect given size
   $ log 'expectsize()'
   hg: parse error: invalid set of arguments
-  [255]
+  [10]
   $ log 'expectsize(0:2, a)'
   hg: parse error: expectsize requires a size range or a positive integer
-  [255]
+  [10]
   $ log 'expectsize(0:2, 3)'
   0
   1
@@ -3066,30 +3066,30 @@
   1
   0
   $ log 'expectsize(0:1, 1)'
-  abort: revset size mismatch. expected 1, got 2!
+  abort: revset size mismatch. expected 1, got 2
   [255]
   $ log 'expectsize(0:4, -1)'
   hg: parse error: negative size
-  [255]
+  [10]
   $ log 'expectsize(0:2, 2:4)'
   0
   1
   2
   $ log 'expectsize(0:1, 3:5)'
-  abort: revset size mismatch. expected between 3 and 5, got 2!
+  abort: revset size mismatch. expected between 3 and 5, got 2
   [255]
   $ log 'expectsize(0:1, -1:2)'
   hg: parse error: negative size
-  [255]
+  [10]
   $ log 'expectsize(0:1, 1:-2)'
   hg: parse error: negative size
-  [255]
+  [10]
   $ log 'expectsize(0:2, a:4)'
   hg: parse error: size range bounds must be integers
-  [255]
+  [10]
   $ log 'expectsize(0:2, 2:b)'
   hg: parse error: size range bounds must be integers
-  [255]
+  [10]
   $ log 'expectsize(0:2, 2:)'
   0
   1
@@ -3103,8 +3103,8 @@
   1
   2
   $ log 'expectsize(0:2, 4:)'
-  abort: revset size mismatch. expected between 4 and 11, got 3!
+  abort: revset size mismatch. expected between 4 and 11, got 3
   [255]
   $ log 'expectsize(0:2, :2)'
-  abort: revset size mismatch. expected between 0 and 2, got 3!
+  abort: revset size mismatch. expected between 0 and 2, got 3
   [255]
--- a/tests/test-revset2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-revset2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -319,7 +319,7 @@
 test unknown revision in `_list`
 
   $ log '0|unknown'
-  abort: unknown revision 'unknown'!
+  abort: unknown revision 'unknown'
   [255]
 
 test integer range in `_list`
@@ -329,11 +329,11 @@
   0
 
   $ log '-10|-11'
-  abort: unknown revision '-11'!
+  abort: unknown revision '-11'
   [255]
 
   $ log '9|10'
-  abort: unknown revision '10'!
+  abort: unknown revision '10'
   [255]
 
 test '0000' != '0' in `_list`
@@ -346,7 +346,7 @@
   $ log '0,1'
   hg: parse error: can't use a list in this context
   (see 'hg help "revsets.x or y"')
-  [255]
+  [10]
   $ try '0,1,2'
   (list
     (symbol '0')
@@ -354,7 +354,7 @@
     (symbol '2'))
   hg: parse error: can't use a list in this context
   (see 'hg help "revsets.x or y"')
-  [255]
+  [10]
 
 test that chained `or` operations make balanced addsets
 
@@ -407,7 +407,7 @@
       (symbol '0')
       None))
   hg: parse error: missing argument
-  [255]
+  [10]
 
 test that chained `or` operations never eat up stack (issue4624)
 (uses `0:1` instead of `0` to avoid future optimization of trivial revisions)
@@ -510,7 +510,7 @@
       (symbol '1'))
     None)
   hg: parse error: missing argument
-  [255]
+  [10]
 
 optimization to only() works only if ancestors() takes only one argument
 
@@ -568,20 +568,20 @@
 
   $ log '"ancestors"(6) and not ancestors(4)'
   hg: parse error: not a symbol
-  [255]
+  [10]
 
   $ log 'ancestors(6) and not "ancestors"(4)'
   hg: parse error: not a symbol
-  [255]
+  [10]
 
 test empty string
 
   $ log ''
   hg: parse error: empty query
-  [255]
+  [10]
   $ log 'parents("")'
   hg: parse error: empty string is not a valid revision
-  [255]
+  [10]
 
 test empty revset
   $ hg log 'none()'
@@ -589,7 +589,7 @@
 we can use patterns when searching for tags
 
   $ log 'tag("1..*")'
-  abort: tag '1..*' does not exist!
+  abort: tag '1..*' does not exist
   [255]
   $ log 'tag("re:1..*")'
   6
@@ -600,16 +600,16 @@
   $ log 'tag("re:0..*")'
 
   $ log 'tag(unknown)'
-  abort: tag 'unknown' does not exist!
+  abort: tag 'unknown' does not exist
   [255]
   $ log 'tag("re:unknown")'
   $ log 'present(tag("unknown"))'
   $ log 'present(tag("re:unknown"))'
   $ log 'branch(unknown)'
-  abort: unknown revision 'unknown'!
+  abort: unknown revision 'unknown'
   [255]
   $ log 'branch("literal:unknown")'
-  abort: branch 'unknown' does not exist!
+  abort: branch 'unknown' does not exist
   [255]
   $ log 'branch("re:unknown")'
   $ log 'present(branch("unknown"))'
@@ -665,7 +665,7 @@
   1
 
   $ log 'named("unknown")'
-  abort: namespace 'unknown' does not exist!
+  abort: namespace 'unknown' does not exist
   [255]
   $ log 'named("re:unknown")'
   $ log 'present(named("unknown"))'
@@ -701,7 +701,7 @@
   hg: parse error at 2: invalid token
   (1 OR 2
      ^ here)
-  [255]
+  [10]
 
 or operator should preserve ordering:
   $ log 'reverse(2::4) or tip'
@@ -755,39 +755,39 @@
 
   $ log 'tip^foo'
   hg: parse error: ^ expects a number 0, 1, or 2
-  [255]
+  [10]
 
   $ log 'branchpoint()~-1'
-  abort: revision in set has more than one child!
+  abort: revision in set has more than one child
   [255]
 
 Bogus function gets suggestions
   $ log 'add()'
   hg: parse error: unknown identifier: add
   (did you mean adds?)
-  [255]
+  [10]
   $ log 'added()'
   hg: parse error: unknown identifier: added
   (did you mean adds?)
-  [255]
+  [10]
   $ log 'remo()'
   hg: parse error: unknown identifier: remo
   (did you mean one of remote, removes?)
-  [255]
+  [10]
   $ log 'babar()'
   hg: parse error: unknown identifier: babar
-  [255]
+  [10]
 
 Bogus function with a similar internal name doesn't suggest the internal name
   $ log 'matches()'
   hg: parse error: unknown identifier: matches
   (did you mean matching?)
-  [255]
+  [10]
 
 Undocumented functions aren't suggested as similar either
   $ log 'tagged2()'
   hg: parse error: unknown identifier: tagged2
-  [255]
+  [10]
 
 multiple revspecs
 
@@ -869,7 +869,7 @@
   $ export HGPLAIN
   $ try m
   (symbol 'm')
-  abort: unknown revision 'm'!
+  abort: unknown revision 'm'
   [255]
 
   $ HGPLAINEXCEPT=revsetalias
@@ -949,7 +949,7 @@
   $ try recurse1
   (symbol 'recurse1')
   hg: parse error: infinite expansion of revset alias "recurse1" detected
-  [255]
+  [10]
 
   $ echo 'level1($1, $2) = $1 or $2' >> .hg/hgrc
   $ echo 'level2($1, $2) = level1($2, $1)' >> .hg/hgrc
@@ -1060,7 +1060,7 @@
     (func
       (symbol 'max')
       (string '$1')))
-  abort: unknown revision '$1'!
+  abort: unknown revision '$1'
   [255]
 
 test scope of alias expansion: 'universe' is expanded prior to 'shadowall(0)',
@@ -1185,13 +1185,13 @@
     (symbol 'rs')
     None)
   hg: parse error: invalid number of arguments: 0
-  [255]
+  [10]
   $ try 'rs(2)'
   (func
     (symbol 'rs')
     (symbol '2'))
   hg: parse error: invalid number of arguments: 1
-  [255]
+  [10]
   $ try 'rs(2, data, 7)'
   (func
     (symbol 'rs')
@@ -1200,7 +1200,7 @@
       (symbol 'data')
       (symbol '7')))
   hg: parse error: invalid number of arguments: 3
-  [255]
+  [10]
   $ try 'rs4(2 or 3, x, x, date)'
   (func
     (symbol 'rs4')
@@ -1575,7 +1575,7 @@
   hg: parse error at 3: syntax error in revset 'foo\\'
   (foo\\
       ^ here)
-  [255]
+  [10]
 
   $ cd ..
 
@@ -1603,7 +1603,7 @@
   $ hg debugrevspec "custom1()"
   *** failed to import extension custompredicate from $TESTTMP/custompredicate.py: intentional failure of loading extension
   hg: parse error: unknown identifier: custom1
-  [255]
+  [10]
 
 Test repo.anyrevs with customized revset overrides
 
--- a/tests/test-rhg.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rhg.t	Tue Jan 19 21:48:43 2021 +0530
@@ -2,8 +2,8 @@
 
 Define an rhg function that will only run if rhg exists
   $ rhg() {
-  > if [ -f "$RUNTESTDIR/../rust/target/debug/rhg" ]; then
-  >   "$RUNTESTDIR/../rust/target/debug/rhg" "$@"
+  > if [ -f "$RUNTESTDIR/../rust/target/release/rhg" ]; then
+  >   "$RUNTESTDIR/../rust/target/release/rhg" "$@"
   > else
   >   echo "skipped: Cannot find rhg. Try to run cargo build in rust/rhg."
   >   exit 80
@@ -74,19 +74,131 @@
   $ rm -rf repository
   $ hg init repository
   $ cd repository
-  $ for i in 1 2 3; do
-  >   echo $i >> file$i
-  >   hg add file$i
-  >   hg commit -m "commit $i" -q
+  $ for i in 1 2 3 4 5 6; do
+  >   echo $i >> file-$i
+  >   hg add file-$i
+  >   hg commit -m "Commit $i" -q
   > done
   $ rhg debugdata -c 2
-  e36fa63d37a576b27a69057598351db6ee5746bd
+  8d0267cb034247ebfa5ee58ce59e22e57a492297
+  test
+  0 0
+  file-3
+  
+  Commit 3 (no-eol)
+  $ rhg debugdata -m 2
+  file-1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
+  file-2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc)
+  file-3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc)
+
+Debuging with full node id
+  $ rhg debugdata -c `hg log -r 0 -T '{node}'`
+  d1d1c679d3053e8926061b6f45ca52009f011e3f
   test
   0 0
-  file3
+  file-1
   
-  commit 3 (no-eol)
-  $ rhg debugdata -m 2
-  file1\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
-  file2\x005d9299349fc01ddd25d0070d149b124d8f10411e (esc)
-  file3\x002661d26c649684b482d10f91960cc3db683c38b4 (esc)
+  Commit 1 (no-eol)
+
+Specifying revisions by changeset ID
+  $ hg log -T '{node}\n'
+  c6ad58c44207b6ff8a4fbbca7045a5edaa7e908b
+  d654274993d0149eecc3cc03214f598320211900
+  f646af7e96481d3a5470b695cf30ad8e3ab6c575
+  cf8b83f14ead62b374b6e91a0e9303b85dfd9ed7
+  91c6f6e73e39318534dc415ea4e8a09c99cd74d6
+  6ae9681c6d30389694d8701faf24b583cf3ccafe
+  $ rhg files -r cf8b83
+  file-1
+  file-2
+  file-3
+  $ rhg cat -r cf8b83 file-2
+  2
+  $ rhg cat -r c file-2
+  abort: ambiguous revision identifier c
+  [255]
+  $ rhg cat -r d file-2
+  2
+
+Cat files
+  $ cd $TESTTMP
+  $ rm -rf repository
+  $ hg init repository
+  $ cd repository
+  $ echo "original content" > original
+  $ hg add original
+  $ hg commit -m "add original" original
+  $ rhg cat -r 0 original
+  original content
+Cat copied file should not display copy metadata
+  $ hg copy original copy_of_original
+  $ hg commit -m "add copy of original"
+  $ rhg cat -r 1 copy_of_original
+  original content
+
+Requirements
+  $ rhg debugrequirements
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+
+  $ echo indoor-pool >> .hg/requires
+  $ rhg files
+  [252]
+
+  $ rhg cat -r 1 copy_of_original
+  [252]
+
+  $ rhg debugrequirements
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+  indoor-pool
+
+  $ echo -e '\xFF' >> .hg/requires
+  $ rhg debugrequirements
+  abort: .hg/requires is corrupted
+  [255]
+
+Persistent nodemap
+  $ cd $TESTTMP
+  $ rm -rf repository
+  $ hg init repository
+  $ cd repository
+  $ rhg debugrequirements | grep nodemap
+  [1]
+  $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn"
+  $ hg id -r tip
+  c3ae8dec9fad tip
+  $ ls .hg/store/00changelog*
+  .hg/store/00changelog.d
+  .hg/store/00changelog.i
+  $ rhg files -r c3ae8dec9fad
+  of
+
+  $ cd $TESTTMP
+  $ rm -rf repository
+  $ hg --config format.use-persistent-nodemap=True init repository
+  $ cd repository
+  $ rhg debugrequirements | grep nodemap
+  persistent-nodemap
+  $ hg debugbuilddag .+5000 --overwritten-file --config "storage.revlog.nodemap.mode=warn"
+  $ hg id -r tip
+  c3ae8dec9fad tip
+  $ ls .hg/store/00changelog*
+  .hg/store/00changelog-*.nd (glob)
+  .hg/store/00changelog.d
+  .hg/store/00changelog.i
+  .hg/store/00changelog.n
+
+Specifying revisions by changeset ID
+  $ rhg files -r c3ae8dec9fad
+  of
+  $ rhg cat -r c3ae8dec9fad of
+  r5000
--- a/tests/test-run-tests.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-run-tests.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1412,7 +1412,7 @@
   > This should print the start of check-code. If this passes but the
   > previous check failed, that means we found a copy of check-code at whatever
   > RUNTESTSDIR ended up containing, even though it doesn't match TESTDIR.
-  >   $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py | sed 's@.!.*python@#!USRBINENVPY@'
+  >   $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py | sed 's@.!.*python3@#!USRBINENVPY@'
   >   #!USRBINENVPY
   >   #
   >   # check-code - a style and portability checker for Mercurial
@@ -1956,9 +1956,12 @@
   $ cat << EOF >> test-config-opt.t
   >   $ hg init test-config-opt
   >   $ hg -R test-config-opt purge
+  >   $ echo "HGTESTEXTRAEXTENSIONS: \$HGTESTEXTRAEXTENSIONS"
+  >   HGTESTEXTRAEXTENSIONS: purge
   > EOF
 
-  $ rt --extra-config-opt extensions.purge= test-config-opt.t
+  $ rt --extra-config-opt extensions.purge= \
+  >    --extra-config-opt not.an.extension=True test-config-opt.t
   running 1 tests using 1 parallel processes 
   .
   # Ran 1 tests, 0 skipped, 0 failed.
--- a/tests/test-rust-ancestor.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rust-ancestor.py	Tue Jan 19 21:48:43 2021 +0530
@@ -2,10 +2,8 @@
 import sys
 import unittest
 
-from mercurial import (
-    error,
-    node,
-)
+from mercurial.node import wdirrev
+from mercurial import error
 
 from mercurial.testing import revlog as revlogtesting
 
@@ -150,7 +148,7 @@
         # WdirUnsupported directly
         idx = self.parseindex()
         with self.assertRaises(error.WdirUnsupported):
-            list(AncestorsIterator(idx, [node.wdirrev], -1, False))
+            list(AncestorsIterator(idx, [wdirrev], -1, False))
 
     def testheadrevs(self):
         idx = self.parseindex()
--- a/tests/test-rust-revlog.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-rust-revlog.py	Tue Jan 19 21:48:43 2021 +0530
@@ -17,7 +17,8 @@
 
 
 @unittest.skipIf(
-    rustext is None, "rustext module revlog relies on is not available",
+    rustext is None,
+    "rustext module revlog relies on is not available",
 )
 class RustRevlogIndexTest(revlogtesting.RevlogBasedTestBase):
     def test_heads(self):
--- a/tests/test-serve.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-serve.t	Tue Jan 19 21:48:43 2021 +0530
@@ -90,10 +90,10 @@
 
   $ hg id http://localhost:$HGPORT/some/dir7
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
   $ hg id http://localhost:$HGPORT/some
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
 
   $ cat access.log errors.log
   $LOCALIP - - [$LOGDATE$] "GET /some/dir7?cmd=capabilities HTTP/1.1" 404 - (glob)
--- a/tests/test-setdiscovery.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-setdiscovery.t	Tue Jan 19 21:48:43 2021 +0530
@@ -44,6 +44,7 @@
   searching for changes
   unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          2
       also local heads:          2
@@ -57,7 +58,16 @@
       unknown:                   2
   local changesets:              7
     common:                      7
+      heads:                     2
+      roots:                     1
     missing:                     0
+      heads:                     0
+      roots:                     0
+    first undecided set:         3
+      heads:                     1
+      roots:                     1
+      common:                    3
+      missing:                   0
   common heads: 01241442b3c2 b5714e113bc0
   
   % -- a -> b set
@@ -66,6 +76,7 @@
   searching for changes
   all local changesets known remotely
   elapsed time:  * seconds (glob)
+  round-trips:                   1
   heads summary:
     total common heads:          2
       also local heads:          2
@@ -79,7 +90,16 @@
       unknown:                   2
   local changesets:              7
     common:                      7
+      heads:                     2
+      roots:                     1
     missing:                     0
+      heads:                     0
+      roots:                     0
+    first undecided set:         3
+      heads:                     1
+      roots:                     1
+      common:                    3
+      missing:                   0
   common heads: 01241442b3c2 b5714e113bc0
   
   % -- a -> b set (tip only)
@@ -88,6 +108,7 @@
   searching for changes
   all local changesets known remotely
   elapsed time:  * seconds (glob)
+  round-trips:                   1
   heads summary:
     total common heads:          1
       also local heads:          1
@@ -101,7 +122,16 @@
       unknown:                   3
   local changesets:              7
     common:                      6
+      heads:                     1
+      roots:                     1
     missing:                     1
+      heads:                     1
+      roots:                     1
+    first undecided set:         6
+      heads:                     2
+      roots:                     1
+      common:                    5
+      missing:                   1
   common heads: b5714e113bc0
   
   % -- b -> a tree
@@ -109,6 +139,7 @@
   searching for changes
   unpruned common: 01241442b3c2 b5714e113bc0
   elapsed time:  * seconds (glob)
+  round-trips:                   1
   heads summary:
     total common heads:          2
       also local heads:          1
@@ -122,7 +153,16 @@
       unknown:                   0
   local changesets:             15
     common:                      7
+      heads:                     2
+      roots:                     1
     missing:                     8
+      heads:                     2
+      roots:                     2
+    first undecided set:         8
+      heads:                     2
+      roots:                     2
+      common:                    0
+      missing:                   8
   common heads: 01241442b3c2 b5714e113bc0
   
   % -- b -> a set
@@ -131,6 +171,7 @@
   searching for changes
   all remote heads known locally
   elapsed time:  * seconds (glob)
+  round-trips:                   1
   heads summary:
     total common heads:          2
       also local heads:          1
@@ -144,7 +185,16 @@
       unknown:                   0
   local changesets:             15
     common:                      7
+      heads:                     2
+      roots:                     1
     missing:                     8
+      heads:                     2
+      roots:                     2
+    first undecided set:         8
+      heads:                     2
+      roots:                     2
+      common:                    0
+      missing:                   8
   common heads: 01241442b3c2 b5714e113bc0
   
   % -- b -> a set (tip only)
@@ -153,6 +203,7 @@
   searching for changes
   all remote heads known locally
   elapsed time:  * seconds (glob)
+  round-trips:                   1
   heads summary:
     total common heads:          2
       also local heads:          1
@@ -166,7 +217,16 @@
       unknown:                   0
   local changesets:             15
     common:                      7
+      heads:                     2
+      roots:                     1
     missing:                     8
+      heads:                     2
+      roots:                     2
+    first undecided set:         8
+      heads:                     2
+      roots:                     2
+      common:                    0
+      missing:                   8
   common heads: 01241442b3c2 b5714e113bc0
 
 
@@ -181,6 +241,7 @@
   searching for changes
   unpruned common: bebd167eb94d
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          1
@@ -194,7 +255,16 @@
       unknown:                   1
   local changesets:             35
     common:                      5
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        34
+      heads:                     2
+      roots:                     1
+      common:                    4
+      missing:                  30
   common heads: bebd167eb94d
   
   % -- a -> b set
@@ -206,6 +276,7 @@
   query 2; still undecided: 29, sample size is: 29
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          1
@@ -219,7 +290,16 @@
       unknown:                   1
   local changesets:             35
     common:                      5
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        34
+      heads:                     2
+      roots:                     1
+      common:                    4
+      missing:                  30
   common heads: bebd167eb94d
   
   % -- a -> b set (tip only)
@@ -231,6 +311,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -244,7 +325,16 @@
       unknown:                   1
   local changesets:             35
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    33
+      heads:                     2
+      roots:                     2
+    first undecided set:        35
+      heads:                     2
+      roots:                     1
+      common:                    2
+      missing:                  33
   common heads: 66f7d451a68b
   
   % -- b -> a tree
@@ -252,6 +342,7 @@
   searching for changes
   unpruned common: 66f7d451a68b bebd167eb94d
   elapsed time:  * seconds (glob)
+  round-trips:                   4
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -265,7 +356,16 @@
       unknown:                   1
   local changesets:              8
     common:                      5
+      heads:                     1
+      roots:                     1
     missing:                     3
+      heads:                     1
+      roots:                     1
+    first undecided set:         3
+      heads:                     1
+      roots:                     1
+      common:                    0
+      missing:                   3
   common heads: bebd167eb94d
   
   % -- b -> a set
@@ -277,6 +377,7 @@
   query 2; still undecided: 2, sample size is: 2
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -290,7 +391,16 @@
       unknown:                   1
   local changesets:              8
     common:                      5
+      heads:                     1
+      roots:                     1
     missing:                     3
+      heads:                     1
+      roots:                     1
+    first undecided set:         3
+      heads:                     1
+      roots:                     1
+      common:                    0
+      missing:                   3
   common heads: bebd167eb94d
   
   % -- b -> a set (tip only)
@@ -302,6 +412,7 @@
   query 2; still undecided: 2, sample size is: 2
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -315,7 +426,16 @@
       unknown:                   1
   local changesets:              8
     common:                      5
+      heads:                     1
+      roots:                     1
     missing:                     3
+      heads:                     1
+      roots:                     1
+    first undecided set:         3
+      heads:                     1
+      roots:                     1
+      common:                    0
+      missing:                   3
   common heads: bebd167eb94d
 
 Both sides many new with stub:
@@ -329,6 +449,7 @@
   searching for changes
   unpruned common: 2dc09a01254d
   elapsed time:  * seconds (glob)
+  round-trips:                   4
   heads summary:
     total common heads:          1
       also local heads:          1
@@ -342,7 +463,16 @@
       unknown:                   1
   local changesets:             34
     common:                      4
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        33
+      heads:                     2
+      roots:                     1
+      common:                    3
+      missing:                  30
   common heads: 2dc09a01254d
   
   % -- a -> b set
@@ -354,6 +484,7 @@
   query 2; still undecided: 29, sample size is: 29
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          1
@@ -367,7 +498,16 @@
       unknown:                   1
   local changesets:             34
     common:                      4
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        33
+      heads:                     2
+      roots:                     1
+      common:                    3
+      missing:                  30
   common heads: 2dc09a01254d
   
   % -- a -> b set (tip only)
@@ -379,6 +519,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -392,7 +533,16 @@
       unknown:                   1
   local changesets:             34
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    32
+      heads:                     2
+      roots:                     2
+    first undecided set:        34
+      heads:                     2
+      roots:                     1
+      common:                    2
+      missing:                  32
   common heads: 66f7d451a68b
   
   % -- b -> a tree
@@ -400,6 +550,7 @@
   searching for changes
   unpruned common: 2dc09a01254d 66f7d451a68b
   elapsed time:  * seconds (glob)
+  round-trips:                   4
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -413,7 +564,16 @@
       unknown:                   1
   local changesets:             34
     common:                      4
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        30
+      heads:                     1
+      roots:                     1
+      common:                    0
+      missing:                  30
   common heads: 2dc09a01254d
   
   % -- b -> a set
@@ -425,6 +585,7 @@
   query 2; still undecided: 29, sample size is: 29
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -438,7 +599,16 @@
       unknown:                   1
   local changesets:             34
     common:                      4
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        30
+      heads:                     1
+      roots:                     1
+      common:                    0
+      missing:                  30
   common heads: 2dc09a01254d
   
   % -- b -> a set (tip only)
@@ -450,6 +620,7 @@
   query 2; still undecided: 29, sample size is: 29
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -463,7 +634,16 @@
       unknown:                   1
   local changesets:             34
     common:                      4
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        30
+      heads:                     1
+      roots:                     1
+      common:                    0
+      missing:                  30
   common heads: 2dc09a01254d
 
 
@@ -478,6 +658,7 @@
   searching for changes
   unpruned common: 66f7d451a68b
   elapsed time:  * seconds (glob)
+  round-trips:                   4
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -491,7 +672,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
   
   % -- a -> b set
@@ -503,6 +693,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -516,7 +707,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
   
   % -- a -> b set (tip only)
@@ -528,6 +728,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -541,7 +742,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
   
   % -- b -> a tree
@@ -549,6 +759,7 @@
   searching for changes
   unpruned common: 66f7d451a68b
   elapsed time:  * seconds (glob)
+  round-trips:                   4
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -562,7 +773,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
   
   % -- b -> a set
@@ -574,6 +794,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -587,7 +808,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
   
   % -- b -> a set (tip only)
@@ -599,6 +829,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -612,7 +843,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
 
 
@@ -627,6 +867,7 @@
   searching for changes
   unpruned common: 66f7d451a68b
   elapsed time:  * seconds (glob)
+  round-trips:                   4
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -640,7 +881,16 @@
       unknown:                   1
   local changesets:             52
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    50
+      heads:                     1
+      roots:                     1
+    first undecided set:        52
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  50
   common heads: 66f7d451a68b
   
   % -- a -> b set
@@ -652,6 +902,7 @@
   query 2; still undecided: 51, sample size is: 51
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -665,7 +916,16 @@
       unknown:                   1
   local changesets:             52
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    50
+      heads:                     1
+      roots:                     1
+    first undecided set:        52
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  50
   common heads: 66f7d451a68b
   
   % -- a -> b set (tip only)
@@ -677,6 +937,7 @@
   query 2; still undecided: 51, sample size is: 51
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -690,7 +951,16 @@
       unknown:                   1
   local changesets:             52
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    50
+      heads:                     1
+      roots:                     1
+    first undecided set:        52
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  50
   common heads: 66f7d451a68b
   
   % -- b -> a tree
@@ -698,6 +968,7 @@
   searching for changes
   unpruned common: 66f7d451a68b
   elapsed time:  * seconds (glob)
+  round-trips:                   3
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -711,7 +982,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
   
   % -- b -> a set
@@ -723,6 +1003,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -736,7 +1017,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
   
   % -- b -> a set (tip only)
@@ -748,6 +1038,7 @@
   query 2; still undecided: 31, sample size is: 31
   2 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   2
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -761,7 +1052,16 @@
       unknown:                   1
   local changesets:             32
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:        32
+      heads:                     1
+      roots:                     1
+      common:                    2
+      missing:                  30
   common heads: 66f7d451a68b
 
 
@@ -776,6 +1076,7 @@
   searching for changes
   unpruned common: 7ead0cba2838
   elapsed time:  * seconds (glob)
+  round-trips:                   4
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -789,7 +1090,16 @@
       unknown:                   1
   local changesets:           1050
     common:                   1000
+      heads:                     1
+      roots:                     1
     missing:                    50
+      heads:                     1
+      roots:                     1
+    first undecided set:      1050
+      heads:                     1
+      roots:                     1
+      common:                 1000
+      missing:                  50
   common heads: 7ead0cba2838
   
   % -- a -> b set
@@ -804,6 +1114,7 @@
   query 3; still undecided: 31, sample size is: 31
   3 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   3
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -817,7 +1128,16 @@
       unknown:                   1
   local changesets:           1050
     common:                   1000
+      heads:                     1
+      roots:                     1
     missing:                    50
+      heads:                     1
+      roots:                     1
+    first undecided set:      1050
+      heads:                     1
+      roots:                     1
+      common:                 1000
+      missing:                  50
   common heads: 7ead0cba2838
   
   % -- a -> b set (tip only)
@@ -832,6 +1152,7 @@
   query 3; still undecided: 31, sample size is: 31
   3 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   3
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -845,7 +1166,16 @@
       unknown:                   1
   local changesets:           1050
     common:                   1000
+      heads:                     1
+      roots:                     1
     missing:                    50
+      heads:                     1
+      roots:                     1
+    first undecided set:      1050
+      heads:                     1
+      roots:                     1
+      common:                 1000
+      missing:                  50
   common heads: 7ead0cba2838
   
   % -- b -> a tree
@@ -853,6 +1183,7 @@
   searching for changes
   unpruned common: 7ead0cba2838
   elapsed time:  * seconds (glob)
+  round-trips:                   3
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -866,7 +1197,16 @@
       unknown:                   1
   local changesets:           1030
     common:                   1000
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:      1030
+      heads:                     1
+      roots:                     1
+      common:                 1000
+      missing:                  30
   common heads: 7ead0cba2838
   
   % -- b -> a set
@@ -881,6 +1221,7 @@
   query 3; still undecided: 15, sample size is: 15
   3 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   3
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -894,7 +1235,16 @@
       unknown:                   1
   local changesets:           1030
     common:                   1000
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:      1030
+      heads:                     1
+      roots:                     1
+      common:                 1000
+      missing:                  30
   common heads: 7ead0cba2838
   
   % -- b -> a set (tip only)
@@ -909,6 +1259,7 @@
   query 3; still undecided: 15, sample size is: 15
   3 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   3
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -922,7 +1273,16 @@
       unknown:                   1
   local changesets:           1030
     common:                   1000
+      heads:                     1
+      roots:                     1
     missing:                    30
+      heads:                     1
+      roots:                     1
+    first undecided set:      1030
+      heads:                     1
+      roots:                     1
+      common:                 1000
+      missing:                  30
   common heads: 7ead0cba2838
 
 
@@ -989,6 +1349,44 @@
   query 6; still undecided: 63, sample size is: 63
   6 total queries in *.????s (glob)
   elapsed time:  * seconds (glob)
+  round-trips:                   6
+  heads summary:
+    total common heads:          1
+      also local heads:          0
+      also remote heads:         0
+      both:                      0
+    local heads:               260
+      common:                    0
+      missing:                 260
+    remote heads:                1
+      common:                    0
+      unknown:                   1
+  local changesets:           1340
+    common:                    300
+      heads:                     1
+      roots:                     1
+    missing:                  1040
+      heads:                   260
+      roots:                   260
+    first undecided set:      1340
+      heads:                   260
+      roots:                     1
+      common:                  300
+      missing:                1040
+  common heads: 3ee37d65064a
+  $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
+  comparing with b
+  query 1; heads
+  searching for changes
+  taking quick initial sample
+  searching: 2 queries
+  query 2; still undecided: 303, sample size is: 9
+  sampling from both directions
+  searching: 3 queries
+  query 3; still undecided: 3, sample size is: 3
+  3 total queries in *.????s (glob)
+  elapsed time:  * seconds (glob)
+  round-trips:                   3
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -1002,20 +1400,34 @@
       unknown:                   1
   local changesets:           1340
     common:                    300
+      heads:                     1
+      roots:                     1
     missing:                  1040
+      heads:                   260
+      roots:                   260
+    first undecided set:      1340
+      heads:                   260
+      roots:                     1
+      common:                  300
+      missing:                1040
   common heads: 3ee37d65064a
-  $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
+
+  $ hg -R a debugdiscovery b --debug --config devel.discovery.exchange-heads=false --config devel.discovery.randomize=false --config devel.discovery.grow-sample.rate=1.01
   comparing with b
-  query 1; heads
   searching for changes
-  taking quick initial sample
-  searching: 2 queries
-  query 2; still undecided: 303, sample size is: 9
+  sampling from both directions
+  query 1; still undecided: 1340, sample size is: 200
+  sampling from both directions
+  query 2; still undecided: 795, sample size is: 202
+  sampling from both directions
+  query 3; still undecided: 525, sample size is: 204
   sampling from both directions
-  searching: 3 queries
-  query 3; still undecided: 3, sample size is: 3
-  3 total queries in *.????s (glob)
-  elapsed time:  * seconds (glob)
+  query 4; still undecided: 252, sample size is: 206
+  sampling from both directions
+  query 5; still undecided: 44, sample size is: 44
+  5 total queries in *s (glob)
+  elapsed time: * seconds (glob)
+  round-trips:                   5
   heads summary:
     total common heads:          1
       also local heads:          0
@@ -1029,7 +1441,16 @@
       unknown:                   1
   local changesets:           1340
     common:                    300
+      heads:                     1
+      roots:                     1
     missing:                  1040
+      heads:                   260
+      roots:                   260
+    first undecided set:      1340
+      heads:                   260
+      roots:                     1
+      common:                  300
+      missing:                1040
   common heads: 3ee37d65064a
 
 Test actual protocol when pulling one new head in addition to common heads
@@ -1134,6 +1555,7 @@
   comparing with $TESTTMP/ancestorsof/a
   searching for changes
   elapsed time:  * seconds (glob)
+  round-trips:                   1
   heads summary:
     total common heads:          1
       also local heads:          1
@@ -1147,5 +1569,14 @@
       unknown:                   0
   local changesets:              3
     common:                      2
+      heads:                     1
+      roots:                     1
     missing:                     1
+      heads:                     1
+      roots:                     1
+    first undecided set:         1
+      heads:                     1
+      roots:                     1
+      common:                    0
+      missing:                   1
   common heads: 66f7d451a68b
--- a/tests/test-share-bookmarks.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-share-bookmarks.t	Tue Jan 19 21:48:43 2021 +0530
@@ -3,7 +3,7 @@
 
 #if safe
   $ echo "[format]"         >> $HGRCPATH
-  $ echo "exp-share-safe = True" >> $HGRCPATH
+  $ echo "use-share-safe = True" >> $HGRCPATH
 #endif
 
   $ echo "[extensions]"      >> $HGRCPATH
@@ -290,4 +290,4 @@
 
   $ hg init brokenrepo --config format.bookmarks-in-store=True --config format.usestore=false
   ignoring enabled 'format.bookmarks-in-store' config beacuse it is incompatible with disabled 'format.usestore' config
-  ignoring enabled 'format.exp-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !)
+  ignoring enabled 'format.use-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !)
--- a/tests/test-share-safe.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-share-safe.t	Tue Jan 19 21:48:43 2021 +0530
@@ -4,7 +4,9 @@
   > [extensions]
   > share =
   > [format]
-  > exp-share-safe = True
+  > use-share-safe = True
+  > [storage]
+  > revlog.persistent-nodemap.slow-path=allow
   > EOF
 
 prepare source repo
@@ -12,7 +14,7 @@
   $ hg init source
   $ cd source
   $ cat .hg/requires
-  exp-sharesafe
+  share-safe
   $ cat .hg/store/requires
   dotencode
   fncache
@@ -22,10 +24,10 @@
   store
   $ hg debugrequirements
   dotencode
-  exp-sharesafe
   fncache
   generaldelta
   revlogv1
+  share-safe
   sparserevlog
   store
 
@@ -36,7 +38,7 @@
 
   $ HGEDITOR=cat hg config --shared
   abort: repository is not shared; can't use --shared
-  [255]
+  [10]
   $ cd ..
 
 Create a shared repo and check the requirements are shared and read correctly
@@ -45,24 +47,24 @@
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd shared1
   $ cat .hg/requires
-  exp-sharesafe
+  share-safe
   shared
 
   $ hg debugrequirements -R ../source
   dotencode
-  exp-sharesafe
   fncache
   generaldelta
   revlogv1
+  share-safe
   sparserevlog
   store
 
   $ hg debugrequirements
   dotencode
-  exp-sharesafe
   fncache
   generaldelta
   revlogv1
+  share-safe
   shared
   sparserevlog
   store
@@ -81,6 +83,36 @@
   $ hg showconfig ui.curses
   true
 
+Test that extensions of source repository are also loaded
+
+  $ hg debugextensions
+  share
+  $ hg extdiff -p echo
+  hg: unknown command 'extdiff'
+  'extdiff' is provided by the following extension:
+  
+      extdiff       command to allow external programs to compare revisions
+  
+  (use 'hg help extensions' for information on enabling extensions)
+  [10]
+
+  $ echo "[extensions]" >> ../source/.hg/hgrc
+  $ echo "extdiff=" >> ../source/.hg/hgrc
+
+  $ hg debugextensions -R ../source
+  extdiff
+  share
+  $ hg extdiff -R ../source -p echo
+
+BROKEN: the command below will not work if config of shared source is not loaded
+on dispatch but debugextensions says that extension
+is loaded
+  $ hg debugextensions
+  extdiff
+  share
+
+  $ hg extdiff -p echo
+
 However, local .hg/hgrc should override the config set by share source
 
   $ echo "[ui]" >> .hg/hgrc
@@ -92,6 +124,8 @@
   $ HGEDITOR=cat hg config --shared
   [ui]
   curses=true
+  [extensions]
+  extdiff=
 
   $ HGEDITOR=cat hg config --local
   [ui]
@@ -180,9 +214,14 @@
   upgrade will perform the following actions:
   
   requirements
-     preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlogv1, sparserevlog, store
+     preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store
      added: revlog-compression-zstd
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg log -r .
   changeset:   1:5f6d8a4bf34a
   user:        test
@@ -201,10 +240,15 @@
   upgrade will perform the following actions:
   
   requirements
-     preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlogv1, sparserevlog, store (no-zstd !)
-     preserved: dotencode, exp-sharesafe, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !)
+     preserved: dotencode, fncache, generaldelta, revlogv1, share-safe, sparserevlog, store (no-zstd !)
+     preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, share-safe, sparserevlog, store (zstd !)
      added: persistent-nodemap
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg log -r .
   changeset:   1:5f6d8a4bf34a
   user:        test
@@ -221,6 +265,25 @@
   date:        Thu Jan 01 00:00:00 1970 +0000
   summary:     added c
   
+
+Testing that nonsharedrc is loaded for source and not shared
+
+  $ cd ../source
+  $ touch .hg/hgrc-not-shared
+  $ echo "[ui]" >> .hg/hgrc-not-shared
+  $ echo "traceback=true" >> .hg/hgrc-not-shared
+
+  $ hg showconfig ui.traceback
+  true
+
+  $ HGEDITOR=cat hg config --non-shared
+  [ui]
+  traceback=true
+
+  $ cd ../shared1
+  $ hg showconfig ui.traceback
+  [1]
+
 Unsharing works
 
   $ hg unshare
@@ -241,3 +304,271 @@
   [255]
   $ hg showconfig ui.curses -R ../shared1
   false
+
+  $ cd ../
+
+Test that upgrading using debugupgraderepo works
+=================================================
+
+  $ hg init non-share-safe --config format.use-share-safe=false
+  $ cd non-share-safe
+  $ hg debugrequirements
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+  $ echo foo > foo
+  $ hg ci -Aqm 'added foo'
+  $ echo bar > bar
+  $ hg ci -Aqm 'added bar'
+
+Create a share before upgrading
+
+  $ cd ..
+  $ hg share non-share-safe nss-share
+  updating working directory
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ hg debugrequirements -R nss-share
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  shared
+  sparserevlog
+  store
+  $ cd non-share-safe
+
+Upgrade
+
+  $ hg debugupgraderepo -q
+  requirements
+     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
+     added: share-safe
+  
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
+  $ hg debugupgraderepo --run -q
+  upgrade will perform the following actions:
+  
+  requirements
+     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
+     added: share-safe
+  
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
+  repository upgraded to share safe mode, existing shares will still work in old non-safe mode. Re-share existing shares to use them in safe mode New shares will be created in safe mode.
+
+  $ hg debugrequirements
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  share-safe
+  sparserevlog
+  store
+
+  $ cat .hg/requires
+  share-safe
+
+  $ cat .hg/store/requires
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+
+  $ hg log -GT "{node}: {desc}\n"
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+Make sure existing shares dont work with default config
+
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share
+  abort: version mismatch: source uses share-safe functionality while the current share does not
+  [255]
+
+
+Create a safe share from upgrade one
+
+  $ cd ..
+  $ hg share non-share-safe ss-share
+  updating working directory
+  2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd ss-share
+  $ hg log -GT "{node}: {desc}\n"
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+  $ cd ../non-share-safe
+
+Test that downgrading works too
+
+  $ cat >> $HGRCPATH <<EOF
+  > [extensions]
+  > share =
+  > [format]
+  > use-share-safe = False
+  > EOF
+
+  $ hg debugupgraderepo -q
+  requirements
+     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
+     removed: share-safe
+  
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
+  $ hg debugupgraderepo -q --run
+  upgrade will perform the following actions:
+  
+  requirements
+     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
+     removed: share-safe
+  
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
+  repository downgraded to not use share safe mode, existing shares will not work and needs to be reshared.
+
+  $ hg debugrequirements
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+
+  $ cat .hg/requires
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  sparserevlog
+  store
+
+  $ test -f .hg/store/requires
+  [1]
+
+  $ hg log -GT "{node}: {desc}\n"
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+Make sure existing shares still works
+
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+  $ hg log -GT "{node}: {desc}\n" -R ../ss-share
+  abort: share source does not support exp-sharesafe requirement
+  [255]
+
+Testing automatic downgrade of shares when config is set
+
+  $ touch ../ss-share/.hg/wlock
+  $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config share.safe-mismatch.source-not-safe=downgrade-abort
+  abort: failed to downgrade share, got error: Lock held
+  [255]
+  $ rm ../ss-share/.hg/wlock
+
+  $ hg log -GT "{node}: {desc}\n" -R ../ss-share --config share.safe-mismatch.source-not-safe=downgrade-abort
+  repository downgraded to not use share-safe mode
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+  $ hg log -GT "{node}: {desc}\n" -R ../ss-share
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+
+Testing automatic upgrade of shares when config is set
+
+  $ hg debugupgraderepo -q --run --config format.use-share-safe=True
+  upgrade will perform the following actions:
+  
+  requirements
+     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
+     added: share-safe
+  
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
+  repository upgraded to share safe mode, existing shares will still work in old non-safe mode. Re-share existing shares to use them in safe mode New shares will be created in safe mode.
+  $ hg debugrequirements
+  dotencode
+  fncache
+  generaldelta
+  revlogv1
+  share-safe
+  sparserevlog
+  store
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share
+  abort: version mismatch: source uses share-safe functionality while the current share does not
+  [255]
+
+Check that if lock is taken, upgrade fails but read operation are successful
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgra
+  abort: share-safe mismatch with source.
+  Unrecognized value 'upgra' of `share.safe-mismatch.source-safe` set.
+  (run `hg help config.share.safe-mismatch.source-safe`)
+  [255]
+  $ touch ../nss-share/.hg/wlock
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-allow
+  failed to upgrade share, got error: Lock held
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-allow --config share.safe-mismatch.source-safe.warn=False
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-abort
+  abort: failed to upgrade share, got error: Lock held
+  [255]
+
+  $ rm ../nss-share/.hg/wlock
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share --config share.safe-mismatch.source-safe=upgrade-abort
+  repository upgraded to use share-safe mode
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
+
+Test that unshare works
+
+  $ hg unshare -R ../nss-share
+  $ hg log -GT "{node}: {desc}\n" -R ../nss-share
+  @  f63db81e6dde1d9c78814167f77fb1fb49283f4f: added bar
+  |
+  o  f3ba8b99bb6f897c87bbc1c07b75c6ddf43a4f77: added foo
+  
--- a/tests/test-share.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-share.t	Tue Jan 19 21:48:43 2021 +0530
@@ -2,7 +2,7 @@
 
 #if safe
   $ echo "[format]"         >> $HGRCPATH
-  $ echo "exp-share-safe = True" >> $HGRCPATH
+  $ echo "use-share-safe = True" >> $HGRCPATH
 #endif
 
   $ echo "[extensions]"      >> $HGRCPATH
@@ -56,6 +56,25 @@
   rbc-revs-v1
   tags2-visible
 
+Cloning a shared repo should pick up the full cache dir on the other hand.
+
+  $ hg clone . ../repo2-clone
+  updating to branch default
+  1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ ls -1 ../repo2-clone/.hg/cache
+  branch2-base
+  branch2-immutable
+  branch2-served
+  branch2-served.hidden
+  branch2-visible
+  branch2-visible-hidden
+  hgtagsfnodes1
+  rbc-names-v1
+  rbc-revs-v1
+  tags2
+  tags2-served
+  tags2-visible
+
 Some sed versions appends newline, some don't, and some just fails
 
   $ cat .hg/sharedpath; echo
@@ -211,7 +230,7 @@
   $TESTTMP/thisdir/rel
   $ mv thisdir thatdir
   $ hg -R thatdir/abs root
-  abort: .hg/sharedpath points to nonexistent directory $TESTTMP/thisdir/orig/.hg!
+  abort: .hg/sharedpath points to nonexistent directory $TESTTMP/thisdir/orig/.hg
   [255]
   $ hg -R thatdir/rel root
   $TESTTMP/thatdir/rel
@@ -247,7 +266,7 @@
 We cannot open the repo with the unknown requirement
 
   $ hg -R sharenewrequires status
-  abort: repository requires features unknown to this Mercurial: missing-requirement!
+  abort: repository requires features unknown to this Mercurial: missing-requirement
   (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
   [255]
 
@@ -262,7 +281,7 @@
 Test sharing a repository which was created with store requirement disable
 
   $ hg init nostore --config format.usestore=false
-  ignoring enabled 'format.exp-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !)
+  ignoring enabled 'format.use-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !)
   $ hg share nostore sharednostore
   abort: cannot create shared repository as source was created with 'format.usestore' config disabled
   [255]
--- a/tests/test-shelve.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-shelve.t	Tue Jan 19 21:48:43 2021 +0530
@@ -209,7 +209,7 @@
 
   $ hg shelve --list --addremove
   abort: options '--list' and '--addremove' may not be used together
-  [255]
+  [10]
 
 delete our older shelved change
 
@@ -278,10 +278,10 @@
 
   $ hg unshelve
   abort: no shelved changes to apply!
-  [255]
+  [20]
   $ hg unshelve foo
   abort: shelved change 'foo' not found
-  [255]
+  [10]
 
 named shelves, specific filenames, and "commit messages" should all work
 (this tests also that editor is invoked, if '--edit' is specified)
@@ -366,7 +366,7 @@
   merging a/a
   warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ hg status -v
   M a/a
   M b.rename/b
@@ -442,7 +442,7 @@
   $ hg shelve
   abort: unshelve already in progress
   (use 'hg unshelve --continue' or 'hg unshelve --abort')
-  [255]
+  [20]
 
 abort the unshelve and be happy
 
@@ -474,7 +474,7 @@
 
   $ hg unshelve -c
   abort: no unshelve in progress
-  [255]
+  [20]
   $ hg status
   A foo/foo
   ? a/a.orig
@@ -484,7 +484,7 @@
   $ hg unshelve -q
   warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
 
 attempt to continue
 
@@ -501,12 +501,12 @@
   $ hg commit -m 'commit while unshelve in progress'
   abort: unshelve already in progress
   (use 'hg unshelve --continue' or 'hg unshelve --abort')
-  [255]
+  [20]
 
   $ hg graft --continue
   abort: no graft in progress
   (continue: hg unshelve --continue)
-  [255]
+  [20]
   $ hg unshelve -c
   unshelve of 'default' complete
 
@@ -648,13 +648,13 @@
 
   $ hg shelve --cleanup --delete
   abort: options '--cleanup' and '--delete' may not be used together
-  [255]
+  [10]
   $ hg shelve --cleanup --patch
   abort: options '--cleanup' and '--patch' may not be used together
-  [255]
+  [10]
   $ hg shelve --cleanup --message MESSAGE
   abort: options '--cleanup' and '--message' may not be used together
-  [255]
+  [10]
 
 test bookmarks
 
@@ -705,7 +705,7 @@
   merging a/a
   warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ hg bookmark
      test                      (4|13):33f7f61e6c5e (re)
 
@@ -737,10 +737,10 @@
 
   $ hg shelve --delete --stat
   abort: options '--delete' and '--stat' may not be used together
-  [255]
+  [10]
   $ hg shelve --delete --name NAME
   abort: options '--delete' and '--name' may not be used together
-  [255]
+  [10]
 
 Test interactive shelve
   $ cat <<EOF >> $HGRCPATH
@@ -767,7 +767,7 @@
   a
   $ hg shelve --interactive --config ui.interactive=false
   abort: running non-interactively
-  [255]
+  [10]
   $ hg shelve --interactive << EOF
   > y
   > y
@@ -810,6 +810,8 @@
   ? foo/foo
   $ hg bookmark
    \* test                      (4|13):33f7f61e6c5e (re)
+there shouldn't be a merge state
+  $ hg resolve -l
   $ hg unshelve
   unshelving change 'test'
   temporarily committing pending changes (restore with 'hg unshelve --abort')
@@ -954,13 +956,13 @@
   unshelving change 'default'
   $ hg shelve --keep --list
   abort: options '--list' and '--keep' may not be used together
-  [255]
+  [10]
   $ hg shelve --keep --patch
   abort: options '--patch' and '--keep' may not be used together
-  [255]
+  [10]
   $ hg shelve --keep --delete
   abort: options '--delete' and '--keep' may not be used together
-  [255]
+  [10]
   $ hg shelve --keep
   shelved as default
   $ hg diff
@@ -977,7 +979,7 @@
   default         (*s ago)    changes to: create conflict (glob)
   $ hg shelve --delete doesnotexist
   abort: shelved change 'doesnotexist' not found
-  [255]
+  [10]
   $ hg shelve --delete default
 
   $ cd ..
@@ -1184,7 +1186,7 @@
   $ hg unshelve
   abort: outstanding uncommitted merge
   (use 'hg commit' or 'hg merge --abort')
-  [255]
+  [20]
 
   $ cd ..
 
@@ -1387,7 +1389,7 @@
   warning: conflicts while merging bar1! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging bar2! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
 
   $ cat > bar1 <<EOF
   > A
@@ -1406,7 +1408,7 @@
 -- using --continue with --interactive should throw an error
   $ hg unshelve --continue -i
   abort: cannot use both continue and interactive
-  [255]
+  [10]
 
   $ cat bar1
   A
@@ -1483,7 +1485,7 @@
 
   $ hg unshelve --continue
   abort: no unshelve in progress
-  [255]
+  [20]
 
   $ hg shelve --list
   default-01      (*)* changes to: add A to bars (glob)
@@ -1509,4 +1511,27 @@
 -- test for --interactive --keep
   $ hg unshelve -i --keep
   abort: --keep on --interactive is not yet supported
-  [255]
+  [10]
+
+  $ hg update -q --clean .
+
+Test that we can successfully shelve and unshelve a file with a trailing space
+in the filename. Such filenames are supposedly unsupported on Windows, so we
+wrap it in the no-windows check. Also test `hg patch` of the .patch file
+produced by `hg shelve`.
+#if no-windows
+  $ echo hi > 'my filename '
+  $ hg add 'my filename '
+  warning: filename ends with ' ', which is not allowed on Windows: 'my filename '
+  $ hg shelve
+  shelved as default-01
+  0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+  $ cp .hg/shelved/default-01.patch test_patch.patch
+  $ hg unshelve
+  unshelving change 'default-01'
+  $ cat 'my filename '
+  hi
+  $ hg update -q --clean .
+  $ hg patch -p1 test_patch.patch
+  applying test_patch.patch
+#endif
--- a/tests/test-shelve2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-shelve2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -54,7 +54,7 @@
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
   $ hg rebase -d 6c103be8f4e4 --config extensions.rebase=
-  rebasing 2:323bfa07f744 "xyz"( \(tip\))? (re)
+  rebasing 2:323bfa07f744( tip)? "xyz" (re)
   merging x
   saved backup bundle to \$TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-(78114325|7ae538ef)-rebase.hg (re)
   $ hg unshelve
@@ -219,7 +219,7 @@
   merging f
   warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
 
 #if phasebased
   $ hg log -G --template '{rev}  {desc|firstline}  {author}  {date|isodate}'
@@ -290,7 +290,7 @@
   merging f
   warning: conflicts while merging f! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ hg st
   M f
   ? f.orig
@@ -345,7 +345,7 @@
   $ hg unshelve -q --config 'ui.origbackuppath=.hg/origbackups'
   warning: conflicts while merging root! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ ls .hg/origbackups
   root
   $ rm -rf .hg/origbackups
@@ -563,7 +563,7 @@
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ echo "aaabbbccc" > a
   $ rm a.orig
   $ hg resolve --mark a
@@ -637,7 +637,7 @@
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
 
 Removing restore branch information from shelvedstate file(making it looks like
 in previous versions) and running unshelve --continue
@@ -715,7 +715,7 @@
   merging file
   warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ echo somethingsomething > .hg/shelvedstate
 
 Unshelve --continue fails with appropriate message if shelvedstate is corrupted
@@ -737,7 +737,7 @@
 #if abortflag
   $ hg unshelve --abort
   abort: no unshelve in progress
-  [255]
+  [20]
 #else
   $ hg abort
   aborting the merge, updating back to 9451eaa6eee3
@@ -745,6 +745,55 @@
 #endif
   $ cd ..
 
+Test corrupt shelves (in .hg/shelved/, not .hg/shelvestate)
+  $ hg init corrupt-shelves
+  $ cd corrupt-shelves
+  $ mkdir .hg/shelved
+
+# A (corrupt) .patch file without a .hg file
+  $ touch .hg/shelved/junk1.patch
+  $ hg shelve -l
+  $ hg unshelve
+  abort: no shelved changes to apply!
+  [20]
+  $ hg shelve -d junk1
+  abort: shelved change 'junk1' not found
+  [10]
+  $ find .hg/shelve* | sort
+  .hg/shelved
+  .hg/shelved/junk1.patch
+
+# A .hg file without a .patch file
+  $ touch .hg/shelved/junk2.hg
+  $ hg shelve -l
+  $ hg unshelve
+  abort: no shelved changes to apply!
+  [20]
+  $ hg shelve -d junk2
+  abort: shelved change 'junk2' not found
+  [10]
+  $ find .hg/shelve* | sort
+  .hg/shelved
+  .hg/shelved/junk1.patch
+  .hg/shelved/junk2.hg
+
+# A file with an unexpected extension
+  $ touch .hg/shelved/junk3
+  $ hg shelve -l
+  $ hg unshelve
+  abort: no shelved changes to apply!
+  [20]
+  $ hg shelve -d junk3
+  abort: shelved change 'junk3' not found
+  [10]
+  $ find .hg/shelve* | sort
+  .hg/shelved
+  .hg/shelved/junk1.patch
+  .hg/shelved/junk2.hg
+  .hg/shelved/junk3
+
+  $ cd ..
+
 Unshelve respects --keep even if user intervention is needed
   $ hg init unshelvekeep && cd unshelvekeep
   $ echo 1 > file && hg ci -Am 1
@@ -762,7 +811,7 @@
   merging file
   warning: conflicts while merging file! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ hg resolve --mark file
   (no more unresolved files)
   continue: hg unshelve --continue
@@ -819,7 +868,7 @@
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
 putting v1 shelvedstate file in place of a created v2
   $ cat << EOF > .hg/shelvedstate
   > 1
@@ -860,7 +909,7 @@
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ hg abort
   unshelve of 'default' aborted
 
@@ -877,7 +926,7 @@
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ cat .hg/shelved/default.shelve
   node=82e0cb9893247d12667017593ce1e5655860f1ac
   $ hg abort
@@ -905,14 +954,14 @@
   merging a
   warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
 
   $ hg log --template '{desc|firstline}  {author}  {date|isodate} \n' -r .
   pending changes temporary commit  shelve@localhost  1970-01-01 00:00 +0000 
   $ hg merge --abort
   abort: cannot abort merge with unshelve in progress
   (use 'hg unshelve --continue' or 'hg unshelve --abort')
-  [255]
+  [20]
 
   $ hg unshelve --abort
   unshelve of 'default' aborted
@@ -934,7 +983,7 @@
   $ hg unshelve -q
   warning: conflicts while merging foo! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue')
-  [1]
+  [240]
   $ cat foo
   r0
   <<<<<<< working-copy: 0b2fcf2a90e9 - shelve: pending changes temporary commit
--- a/tests/test-sidedata.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-sidedata.t	Tue Jan 19 21:48:43 2021 +0530
@@ -54,6 +54,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -66,6 +67,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no    yes      no
   persistent-nodemap:  no     no      no
@@ -84,6 +86,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes     no      no
   persistent-nodemap:  no     no      no
@@ -96,6 +99,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes     no      no
   persistent-nodemap:  no     no      no
--- a/tests/test-simple-update.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-simple-update.t	Tue Jan 19 21:48:43 2021 +0530
@@ -55,7 +55,7 @@
 
   $ hg upd -d foo 0
   abort: you can't specify a revision and a date
-  [255]
+  [10]
 
 update by date
 
@@ -71,7 +71,7 @@
 
   $ hg update -d '>1970-01-01 00:00:02 +0000'
   abort: revision matching date not found
-  [255]
+  [10]
   $ hg update -d '>1970-01-01 00:00:01 +0000'
   found revision 1 from Thu Jan 01 00:00:01 1970 +0000
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-single-head.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-single-head.t	Tue Jan 19 21:48:43 2021 +0530
@@ -277,7 +277,7 @@
   c_aL0
   c_aM0
 
-Let's make a new head and push everythin. The server feedback will mention
+Let's make a new head and push everything. The server feedback will mention
 exactly one new head because c_aM0 is closed.
 
   $ hg up 'desc("c_aG0")'
@@ -291,3 +291,98 @@
   adding manifests
   adding file changes
   added 3 changesets with 3 changes to 3 files (+1 heads)
+  $ cd ..
+
+
+Test that singe-head-per-branch can be restricted to public changes
+-------------------------------------------------------------------
+
+  $ hg clone -r 49003e504178 single-head-server public-only
+  adding changesets
+  adding manifests
+  adding file changes
+  added 9 changesets with 9 changes to 9 files
+  1 new obsolescence markers
+  new changesets ea207398892e:49003e504178 (9 drafts)
+  updating to branch branch_A
+  9 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  $ cd public-only
+  $ cat <<EOF >> .hg/hgrc
+  > [phases]
+  > publish = no
+  > [experimental]
+  > single-head-per-branch = yes
+  > single-head-per-branch:public-changes-only = yes
+  > EOF
+  > hg phase -p :
+  $ hg update 'desc("c_aG0")'
+  0 files updated, 0 files merged, 2 files removed, 0 files unresolved
+  $ mkcommit c_dO0
+  created new head
+  $ hg log -G
+  @  changeset:   9:8058fd35cc2b
+  |  branch:      branch_A
+  |  tag:         tip
+  |  parent:      7:a33fb808fb4b
+  |  user:        test
+  |  date:        Thu Jan 01 00:00:00 1970 +0000
+  |  summary:     c_dO0
+  |
+  | o  changeset:   8:49003e504178
+  |/|  branch:      branch_A
+  | |  parent:      7:a33fb808fb4b
+  | |  parent:      3:840af1c6bc88
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     c_aI0
+  | |
+  o |  changeset:   7:a33fb808fb4b
+  | |  branch:      branch_A
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     c_aG0
+  | |
+  o |  changeset:   6:99a2dc242c5d
+  | |  user:        test
+  | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | |  summary:     c_dF1
+  | |
+  o |    changeset:   5:6ed1df20edb1
+  |\ \   parent:      4:9bf953aa81f6
+  | | |  parent:      2:286d02a6e2a2
+  | | |  user:        test
+  | | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | | |  summary:     c_dE0
+  | | |
+  | o |  changeset:   4:9bf953aa81f6
+  | | |  parent:      1:134bc3852ad2
+  | | |  user:        test
+  | | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | | |  summary:     c_dD0
+  | | |
+  | | o  changeset:   3:840af1c6bc88
+  | | |  branch:      branch_A
+  | | |  parent:      0:ea207398892e
+  | | |  user:        test
+  | | |  date:        Thu Jan 01 00:00:00 1970 +0000
+  | | |  summary:     c_aC0
+  | | |
+  o | |  changeset:   2:286d02a6e2a2
+  |/ /   user:        test
+  | |    date:        Thu Jan 01 00:00:00 1970 +0000
+  | |    summary:     c_dB0
+  | |
+  o |  changeset:   1:134bc3852ad2
+  |/   user:        test
+  |    date:        Thu Jan 01 00:00:00 1970 +0000
+  |    summary:     c_dA0
+  |
+  o  changeset:   0:ea207398892e
+     user:        test
+     date:        Thu Jan 01 00:00:00 1970 +0000
+     summary:     ROOT
+  
+  $ hg phase -p .
+  abort: rejecting multiple heads on branch "branch_A"
+  (2 heads: 49003e504178 8058fd35cc2b)
+  [255]
--- a/tests/test-sparse-profiles.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-sparse-profiles.t	Tue Jan 19 21:48:43 2021 +0530
@@ -201,7 +201,7 @@
   warning: conflicts while merging backend.sparse! (edit, then use 'hg resolve --mark')
   warning: conflicts while merging data.py! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
   $ rm *.orig
   $ ls -A
   .hg
--- a/tests/test-sparse-requirement.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-sparse-requirement.t	Tue Jan 19 21:48:43 2021 +0530
@@ -46,7 +46,7 @@
 Client without sparse enabled reacts properly
 
   $ hg files
-  abort: repository is using sparse feature but sparse is not enabled; enable the "sparse" extensions to access!
+  abort: repository is using sparse feature but sparse is not enabled; enable the "sparse" extensions to access
   [255]
 
 Requirement for sparse is removed when sparse is disabled
--- a/tests/test-sparse.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-sparse.t	Tue Jan 19 21:48:43 2021 +0530
@@ -196,12 +196,12 @@
 Verify rebase temporarily includes excluded files
 
   $ hg rebase -d 1 -r 2 --config extensions.rebase=
-  rebasing 2:b91df4f39e75 "edit hide" (tip)
+  rebasing 2:b91df4f39e75 tip "edit hide"
   temporarily included 2 file(s) in the sparse checkout for merging
   merging hide
   warning: conflicts while merging hide! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg rebase --continue')
-  [1]
+  [240]
 
   $ hg debugsparse
   [exclude]
@@ -279,7 +279,7 @@
 
   $ hg commit -Aqm "add show2"
   $ hg rebase -d 1 --config extensions.rebase=
-  rebasing 2:bdde55290160 "add show2" (tip)
+  rebasing 2:bdde55290160 tip "add show2"
   saved backup bundle to $TESTTMP/myrepo/.hg/strip-backup/bdde55290160-216ed9c6-rebase.hg
 
 Verify log --sparse only shows commits that affect the sparse checkout
--- a/tests/test-split.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-split.t	Tue Jan 19 21:48:43 2021 +0530
@@ -51,7 +51,7 @@
   $ hg commit -m empty --config ui.allowemptycommit=1
   $ hg split
   abort: cannot split an empty revision
-  [255]
+  [10]
 
   $ rm -rf .hg
   $ hg init
@@ -60,7 +60,7 @@
 
   $ hg split -r 'wdir()'
   abort: cannot split working directory
-  [255]
+  [10]
 
 Generate some content.  The sed filter drop CR on Windows, which is dropped in
 the a > b line.
@@ -79,7 +79,7 @@
   $ hg split .
   abort: cannot split public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
 
   $ hg phase --draft -f -r 'all()'
 
@@ -89,7 +89,7 @@
   $ hg add dirty
   $ hg split .
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg forget dirty
   $ rm dirty
 
@@ -103,7 +103,7 @@
 
   $ hg split 'all()'
   abort: cannot split multiple revisions
-  [255]
+  [10]
 
 This function splits a bit strangely primarily to avoid changing the behavior of
 the test after a bug was fixed with how split/commit --interactive handled
@@ -159,7 +159,7 @@
   transaction abort!
   rollback completed
   abort: edit failed: false exited with status 1
-  [255]
+  [250]
   $ hg status
 
   $ HGEDITOR="\"$PYTHON\" $TESTTMP/editor.py"
@@ -216,7 +216,7 @@
   (enter ? for help) [Ynesfdaq?] y
   
   EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into:
-  EDITOR: HG: - e704349bd21b: split 1
+  EDITOR: HG: - 2:e704349bd21b tip "split 1"
   EDITOR: HG: Write commit message for the next split changeset.
   EDITOR: a2
   EDITOR: 
@@ -239,8 +239,8 @@
   (enter ? for help) [Ynesfdaq?] y
   
   EDITOR: HG: Splitting 1df0d5c5a3ab. So far it has been split into:
-  EDITOR: HG: - e704349bd21b: split 1
-  EDITOR: HG: - a09ad58faae3: split 2
+  EDITOR: HG: - 2:e704349bd21b tip "split 1"
+  EDITOR: HG: - 3:a09ad58faae3 "split 2"
   EDITOR: HG: Write commit message for the next split changeset.
   EDITOR: a2
   EDITOR: 
@@ -368,9 +368,9 @@
   $ cp -R . ../d
 
   $ runsplit -r 1 | grep rebasing
-  rebasing 2:b5c5ea414030 "d1" (d1)
-  rebasing 3:f4a0a8d004cc "d2" (d2)
-  rebasing 4:777940761eba "d3" (d3)
+  rebasing 2:b5c5ea414030 d1 "d1"
+  rebasing 3:f4a0a8d004cc d2 "d2"
+  rebasing 4:777940761eba d3 "d3"
 #if obsstore-off
   $ hg bookmark
      d1                        4:c4b449ef030e
@@ -467,7 +467,7 @@
 #if obsstore-off
   $ runsplit -r 1 --no-rebase
   abort: cannot split changeset with children
-  [255]
+  [10]
 #else
   $ runsplit -r 1 --no-rebase >/dev/null
   3 new orphan changesets
@@ -518,7 +518,7 @@
   $ rm .hg/localtags
   $ hg split $B --config experimental.evolution=createmarkers
   abort: cannot split changeset with children
-  [255]
+  [10]
   $ cat > $TESTTMP/messages <<EOF
   > Split B
   > EOF
@@ -661,7 +661,7 @@
   (enter ? for help) [Ynesfdaq?] f
   
   EDITOR: HG: Splitting dd3c45017cbf. So far it has been split into:
-  EDITOR: HG: - f205aea1c624: split 1
+  EDITOR: HG: - 2:f205aea1c624 tip "split 1"
   EDITOR: HG: Write commit message for the next split changeset.
   EDITOR: splitme
   EDITOR: 
@@ -718,7 +718,7 @@
   (enter ? for help) [Ynesfdaq?] f
   
   EDITOR: HG: Splitting 904c80b40a4a. So far it has been split into:
-  EDITOR: HG: - ffecf40fa954: split 1
+  EDITOR: HG: - 2:ffecf40fa954 tip "split 1"
   EDITOR: HG: Write commit message for the next split changeset.
   EDITOR: splitme
   EDITOR: 
@@ -787,7 +787,7 @@
 
   $ printf 'y\ny\ny\n' | hg split
   abort: cannot split an empty revision
-  [255]
+  [10]
 #endif
 
 Test that splitting moves works properly (issue5723)
@@ -843,7 +843,7 @@
   (enter ? for help) [Ynesfdaq?] a
   
   EDITOR: HG: Splitting 8c42fa635116. So far it has been split into:
-  EDITOR: HG: - 478be2a70c27: split1, keeping only the numbered lines
+  EDITOR: HG: - 2:478be2a70c27 tip "split1, keeping only the numbered lines"
   EDITOR: HG: Write commit message for the next split changeset.
   EDITOR: move and modify
   EDITOR: 
@@ -941,7 +941,7 @@
   (enter ? for help) [Ynesfdaq?] a
   
   EDITOR: HG: Splitting 41c861dfa61e. So far it has been split into:
-  EDITOR: HG: - 4b19e06610eb: split1, keeping "file" and only the numbered lines in file2
+  EDITOR: HG: - 2:4b19e06610eb tip "split1, keeping "file" and only the numbered lines in file2"
   EDITOR: HG: Write commit message for the next split changeset.
   EDITOR: copy file->file2, modify both
   EDITOR: 
@@ -976,3 +976,73 @@
   2
   3
   4
+
+Test that color codes don't end up in the commit message template
+----------------------------------------------------
+
+  $ hg init $TESTTMP/colorless
+  $ cd $TESTTMP/colorless
+  $ echo 1 > file1
+  $ echo 1 > file2
+  $ hg ci -qAm initial
+  $ echo 2 > file1
+  $ echo 2 > file2
+  $ cat > $TESTTMP/messages <<EOF
+  > split1, modifying file1
+  > --
+  > split2, modifying file2
+  > EOF
+  $ hg ci
+  EDITOR: 
+  EDITOR: 
+  EDITOR: HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  EDITOR: HG: Leave message empty to abort commit.
+  EDITOR: HG: --
+  EDITOR: HG: user: test
+  EDITOR: HG: branch 'default'
+  EDITOR: HG: changed file1
+  EDITOR: HG: changed file2
+  $ printf 'f\nn\na\n' | hg split --color=debug \
+  > --config command-templates.oneline-summary='{label("rev", rev)} {desc}'
+  [diff.diffline|diff --git a/file1 b/file1]
+  1 hunks, 1 lines changed
+  [ ui.prompt|examine changes to 'file1'?
+  (enter ? for help) [Ynesfdaq?]] [ ui.promptecho|f]
+  
+  [diff.diffline|diff --git a/file2 b/file2]
+  1 hunks, 1 lines changed
+  [ ui.prompt|examine changes to 'file2'?
+  (enter ? for help) [Ynesfdaq?]] [ ui.promptecho|n]
+  
+  EDITOR: HG: Splitting 6432c65c3078. Write commit message for the first split changeset.
+  EDITOR: split1, modifying file1
+  EDITOR: 
+  EDITOR: 
+  EDITOR: HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  EDITOR: HG: Leave message empty to abort commit.
+  EDITOR: HG: --
+  EDITOR: HG: user: test
+  EDITOR: HG: branch 'default'
+  EDITOR: HG: changed file1
+  [ ui.status|created new head]
+  [diff.diffline|diff --git a/file2 b/file2]
+  1 hunks, 1 lines changed
+  [ ui.prompt|examine changes to 'file2'?
+  (enter ? for help) [Ynesfdaq?]] [ ui.promptecho|a]
+  
+  EDITOR: HG: Splitting 6432c65c3078. So far it has been split into:
+  EDITOR: HG: - 2 split2, modifying file2
+  EDITOR: HG: Write commit message for the next split changeset.
+  EDITOR: split1, modifying file1
+  EDITOR: 
+  EDITOR: 
+  EDITOR: HG: Enter commit message.  Lines beginning with 'HG:' are removed.
+  EDITOR: HG: Leave message empty to abort commit.
+  EDITOR: HG: --
+  EDITOR: HG: user: test
+  EDITOR: HG: branch 'default'
+  EDITOR: HG: changed file2
+  [ ui.warning|transaction abort!]
+  [ ui.warning|rollback completed]
+  [ ui.error|abort: empty commit message]
+  [10]
--- a/tests/test-sqlitestore.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-sqlitestore.t	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,9 @@
-#require sqlite
+#require sqlite no-chg
+
+The sqlitestore backend leaves transactions around when used with chg.
+Since this backend is primarily intended as proof-of-concept for
+alternative storage backends, disable it for chg test runs to avoid
+the instability.
 
   $ cat >> $HGRCPATH <<EOF
   > [extensions]
--- a/tests/test-ssh-bundle1.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-ssh-bundle1.t	Tue Jan 19 21:48:43 2021 +0530
@@ -53,16 +53,16 @@
 repo not found error
 
   $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
-  remote: abort: repository nonexistent not found!
-  abort: no suitable response from remote hg!
+  remote: abort: repository nonexistent not found
+  abort: no suitable response from remote hg
   [255]
 
 non-existent absolute path
 
 #if no-msys
   $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local
-  remote: abort: repository /$TESTTMP/nonexistent not found!
-  abort: no suitable response from remote hg!
+  remote: abort: repository /$TESTTMP/nonexistent not found
+  abort: no suitable response from remote hg
   [255]
 #endif
 
@@ -147,8 +147,8 @@
 
   $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
   pulling from ssh://user@dummy/doesnotexist
-  remote: abort: repository doesnotexist not found!
-  abort: no suitable response from remote hg!
+  remote: abort: repository doesnotexist not found
+  abort: no suitable response from remote hg
   [255]
 
 local change
@@ -340,7 +340,7 @@
 
   $ hg push ssh://user:erroneouspwd@dummy/remote
   pushing to ssh://user:*@dummy/remote (glob)
-  abort: password in URL not supported!
+  abort: password in URL not supported
   [255]
 
   $ cd $TESTTMP
@@ -359,7 +359,7 @@
   73649e48688a
 
   $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
-  abort: unknown revision 'noNoNO'!
+  abort: unknown revision 'noNoNO'
   [255]
 
 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
@@ -367,7 +367,7 @@
   $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
   destination directory: a repo
   abort: destination 'a repo' is not empty
-  [255]
+  [10]
 
 Test hg-ssh using a helper script that will restore PYTHONPATH (which might
 have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right
@@ -387,12 +387,12 @@
 
   $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
   remote: Illegal repository "$TESTTMP/a'repo"
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
   $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
   remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
   $ SSH_ORIGINAL_COMMAND="'hg' serve -R 'a'repo' --stdio" "$PYTHON" "$TESTDIR/../contrib/hg-ssh"
--- a/tests/test-ssh-proto.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-ssh-proto.t	Tue Jan 19 21:48:43 2021 +0530
@@ -270,7 +270,7 @@
   sending between command
   remote: 0
   remote: 1
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
 Sending an unknown command to the server results in an empty response to that command
--- a/tests/test-ssh-repoerror.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-ssh-repoerror.t	Tue Jan 19 21:48:43 2021 +0530
@@ -17,7 +17,7 @@
 
   $ hg id ssh://user@dummy/no-read
   remote: abort: Permission denied: *$TESTTMP/no-read/.hg* (glob)
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
 special case files are visible, but unreadable
@@ -35,7 +35,7 @@
 
   $ hg id ssh://user@dummy/other
   remote: abort: Permission denied: '$TESTTMP/other/.hg/requires'
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
 directory toward the repository is read only
@@ -51,7 +51,7 @@
 
   $ hg id ssh://user@dummy/deep/nested
   remote: abort: Permission denied: *$TESTTMP/deep/nested/.hg* (glob)
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
 repository has wrong requirement
@@ -62,7 +62,7 @@
   000000000000
   $ echo flying-car >> repo-future/.hg/requires
   $ hg id ssh://user@dummy/repo-future
-  remote: abort: repository requires features unknown to this Mercurial: flying-car!
+  remote: abort: repository requires features unknown to this Mercurial: flying-car
   remote: (see https://mercurial-scm.org/wiki/MissingRequirement for more information)
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
--- a/tests/test-ssh.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-ssh.t	Tue Jan 19 21:48:43 2021 +0530
@@ -43,19 +43,19 @@
 repo not found error
 
   $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
-  remote: abort: repository nonexistent not found!
-  abort: no suitable response from remote hg!
+  remote: abort: repository nonexistent not found
+  abort: no suitable response from remote hg
   [255]
   $ hg clone -q -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local
-  remote: abort: repository nonexistent not found!
-  abort: no suitable response from remote hg!
+  remote: abort: repository nonexistent not found
+  abort: no suitable response from remote hg
   [255]
 
 non-existent absolute path
 
   $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local
-  remote: abort: repository $TESTTMP/nonexistent not found!
-  abort: no suitable response from remote hg!
+  remote: abort: repository $TESTTMP/nonexistent not found
+  abort: no suitable response from remote hg
   [255]
 
 clone remote via stream
@@ -84,8 +84,8 @@
   $ hg -R local-stream book mybook
   $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" --stream ssh://user@dummy/local-stream stream2
   streaming all changes
-  9 files to transfer, 870 bytes of data
-  transferred 870 bytes in * seconds (*) (glob)
+  16 files to transfer, * of data (glob)
+  transferred * in * seconds (*) (glob)
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ cd stream2
@@ -135,8 +135,8 @@
 
   $ hg pull -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist
   pulling from ssh://user@dummy/doesnotexist
-  remote: abort: repository doesnotexist not found!
-  abort: no suitable response from remote hg!
+  remote: abort: repository doesnotexist not found
+  abort: no suitable response from remote hg
   [255]
 
 local change
@@ -331,9 +331,10 @@
   remote: adding changesets
   remote: adding manifests
   remote: adding file changes
+  remote: added 1 changesets with 1 changes to 1 files (py3 !)
   remote: KABOOM
   remote: KABOOM IN PROCESS
-  remote: added 1 changesets with 1 changes to 1 files
+  remote: added 1 changesets with 1 changes to 1 files (no-py3 !)
 
 #endif
 
@@ -360,7 +361,7 @@
 
   $ hg push ssh://user:erroneouspwd@dummy/remote
   pushing to ssh://user:*@dummy/remote (glob)
-  abort: password in URL not supported!
+  abort: password in URL not supported
   [255]
 
   $ cd $TESTTMP
@@ -379,7 +380,7 @@
   73649e48688a
 
   $ hg id --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO"
-  abort: unknown revision 'noNoNO'!
+  abort: unknown revision 'noNoNO'
   [255]
 
 Test (non-)escaping of remote paths with spaces when cloning (issue3145):
@@ -387,7 +388,7 @@
   $ hg clone --ssh "\"$PYTHON\" \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo"
   destination directory: a repo
   abort: destination 'a repo' is not empty
-  [255]
+  [10]
 
 Make sure hg is really paranoid in serve --stdio mode. It used to be
 possible to get a debugger REPL by specifying a repo named --debugger.
@@ -420,12 +421,12 @@
 
   $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo"
   remote: Illegal repository "$TESTTMP/a'repo"
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
   $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo"
   remote: Illegal command "hacking -R 'a'\''repo' serve --stdio"
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
   $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" "$PYTHON" "$TESTDIR/../contrib/hg-ssh"
@@ -675,7 +676,7 @@
 
   $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\""
   pulling from ssh://brokenrepository/
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
 abort with configured error hint when there is a ssh problem when pulling
@@ -683,7 +684,7 @@
   $ hg pull ssh://brokenrepository -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" \
   > --config ui.ssherrorhint="Please see http://company/internalwiki/ssh.html"
   pulling from ssh://brokenrepository/
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   (Please see http://company/internalwiki/ssh.html)
   [255]
 
@@ -696,11 +697,11 @@
   $ hg pull ssh://something --config ui.ssh="sh dumpenv"
   pulling from ssh://something/
   remote: 
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ hg pull ssh://something --config ui.ssh="sh dumpenv" --config sshenv.VAR=17
   pulling from ssh://something/
   remote: 17
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
 
--- a/tests/test-state-extension.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-state-extension.t	Tue Jan 19 21:48:43 2021 +0530
@@ -101,7 +101,7 @@
   merging file1
   warning: conflicts while merging file1! (edit, then use 'hg resolve --mark')
   unresolved conflicts (see 'hg resolve', then 'hg chainify --continue')
-  [1]
+  [240]
   $ hg status --config commands.status.verbose=True
   M file1
   ? file1.orig
--- a/tests/test-static-http.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-static-http.t	Tue Jan 19 21:48:43 2021 +0530
@@ -2,7 +2,7 @@
 
   $ hg clone http://localhost:$HGPORT/ copy
   abort: * (glob)
-  [255]
+  [100]
   $ test -d copy
   [1]
 
@@ -94,7 +94,7 @@
 
   $ cd ..
   $ hg clone -r doesnotexist static-http://localhost:$HGPORT/remote local0
-  abort: unknown revision 'doesnotexist'!
+  abort: unknown revision 'doesnotexist'
   [255]
   $ hg clone -r 0 static-http://localhost:$HGPORT/remote local0
   adding changesets
@@ -169,7 +169,7 @@
   $ cd ..
   $ mkdir notarepo
   $ hg clone static-http://localhost:$HGPORT/notarepo local3
-  abort: 'http://localhost:$HGPORT/notarepo' does not appear to be an hg repository!
+  abort: 'http://localhost:$HGPORT/notarepo' does not appear to be an hg repository
   [255]
 
 Clone with tags and branches works
--- a/tests/test-status-color.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-status-color.t	Tue Jan 19 21:48:43 2021 +0530
@@ -394,7 +394,7 @@
   $ hg unknowncommand > /dev/null
   hg: unknown command 'unknowncommand'
   (use 'hg help' for a list of commands)
-  [255]
+  [10]
 
 color coding of error message without curses
 
@@ -402,6 +402,6 @@
   $ PYTHONPATH=`pwd`:$PYTHONPATH hg unknowncommand > /dev/null
   hg: unknown command 'unknowncommand'
   (use 'hg help' for a list of commands)
-  [255]
+  [10]
 
   $ cd ..
--- a/tests/test-status-inprocess.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-status-inprocess.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 from __future__ import absolute_import, print_function
 
 import sys
--- a/tests/test-status-terse.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-status-terse.t	Tue Jan 19 21:48:43 2021 +0530
@@ -24,7 +24,7 @@
   ? x/bb.o
   $ hg status --terse f
   abort: 'f' not recognized
-  [255]
+  [10]
 
 Add a .hgignore so that we can also have ignored files
 
@@ -224,7 +224,7 @@
 
   $ hg status --terse marduic --rev 0 --rev 1
   abort: cannot use --terse with --rev
-  [255]
+  [10]
 
 Config item to set the default terseness
   $ cat <<EOF >> $HGRCPATH
--- a/tests/test-stdio.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-stdio.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """
 Tests the buffering behavior of stdio streams in `mercurial.utils.procutil`.
 """
--- a/tests/test-stream-bundle-v2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-stream-bundle-v2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -117,6 +117,7 @@
    E: remote created -> g
   getting E
   5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating the branch cache
   (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ hg clone --stream http://localhost:$HGPORT streamv2-clone-explicit --debug
@@ -173,4 +174,5 @@
    E: remote created -> g
   getting E
   5 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating the branch cache
   (sent 4 HTTP requests and * bytes; received * bytes in responses) (glob)
--- a/tests/test-strict.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-strict.t	Tue Jan 19 21:48:43 2021 +0530
@@ -16,7 +16,7 @@
   $ hg an a
   hg: unknown command 'an'
   (use 'hg help' for a list of commands)
-  [255]
+  [10]
   $ hg annotate a
   0: a
 
--- a/tests/test-strip.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-strip.t	Tue Jan 19 21:48:43 2021 +0530
@@ -10,7 +10,7 @@
   >     hg up -C $1
   >     echo % before update $1, strip $2
   >     hg log -G -T '{rev}:{node}'
-  >     hg --traceback strip $2
+  >     hg --traceback debugstrip $2
   >     echo % after update $1, strip $2
   >     hg log -G -T '{rev}:{node}'
   >     restore
@@ -317,7 +317,7 @@
   $ hg strip 4
   abort: outstanding uncommitted merge
   (use 'hg commit' or 'hg merge --abort')
-  [255]
+  [20]
 ##strip allowed --force with merge in progress
   $ hg strip 4 --force
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -606,7 +606,7 @@
   $ echo c > b
   $ hg strip tip
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg strip tip --keep
   saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob)
   $ hg log --graph
@@ -708,7 +708,7 @@
   saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/*-backup.hg (glob)
   bookmark 'todelete' deleted
   $ hg id -ir dcbb326fdec2
-  abort: unknown revision 'dcbb326fdec2'!
+  abort: unknown revision 'dcbb326fdec2'
   [255]
   $ hg id -ir d62d843c9a01
   d62d843c9a01
@@ -724,17 +724,17 @@
   bookmark 'multipledelete1' deleted
   bookmark 'multipledelete2' deleted
   $ hg id -ir e46a4836065c
-  abort: unknown revision 'e46a4836065c'!
+  abort: unknown revision 'e46a4836065c'
   [255]
   $ hg id -ir b4594d867745
-  abort: unknown revision 'b4594d867745'!
+  abort: unknown revision 'b4594d867745'
   [255]
   $ hg strip -B singlenode1 -B singlenode2
   saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/43227190fef8-8da858f2-backup.hg
   bookmark 'singlenode1' deleted
   bookmark 'singlenode2' deleted
   $ hg id -ir 43227190fef8
-  abort: unknown revision '43227190fef8'!
+  abort: unknown revision '43227190fef8'
   [255]
   $ hg strip -B unknownbookmark
   abort: bookmark 'unknownbookmark' not found
@@ -749,7 +749,7 @@
   saved backup bundle to $TESTTMP/bookmarks/.hg/strip-backup/*-backup.hg (glob)
   bookmark 'delete' deleted
   $ hg id -ir 6:2702dd0c91e7
-  abort: unknown revision '2702dd0c91e7'!
+  abort: unknown revision '2702dd0c91e7'
   [255]
   $ hg update B
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
@@ -758,20 +758,18 @@
   $ hg add a
   $ hg strip -B B
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg bookmarks
    * B                         6:ff43616e5d0f
 
 Make sure no one adds back a -b option:
 
   $ hg strip -b tip
-  hg strip: option -b not recognized
-  hg strip [-k] [-f] [-B bookmark] [-r] REV...
+  hg debugstrip: option -b not recognized
+  hg debugstrip [-k] [-f] [-B bookmark] [-r] REV...
   
   strip changesets and all their descendants from the repository
   
-  (use 'hg help -e strip' to show help for the strip extension)
-  
   options ([+] can be repeated):
   
    -r --rev REV [+]           strip specified revision (optional, can specify
@@ -783,8 +781,8 @@
    -B --bookmark BOOKMARK [+] remove revs only reachable from given bookmark
       --mq                    operate on patch repository
   
-  (use 'hg strip -h' to show more help)
-  [255]
+  (use 'hg debugstrip -h' to show more help)
+  [10]
 
   $ cd ..
 
--- a/tests/test-subrepo-deep-nested-change.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-subrepo-deep-nested-change.t	Tue Jan 19 21:48:43 2021 +0530
@@ -110,7 +110,7 @@
   updating to branch default
   cloning subrepo sub1 from http://localhost:$HGPORT/../sub1
   abort: HTTP Error 404: Not Found
-  [255]
+  [100]
 
   $ cat access.log
   * "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
--- a/tests/test-subrepo-git.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-subrepo-git.t	Tue Jan 19 21:48:43 2021 +0530
@@ -17,6 +17,10 @@
   $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE
   $ GIT_CONFIG_NOSYSTEM=1; export GIT_CONFIG_NOSYSTEM
 
+set default branch to value compatible with new and old git version
+
+  $ git config --global init.defaultBranch master
+
 root hg repo
 
   $ hg init t
--- a/tests/test-subrepo-recursion.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-subrepo-recursion.t	Tue Jan 19 21:48:43 2021 +0530
@@ -575,7 +575,7 @@
   $ hg --config progress.disable=True clone ../empty2 ../empty_clone
   updating to branch default
   cloning subrepo foo from $TESTTMP/empty2/foo
-  abort: repository $TESTTMP/empty2/foo not found!
+  abort: repository $TESTTMP/empty2/foo not found
   [255]
 
 Disable progress extension and cleanup:
@@ -682,7 +682,7 @@
 
   $ hg incoming -S --bundle incoming.hg
   abort: cannot specify both --subrepos and --bundle
-  [255]
+  [10]
 
 Test missing subrepo:
 
--- a/tests/test-subrepo-relative-path.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-subrepo-relative-path.t	Tue Jan 19 21:48:43 2021 +0530
@@ -85,7 +85,7 @@
   searching for changes
   no changes found
   abort: HTTP Error 403: ssl required
-  [255]
+  [100]
 
 Checking cloned repo ids
 
--- a/tests/test-subrepo.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-subrepo.t	Tue Jan 19 21:48:43 2021 +0530
@@ -315,8 +315,8 @@
   resolving manifests
    branchmerge: True, force: False, partial: False
    ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198
+  starting 4 threads for background file closing (?)
    preserving t for resolve of t
-  starting 4 threads for background file closing (?)
    t: versions differ -> m (premerge)
   picked tool ':merge' for t (binary False symlink False changedelete False)
   merging t
@@ -571,7 +571,7 @@
   no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss
   pushing subrepo s to $TESTTMP/t/s
   searching for changes
-  abort: push creates new remote head 12a213df6fa9! (in subrepository "s")
+  abort: push creates new remote head 12a213df6fa9 (in subrepository "s")
   (merge or see 'hg help push' for details about pushing new heads)
   [255]
   $ hg push -f
@@ -1013,7 +1013,7 @@
   created new head
   $ hg -R repo2 ci -m3
   $ hg -q -R repo2 push
-  abort: push creates new remote head cc505f09a8b2! (in subrepository "s")
+  abort: push creates new remote head cc505f09a8b2 (in subrepository "s")
   (merge or see 'hg help push' for details about pushing new heads)
   [255]
   $ hg -R repo update
@@ -1280,7 +1280,7 @@
   ../shared/subrepo-2/.hg/wcache/manifestfulltextcache (reporevlogstore !)
   ../shared/subrepo-2/file
   $ hg -R ../shared in
-  abort: repository default not found!
+  abort: repository default not found
   [255]
   $ hg -R ../shared/subrepo-2 showconfig paths
   paths.default=$TESTTMP/subrepo-status/subrepo-2
@@ -1964,7 +1964,7 @@
   $ hg clone malicious-proxycommand malicious-proxycommand-clone
   updating to branch default
   cloning subrepo s from ssh://fakehost%7Ctouch%24%7BIFS%7Downed/path
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ [ ! -f owned ] || echo 'you got owned'
 
@@ -1978,7 +1978,7 @@
   $ hg clone malicious-proxycommand malicious-proxycommand-clone
   updating to branch default
   cloning subrepo s from ssh://fakehost%7Ctouch%20owned/path
-  abort: no suitable response from remote hg!
+  abort: no suitable response from remote hg
   [255]
   $ [ ! -f owned ] || echo 'you got owned'
 
--- a/tests/test-symlinks.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-symlinks.t	Tue Jan 19 21:48:43 2021 +0530
@@ -198,7 +198,7 @@
   $ ln -s nothing dangling
   $ hg commit -m 'commit symlink without adding' dangling
   abort: dangling: file not tracked!
-  [255]
+  [10]
   $ hg add dangling
   $ hg commit -m 'add symlink'
 
--- a/tests/test-tag.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-tag.t	Tue Jan 19 21:48:43 2021 +0530
@@ -30,7 +30,7 @@
 
   $ hg tag ' '
   abort: tag names cannot consist entirely of whitespace
-  [255]
+  [10]
 
 (this tests also that editor is not invoked, if '--edit' is not
 specified)
@@ -61,29 +61,29 @@
   $ hg revert .hgtags
   $ hg tag -r 0 x y z y y z
   abort: tag names must be unique
-  [255]
+  [10]
   $ hg tag tap nada dot tip
   abort: the name 'tip' is reserved
-  [255]
+  [10]
   $ hg tag .
   abort: the name '.' is reserved
-  [255]
+  [10]
   $ hg tag null
   abort: the name 'null' is reserved
-  [255]
+  [10]
   $ hg tag "bleah"
   abort: tag 'bleah' already exists (use -f to force)
-  [255]
+  [10]
   $ hg tag "blecch" "bleah"
   abort: tag 'bleah' already exists (use -f to force)
-  [255]
+  [10]
 
   $ hg tag --remove "blecch"
   abort: tag 'blecch' does not exist
-  [255]
+  [10]
   $ hg tag --remove "bleah" "blecch" "blough"
   abort: tag 'blecch' does not exist
-  [255]
+  [10]
 
   $ hg tag -r 0 "bleah0"
   hook: tag changes detected
@@ -105,13 +105,13 @@
 
   $ hg tag "bleah "
   abort: tag 'bleah' already exists (use -f to force)
-  [255]
+  [10]
   $ hg tag " bleah"
   abort: tag 'bleah' already exists (use -f to force)
-  [255]
+  [10]
   $ hg tag " bleah"
   abort: tag 'bleah' already exists (use -f to force)
-  [255]
+  [10]
   $ hg tag -r 0 "  bleahbleah  "
   hook: tag changes detected
   hook: +A acb14030fe0a21b60322c440ad2d20cf7685a376 bleahbleah
@@ -144,7 +144,7 @@
   $ hg tag -l localblah
   $ hg tag "foobar"
   abort: working directory is not at a branch head (use -f to force)
-  [255]
+  [10]
   $ hg tag -f "foobar"
   hook: tag changes detected
   hook: +A acb14030fe0a21b60322c440ad2d20cf7685a376 foobar
@@ -157,10 +157,10 @@
   $ hg tag -l 'xx
   > newline'
   abort: '\n' cannot be used in a name
-  [255]
+  [10]
   $ hg tag -l 'xx:xx'
   abort: ':' cannot be used in a name
-  [255]
+  [10]
 
 cloning local tags
 
@@ -401,16 +401,16 @@
   0 files updated, 0 files merged, 2 files removed, 0 files unresolved
   $ hg tag nullrev
   abort: working directory is not at a branch head (use -f to force)
-  [255]
+  [10]
 
   $ hg init empty
   $ hg tag -R empty nullrev
   abort: cannot tag null revision
-  [255]
+  [10]
 
   $ hg tag -R empty -r 00000000000 -f nulltag
   abort: cannot tag null revision
-  [255]
+  [10]
 
 issue5539: pruned tags do not appear in .hgtags
 
@@ -492,11 +492,11 @@
 
   $ hg tag t1
   abort: uncommitted merge
-  [255]
+  [20]
   $ hg status
   $ hg tag --rev 1 t2
   abort: uncommitted merge
-  [255]
+  [20]
   $ hg tag --rev 1 --local t3
   $ hg tags -v
   tip                                2:2a156e8887cc
--- a/tests/test-tags.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-tags.t	Tue Jan 19 21:48:43 2021 +0530
@@ -505,7 +505,7 @@
 
   $ hg tag --remove foobar
   abort: tag 'foobar' does not exist
-  [255]
+  [10]
   $ hg tip
   changeset:   5:5f6e8655b1c7
   tag:         tip
@@ -555,7 +555,7 @@
 
   $ hg tag -r 3 bar
   abort: tag 'bar' already exists (use -f to force)
-  [255]
+  [10]
   $ hg tags
   tip                                6:735c3ca72986
   bar                                0:bbd179dfa0a7
@@ -622,12 +622,12 @@
   $ hg tag -r 0 -l localtag
   $ hg tag --remove localtag
   abort: tag 'localtag' is not a global tag
-  [255]
+  [10]
   $ 
   $ hg tag -r 0 globaltag
   $ hg tag --remove -l globaltag
   abort: tag 'globaltag' is not a local tag
-  [255]
+  [10]
   $ hg tags -v
   tip                                1:a0b6fe111088
   localtag                           0:bbd179dfa0a7 local
@@ -721,19 +721,26 @@
 
   $ ls tagsclient/.hg/cache
   branch2-base
+  branch2-immutable
+  branch2-served
+  branch2-served.hidden
+  branch2-visible
+  branch2-visible-hidden
   hgtagsfnodes1
   rbc-names-v1
   rbc-revs-v1
+  tags2
+  tags2-served
 
 Cache should contain the head only, even though other nodes have tags data
 
   $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
   tagsclient/.hg/cache/hgtagsfnodes1: size=96
-  0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0030: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0040: ff ff ff ff ff ff ff ff 40 f0 35 8c 19 e0 a7 d3 |........@.5.....|
+  0000: 96 ee 1d 73 00 00 00 00 00 00 00 00 00 00 00 00 |...s............|
+  0010: 00 00 00 00 00 00 00 00 c4 da b0 c2 94 65 e1 c6 |.............e..|
+  0020: 0d f7 f0 dd 32 04 ea 57 78 c8 97 97 79 fc d5 95 |....2..Wx...y...|
+  0030: f6 3c c8 fe 94 65 e1 c6 0d f7 f0 dd 32 04 ea 57 |.<...e......2..W|
+  0040: 78 c8 97 97 79 fc d5 95 40 f0 35 8c 19 e0 a7 d3 |x...y...@.5.....|
   0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
 
 Running hg tags should produce tags2* file and not change cache
@@ -745,18 +752,25 @@
 
   $ ls tagsclient/.hg/cache
   branch2-base
+  branch2-immutable
+  branch2-served
+  branch2-served.hidden
+  branch2-visible
+  branch2-visible-hidden
   hgtagsfnodes1
   rbc-names-v1
   rbc-revs-v1
+  tags2
+  tags2-served
   tags2-visible
 
   $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1
   tagsclient/.hg/cache/hgtagsfnodes1: size=96
-  0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0030: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................|
-  0040: ff ff ff ff ff ff ff ff 40 f0 35 8c 19 e0 a7 d3 |........@.5.....|
+  0000: 96 ee 1d 73 00 00 00 00 00 00 00 00 00 00 00 00 |...s............|
+  0010: 00 00 00 00 00 00 00 00 c4 da b0 c2 94 65 e1 c6 |.............e..|
+  0020: 0d f7 f0 dd 32 04 ea 57 78 c8 97 97 79 fc d5 95 |....2..Wx...y...|
+  0030: f6 3c c8 fe 94 65 e1 c6 0d f7 f0 dd 32 04 ea 57 |.<...e......2..W|
+  0040: 78 c8 97 97 79 fc d5 95 40 f0 35 8c 19 e0 a7 d3 |x...y...@.5.....|
   0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.|
 
 Check that the bundle includes cache data
@@ -779,6 +793,8 @@
   updating to branch default
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ (cd tags-local-clone/.hg/cache/; ls -1 tag*)
+  tags2
+  tags2-served
   tags2-visible
 
 Avoid writing logs on trying to delete an already deleted tag
@@ -824,7 +840,7 @@
   
   $ hg tag --remove a
   abort: tag 'a' is already removed
-  [255]
+  [10]
   $ hg log
   changeset:   2:e7feacc7ec9e
   tag:         tip
--- a/tests/test-template-basic.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-template-basic.t	Tue Jan 19 21:48:43 2021 +0530
@@ -139,7 +139,7 @@
   * keywords: 
   * functions: stringify
   hg: parse error: arithmetic only defined on integers
-  [255]
+  [10]
   $ hg debugtemplate -r0 -v '{-3|stringify}\n'
   (template
     (|
@@ -179,7 +179,7 @@
   * keywords: bar, foo
   * functions: baz
   hg: parse error: can't use a key-value pair in this context
-  [255]
+  [10]
 
   $ hg debugtemplate '{pad("foo", width=10, left=true)}\n'
          foo
@@ -190,7 +190,7 @@
   $ hg debugtemplate '{("not", "an", "argument", "list")|separate}'
   hg: parse error: can't use a list in this context
   (check place of comma and parens)
-  [255]
+  [10]
 
 Second branch starting at nullrev:
 
@@ -265,7 +265,7 @@
   > EOF
   $ hg log --style ./issue4758
   abort: recursive reference 'changeset' in template
-  [255]
+  [10]
 
  not a recursion if a keyword of the same name exists:
 
@@ -307,34 +307,34 @@
   $ echo 'style = t' >> .hg/hgrc
   $ hg log
   hg: parse error at t:3: unmatched quotes
-  [255]
+  [10]
 
   $ hg log -T '{date'
   hg: parse error at 1: unterminated template expansion
   ({date
     ^ here)
-  [255]
+  [10]
   $ hg log -T '{date(}'
   hg: parse error at 6: not a prefix: end
   ({date(}
          ^ here)
-  [255]
+  [10]
   $ hg log -T '{date)}'
   hg: parse error at 5: invalid token
   ({date)}
         ^ here)
-  [255]
+  [10]
   $ hg log -T '{date date}'
   hg: parse error at 6: invalid token
   ({date date}
          ^ here)
-  [255]
+  [10]
 
   $ hg log -T '{}'
   hg: parse error at 1: not a prefix: end
   ({}
     ^ here)
-  [255]
+  [10]
   $ hg debugtemplate -v '{()}'
   (template
     (group
@@ -342,7 +342,7 @@
   * keywords: 
   * functions: 
   hg: parse error: missing argument
-  [255]
+  [10]
 
 Behind the scenes, this would throw TypeError without intype=bytes
 
@@ -356,7 +356,7 @@
   $ hg log -l 3 --template 'line: {desc|shortdate}\n'
   hg: parse error: invalid date: 'Modify, add, remove, rename'
   (template filter 'shortdate' is not compatible with keyword 'desc')
-  [255]
+  [10]
 
 Behind the scenes, this would throw AttributeError without intype=bytes
 
@@ -367,23 +367,23 @@
 
   $ hg log -l 3 --template 'line: {extras|localdate}\n'
   hg: parse error: localdate expects a date information
-  [255]
+  [10]
 
 Behind the scenes, this will throw ValueError
 
   $ hg tip --template '{author|email|date}\n'
   hg: parse error: date expects a date information
-  [255]
+  [10]
 
   $ hg tip -T '{author|email|shortdate}\n'
   hg: parse error: invalid date: 'test'
   (template filter 'shortdate' is not compatible with keyword 'author')
-  [255]
+  [10]
 
   $ hg tip -T '{get(extras, "branch")|shortdate}\n'
   hg: parse error: invalid date: 'default'
   (incompatible use of template filter 'shortdate')
-  [255]
+  [10]
 
 Error in nested template:
 
@@ -391,19 +391,19 @@
   hg: parse error at 2: unterminated string
   ({"date
      ^ here)
-  [255]
+  [10]
 
   $ hg log -T '{"foo{date|?}"}'
   hg: parse error at 11: syntax error
   ({"foo{date|?}"}
               ^ here)
-  [255]
+  [10]
 
 Thrown an error if a template function doesn't exist
 
   $ hg tip --template '{foo()}\n'
   hg: parse error: unknown function 'foo'
-  [255]
+  [10]
 
   $ cd ..
 
@@ -453,13 +453,13 @@
   $ hg log -R latesttag -r tip -T '{rev % "a"}\n'
   hg: parse error: 11 is not iterable of mappings
   (keyword 'rev' does not support map operation)
-  [255]
+  [10]
   $ hg log -R latesttag -r tip -T '{get(extras, "unknown") % "a"}\n'
   hg: parse error: None is not iterable of mappings
-  [255]
+  [10]
   $ hg log -R latesttag -r tip -T '{extras % "{key}\n" % "{key}\n"}'
   hg: parse error: list of strings is not mappable
-  [255]
+  [10]
 
 Test new-style inline templating of non-list/dict type:
 
@@ -474,7 +474,7 @@
   branch: default
   $ hg log -R latesttag -r tip -T '{get(extras, "unknown") % "{key}\n"}'
   hg: parse error: None is not iterable of mappings
-  [255]
+  [10]
   $ hg log -R latesttag -r tip -T '{min(extras) % "{key}: {value}\n"}'
   branch: default
   $ hg log -R latesttag -l1 -T '{min(revset("0:9")) % "{rev}:{node|short}\n"}'
@@ -506,7 +506,7 @@
         (symbol 'bar')
         (symbol 'baz')))
     (string '\n'))
-  [255]
+  [10]
   $ hg debugtemplate -R latesttag -r0 -v '{foo.bar()}\n' 2> /dev/null
   (template
     (.
@@ -517,7 +517,7 @@
     (string '\n'))
   * keywords: foo
   * functions: bar
-  [255]
+  [10]
 
 Test evaluation of dot operator:
 
@@ -531,10 +531,10 @@
   $ hg log -R latesttag -l1 -T '{author.invalid}\n'
   hg: parse error: 'test' is not a dictionary
   (keyword 'author' does not support member operation)
-  [255]
+  [10]
   $ hg log -R latesttag -l1 -T '{min("abc").invalid}\n'
   hg: parse error: 'a' is not a dictionary
-  [255]
+  [10]
 
 Test integer literal:
 
@@ -567,10 +567,10 @@
   hg: parse error at 3: not a prefix: )
   ({(-)}\n
       ^ here)
-  [255]
+  [10]
   $ hg debugtemplate '{(-a)}\n'
   hg: parse error: negation needs an integer argument
-  [255]
+  [10]
 
 top-level integer literal is interpreted as symbol (i.e. variable name):
 
@@ -607,10 +607,10 @@
 
   $ hg log -Ra -r0 -T '{desc|1}\n'
   hg: parse error: expected a symbol, got 'integer'
-  [255]
+  [10]
   $ hg log -Ra -r0 -T '{1()}\n'
   hg: parse error: expected a symbol, got 'integer'
-  [255]
+  [10]
 
 Test string literal:
 
@@ -748,13 +748,13 @@
   hg: parse error at 21: unterminated string
   ({if(rev, "{if(rev, \")}")}\n
                         ^ here)
-  [255]
+  [10]
   $ hg log -r 2 -T '{if(rev, \"\\"")}\n'
   hg: parse error: trailing \ in string
-  [255]
+  [10]
   $ hg log -r 2 -T '{if(rev, r\"\\"")}\n'
   hg: parse error: trailing \ in string
-  [255]
+  [10]
 
   $ cd ..
 
@@ -900,7 +900,7 @@
   text.1:wrapped (no-eol)
   $ hg log -l1 -T '{fill(desc, date, "", "")}\n'
   hg: parse error: fill expects an integer width
-  [255]
+  [10]
 
   $ hg log -l 1 --template '{sub(r"[0-9]", "-", author)}'
   {node|short} (no-eol)
@@ -926,16 +926,16 @@
 
   $ hg log -Gv -R a --template '{desc|user()}'
   hg: parse error: expected a symbol, got 'func'
-  [255]
+  [10]
 
 Test broken string escapes:
 
   $ hg log -T "bogus\\" -R a
   hg: parse error: trailing \ in string
-  [255]
+  [10]
   $ hg log -T "\\xy" -R a
   hg: parse error: invalid \x escape* (glob)
-  [255]
+  [10]
 
 Templater supports aliases of symbol and func() styles:
 
--- a/tests/test-template-functions.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-template-functions.t	Tue Jan 19 21:48:43 2021 +0530
@@ -240,7 +240,7 @@
   $ hg log -r null -T '{rev|commondir}'
   hg: parse error: argument is not a list of text
   (template filter 'commondir' is not compatible with keyword 'rev')
-  [255]
+  [10]
 
 Add a dummy commit to make up for the instability of the above:
 
@@ -283,7 +283,7 @@
   $ hg log -l1 -T '{termwidth|count}\n'
   hg: parse error: not countable
   (template filter 'count' is not compatible with keyword 'termwidth')
-  [255]
+  [10]
 
 Upper/lower filters:
 
@@ -478,25 +478,25 @@
 
   $ hg log -R a -r 0 -T '{filter()}\n'
   hg: parse error: filter expects one or two arguments
-  [255]
+  [10]
   $ hg log -R a -r 0 -T '{filter(date)}\n'
   hg: parse error: date is not iterable
-  [255]
+  [10]
   $ hg log -R a -r 0 -T '{filter(rev)}\n'
   hg: parse error: 0 is not iterable
-  [255]
+  [10]
   $ hg log -R a -r 0 -T '{filter(desc|firstline)}\n'
   hg: parse error: 'line 1' is not filterable
-  [255]
+  [10]
   $ hg log -R a -r 0 -T '{filter(manifest)}\n'
   hg: parse error: '0:a0c8bcbbb45c' is not filterable
-  [255]
+  [10]
   $ hg log -R a -r 0 -T '{filter(succsandmarkers)}\n'
   hg: parse error: not filterable without template
-  [255]
+  [10]
   $ hg log -R a -r 0 -T '{filter(desc|splitlines % "{line}", "")}\n'
   hg: parse error: not filterable by expression
-  [255]
+  [10]
 
 Test manifest/get() can be join()-ed as string, though it's silly:
 
@@ -514,7 +514,7 @@
 
   $ hg log -R latesttag -r tip -T '{join(rev, "")}\n'
   hg: parse error: 11 is not iterable
-  [255]
+  [10]
 
 Test min/max of integers
 
@@ -542,47 +542,47 @@
   $ hg debugtemplate '{min(1)}'
   hg: parse error: 1 is not iterable
   (min first argument should be an iterable)
-  [255]
+  [10]
   $ hg debugtemplate '{max(2)}'
   hg: parse error: 2 is not iterable
   (max first argument should be an iterable)
-  [255]
+  [10]
 
   $ hg log -R latesttag -l1 -T '{min(date)}'
   hg: parse error: date is not iterable
   (min first argument should be an iterable)
-  [255]
+  [10]
   $ hg log -R latesttag -l1 -T '{max(date)}'
   hg: parse error: date is not iterable
   (max first argument should be an iterable)
-  [255]
+  [10]
 
 Test min/max of empty sequence:
 
   $ hg debugtemplate '{min("")}'
   hg: parse error: empty string
   (min first argument should be an iterable)
-  [255]
+  [10]
   $ hg debugtemplate '{max("")}'
   hg: parse error: empty string
   (max first argument should be an iterable)
-  [255]
+  [10]
   $ hg debugtemplate '{min(dict())}'
   hg: parse error: empty sequence
   (min first argument should be an iterable)
-  [255]
+  [10]
   $ hg debugtemplate '{max(dict())}'
   hg: parse error: empty sequence
   (max first argument should be an iterable)
-  [255]
+  [10]
   $ hg debugtemplate '{min(dict() % "")}'
   hg: parse error: empty sequence
   (min first argument should be an iterable)
-  [255]
+  [10]
   $ hg debugtemplate '{max(dict() % "")}'
   hg: parse error: empty sequence
   (max first argument should be an iterable)
-  [255]
+  [10]
 
 Test min/max of if() result
 
@@ -606,7 +606,7 @@
   $ hg debugtemplate '{count(0)}'
   hg: parse error: not countable
   (incompatible use of template filter 'count')
-  [255]
+  [10]
   $ hg debugtemplate '{if(true, "", count(0))}'
   $ hg debugtemplate '{if(false, count(0), "")}'
   $ hg debugtemplate '{ifcontains("a", "aa", "", count(0))}'
@@ -654,10 +654,10 @@
 
   $ hg log -R a -r2 -T '{search(r"(?P<0>.)", desc) % "{0}"}\n'
   hg: parse error: search got an invalid pattern: (?P<0>.)
-  [255]
+  [10]
   $ hg log -R a -r2 -T '{search(r"(?P<repo>.)", desc) % "{repo}"}\n'
   hg: parse error: invalid group 'repo' in search pattern: (?P<repo>.)
-  [255]
+  [10]
 
 Test the sub function of templating for expansion:
 
@@ -666,10 +666,10 @@
 
   $ hg log -R latesttag -r 10 -T '{sub("[", "x", rev)}\n'
   hg: parse error: sub got an invalid pattern: [
-  [255]
+  [10]
   $ hg log -R latesttag -r 10 -T '{sub("[0-9]", r"\1", rev)}\n'
   hg: parse error: sub got an invalid replacement: \1
-  [255]
+  [10]
 
 Test the strip function with chars specified:
 
@@ -721,7 +721,7 @@
 
   $ hg log -R latesttag -T '{date(rev)}\n'
   hg: parse error: date expects a date information
-  [255]
+  [10]
 
 Set up repository containing template fragments in commit metadata:
 
@@ -789,17 +789,17 @@
 
   $ hg log -r 0 -T '{dict(rev, rev=rev)}\n'
   hg: parse error: duplicated dict key 'rev' inferred
-  [255]
+  [10]
   $ hg log -r 0 -T '{dict(node, node|short)}\n'
   hg: parse error: duplicated dict key 'node' inferred
-  [255]
+  [10]
   $ hg log -r 0 -T '{dict(1 + 2)}'
   hg: parse error: dict key cannot be inferred
-  [255]
+  [10]
 
   $ hg log -r 0 -T '{dict(x=rev, x=node)}'
   hg: parse error: dict got multiple values for keyword argument 'x'
-  [255]
+  [10]
 
 Test get function:
 
@@ -810,7 +810,7 @@
   $ hg log -r 0 --template '{get(files, "should_fail")}\n'
   hg: parse error: not a dictionary
   (get() expects a dict as first argument)
-  [255]
+  [10]
 
 Test json filter applied to wrapped object:
 
@@ -836,7 +836,7 @@
   1970-01-01 00:00 +0000
   $ TZ=JST-09 hg log -r0 -T '{localdate(date, "blahUTC")|isodate}\n'
   hg: parse error: localdate expects a timezone
-  [255]
+  [10]
   $ TZ=JST-09 hg log -r0 -T '{localdate(date, "+0200")|isodate}\n'
   1970-01-01 02:00 +0200
   $ TZ=JST-09 hg log -r0 -T '{localdate(date, "0")|isodate}\n'
@@ -845,10 +845,10 @@
   1970-01-01 00:00 +0000
   $ hg log -r0 -T '{localdate(date, "invalid")|isodate}\n'
   hg: parse error: localdate expects a timezone
-  [255]
+  [10]
   $ hg log -r0 -T '{localdate(date, date)|isodate}\n'
   hg: parse error: localdate expects a timezone
-  [255]
+  [10]
 
 Test shortest(node) function:
 
@@ -871,7 +871,7 @@
   f7769ec2ab
   $ hg log -r 0 -T '{shortest(node, "not an int")}\n'
   hg: parse error: shortest() expects an integer minlength
-  [255]
+  [10]
 
   $ hg log -r 'wdir()' -T '{node|shortest}\n'
   ffff
@@ -1070,16 +1070,16 @@
   0          test
   $ hg log -r 0 -T '{pad(rev, "not an int")}\n'
   hg: parse error: pad() expects an integer width
-  [255]
+  [10]
 
 Test invalid fillchar passed to pad function
 
   $ hg log -r 0 -T '{pad(rev, 10, "")}\n'
   hg: parse error: pad() expects a single fill character
-  [255]
+  [10]
   $ hg log -r 0 -T '{pad(rev, 10, "--")}\n'
   hg: parse error: pad() expects a single fill character
-  [255]
+  [10]
 
 Test boolean argument passed to pad function
 
@@ -1294,60 +1294,60 @@
   $ hg log -T '{revset("%d", rev)}\n' -r'null'
   -1
   $ hg log -T '{revset("%d", rev + 1)}\n' -r'tip'
-  abort: unknown revision '3'!
+  abort: unknown revision '3'
   [255]
   $ hg log -T '{revset("%d", rev - 1)}\n' -r'null'
-  abort: unknown revision '-2'!
+  abort: unknown revision '-2'
   [255]
 
 Invalid arguments passed to revset()
 
   $ hg log -T '{revset("%whatever", 0)}\n'
   hg: parse error: unexpected revspec format character w
-  [255]
+  [10]
   $ hg log -T '{revset("%lwhatever", files)}\n'
   hg: parse error: unexpected revspec format character w
-  [255]
+  [10]
   $ hg log -T '{revset("%s %s", 0)}\n'
   hg: parse error: missing argument for revspec
-  [255]
+  [10]
   $ hg log -T '{revset("", 0)}\n'
   hg: parse error: too many revspec arguments specified
-  [255]
+  [10]
   $ hg log -T '{revset("%s", 0, 1)}\n'
   hg: parse error: too many revspec arguments specified
-  [255]
+  [10]
   $ hg log -T '{revset("%", 0)}\n'
   hg: parse error: incomplete revspec format character
-  [255]
+  [10]
   $ hg log -T '{revset("%l", 0)}\n'
   hg: parse error: incomplete revspec format character
-  [255]
+  [10]
   $ hg log -T '{revset("%d", 'foo')}\n'
   hg: parse error: invalid argument for revspec
-  [255]
+  [10]
   $ hg log -T '{revset("%ld", files)}\n'
   hg: parse error: invalid argument for revspec
-  [255]
+  [10]
   $ hg log -T '{revset("%ls", 0)}\n'
   hg: parse error: invalid argument for revspec
-  [255]
+  [10]
   $ hg log -T '{revset("%b", 'foo')}\n'
   hg: parse error: invalid argument for revspec
-  [255]
+  [10]
   $ hg log -T '{revset("%lb", files)}\n'
   hg: parse error: invalid argument for revspec
-  [255]
+  [10]
   $ hg log -T '{revset("%r", 0)}\n'
   hg: parse error: invalid argument for revspec
-  [255]
+  [10]
 
 Invalid operation on revset()
 
   $ hg log -T '{get(revset(":"), "foo")}\n'
   hg: parse error: not a dictionary
   (get() expects a dict as first argument)
-  [255]
+  [10]
 
 Test files function
 
@@ -1432,7 +1432,7 @@
 Test startswith
   $ hg log -Gv -R a --template "{startswith(desc)}"
   hg: parse error: startswith expects two arguments
-  [255]
+  [10]
 
   $ hg log -Gv -R a --template "{startswith('line', desc)}"
   @
@@ -1514,11 +1514,11 @@
 
   $ hg log -Gv -R a --template "{word('0')}"
   hg: parse error: word expects two or three arguments, got 1
-  [255]
+  [10]
 
   $ hg log -Gv -R a --template "{word('0', desc, 'o', 'h', 'b', 'o', 'y')}"
   hg: parse error: word expects two or three arguments, got 7
-  [255]
+  [10]
 
 Test word for integer literal
 
@@ -1529,7 +1529,7 @@
 
   $ hg log -Gv -R a --template "{word('a', desc)}"
   hg: parse error: word expects an integer index
-  [255]
+  [10]
 
 Test word for out of range
 
--- a/tests/test-template-graph.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-template-graph.t	Tue Jan 19 21:48:43 2021 +0530
@@ -381,13 +381,13 @@
 
   $ hg log -T '{subsetparents()}\n'
   hg: parse error: subsetparents expects two arguments
-  [255]
+  [10]
   $ hg log -T '{subsetparents("a")}\n'
   hg: parse error: subsetparents expects two arguments
-  [255]
+  [10]
   $ hg log -T '{subsetparents(rev, extras)}\n'
   hg: parse error: subsetparents expects a queried revset
-  [255]
+  [10]
 
   $ cd ..
 
--- a/tests/test-template-keywords.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-template-keywords.t	Tue Jan 19 21:48:43 2021 +0530
@@ -114,6 +114,30 @@
   |
   o  0 (public): -1 (public) -1 (public)
   
+Test {onelinesummary}
+
+  $ hg log -G -T '{onelinesummary}'
+  @  8:95c24699272e tip "third"
+  |
+  o  7:29114dbae42b "second"
+  
+  o    6:d41e714fe50d "merge"
+  |\
+  | o  5:13207e5a10d9 "new head"
+  | |
+  o |  4:bbe44766e73d "new branch"
+  |/
+  o  3:10e46f2dcbf4 "no user, no domain"
+  |
+  o  2:97054abb4ab8 "no person"
+  |
+  o  1:b608e9d1a3f0 "other 1"
+  |
+  o  0:1e4e1b8f71e0 "line 1"
+  
+  $ hg log -T '{onelinesummary}' -r 0 \
+  > --config command-templates.oneline-summary='{rev} - {desc}'
+  0 - line 1 (no-eol)
 
 Keys work:
 
--- a/tests/test-template-map.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-template-map.t	Tue Jan 19 21:48:43 2021 +0530
@@ -48,8 +48,9 @@
 
 Make sure user/global hgrc does not affect tests
 
+  $ echo '[command-templates]' > .hg/hgrc
+  $ echo 'log =' >> .hg/hgrc
   $ echo '[ui]' > .hg/hgrc
-  $ echo 'logtemplate =' >> .hg/hgrc
   $ echo 'style =' >> .hg/hgrc
 
 Add some simple styles to settings
@@ -138,7 +139,8 @@
   $ mkdir somedir
   $ echo "__base__ = somedir" > map-base-dir
   $ hg log -l1 -T./map-base-dir
-  abort: Is a directory: '$TESTTMP/a/somedir'
+  abort: Is a directory: '$TESTTMP/a/somedir' (no-windows !)
+  abort: $TESTTMP/a/somedir: Access is denied (windows !)
   [255]
 
 Test including a built-in template map
@@ -1241,7 +1243,7 @@
   hg: parse error at 6: not a prefix: )
   (json(-)
          ^ here)
-  [255]
+  [10]
 
 For backward compatibility, the following examples are not parsed as
 function-style references:
@@ -1278,7 +1280,8 @@
 Error if style is a directory:
 
   $ hg log --style somedir
-  abort: Is a directory: 'somedir'
+  abort: Is a directory: 'somedir' (no-windows !)
+  abort: somedir: Access is denied (windows !)
   [255]
 
 Error if style is a directory whose name is a built-in style:
@@ -1300,7 +1303,7 @@
   $ echo 'changeset =' > t
   $ hg log --style t
   hg: parse error at t:1: missing value
-  [255]
+  [10]
 
 Error if include fails:
 
@@ -1969,6 +1972,7 @@
   > EOF
 
   $ HOME=`pwd`/home; export HOME
+  $ USERPROFILE=`pwd`/home; export USERPROFILE
 
   $ cat > latesttag/.hg/hgrc <<EOF
   > [ui]
--- a/tests/test-transplant.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-transplant.t	Tue Jan 19 21:48:43 2021 +0530
@@ -19,10 +19,10 @@
   [255]
   $ hg transplant --continue --all
   abort: cannot specify both --continue and --all
-  [255]
+  [10]
   $ hg transplant --stop --all
   abort: cannot specify both --stop and --all
-  [255]
+  [10]
   $ hg transplant --all tip
   abort: --all requires a branch revision
   [255]
@@ -53,12 +53,12 @@
   $ hg transplant 1
   abort: outstanding uncommitted merge
   (use 'hg commit' or 'hg merge --abort')
-  [255]
+  [20]
   $ hg up -qC tip
   $ echo b0 > b1
   $ hg transplant 1
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg up -qC tip
   $ echo b2 > b2
   $ hg ci -Amb2 -d '1 0'
@@ -513,10 +513,10 @@
   $ hg continue
   abort: no transplant to continue (continueflag !)
   abort: no operation in progress (no-continueflag !)
-  [255]
+  [20]
   $ hg transplant --stop
   abort: no interrupted transplant found
-  [255]
+  [20]
   $ hg transplant 1
   applying 46ae92138f3c
   patching file foo
@@ -571,7 +571,7 @@
   $ hg transplant 1:3
   abort: transplant in progress
   (use 'hg transplant --continue' or 'hg transplant --stop')
-  [255]
+  [20]
   $ hg status -v
   A bar
   ? added.rej
--- a/tests/test-treediscovery-legacy.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-treediscovery-legacy.t	Tue Jan 19 21:48:43 2021 +0530
@@ -122,7 +122,7 @@
   $ cd empty1
   $ hg incoming $remote --rev name1
   comparing with http://localhost:$HGPORT/
-  abort: cannot look up remote changes; remote repository does not support the 'changegroupsubset' capability!
+  abort: cannot look up remote changes; remote repository does not support the 'changegroupsubset' capability
   [255]
   $ hg incoming $remote
   comparing with http://localhost:$HGPORT/
@@ -285,9 +285,9 @@
   $ hg push $remote
   pushing to http://localhost:$HGPORT/
   searching for changes
-  abort: push creates new remote branches: both, name1!
+  abort: push creates new remote branches: both, name1
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
   $ hg push $remote --new-branch
   pushing to http://localhost:$HGPORT/
   searching for changes
@@ -311,7 +311,7 @@
   $ hg init partial; cd partial
   $ hg incoming $remote --rev name2
   comparing with http://localhost:$HGPORT/
-  abort: cannot look up remote changes; remote repository does not support the 'changegroupsubset' capability!
+  abort: cannot look up remote changes; remote repository does not support the 'changegroupsubset' capability
   [255]
   $ hg pull $remote --rev name2
   pulling from http://localhost:$HGPORT/
--- a/tests/test-treediscovery.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-treediscovery.t	Tue Jan 19 21:48:43 2021 +0530
@@ -3,8 +3,8 @@
   $ CAP="getbundle bundle2"
   $ . "$TESTDIR/notcapable"
   $ cat >> $HGRCPATH <<EOF
-  > [ui]
-  > logtemplate="{rev} {node|short}: {desc} {branches}\n"
+  > [command-templates]
+  > log="{rev} {node|short}: {desc} {branches}\n"
   > EOF
 
 Setup HTTP server control:
@@ -269,9 +269,9 @@
   $ hg push $remote
   pushing to http://localhost:$HGPORT/
   searching for changes
-  abort: push creates new remote branches: both, name1!
+  abort: push creates new remote branches: both, name1
   (use 'hg push --new-branch' to create new remote branches)
-  [255]
+  [20]
   $ hg push $remote --new-branch
   pushing to http://localhost:$HGPORT/
   searching for changes
--- a/tests/test-treemanifest.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-treemanifest.t	Tue Jan 19 21:48:43 2021 +0530
@@ -792,7 +792,7 @@
   $ hg clone --config experimental.changegroup3=True --stream -U \
   >   http://localhost:$HGPORT1 stream-clone-basicstore
   streaming all changes
-  21 files to transfer, * of data (glob)
+  29 files to transfer, * of data (glob)
   transferred * in * seconds (*) (glob)
   $ hg -R stream-clone-basicstore verify
   checking changesets
@@ -806,7 +806,7 @@
   $ hg clone --config experimental.changegroup3=True --stream -U \
   >   http://localhost:$HGPORT2 stream-clone-encodedstore
   streaming all changes
-  21 files to transfer, * of data (glob)
+  29 files to transfer, * of data (glob)
   transferred * in * seconds (*) (glob)
   $ hg -R stream-clone-encodedstore verify
   checking changesets
--- a/tests/test-trusted.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-trusted.py	Tue Jan 19 21:48:43 2021 +0530
@@ -254,26 +254,15 @@
 f.write(b'foo')
 f.close()
 
-# This is a hack to remove b'' prefixes from ParseError.__bytes__ on
-# Python 3.
-def normalizeparseerror(e):
-    if pycompat.ispy3:
-        args = [a.decode('utf-8') for a in e.args]
-    else:
-        args = e.args
-
-    return error.ParseError(*args)
-
-
 try:
     testui(user=b'abc', group=b'def', silent=True)
-except error.ParseError as inst:
-    bprint(normalizeparseerror(inst))
+except error.ConfigError as inst:
+    bprint(inst.format())
 
 try:
     testui(debug=True, silent=True)
-except error.ParseError as inst:
-    bprint(normalizeparseerror(inst))
+except error.ConfigError as inst:
+    bprint(inst.format())
 
 print()
 bprint(b'# access typed information')
--- a/tests/test-trusted.py.out	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-trusted.py.out	Tue Jan 19 21:48:43 2021 +0530
@@ -174,9 +174,10 @@
 # parse error
 # different user, different group
 not trusting file .hg/hgrc from untrusted user abc, group def
-ignored: ('foo', '.hg/hgrc:1')
+ignored .hg/hgrc:1: foo
 # same user, same group
-ParseError('foo', '.hg/hgrc:1')
+config error at .hg/hgrc:1: foo
+
 
 # access typed information
 # different user, different group
--- a/tests/test-unamend.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-unamend.t	Tue Jan 19 21:48:43 2021 +0530
@@ -284,7 +284,7 @@
 
   $ hg --config experimental.evolution=createmarkers unamend
   abort: cannot unamend changeset with children
-  [255]
+  [10]
 
   $ hg unamend
   3 new orphan changesets
@@ -298,7 +298,7 @@
   $ hg unamend
   abort: cannot unamend public changesets
   (see 'hg help phases' for details)
-  [255]
+  [10]
 
 Testing whether unamend retains copies or not
 
@@ -405,8 +405,10 @@
   $ hg co -q 0
   $ hg mv a b
   $ hg ci -qm 'move to a b'
+  warning: commit already existed in the repository!
   $ hg mv b c
   $ hg amend
+  warning: commit already existed in the repository!
   $ hg mv c d
   $ hg unamend
   $ hg st --copies --change .
--- a/tests/test-uncommit.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-uncommit.t	Tue Jan 19 21:48:43 2021 +0530
@@ -53,7 +53,7 @@
   $ hg uncommit
   abort: cannot uncommit null changeset
   (no changeset checked out)
-  [255]
+  [10]
 
 Create some commits
 
@@ -168,11 +168,11 @@
   $ hg uncommit
   abort: uncommitted changes
   (requires --allow-dirty-working-copy to uncommit)
-  [255]
+  [20]
   $ hg uncommit files
   abort: uncommitted changes
   (requires --allow-dirty-working-copy to uncommit)
-  [255]
+  [20]
   $ cat files
   abcde
   foo
@@ -184,7 +184,7 @@
   $ hg uncommit
   abort: uncommitted changes
   (requires --allow-dirty-working-copy to uncommit)
-  [255]
+  [20]
   $ hg uncommit --config experimental.uncommitondirtywdir=True
   $ hg commit -m "files abcde + foo"
 
@@ -407,11 +407,11 @@
   $ hg uncommit
   abort: outstanding uncommitted merge
   (requires --allow-dirty-working-copy to uncommit)
-  [255]
+  [20]
 
   $ hg uncommit --config experimental.uncommitondirtywdir=True
   abort: cannot uncommit while merging
-  [255]
+  [20]
 
   $ hg status
   M a
@@ -507,7 +507,7 @@
   $ hg uncommit b
   abort: uncommitted changes
   (requires --allow-dirty-working-copy to uncommit)
-  [255]
+  [20]
   $ hg uncommit --allow-dirty-working-copy b
   $ hg log
   changeset:   3:30fa958635b2
@@ -556,10 +556,10 @@
   $ hg rollback -q --config ui.rollback=True
   $ hg uncommit -U --user 'user'
   abort: cannot specify both --user and --currentuser
-  [255]
+  [10]
   $ hg uncommit -D --date today
   abort: cannot specify both --date and --currentdate
-  [255]
+  [10]
 
 `uncommit <dir>` and `cd <dir> && uncommit .` behave the same...
 
--- a/tests/test-up-local-change.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-up-local-change.t	Tue Jan 19 21:48:43 2021 +0530
@@ -70,8 +70,8 @@
    ancestor: 1e71731e6fbb, local: 1e71731e6fbb+, remote: c19d34741b0a
    b: other deleted -> r
   removing b
+  starting 4 threads for background file closing (?)
    preserving a for resolve of a
-  starting 4 threads for background file closing (?)
    a: versions differ -> m (premerge)
   picked tool 'true' for a (binary False symlink False changedelete False)
   merging a
--- a/tests/test-update-atomic.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-update-atomic.t	Tue Jan 19 21:48:43 2021 +0530
@@ -54,7 +54,7 @@
   $ hg update -r 1 --config extensions.showwrites=.hg/showwrites.py 2>&1 | grep "a1'.*wb"
   ('vfs open', ('a1', 'wb'), [('atomictemp', False), ('backgroundclose', True)])
 
-  $ python $TESTTMP/show_mode.py *
+  $ $PYTHON $TESTTMP/show_mode.py *
   a1:0644
   a2:0755
   b1:0644
@@ -76,7 +76,7 @@
   $ hg update -r 1
   6 files updated, 0 files merged, 1 files removed, 0 files unresolved
 
-  $ python $TESTTMP/show_mode.py *
+  $ $PYTHON $TESTTMP/show_mode.py *
   a1:0644
   a2:0755
   b1:0644
@@ -88,7 +88,7 @@
 
   $ chmod a-w ro
 
-  $ python $TESTTMP/show_mode.py ro
+  $ $PYTHON $TESTTMP/show_mode.py ro
   ro:0444
 
 Now the file is present, try to update and check the permissions of the file
@@ -96,7 +96,7 @@
   $ hg up -r 2
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
-  $ python $TESTTMP/show_mode.py ro
+  $ $PYTHON $TESTTMP/show_mode.py ro
   ro:0644
 
 # The file which was read-only is now writable in the default behavior
@@ -121,7 +121,7 @@
   C ro
 
 Check the file permission after update
-  $ python $TESTTMP/show_mode.py *
+  $ $PYTHON $TESTTMP/show_mode.py *
   a1:0644
   a2:0755
   b1:0644
@@ -133,7 +133,7 @@
 
   $ chmod a-w ro
 
-  $ python $TESTTMP/show_mode.py ro
+  $ $PYTHON $TESTTMP/show_mode.py ro
   ro:0444
 
 Now the file is present, try to update and check the permissions of the file
@@ -141,7 +141,7 @@
   $ hg update -r 2 --traceback
   1 files updated, 0 files merged, 0 files removed, 0 files unresolved
 
-  $ python $TESTTMP/show_mode.py ro
+  $ $PYTHON $TESTTMP/show_mode.py ro
   ro:0644
 
 # The behavior is the same as without atomic update
--- a/tests/test-update-branches.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-update-branches.t	Tue Jan 19 21:48:43 2021 +0530
@@ -57,8 +57,8 @@
 
   $ cd ..
   $ hg update b1
-  abort: no repository found in '$TESTTMP' (.hg not found)!
-  [255]
+  abort: no repository found in '$TESTTMP' (.hg not found)
+  [10]
   $ cd b1
 
 Test helper functions:
@@ -348,7 +348,7 @@
   $ rm a.orig
   $ hg ci -m empty
   abort: unresolved merge conflicts (see 'hg help resolve')
-  [255]
+  [20]
   $ hg resolve -m a
   (no more unresolved files)
   $ hg resolve -l
@@ -701,6 +701,6 @@
   > EOF
   $ hg co 2
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg co --no-check 2
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
--- a/tests/test-update-dest.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-update-dest.t	Tue Jan 19 21:48:43 2021 +0530
@@ -11,7 +11,7 @@
   $ hg up
   abort: you must specify a destination
   (for example: hg update ".::")
-  [255]
+  [10]
   $ hg up .
   0 files updated, 0 files merged, 0 files removed, 0 files unresolved
   $ HGPLAIN=1 hg up
@@ -32,7 +32,7 @@
   $ hg pull --update
   abort: update destination required by configuration
   (use hg pull followed by hg update DEST)
-  [255]
+  [10]
 
   $ cd ..
 
--- a/tests/test-update-issue1456.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-update-issue1456.t	Tue Jan 19 21:48:43 2021 +0530
@@ -19,7 +19,7 @@
   $ echo dirty > foo
   $ hg up -c
   abort: uncommitted changes
-  [255]
+  [20]
   $ hg up -q
   $ cat foo
   dirty
--- a/tests/test-upgrade-repo.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-upgrade-repo.t	Tue Jan 19 21:48:43 2021 +0530
@@ -56,6 +56,7 @@
   fncache:            yes
   dotencode:          yes
   generaldelta:       yes
+  share-safe:          no
   sparserevlog:       yes
   sidedata:            no
   persistent-nodemap:  no
@@ -68,6 +69,7 @@
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -80,6 +82,7 @@
   fncache:            yes     no     yes
   dotencode:          yes     no     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -92,6 +95,7 @@
   [formatvariant.name.mismatchconfig|fncache:           ][formatvariant.repo.mismatchconfig| yes][formatvariant.config.special|     no][formatvariant.default|     yes]
   [formatvariant.name.mismatchconfig|dotencode:         ][formatvariant.repo.mismatchconfig| yes][formatvariant.config.special|     no][formatvariant.default|     yes]
   [formatvariant.name.uptodate|generaldelta:      ][formatvariant.repo.uptodate| yes][formatvariant.config.default|    yes][formatvariant.default|     yes]
+  [formatvariant.name.uptodate|share-safe:        ][formatvariant.repo.uptodate|  no][formatvariant.config.default|     no][formatvariant.default|      no]
   [formatvariant.name.uptodate|sparserevlog:      ][formatvariant.repo.uptodate| yes][formatvariant.config.default|    yes][formatvariant.default|     yes]
   [formatvariant.name.uptodate|sidedata:          ][formatvariant.repo.uptodate|  no][formatvariant.config.default|     no][formatvariant.default|      no]
   [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate|  no][formatvariant.config.default|     no][formatvariant.default|      no]
@@ -120,6 +124,12 @@
     "repo": true
    },
    {
+    "config": false,
+    "default": false,
+    "name": "share-safe",
+    "repo": false
+   },
+   {
     "config": true,
     "default": true,
     "name": "sparserevlog",
@@ -163,12 +173,17 @@
    }
   ]
   $ hg debugupgraderepo
-  (no feature deficiencies found in existing repository)
+  (no format upgrades found in existing repository)
   performing an upgrade with "--run" will make the following changes:
   
   requirements
      preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   additional optimizations are available by specifying "--optimize <name>":
   
   re-delta-parent
@@ -188,11 +203,16 @@
   requirements
      preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
 
 --optimize can be used to add optimizations
 
-  $ hg debugupgrade --optimize redeltaparent
-  (no feature deficiencies found in existing repository)
+  $ hg debugupgrade --optimize 're-delta-parent'
+  (no format upgrades found in existing repository)
   performing an upgrade with "--run" will make the following changes:
   
   requirements
@@ -203,6 +223,11 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   additional optimizations are available by specifying "--optimize <name>":
   
   re-delta-multibase
@@ -218,7 +243,7 @@
 modern form of the option
 
   $ hg debugupgrade --optimize re-delta-parent
-  (no feature deficiencies found in existing repository)
+  (no format upgrades found in existing repository)
   performing an upgrade with "--run" will make the following changes:
   
   requirements
@@ -229,6 +254,11 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   additional optimizations are available by specifying "--optimize <name>":
   
   re-delta-multibase
@@ -246,6 +276,11 @@
   
   optimisations: re-delta-parent
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
 
 unknown optimization:
 
@@ -266,6 +301,7 @@
   fncache:             no
   dotencode:           no
   generaldelta:        no
+  share-safe:          no
   sparserevlog:        no
   sidedata:            no
   persistent-nodemap:  no
@@ -278,6 +314,7 @@
   fncache:             no    yes     yes
   dotencode:           no    yes     yes
   generaldelta:        no    yes     yes
+  share-safe:          no     no      no
   sparserevlog:        no    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -290,6 +327,7 @@
   fncache:             no    yes     yes
   dotencode:           no    yes     yes
   generaldelta:        no     no     yes
+  share-safe:          no     no      no
   sparserevlog:        no     no     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -302,6 +340,7 @@
   [formatvariant.name.mismatchconfig|fncache:           ][formatvariant.repo.mismatchconfig|  no][formatvariant.config.default|    yes][formatvariant.default|     yes]
   [formatvariant.name.mismatchconfig|dotencode:         ][formatvariant.repo.mismatchconfig|  no][formatvariant.config.default|    yes][formatvariant.default|     yes]
   [formatvariant.name.mismatchdefault|generaldelta:      ][formatvariant.repo.mismatchdefault|  no][formatvariant.config.special|     no][formatvariant.default|     yes]
+  [formatvariant.name.uptodate|share-safe:        ][formatvariant.repo.uptodate|  no][formatvariant.config.default|     no][formatvariant.default|      no]
   [formatvariant.name.mismatchdefault|sparserevlog:      ][formatvariant.repo.mismatchdefault|  no][formatvariant.config.special|     no][formatvariant.default|     yes]
   [formatvariant.name.uptodate|sidedata:          ][formatvariant.repo.uptodate|  no][formatvariant.config.default|     no][formatvariant.default|      no]
   [formatvariant.name.uptodate|persistent-nodemap:][formatvariant.repo.uptodate|  no][formatvariant.config.default|     no][formatvariant.default|      no]
@@ -343,6 +382,11 @@
   sparserevlog
      Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   additional optimizations are available by specifying "--optimize <name>":
   
   re-delta-parent
@@ -362,6 +406,11 @@
      preserved: revlogv1, store
      added: dotencode, fncache, generaldelta, sparserevlog
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
 
   $ hg --config format.dotencode=false debugupgraderepo
   repository lacks features recommended by current config options:
@@ -396,6 +445,11 @@
   sparserevlog
      Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   additional optimizations are available by specifying "--optimize <name>":
   
   re-delta-parent
@@ -417,25 +471,7 @@
 
   $ hg init modern
   $ hg -R modern debugupgraderepo --run
-  upgrade will perform the following actions:
-  
-  requirements
-     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
-  
-  beginning upgrade...
-  repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/modern/.hg/upgrade.* (glob)
-  (it is safe to interrupt this process any time before data migration completes)
-  data fully migrated to temporary repository
-  marking source repository as being upgraded; clients will be unable to read from repository
-  starting in-place swap of repository data
-  replaced files will be backed up at $TESTTMP/modern/.hg/upgradebackup.* (glob)
-  replacing store...
-  store replacement complete; repository was inconsistent for *s (glob)
-  finalizing requirements file and making repository readable again
-  removing temporary repository $TESTTMP/modern/.hg/upgrade.* (glob)
-  copy of old repository backed up at $TESTTMP/modern/.hg/upgradebackup.* (glob)
-  the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
+  nothing to do
 
 Upgrading a repository to generaldelta works
 
@@ -473,9 +509,14 @@
   generaldelta
      repository storage will be able to create optimal deltas; new repository data will be smaller and read times should decrease; interacting with other repositories using this storage model should require less network and CPU resources, making "hg push" and "hg pull" faster
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -487,7 +528,7 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: -17 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
@@ -569,9 +610,14 @@
   sparserevlog
      Revlog supports delta chain with more unused data between payload. These gaps will be skipped at read time. This allows for better delta chains, making a better compression and faster exchange with server.
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -583,14 +629,14 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: 0 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   replacing store...
   store replacement complete; repository was inconsistent for * (glob)
   finalizing requirements file and making repository readable again
-  removing old repository content$TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
+  removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   $ ls -1 .hg/ | grep upgradebackup
   [1]
@@ -608,9 +654,12 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - manifest
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -627,14 +676,14 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: 0 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   replacing store...
   store replacement complete; repository was inconsistent for *s (glob)
   finalizing requirements file and making repository readable again
-  removing old repository content$TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
+  removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
 
 Check that the repo still works fine
@@ -684,9 +733,13 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -703,14 +756,14 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: 0 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   replacing store...
   store replacement complete; repository was inconsistent for *s (glob)
   finalizing requirements file and making repository readable again
-  removing old repository content$TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
+  removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   $ hg verify
   checking changesets
@@ -732,9 +785,12 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - changelog
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -751,14 +807,14 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: 0 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   replacing store...
   store replacement complete; repository was inconsistent for *s (glob)
   finalizing requirements file and making repository readable again
-  removing old repository content$TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
+  removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   $ hg verify
   checking changesets
@@ -780,9 +836,12 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - all-filelogs
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -799,14 +858,14 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: 0 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   replacing store...
   store replacement complete; repository was inconsistent for *s (glob)
   finalizing requirements file and making repository readable again
-  removing old repository content$TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
+  removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   $ hg verify
   checking changesets
@@ -833,9 +892,14 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -852,14 +916,14 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: 0 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   replacing store...
   store replacement complete; repository was inconsistent for *s (glob)
   finalizing requirements file and making repository readable again
-  removing old repository content$TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
+  removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   $ hg verify
   checking changesets
@@ -887,9 +951,14 @@
   re-delta-parent
      deltas within internal storage will choose a new base revision if needed
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 519 KB in store; 1.05 MB tracked data
@@ -906,14 +975,14 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: 0 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   replacing store...
   store replacement complete; repository was inconsistent for *s (glob)
   finalizing requirements file and making repository readable again
-  removing old repository content$TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
+  removing old repository content $TESTTMP/upgradegd/.hg/upgradebackup.* (glob)
   removing temporary repository $TESTTMP/upgradegd/.hg/upgrade.* (glob)
   $ hg verify
   checking changesets
@@ -933,14 +1002,26 @@
   $ touch .hg/store/.XX_special_filename
 
   $ hg debugupgraderepo --run
+  nothing to do
+  $ hg debugupgraderepo --run --optimize 're-delta-fulladd'
   upgrade will perform the following actions:
   
   requirements
      preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
   
+  optimisations: re-delta-fulladd
+  
+  re-delta-fulladd
+     each revision will be added as new content to the internal storage; this will likely drastically slow down execution time, but some extensions might need it
+  
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/store-filenames/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/store-filenames/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 3 total revisions (1 in filelogs, 1 in manifests, 1 in changelog)
   migrating 301 bytes in store; 107 bytes tracked data
@@ -953,43 +1034,7 @@
   finished migrating 3 total revisions; total change in store size: 0 bytes
   copying .XX_special_filename
   copying phaseroots
-  data fully migrated to temporary repository
-  marking source repository as being upgraded; clients will be unable to read from repository
-  starting in-place swap of repository data
-  replaced files will be backed up at $TESTTMP/store-filenames/.hg/upgradebackup.* (glob)
-  replacing store...
-  store replacement complete; repository was inconsistent for *s (glob)
-  finalizing requirements file and making repository readable again
-  removing temporary repository $TESTTMP/store-filenames/.hg/upgrade.* (glob)
-  copy of old repository backed up at $TESTTMP/store-filenames/.hg/upgradebackup.* (glob)
-  the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
-  $ hg debugupgraderepo --run --optimize redeltafulladd
-  upgrade will perform the following actions:
-  
-  requirements
-     preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
-  
-  optimisations: re-delta-fulladd
-  
-  re-delta-fulladd
-     each revision will be added as new content to the internal storage; this will likely drastically slow down execution time, but some extensions might need it
-  
-  beginning upgrade...
-  repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/store-filenames/.hg/upgrade.* (glob)
-  (it is safe to interrupt this process any time before data migration completes)
-  migrating 3 total revisions (1 in filelogs, 1 in manifests, 1 in changelog)
-  migrating 301 bytes in store; 107 bytes tracked data
-  migrating 1 filelogs containing 1 revisions (64 bytes in store; 0 bytes tracked data)
-  finished migrating 1 filelog revisions across 1 filelogs; change in size: 0 bytes
-  migrating 1 manifests containing 1 revisions (110 bytes in store; 45 bytes tracked data)
-  finished migrating 1 manifest revisions across 1 manifests; change in size: 0 bytes
-  migrating changelog containing 1 revisions (127 bytes in store; 62 bytes tracked data)
-  finished migrating 1 changelog revisions; change in size: 0 bytes
-  finished migrating 3 total revisions; total change in store size: 0 bytes
-  copying .XX_special_filename
-  copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/store-filenames/.hg/upgradebackup.* (glob)
@@ -1030,35 +1075,7 @@
   store
 
   $ hg debugupgraderepo --run
-  upgrade will perform the following actions:
-  
-  requirements
-     preserved: dotencode, fncache, generaldelta, largefiles, revlogv1, sparserevlog, store
-  
-  beginning upgrade...
-  repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
-  (it is safe to interrupt this process any time before data migration completes)
-  migrating 3 total revisions (1 in filelogs, 1 in manifests, 1 in changelog)
-  migrating 355 bytes in store; 160 bytes tracked data
-  migrating 1 filelogs containing 1 revisions (106 bytes in store; 41 bytes tracked data)
-  finished migrating 1 filelog revisions across 1 filelogs; change in size: 0 bytes
-  migrating 1 manifests containing 1 revisions (116 bytes in store; 51 bytes tracked data)
-  finished migrating 1 manifest revisions across 1 manifests; change in size: 0 bytes
-  migrating changelog containing 1 revisions (133 bytes in store; 68 bytes tracked data)
-  finished migrating 1 changelog revisions; change in size: 0 bytes
-  finished migrating 3 total revisions; total change in store size: 0 bytes
-  copying phaseroots
-  data fully migrated to temporary repository
-  marking source repository as being upgraded; clients will be unable to read from repository
-  starting in-place swap of repository data
-  replaced files will be backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
-  replacing store...
-  store replacement complete; repository was inconsistent for *s (glob)
-  finalizing requirements file and making repository readable again
-  removing temporary repository $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
-  copy of old repository backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
-  the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
+  nothing to do
   $ cat .hg/requires
   dotencode
   fncache
@@ -1083,36 +1100,7 @@
   .hg/store/lfs/objects/d0/beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
 
   $ hg debugupgraderepo --run
-  upgrade will perform the following actions:
-  
-  requirements
-     preserved: dotencode, fncache, generaldelta, largefiles, lfs, revlogv1, sparserevlog, store
-  
-  beginning upgrade...
-  repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
-  (it is safe to interrupt this process any time before data migration completes)
-  migrating 6 total revisions (2 in filelogs, 2 in manifests, 2 in changelog)
-  migrating 801 bytes in store; 467 bytes tracked data
-  migrating 2 filelogs containing 2 revisions (296 bytes in store; 182 bytes tracked data)
-  finished migrating 2 filelog revisions across 2 filelogs; change in size: 0 bytes
-  migrating 1 manifests containing 2 revisions (241 bytes in store; 151 bytes tracked data)
-  finished migrating 2 manifest revisions across 1 manifests; change in size: 0 bytes
-  migrating changelog containing 2 revisions (264 bytes in store; 134 bytes tracked data)
-  finished migrating 2 changelog revisions; change in size: 0 bytes
-  finished migrating 6 total revisions; total change in store size: 0 bytes
-  copying phaseroots
-  copying lfs blob d0beab232adff5ba365880366ad30b1edb85c4c5372442b5d2fe27adc96d653f
-  data fully migrated to temporary repository
-  marking source repository as being upgraded; clients will be unable to read from repository
-  starting in-place swap of repository data
-  replaced files will be backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
-  replacing store...
-  store replacement complete; repository was inconsistent for *s (glob)
-  finalizing requirements file and making repository readable again
-  removing temporary repository $TESTTMP/largefilesrepo/.hg/upgrade.* (glob)
-  copy of old repository backed up at $TESTTMP/largefilesrepo/.hg/upgradebackup.* (glob)
-  the old repository will not be deleted; remove it to free up disk space once the upgraded repository is verified
+  nothing to do
 
   $ grep lfs .hg/requires
   lfs
@@ -1177,7 +1165,7 @@
         1       1        2        0      p1         21        191         98   0.51309        98         0    0.00000         98         98   1.00000        1
         2       1        2        0   other         30        200        107   0.53500       128        21    0.19626        128        128   0.83594        1
 
-  $ hg debugupgraderepo --run --optimize redeltaall
+  $ hg debugupgraderepo --run --optimize 're-delta-all'
   upgrade will perform the following actions:
   
   requirements
@@ -1188,9 +1176,14 @@
   re-delta-all
      deltas within internal storage will be fully recomputed; this will likely drastically slow down execution time
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   beginning upgrade...
   repository locked and read-only
-  creating temporary repository to stage migrated data: $TESTTMP/localconfig/.hg/upgrade.* (glob)
+  creating temporary repository to stage upgraded data: $TESTTMP/localconfig/.hg/upgrade.* (glob)
   (it is safe to interrupt this process any time before data migration completes)
   migrating 9 total revisions (3 in filelogs, 3 in manifests, 3 in changelog)
   migrating 1019 bytes in store; 882 bytes tracked data
@@ -1202,7 +1195,7 @@
   finished migrating 3 changelog revisions; change in size: 0 bytes
   finished migrating 9 total revisions; total change in store size: -9 bytes
   copying phaseroots
-  data fully migrated to temporary repository
+  data fully upgraded in a temporary repository
   marking source repository as being upgraded; clients will be unable to read from repository
   starting in-place swap of repository data
   replaced files will be backed up at $TESTTMP/localconfig/.hg/upgradebackup.* (glob)
@@ -1247,6 +1240,11 @@
      preserved: dotencode, fncache, generaldelta, revlogv1, store
      added: sparserevlog
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ cat .hg/requires
   dotencode
   fncache
@@ -1263,6 +1261,11 @@
      preserved: dotencode, fncache, generaldelta, revlogv1, store
      removed: sparserevlog
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ cat .hg/requires
   dotencode
   fncache
@@ -1284,11 +1287,17 @@
      preserved: dotencode, fncache, generaldelta, revlogv1, store
      added: revlog-compression-zstd, sparserevlog
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg debugformat -v
   format-variant     repo config default
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -1314,11 +1323,17 @@
      preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
      removed: revlog-compression-zstd
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg debugformat -v
   format-variant     repo config default
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -1347,11 +1362,17 @@
      preserved: dotencode, fncache, generaldelta, revlogv1, sparserevlog, store
      added: revlog-compression-zstd
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg debugformat -v
   format-variant     repo config default
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -1384,11 +1405,17 @@
      added: exp-sidedata-flag (zstd !)
      added: exp-sidedata-flag, sparserevlog (no-zstd !)
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg debugformat -v
   format-variant     repo config default
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes     no      no
   persistent-nodemap:  no     no      no
@@ -1421,11 +1448,17 @@
      preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !)
      removed: exp-sidedata-flag
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg debugformat -v
   format-variant     repo config default
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:            no     no      no
   persistent-nodemap:  no     no      no
@@ -1458,11 +1491,17 @@
      preserved: dotencode, fncache, generaldelta, revlog-compression-zstd, revlogv1, sparserevlog, store (zstd !)
      added: exp-sidedata-flag
   
+  processed revlogs:
+    - all-filelogs
+    - changelog
+    - manifest
+  
   $ hg debugformat -v
   format-variant     repo config default
   fncache:            yes    yes     yes
   dotencode:          yes    yes     yes
   generaldelta:       yes    yes     yes
+  share-safe:          no     no      no
   sparserevlog:       yes    yes     yes
   sidedata:           yes    yes      no
   persistent-nodemap:  no     no      no
@@ -1481,3 +1520,8 @@
   sparserevlog
   store
   $ hg debugsidedata -c 0
+
+Demonstrate that nothing to perform upgrade will still run all the way through
+
+  $ hg debugupgraderepo --run
+  nothing to do
--- a/tests/test-username-newline.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-username-newline.t	Tue Jan 19 21:48:43 2021 +0530
@@ -22,6 +22,6 @@
   adding a
   transaction abort!
   rollback completed
-  abort: username 'foo\nbar3' contains a newline!
-  [255]
+  abort: username 'foo\nbar3' contains a newline
+  [50]
 
--- a/tests/test-verify-repo-operations.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-verify-repo-operations.py	Tue Jan 19 21:48:43 2021 +0530
@@ -11,7 +11,9 @@
 import sys
 
 # Only run if slow tests are allowed
-if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'slow']):
+if subprocess.call(
+    [os.environ['PYTHON'], '%s/hghave' % os.environ['TESTDIR'], 'slow']
+):
     sys.exit(80)
 
 # These tests require Hypothesis and pytz to be installed.
@@ -112,7 +114,13 @@
     min_size=1,
 ).map(lambda s: s.encode('utf-8'))
 
-extensions = st.sampled_from(('shelve', 'mq', 'blackbox',))
+extensions = st.sampled_from(
+    (
+        'shelve',
+        'mq',
+        'blackbox',
+    )
+)
 
 
 @contextmanager
@@ -233,7 +241,12 @@
                         t = r.read()
                         assert ext not in t, t
                     output = subprocess.check_output(
-                        [runtests, tf, "--local",], stderr=subprocess.STDOUT
+                        [
+                            runtests,
+                            tf,
+                            "--local",
+                        ],
+                        stderr=subprocess.STDOUT,
                     )
                     assert "Ran 1 test" in output, output
             except subprocess.CalledProcessError as e:
@@ -307,7 +320,8 @@
         return content
 
     @rule(
-        target=branches, name=safetext,
+        target=branches,
+        name=safetext,
     )
     def genbranch(self, name):
         return name
@@ -340,10 +354,13 @@
             o.write(content)
         self.log.append(
             (
-                "$ python -c 'import binascii; "
+                "$ $PYTHON -c 'import binascii; "
                 "print(binascii.unhexlify(\"%s\"))' > %s"
             )
-            % (binascii.hexlify(content), pipes.quote(path),)
+            % (
+                binascii.hexlify(content),
+                pipes.quote(path),
+            )
         )
 
     @rule(path=paths)
@@ -354,7 +371,9 @@
     @rule(path=paths)
     def forgetpath(self, path):
         if os.path.exists(path):
-            with acceptableerrors("file is already untracked",):
+            with acceptableerrors(
+                "file is already untracked",
+            ):
                 self.hg("forget", "--", path)
 
     @rule(s=st.none() | st.integers(0, 100))
@@ -420,7 +439,9 @@
         return self.configperrepo.setdefault(self.currentrepo, {})
 
     @rule(
-        target=repos, source=repos, name=reponames,
+        target=repos,
+        source=repos,
+        name=reponames,
     )
     def clone(self, source, name):
         if not os.path.exists(os.path.join("..", name)):
@@ -430,7 +451,8 @@
         return name
 
     @rule(
-        target=repos, name=reponames,
+        target=repos,
+        name=reponames,
     )
     def fresh(self, name):
         if not os.path.exists(os.path.join("..", name)):
@@ -453,14 +475,16 @@
     @rule()
     def pull(self, repo=repos):
         with acceptableerrors(
-            "repository default not found", "repository is unrelated",
+            "repository default not found",
+            "repository is unrelated",
         ):
             self.hg("pull")
 
     @rule(newbranch=st.booleans())
     def push(self, newbranch):
         with acceptableerrors(
-            "default repository not configured", "no changes found",
+            "default repository not configured",
+            "no changes found",
         ):
             if newbranch:
                 self.hg("push", "--new-branch")
@@ -507,7 +531,8 @@
     @rule(branch=branches, clean=st.booleans())
     def update(self, branch, clean):
         with acceptableerrors(
-            'unknown revision', 'parse error',
+            'unknown revision',
+            'parse error',
         ):
             if clean:
                 self.hg("update", "-C", "--", branch)
@@ -570,7 +595,12 @@
 
 
 settings.register_profile(
-    'default', settings(timeout=300, stateful_step_count=50, max_examples=10,)
+    'default',
+    settings(
+        timeout=300,
+        stateful_step_count=50,
+        max_examples=10,
+    ),
 )
 
 settings.register_profile(
--- a/tests/test-wireproto-caching.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-caching.t	Tue Jan 19 21:48:43 2021 +0530
@@ -451,7 +451,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending manifestdata command
-  abort: unknown node: \xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa! (esc)
+  abort: unknown node: \xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa (esc)
   [255]
 
   $ cat .hg/blackbox.log
--- a/tests/test-wireproto-clientreactor.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-clientreactor.py	Tue Jan 19 21:48:43 2021 +0530
@@ -486,13 +486,19 @@
 
         response1 = b''.join(
             cborutil.streamencode(
-                {b'status': b'ok', b'extra': b'response1' * 10,}
+                {
+                    b'status': b'ok',
+                    b'extra': b'response1' * 10,
+                }
             )
         )
 
         response2 = b''.join(
             cborutil.streamencode(
-                {b'status': b'error', b'extra': b'response2' * 10,}
+                {
+                    b'status': b'error',
+                    b'extra': b'response2' * 10,
+                }
             )
         )
 
@@ -678,13 +684,19 @@
 
         response1 = b''.join(
             cborutil.streamencode(
-                {b'status': b'ok', b'extra': b'response1' * 10,}
+                {
+                    b'status': b'ok',
+                    b'extra': b'response1' * 10,
+                }
             )
         )
 
         response2 = b''.join(
             cborutil.streamencode(
-                {b'status': b'error', b'extra': b'response2' * 10,}
+                {
+                    b'status': b'error',
+                    b'extra': b'response2' * 10,
+                }
             )
         )
 
--- a/tests/test-wireproto-command-changesetdata.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-command-changesetdata.t	Tue Jan 19 21:48:43 2021 +0530
@@ -44,7 +44,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: missing required arguments: revisions!
+  abort: missing required arguments: revisions
   [255]
 
 Missing nodes for changesetexplicit results in error
@@ -55,7 +55,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: nodes key not present in changesetexplicit revision specifier!
+  abort: nodes key not present in changesetexplicit revision specifier
   [255]
 
 changesetexplicitdepth requires nodes and depth keys
@@ -66,7 +66,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: nodes key not present in changesetexplicitdepth revision specifier!
+  abort: nodes key not present in changesetexplicitdepth revision specifier
   [255]
 
   $ sendhttpv2peer << EOF
@@ -75,7 +75,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: depth key not present in changesetexplicitdepth revision specifier!
+  abort: depth key not present in changesetexplicitdepth revision specifier
   [255]
 
   $ sendhttpv2peer << EOF
@@ -84,7 +84,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: nodes key not present in changesetexplicitdepth revision specifier!
+  abort: nodes key not present in changesetexplicitdepth revision specifier
   [255]
 
 changesetdagrange requires roots and heads keys
@@ -95,7 +95,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: roots key not present in changesetdagrange revision specifier!
+  abort: roots key not present in changesetdagrange revision specifier
   [255]
 
   $ sendhttpv2peer << EOF
@@ -104,7 +104,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: heads key not present in changesetdagrange revision specifier!
+  abort: heads key not present in changesetdagrange revision specifier
   [255]
 
   $ sendhttpv2peer << EOF
@@ -113,7 +113,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: roots key not present in changesetdagrange revision specifier!
+  abort: roots key not present in changesetdagrange revision specifier
   [255]
 
 Empty changesetdagrange heads results in an error
@@ -124,7 +124,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending changesetdata command
-  abort: heads key in changesetdagrange cannot be empty!
+  abort: heads key in changesetdagrange cannot be empty
   [255]
 
 Sending just dagrange heads sends all revisions
--- a/tests/test-wireproto-command-filedata.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-command-filedata.t	Tue Jan 19 21:48:43 2021 +0530
@@ -78,7 +78,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending filedata command
-  abort: missing required arguments: nodes, path!
+  abort: missing required arguments: nodes, path
   [255]
 
   $ sendhttpv2peer << EOF
@@ -87,7 +87,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending filedata command
-  abort: missing required arguments: path!
+  abort: missing required arguments: path
   [255]
 
 Unknown node is an error
@@ -99,7 +99,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending filedata command
-  abort: unknown file node: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa!
+  abort: unknown file node: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
   [255]
 
 Fetching a single revision returns just metadata by default
--- a/tests/test-wireproto-command-filesdata.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-command-filesdata.t	Tue Jan 19 21:48:43 2021 +0530
@@ -71,7 +71,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending filesdata command
-  abort: missing required arguments: revisions!
+  abort: missing required arguments: revisions
   [255]
 
 Bad pattern to pathfilter is rejected
@@ -87,7 +87,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending filesdata command
-  abort: include pattern must begin with `path:` or `rootfilesin:`; got bad:foo!
+  abort: include pattern must begin with `path:` or `rootfilesin:`; got bad:foo
   [255]
 
   $ sendhttpv2peer << EOF
@@ -101,7 +101,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending filesdata command
-  abort: exclude pattern must begin with `path:` or `rootfilesin:`; got glob:foo!
+  abort: exclude pattern must begin with `path:` or `rootfilesin:`; got glob:foo
   [255]
 
 Fetching a single changeset without parents fetches all files
--- a/tests/test-wireproto-command-manifestdata.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-command-manifestdata.t	Tue Jan 19 21:48:43 2021 +0530
@@ -49,7 +49,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending manifestdata command
-  abort: missing required arguments: nodes, tree!
+  abort: missing required arguments: nodes, tree
   [255]
 
   $ sendhttpv2peer << EOF
@@ -58,7 +58,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending manifestdata command
-  abort: missing required arguments: tree!
+  abort: missing required arguments: tree
   [255]
 
 Unknown node is an error
@@ -70,7 +70,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending manifestdata command
-  abort: unknown node: \xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa! (esc)
+  abort: unknown node: \xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa\xaa (esc)
   [255]
 
 Fetching a single revision returns just metadata by default
--- a/tests/test-wireproto-command-rawstorefiledata.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-command-rawstorefiledata.t	Tue Jan 19 21:48:43 2021 +0530
@@ -31,7 +31,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending rawstorefiledata command
-  abort: missing required arguments: files!
+  abort: missing required arguments: files
   [255]
 
 Unknown files value results in error
@@ -42,7 +42,7 @@
   > EOF
   creating http peer for wire protocol version 2
   sending rawstorefiledata command
-  abort: unknown file type: unknown!
+  abort: unknown file type: unknown
   [255]
 
 Requesting just changelog works
--- a/tests/test-wireproto-exchangev2-shallow.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-exchangev2-shallow.t	Tue Jan 19 21:48:43 2021 +0530
@@ -98,13 +98,14 @@
   received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   received frame(size=1170; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
+  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?)
   add changeset 3390ef850073
   add changeset b709380892b1
   add changeset 47fe012ab237
   add changeset 97765fc3cd62
   add changeset dc666cf9ecf3
   add changeset 93a8bd067ed2
+  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?)
   checking for updated bookmarks
   sending 1 commands
   sending command manifestdata: {
@@ -167,6 +168,7 @@
    dir1/f: remote created -> g
   getting dir1/f
   6 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ sqlite3 -line client-shallow-1/.hg/store/db.sqlite << EOF
@@ -332,6 +334,7 @@
    dir0/d: remote created -> g
   getting dir0/d
   2 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ sqlite3 -line client-shallow-narrow-1/.hg/store/db.sqlite << EOF
@@ -402,11 +405,12 @@
   received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   received frame(size=783; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
+  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?)
   add changeset 3390ef850073
   add changeset b709380892b1
   add changeset 47fe012ab237
   add changeset 97765fc3cd62
+  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?)
   checking for updated bookmarks
   sending 1 commands
   sending command manifestdata: {
@@ -467,6 +471,7 @@
    dir1/f: remote created -> g
   getting dir1/f
   6 files updated, 0 files merged, 0 files removed, 0 files unresolved
+  updating the branch cache
   (sent 6 HTTP requests and * bytes; received * bytes in responses) (glob)
 
 Incremental pull of shallow clone fetches new changesets
@@ -515,9 +520,10 @@
   received frame(size=9; request=1; stream=2; streamflags=stream-begin; type=stream-settings; flags=eos)
   received frame(size=11; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
   received frame(size=400; request=1; stream=2; streamflags=encoded; type=command-response; flags=continuation)
-  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos)
+  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?)
   add changeset dc666cf9ecf3
   add changeset 93a8bd067ed2
+  received frame(size=0; request=1; stream=2; streamflags=; type=command-response; flags=eos) (?)
   checking for updated bookmarks
   sending 1 commands
   sending command manifestdata: {
--- a/tests/test-wireproto-exchangev2.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-exchangev2.t	Tue Jan 19 21:48:43 2021 +0530
@@ -111,6 +111,7 @@
   }
   updating the branch cache
   new changesets 3390ef850073:caa2a465451d (3 drafts)
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
@@ -239,6 +240,7 @@
   }
   updating the branch cache
   new changesets 3390ef850073:4432d83626e8
+  updating the branch cache
   (sent 6 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
@@ -557,6 +559,7 @@
   }
   updating the branch cache
   new changesets 3390ef850073:caa2a465451d (1 drafts)
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
@@ -758,6 +761,7 @@
   }
   updating the branch cache
   new changesets 3390ef850073:97765fc3cd62
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
@@ -872,6 +876,7 @@
   }
   updating the branch cache
   new changesets 3390ef850073:97765fc3cd62
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
@@ -988,6 +993,7 @@
   }
   updating the branch cache
   new changesets 3390ef850073:97765fc3cd62
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
@@ -1087,6 +1093,7 @@
       }
     ]
   }
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
@@ -1183,6 +1190,7 @@
       }
     ]
   }
+  updating the branch cache
   (sent 5 HTTP requests and * bytes; received * bytes in responses) (glob)
 
   $ cat clone-output | grep "received frame"
--- a/tests/test-wireproto-framing.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-framing.py	Tue Jan 19 21:48:43 2021 +0530
@@ -192,7 +192,12 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo', [], []), (b'bar', [], []),]
+                stream,
+                1,
+                [
+                    (b'foo', [], []),
+                    (b'bar', [], []),
+                ],
             )
         )
 
@@ -210,7 +215,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo %s', [b'val1'], []),]
+                stream,
+                1,
+                [
+                    (b'foo %s', [b'val1'], []),
+                ],
             )
         )
 
@@ -228,7 +237,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo %s %s', [b'val', b'value'], []),]
+                stream,
+                1,
+                [
+                    (b'foo %s %s', [b'val', b'value'], []),
+                ],
             )
         )
 
@@ -246,7 +259,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo', [], [b'label']),]
+                stream,
+                1,
+                [
+                    (b'foo', [], [b'label']),
+                ],
             )
         )
 
@@ -264,7 +281,11 @@
         stream = framing.stream(1)
         val = list(
             framing.createtextoutputframe(
-                stream, 1, [(b'foo %s', [b'arg'], [b'label']),]
+                stream,
+                1,
+                [
+                    (b'foo %s', [b'arg'], [b'label']),
+                ],
             )
         )
 
--- a/tests/test-wireproto-serverreactor.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wireproto-serverreactor.py	Tue Jan 19 21:48:43 2021 +0530
@@ -202,7 +202,10 @@
             {
                 b'requestid': 1,
                 b'command': b'command',
-                b'args': {b'key': b'val', b'foo': b'bar',},
+                b'args': {
+                    b'key': b'val',
+                    b'foo': b'bar',
+                },
                 b'redirect': None,
                 b'data': b'value1value2',
             },
@@ -356,7 +359,10 @@
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'error')
         self.assertEqual(
-            results[1][1], {b'message': b'request with ID 1 already received',}
+            results[1][1],
+            {
+                b'message': b'request with ID 1 already received',
+            },
         )
 
     def testinterleavedcommands(self):
@@ -364,7 +370,10 @@
             cborutil.streamencode(
                 {
                     b'name': b'command1',
-                    b'args': {b'foo': b'bar', b'key1': b'val',},
+                    b'args': {
+                        b'foo': b'bar',
+                        b'key1': b'val',
+                    },
                 }
             )
         )
@@ -372,7 +381,10 @@
             cborutil.streamencode(
                 {
                     b'name': b'command3',
-                    b'args': {b'biz': b'baz', b'key': b'val',},
+                    b'args': {
+                        b'biz': b'baz',
+                        b'key': b'val',
+                    },
                 }
             )
         )
@@ -461,7 +473,10 @@
         self.assertaction(results[0], b'wantframe')
         self.assertaction(results[1], b'error')
         self.assertEqual(
-            results[1][1], {b'message': b'command data frame without flags',}
+            results[1][1],
+            {
+                b'message': b'command data frame without flags',
+            },
         )
 
     def testframefornonreceivingrequest(self):
@@ -651,7 +666,10 @@
 
         self.assertaction(results[0], b'error')
         self.assertEqual(
-            results[0][1], {b'message': b'request with ID 1 is already active',}
+            results[0][1],
+            {
+                b'message': b'request with ID 1 is already active',
+            },
         )
 
     def testduplicaterequestonactivecommandnosend(self):
@@ -668,7 +686,10 @@
         results = list(sendcommandframes(reactor, instream, 1, b'command1', {}))
         self.assertaction(results[0], b'error')
         self.assertEqual(
-            results[0][1], {b'message': b'request with ID 1 is already active',}
+            results[0][1],
+            {
+                b'message': b'request with ID 1 is already active',
+            },
         )
 
     def testduplicaterequestaftersend(self):
@@ -763,7 +784,9 @@
 
         data = b''.join(
             cborutil.streamencode(
-                {b'contentencodings': [b'value1', b'value2'],}
+                {
+                    b'contentencodings': [b'value1', b'value2'],
+                }
             )
         )
 
@@ -811,7 +834,10 @@
         )
         self.assertaction(result, b'error')
         self.assertEqual(
-            result[1], {b'message': b'expected command request frame; got 8',}
+            result[1],
+            {
+                b'message': b'expected command request frame; got 8',
+            },
         )
 
 
--- a/tests/test-worker.t	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-worker.t	Tue Jan 19 21:48:43 2021 +0530
@@ -85,11 +85,13 @@
   [255]
 
   $ hg --config "extensions.t=$abspath" --config 'worker.numcpus=8' \
-  > test 100000.0 abort --traceback 2>&1 | egrep '(SystemExit|Abort)'
+  > test 100000.0 abort --traceback 2>&1 | egrep '(WorkerError|Abort)'
       raise error.Abort(b'known exception')
   mercurial.error.Abort: known exception (py3 !)
   Abort: known exception (no-py3 !)
-  SystemExit: 255
+      raise error.WorkerError(status)
+  WorkerError: 255 (no-py3 !)
+  mercurial.error.WorkerError: 255 (py3 !)
 
 Traceback must be printed for unknown exceptions
 
--- a/tests/test-wsgirequest.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/test-wsgirequest.py	Tue Jan 19 21:48:43 2021 +0530
@@ -49,7 +49,12 @@
         self.assertEqual(len(r.headers), 0)
 
     def testcustomport(self):
-        r = parse(DEFAULT_ENV, extra={'SERVER_PORT': '8000',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SERVER_PORT': '8000',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver:8000')
         self.assertEqual(r.baseurl, b'http://testserver:8000')
@@ -58,7 +63,10 @@
 
         r = parse(
             DEFAULT_ENV,
-            extra={'SERVER_PORT': '4000', 'wsgi.url_scheme': 'https',},
+            extra={
+                'SERVER_PORT': '4000',
+                'wsgi.url_scheme': 'https',
+            },
         )
 
         self.assertEqual(r.url, b'https://testserver:4000')
@@ -67,7 +75,12 @@
         self.assertEqual(r.advertisedbaseurl, b'https://testserver:4000')
 
     def testhttphost(self):
-        r = parse(DEFAULT_ENV, extra={'HTTP_HOST': 'altserver',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'HTTP_HOST': 'altserver',
+            },
+        )
 
         self.assertEqual(r.url, b'http://altserver')
         self.assertEqual(r.baseurl, b'http://altserver')
@@ -75,7 +88,12 @@
         self.assertEqual(r.advertisedbaseurl, b'http://testserver')
 
     def testscriptname(self):
-        r = parse(DEFAULT_ENV, extra={'SCRIPT_NAME': '',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SCRIPT_NAME': '',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -85,7 +103,12 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertIsNone(r.dispatchpath)
 
-        r = parse(DEFAULT_ENV, extra={'SCRIPT_NAME': '/script',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SCRIPT_NAME': '/script',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/script')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -95,7 +118,12 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertIsNone(r.dispatchpath)
 
-        r = parse(DEFAULT_ENV, extra={'SCRIPT_NAME': '/multiple words',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'SCRIPT_NAME': '/multiple words',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/multiple%20words')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -106,7 +134,12 @@
         self.assertIsNone(r.dispatchpath)
 
     def testpathinfo(self):
-        r = parse(DEFAULT_ENV, extra={'PATH_INFO': '',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'PATH_INFO': '',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -116,7 +149,12 @@
         self.assertEqual(r.dispatchparts, [])
         self.assertEqual(r.dispatchpath, b'')
 
-        r = parse(DEFAULT_ENV, extra={'PATH_INFO': '/pathinfo',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'PATH_INFO': '/pathinfo',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/pathinfo')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -126,7 +164,12 @@
         self.assertEqual(r.dispatchparts, [b'pathinfo'])
         self.assertEqual(r.dispatchpath, b'pathinfo')
 
-        r = parse(DEFAULT_ENV, extra={'PATH_INFO': '/one/two/',})
+        r = parse(
+            DEFAULT_ENV,
+            extra={
+                'PATH_INFO': '/one/two/',
+            },
+        )
 
         self.assertEqual(r.url, b'http://testserver/one/two/')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -139,7 +182,10 @@
     def testscriptandpathinfo(self):
         r = parse(
             DEFAULT_ENV,
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/pathinfo',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/pathinfo',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/script/pathinfo')
@@ -208,7 +254,9 @@
             parse(
                 DEFAULT_ENV,
                 reponame=b'repo',
-                extra={'PATH_INFO': '/pathinfo',},
+                extra={
+                    'PATH_INFO': '/pathinfo',
+                },
             )
 
         with self.assertRaisesRegex(
@@ -217,13 +265,17 @@
             parse(
                 DEFAULT_ENV,
                 reponame=b'repo',
-                extra={'PATH_INFO': '/repoextra/path',},
+                extra={
+                    'PATH_INFO': '/repoextra/path',
+                },
             )
 
         r = parse(
             DEFAULT_ENV,
             reponame=b'repo',
-            extra={'PATH_INFO': '/repo/path1/path2',},
+            extra={
+                'PATH_INFO': '/repo/path1/path2',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/repo/path1/path2')
@@ -238,7 +290,9 @@
         r = parse(
             DEFAULT_ENV,
             reponame=b'prefix/repo',
-            extra={'PATH_INFO': '/prefix/repo/path1/path2',},
+            extra={
+                'PATH_INFO': '/prefix/repo/path1/path2',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/prefix/repo/path1/path2')
@@ -307,7 +361,9 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver',
-            extra={'PATH_INFO': '/path1/path2',},
+            extra={
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -347,7 +403,9 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver/altpath',
-            extra={'PATH_INFO': '/path1/path2',},
+            extra={
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -365,7 +423,9 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver/altpath/',
-            extra={'PATH_INFO': '/path1/path2',},
+            extra={
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -383,7 +443,10 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver',
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/path1/path2',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/script/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -399,7 +462,10 @@
         r = parse(
             DEFAULT_ENV,
             altbaseurl=b'http://altserver/altroot',
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/path1/path2',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/path1/path2',
+            },
         )
         self.assertEqual(r.url, b'http://testserver/script/path1/path2')
         self.assertEqual(r.baseurl, b'http://testserver')
@@ -418,7 +484,10 @@
             DEFAULT_ENV,
             reponame=b'repo',
             altbaseurl=b'http://altserver/altroot',
-            extra={'SCRIPT_NAME': '/script', 'PATH_INFO': '/repo/path1/path2',},
+            extra={
+                'SCRIPT_NAME': '/script',
+                'PATH_INFO': '/repo/path1/path2',
+            },
         )
 
         self.assertEqual(r.url, b'http://testserver/script/repo/path1/path2')
--- a/tests/testlib/exchange-obsmarker-util.sh	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/testlib/exchange-obsmarker-util.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -14,9 +14,9 @@
 push_ssl = false
 allow_push = *
 
-[ui]
+[command-templates]
 # simpler log output
-logtemplate ="{node|short} ({phase}): {desc}\n"
+log ="{node|short} ({phase}): {desc}\n"
 
 [phases]
 # non publishing server
--- a/tests/testlib/ext-sidedata.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/testlib/ext-sidedata.py	Tue Jan 19 21:48:43 2021 +0530
@@ -10,14 +10,18 @@
 import hashlib
 import struct
 
+from mercurial.node import (
+    nullid,
+    nullrev,
+)
 from mercurial import (
     extensions,
-    node,
     requirements,
     revlog,
-    upgrade,
 )
 
+from mercurial.upgrade_utils import engine as upgrade_engine
+
 from mercurial.revlogutils import sidedata
 
 
@@ -40,7 +44,7 @@
     text = orig(self, nodeorrev, *args, **kwargs)
     if getattr(self, 'sidedatanocheck', False):
         return text
-    if nodeorrev != node.nullrev and nodeorrev != node.nullid:
+    if nodeorrev != nullrev and nodeorrev != nullid:
         sd = self.sidedata(nodeorrev)
         if len(text) != struct.unpack('>I', sd[sidedata.SD_TEST1])[0]:
             raise RuntimeError('text size mismatch')
@@ -79,5 +83,5 @@
     extensions.wrapfunction(revlog.revlog, 'addrevision', wrapaddrevision)
     extensions.wrapfunction(revlog.revlog, 'revision', wraprevision)
     extensions.wrapfunction(
-        upgrade, 'getsidedatacompanion', wrapgetsidedatacompanion
+        upgrade_engine, 'getsidedatacompanion', wrapgetsidedatacompanion
     )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/testlib/merge-combination-util.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -0,0 +1,73 @@
+# genmerges is the workhorse of the test-merge-combination-*.t tests.
+
+# Given:
+# - a `range` function describing the possible values for file a
+# - a `isgood` function to filter out uninteresting combination
+# - a `createfile` function to actually write the values for file a on the
+#   filesystem
+#
+# it print a series of lines that look like: abcd C: output of -T {files}
+# describing the file a at respectively the base, p2, p1, merge
+# revision. "C" indicates that hg merge had conflicts.
+
+genmerges () {
+
+  (LC_ALL=C type range | grep -q 'function') || (echo >&2 "missing function: range")
+  (LC_ALL=C type isgood | grep -q 'function') || (echo >&2 "missing function: isgood")
+  (LC_ALL=C type createfile | grep -q 'function') || (echo >&2 "missing function: createfile")
+
+  for base in `range` -; do
+    for r1 in `range $base` -; do
+      for r2 in `range $base $r1` -; do
+        for m in `range $base $r1 $r2` -; do
+          line="$base$r1$r2$m"
+          isgood $line || continue
+          hg init repo
+          cd repo
+          make_commit () {
+            v=$1; msg=$2; file=$3;
+            if [ $v != - ]; then
+              createfile $v
+            else
+              if [ -f a ]
+              then rm a
+              else touch $file
+              fi
+            fi
+            hg commit -q -Am $msg || exit 123
+          }
+          echo foo > foo
+          make_commit $base base b
+          make_commit $r1 r1 c
+          hg up -r 0 -q
+          make_commit $r2 r2 d
+          hg merge -q -r 1 > ../output 2>&1
+          if [ $? -ne 0 ]; then rm -f *.orig; hg resolve -m --all -q; fi
+          if [ -s ../output ]; then conflicts=" C"; else conflicts="  "; fi
+          make_commit $m m e
+          if [ $m = $r1 ] && [ $m = $r2 ]
+          then expected=
+          elif [ $m = $r1 ]
+          then if [ $base = $r2 ]
+               then expected=
+               else expected=a
+               fi
+          elif [ $m = $r2 ]
+          then if [ $base = $r1 ]
+               then expected=
+               else expected=a
+               fi
+          else expected=a
+          fi
+          got=`hg log -r 3 --template '{files}\n' | tr -d 'e '`
+          if [ "$got" = "$expected" ]
+          then echo "$line$conflicts: agree on \"$got\""
+          else echo "$line$conflicts: hg said \"$got\", expected \"$expected\""
+          fi
+          cd ../
+          rm -rf repo
+        done
+      done
+    done
+  done
+}
--- a/tests/testlib/push-checkheads-util.sh	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/testlib/push-checkheads-util.sh	Tue Jan 19 21:48:43 2021 +0530
@@ -1,9 +1,9 @@
 # setup config and various utility to test new heads checks on push
 
 cat >> $HGRCPATH <<EOF
-[ui]
+[command-templates]
 # simpler log output
-logtemplate ="{node|short} ({phase}): {desc}\n"
+log ="{node|short} ({phase}): {desc}\n"
 
 [phases]
 # non publishing server
--- a/tests/tinyproxy.py	Thu Dec 24 15:58:08 2020 +0900
+++ b/tests/tinyproxy.py	Tue Jan 19 21:48:43 2021 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 from __future__ import absolute_import, print_function