--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/.gitattributes Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,2 @@
+# So GitLab doesn't think we're using tons of Perl
+*.t -linguist-detectable
--- a/.hgignore Thu May 04 14:17:28 2023 +0200
+++ b/.hgignore Tue May 09 11:35:50 2023 +0200
@@ -19,6 +19,7 @@
*.zip
\#*\#
.\#*
+result/
tests/artifacts/cache/big-file-churn.hg
tests/.coverage*
tests/.testtimes*
--- a/contrib/heptapod-ci.yml Thu May 04 14:17:28 2023 +0200
+++ b/contrib/heptapod-ci.yml Tue May 09 11:35:50 2023 +0200
@@ -26,6 +26,7 @@
- clang-format --version
script:
- echo "python used, $PYTHON"
+ - $PYTHON --version
- echo "$RUNTEST_ARGS"
- HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS
--- a/contrib/import-checker.py Thu May 04 14:17:28 2023 +0200
+++ b/contrib/import-checker.py Tue May 09 11:35:50 2023 +0200
@@ -44,6 +44,7 @@
# third-party imports should be directly imported
'mercurial.thirdparty',
'mercurial.thirdparty.attr',
+ 'mercurial.thirdparty.jaraco.collections',
'mercurial.thirdparty.zope',
'mercurial.thirdparty.zope.interface',
'typing',
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/nix/flake.lock Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,94 @@
+{
+ "nodes": {
+ "flake-utils": {
+ "inputs": {
+ "systems": "systems"
+ },
+ "locked": {
+ "lastModified": 1681202837,
+ "narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "rev": "cfacdce06f30d2b68473a46042957675eebb3401",
+ "type": "github"
+ },
+ "original": {
+ "owner": "numtide",
+ "repo": "flake-utils",
+ "type": "github"
+ }
+ },
+ "flaky-utils": {
+ "locked": {
+ "lastModified": 1668472805,
+ "narHash": "sha256-hjRe8QFh2JMo9u6AaxQNGWfDWZxk3psULmPglqsjsLk=",
+ "ref": "refs/heads/master",
+ "rev": "c3f9daf4ec56276e040bc33e29c7eeaf1b99d91c",
+ "revCount": 33,
+ "type": "git",
+ "url": "https://cgit.pacien.net/libs/flaky-utils"
+ },
+ "original": {
+ "type": "git",
+ "url": "https://cgit.pacien.net/libs/flaky-utils"
+ }
+ },
+ "nixpkgs": {
+ "locked": {
+ "lastModified": 1681482634,
+ "narHash": "sha256-cT/nr3L8khEYZSGp8qqwxFH+/q4/547MfyOdSj6MhBk=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "fda0d99c2cbbb5c89d8855d258cb0821bd9113ad",
+ "type": "github"
+ },
+ "original": {
+ "owner": "NixOS",
+ "ref": "nixos-22.11",
+ "repo": "nixpkgs",
+ "type": "github"
+ }
+ },
+ "nixpkgs-black": {
+ "locked": {
+ "lastModified": 1605911135,
+ "narHash": "sha256-PoVe4Nu7UzYtOboytSzRY9sks6euoEzeCckBN+AIoTU=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "c7cb72b0cae397d311236d6773338efb4bd4f2d1",
+ "type": "github"
+ },
+ "original": {
+ "owner": "NixOS",
+ "ref": "c7cb72b0",
+ "repo": "nixpkgs",
+ "type": "github"
+ }
+ },
+ "root": {
+ "inputs": {
+ "flake-utils": "flake-utils",
+ "flaky-utils": "flaky-utils",
+ "nixpkgs": "nixpkgs",
+ "nixpkgs-black": "nixpkgs-black"
+ }
+ },
+ "systems": {
+ "locked": {
+ "lastModified": 1681028828,
+ "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
+ "owner": "nix-systems",
+ "repo": "default",
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-systems",
+ "repo": "default",
+ "type": "github"
+ }
+ }
+ },
+ "root": "root",
+ "version": 7
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/contrib/nix/flake.nix Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,177 @@
+# flake.nix - Nix-defined package and devel env for the Mercurial project.
+#
+# Copyright 2021-2023 Pacien TRAN-GIRARD <pacien.trangirard@pacien.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+# Usage summary, from the root of this repository:
+#
+# Enter a shell with development tools:
+# nix develop 'hg+file:.?dir=contrib/nix'
+#
+# Running mercurial:
+# nix run 'hg+file:.?dir=contrib/nix' -- version
+#
+# Running the test suite in a sandbox:
+# nix build 'hg+file:.?dir=contrib/nix#mercurial-tests' -L
+
+{
+ inputs = {
+ nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11";
+ nixpkgs-black.url = "github:NixOS/nixpkgs/c7cb72b0"; # black 20.8b1
+ # rust-overlay.url = "github:oxalica/rust-overlay";
+ flake-utils.url = "github:numtide/flake-utils";
+ flaky-utils.url = "git+https://cgit.pacien.net/libs/flaky-utils";
+ };
+
+ outputs = {
+ self
+ , nixpkgs
+ , nixpkgs-black
+ # , rust-overlay
+ , flake-utils
+ , flaky-utils
+ }:
+ flake-utils.lib.eachDefaultSystem (system:
+ let
+ # overlays = [ (import rust-overlay) ];
+ pkgs = import nixpkgs { inherit system; };
+
+ # We're in the contrib/nix sub-directory.
+ src = ../..;
+
+ # For snapshots, to satisfy extension minimum version requirements.
+ dummyVersion = "99.99";
+
+ pin = {
+ # The test suite has issues with the latest/current versions of Python.
+ # Use an older recommended version instead, matching the CI.
+ python = pkgs.python39;
+
+ # The project uses a pinned version (rust/clippy.toml) for compiling,
+ # but uses formatter features from nightly.
+ # TODO: make cargo use the formatter from nightly automatically
+ # (not supported by rustup/cargo yet? workaround?)
+ # rustPlatform = pkgs.rust-bin.stable."1.61.0".default;
+ # rustPlatformFormatter = pkgs.rust-bin.nightly."2023-04-20".default;
+
+ # The CI uses an old version of the Black code formatter,
+ # itself depending on old Python libraries.
+ # The formatting rules have changed in more recent versions.
+ inherit (import nixpkgs-black { inherit system; }) black;
+ };
+
+ in rec {
+ apps.mercurial = apps.mercurial-rust;
+ apps.default = apps.mercurial;
+ apps.mercurial-c = flake-utils.lib.mkApp {
+ drv = packages.mercurial-c;
+ };
+ apps.mercurial-rust = flake-utils.lib.mkApp {
+ drv = packages.mercurial-rust;
+ };
+
+ packages.mercurial = packages.mercurial-rust;
+ packages.default = packages.mercurial;
+
+ packages.mercurial-c = pin.python.pkgs.buildPythonApplication {
+ format = "other";
+ pname = "mercurial";
+ version = "SNAPSHOT";
+ passthru.exePath = "/bin/hg";
+ inherit src;
+
+ postPatch = ''
+ echo 'version = b"${toString dummyVersion}"' \
+ > mercurial/__version__.py
+
+ patchShebangs .
+
+ for f in **/*.{py,c,t}; do
+ # not only used in shebangs
+ substituteAllInPlace "$f" '/bin/sh' '${pkgs.stdenv.shell}'
+ done
+ '';
+
+ buildInputs = with pin.python.pkgs; [
+ docutils
+ ];
+
+ nativeBuildInputs = with pkgs; [
+ gettext
+ installShellFiles
+ ];
+
+ makeFlags = [
+ "PREFIX=$(out)"
+ ];
+
+ buildPhase = ''
+ make local
+ '';
+
+ # Test suite is huge ; run on-demand in a separate package instead.
+ doCheck = false;
+ };
+
+ packages.mercurial-rust = packages.mercurial-c.overrideAttrs (super: {
+ cargoRoot = "rust";
+ cargoDeps = pkgs.rustPlatform.importCargoLock {
+ lockFile = "${src}/rust/Cargo.lock";
+ };
+
+ nativeBuildInputs = (super.nativeBuildInputs or []) ++ (
+ with pkgs.rustPlatform; [
+ cargoSetupHook
+ rust.cargo
+ rust.rustc
+ ]
+ );
+
+ makeFlags = (super.makeFlags or []) ++ [
+ "PURE=--rust"
+ ];
+ });
+
+ packages.mercurial-tests = pkgs.stdenv.mkDerivation {
+ pname = "mercurial-tests";
+ version = "SNAPSHOT";
+ inherit src;
+
+ buildInputs = with pkgs; [
+ pin.python
+ pin.black
+ unzip
+ which
+ sqlite
+ ];
+
+ postPatch = (packages.mercurial.postPatch or "") + ''
+ # * paths emitted by our wrapped hg look like ..hg-wrapped-wrapped
+ # * 'hg' is a wrapper; don't run using python directly
+ for f in **/*.t; do
+ substituteInPlace 2>/dev/null "$f" \
+ --replace '*/hg:' '*/*hg*:' \
+ --replace '"$PYTHON" "$BINDIR"/hg' '"$BINDIR"/hg'
+ done
+ '';
+
+ buildPhase = ''
+ export HGTEST_REAL_HG="${packages.mercurial}/bin/hg"
+ export HGMODULEPOLICY="rust+c"
+ export HGTESTFLAGS="--blacklist blacklists/nix"
+ make check 2>&1 | tee "$out"
+ '';
+ };
+
+ devShell = flaky-utils.lib.mkDevShell {
+ inherit pkgs;
+
+ tools = [
+ pin.python
+ pin.black
+ ];
+ };
+ });
+}
--- a/contrib/perf.py Thu May 04 14:17:28 2023 +0200
+++ b/contrib/perf.py Tue May 09 11:35:50 2023 +0200
@@ -1900,6 +1900,57 @@
fm.end()
+@command(
+ b'perf::stream-locked-section',
+ [
+ (
+ b'',
+ b'stream-version',
+ b'latest',
+ b'stream version to us ("v1", "v2" or "latest", (the default))',
+ ),
+ ]
+ + formatteropts,
+)
+def perf_stream_clone_scan(ui, repo, stream_version, **opts):
+ """benchmark the initial, repo-locked, section of a stream-clone"""
+ import mercurial.streamclone
+
+ generatev1 = mercurial.streamclone.generatev1
+ generatev2 = mercurial.streamclone.generatev2
+
+ opts = _byteskwargs(opts)
+ timer, fm = gettimer(ui, opts)
+
+ # deletion of the generator may trigger some cleanup that we do not want to
+ # measure
+ result_holder = [None]
+
+ def setupone():
+ result_holder[0] = None
+
+ def runone_v1():
+ # the lock is held for the duration the initialisation
+ result_holder[0] = generatev1(repo)
+
+ def runone_v2():
+ # the lock is held for the duration the initialisation
+ result_holder[0] = generatev2(repo, None, None, True)
+
+ if stream_version == b'latest':
+ runone = runone_v2
+ elif stream_version == b'v2':
+ runone = runone_v2
+ elif stream_version == b'v1':
+ runone = runone_v1
+ else:
+ msg = b'unknown stream version: "%s"' % stream_version
+ raise error.Abort(msg)
+
+ timer(runone, setup=setupone, title=b"load")
+ fm.end()
+
+
@command(b'perf::parents|perfparents', formatteropts)
def perfparents(ui, repo, **opts):
"""benchmark the time necessary to fetch one changeset's parents.
--- a/hgext/clonebundles.py Thu May 04 14:17:28 2023 +0200
+++ b/hgext/clonebundles.py Tue May 09 11:35:50 2023 +0200
@@ -200,15 +200,105 @@
occurs. So server operators should prepare for some people to follow these
instructions when a failure occurs, thus driving more load to the original
Mercurial server when the bundle hosting service fails.
+
+
+auto-generation of clone bundles
+--------------------------------
+
+It is possible to set Mercurial to automatically re-generate clone bundles when
+enough new content is available.
+
+Mercurial will take care of the process asynchronously. The defined list of
+bundle-type will be generated, uploaded, and advertised. Older bundles will get
+decommissioned as newer ones replace them.
+
+Bundles Generation:
+...................
+
+The extension can generate multiple variants of the clone bundle. Each
+different variant will be defined by the "bundle-spec" they use::
+
+ [clone-bundles]
+ auto-generate.formats= zstd-v2, gzip-v2
+
+See `hg help bundlespec` for details about available options.
+
+By default, new bundles are generated when 5% of the repository contents or at
+least 1000 revisions are not contained in the cached bundles. This option can
+be controlled by the `clone-bundles.trigger.below-bundled-ratio` option
+(default 0.95) and the `clone-bundles.trigger.revs` option (default 1000)::
+
+ [clone-bundles]
+ trigger.below-bundled-ratio=0.95
+ trigger.revs=1000
+
+This logic can be manually triggered using the `admin::clone-bundles-refresh`
+command, or automatically on each repository change if
+`clone-bundles.auto-generate.on-change` is set to `yes`.
+
+ [clone-bundles]
+ auto-generate.on-change=yes
+ auto-generate.formats= zstd-v2, gzip-v2
+
+Bundles Upload and Serving:
+...........................
+
+The generated bundles need to be made available to users through a "public" URL.
+This should be donne through `clone-bundles.upload-command` configuration. The
+value of this command should be a shell command. It will have access to the
+bundle file path through the `$HGCB_BUNDLE_PATH` variable. And the expected
+basename in the "public" URL is accessible at::
+
+ [clone-bundles]
+ upload-command=sftp put $HGCB_BUNDLE_PATH \
+ sftp://bundles.host/clone-bundles/$HGCB_BUNDLE_BASENAME
+
+If the file was already uploaded, the command must still succeed.
+
+After upload, the file should be available at an url defined by
+`clone-bundles.url-template`.
+
+ [clone-bundles]
+ url-template=https://bundles.host/cache/clone-bundles/{basename}
+
+Old bundles cleanup:
+....................
+
+When new bundles are generated, the older ones are no longer necessary and can
+be removed from storage. This is done through the `clone-bundles.delete-command`
+configuration. The command is given the url of the artifact to delete through
+the `$HGCB_BUNDLE_URL` environment variable.
+
+ [clone-bundles]
+ delete-command=sftp rm sftp://bundles.host/clone-bundles/$HGCB_BUNDLE_BASENAME
+
+If the file was already deleted, the command must still succeed.
"""
+import os
+import weakref
+
+from mercurial.i18n import _
+
from mercurial import (
bundlecaches,
+ commands,
+ error,
extensions,
+ localrepo,
+ lock,
+ node,
+ registrar,
+ util,
wireprotov1server,
)
+
+from mercurial.utils import (
+ procutil,
+)
+
testedwith = b'ships-with-hg-core'
@@ -226,3 +316,707 @@
def extsetup(ui):
extensions.wrapfunction(wireprotov1server, b'_capabilities', capabilities)
+
+
+# logic for bundle auto-generation
+
+
+configtable = {}
+configitem = registrar.configitem(configtable)
+
+cmdtable = {}
+command = registrar.command(cmdtable)
+
+configitem(b'clone-bundles', b'auto-generate.on-change', default=False)
+configitem(b'clone-bundles', b'auto-generate.formats', default=list)
+configitem(b'clone-bundles', b'trigger.below-bundled-ratio', default=0.95)
+configitem(b'clone-bundles', b'trigger.revs', default=1000)
+
+configitem(b'clone-bundles', b'upload-command', default=None)
+
+configitem(b'clone-bundles', b'delete-command', default=None)
+
+configitem(b'clone-bundles', b'url-template', default=None)
+
+configitem(b'devel', b'debug.clonebundles', default=False)
+
+
+# category for the post-close transaction hooks
+CAT_POSTCLOSE = b"clonebundles-autobundles"
+
+# template for bundle file names
+BUNDLE_MASK = (
+ b"full-%(bundle_type)s-%(revs)d_revs-%(tip_short)s_tip-%(op_id)s.hg"
+)
+
+
+# file in .hg/ use to track clonebundles being auto-generated
+AUTO_GEN_FILE = b'clonebundles.auto-gen'
+
+
+class BundleBase(object):
+ """represents the core of properties that matters for us in a bundle
+
+ :bundle_type: the bundlespec (see hg help bundlespec)
+ :revs: the number of revisions in the repo at bundle creation time
+ :tip_rev: the rev-num of the tip revision
+ :tip_node: the node id of the tip-most revision in the bundle
+
+ :ready: True if the bundle is ready to be served
+ """
+
+ ready = False
+
+ def __init__(self, bundle_type, revs, tip_rev, tip_node):
+ self.bundle_type = bundle_type
+ self.revs = revs
+ self.tip_rev = tip_rev
+ self.tip_node = tip_node
+
+ def valid_for(self, repo):
+ """is this bundle applicable to the current repository
+
+ This is useful for detecting bundles made irrelevant by stripping.
+ """
+ tip_node = node.bin(self.tip_node)
+ return repo.changelog.index.get_rev(tip_node) == self.tip_rev
+
+ def __eq__(self, other):
+ left = (self.ready, self.bundle_type, self.tip_rev, self.tip_node)
+ right = (other.ready, other.bundle_type, other.tip_rev, other.tip_node)
+ return left == right
+
+ def __neq__(self, other):
+ return not self == other
+
+ def __cmp__(self, other):
+ if self == other:
+ return 0
+ return -1
+
+
+class RequestedBundle(BundleBase):
+ """A bundle that should be generated.
+
+ Additional attributes compared to BundleBase
+ :heads: list of head revisions (as rev-num)
+ :op_id: a "unique" identifier for the operation triggering the change
+ """
+
+ def __init__(self, bundle_type, revs, tip_rev, tip_node, head_revs, op_id):
+ self.head_revs = head_revs
+ self.op_id = op_id
+ super(RequestedBundle, self).__init__(
+ bundle_type,
+ revs,
+ tip_rev,
+ tip_node,
+ )
+
+ @property
+ def suggested_filename(self):
+ """A filename that can be used for the generated bundle"""
+ data = {
+ b'bundle_type': self.bundle_type,
+ b'revs': self.revs,
+ b'heads': self.head_revs,
+ b'tip_rev': self.tip_rev,
+ b'tip_node': self.tip_node,
+ b'tip_short': self.tip_node[:12],
+ b'op_id': self.op_id,
+ }
+ return BUNDLE_MASK % data
+
+ def generate_bundle(self, repo, file_path):
+ """generate the bundle at `filepath`"""
+ commands.bundle(
+ repo.ui,
+ repo,
+ file_path,
+ base=[b"null"],
+ rev=self.head_revs,
+ type=self.bundle_type,
+ quiet=True,
+ )
+
+ def generating(self, file_path, hostname=None, pid=None):
+ """return a GeneratingBundle object from this object"""
+ if pid is None:
+ pid = os.getpid()
+ if hostname is None:
+ hostname = lock._getlockprefix()
+ return GeneratingBundle(
+ self.bundle_type,
+ self.revs,
+ self.tip_rev,
+ self.tip_node,
+ hostname,
+ pid,
+ file_path,
+ )
+
+
+class GeneratingBundle(BundleBase):
+ """A bundle being generated
+
+ extra attributes compared to BundleBase:
+
+ :hostname: the hostname of the machine generating the bundle
+ :pid: the pid of the process generating the bundle
+ :filepath: the target filename of the bundle
+
+ These attributes exist to help detect stalled generation processes.
+ """
+
+ ready = False
+
+ def __init__(
+ self, bundle_type, revs, tip_rev, tip_node, hostname, pid, filepath
+ ):
+ self.hostname = hostname
+ self.pid = pid
+ self.filepath = filepath
+ super(GeneratingBundle, self).__init__(
+ bundle_type, revs, tip_rev, tip_node
+ )
+
+ @classmethod
+ def from_line(cls, line):
+ """create an object by deserializing a line from AUTO_GEN_FILE"""
+ assert line.startswith(b'PENDING-v1 ')
+ (
+ __,
+ bundle_type,
+ revs,
+ tip_rev,
+ tip_node,
+ hostname,
+ pid,
+ filepath,
+ ) = line.split()
+ hostname = util.urlreq.unquote(hostname)
+ filepath = util.urlreq.unquote(filepath)
+ revs = int(revs)
+ tip_rev = int(tip_rev)
+ pid = int(pid)
+ return cls(
+ bundle_type, revs, tip_rev, tip_node, hostname, pid, filepath
+ )
+
+ def to_line(self):
+ """serialize the object to include as a line in AUTO_GEN_FILE"""
+ templ = b"PENDING-v1 %s %d %d %s %s %d %s"
+ data = (
+ self.bundle_type,
+ self.revs,
+ self.tip_rev,
+ self.tip_node,
+ util.urlreq.quote(self.hostname),
+ self.pid,
+ util.urlreq.quote(self.filepath),
+ )
+ return templ % data
+
+ def __eq__(self, other):
+ if not super(GeneratingBundle, self).__eq__(other):
+ return False
+ left = (self.hostname, self.pid, self.filepath)
+ right = (other.hostname, other.pid, other.filepath)
+ return left == right
+
+ def uploaded(self, url, basename):
+ """return a GeneratedBundle from this object"""
+ return GeneratedBundle(
+ self.bundle_type,
+ self.revs,
+ self.tip_rev,
+ self.tip_node,
+ url,
+ basename,
+ )
+
+
+class GeneratedBundle(BundleBase):
+ """A bundle that is done being generated and can be served
+
+ extra attributes compared to BundleBase:
+
+ :file_url: the url where the bundle is available.
+ :basename: the "basename" used to upload (useful for deletion)
+
+ These attributes exist to generate a bundle manifest
+ (.hg/pullbundles.manifest)
+ """
+
+ ready = True
+
+ def __init__(
+ self, bundle_type, revs, tip_rev, tip_node, file_url, basename
+ ):
+ self.file_url = file_url
+ self.basename = basename
+ super(GeneratedBundle, self).__init__(
+ bundle_type, revs, tip_rev, tip_node
+ )
+
+ @classmethod
+ def from_line(cls, line):
+ """create an object by deserializing a line from AUTO_GEN_FILE"""
+ assert line.startswith(b'DONE-v1 ')
+ (
+ __,
+ bundle_type,
+ revs,
+ tip_rev,
+ tip_node,
+ file_url,
+ basename,
+ ) = line.split()
+ revs = int(revs)
+ tip_rev = int(tip_rev)
+ file_url = util.urlreq.unquote(file_url)
+ return cls(bundle_type, revs, tip_rev, tip_node, file_url, basename)
+
+ def to_line(self):
+ """serialize the object to include as a line in AUTO_GEN_FILE"""
+ templ = b"DONE-v1 %s %d %d %s %s %s"
+ data = (
+ self.bundle_type,
+ self.revs,
+ self.tip_rev,
+ self.tip_node,
+ util.urlreq.quote(self.file_url),
+ self.basename,
+ )
+ return templ % data
+
+ def manifest_line(self):
+ """serialize the object to include as a line in pullbundles.manifest"""
+ templ = b"%s BUNDLESPEC=%s REQUIRESNI=true"
+ return templ % (self.file_url, self.bundle_type)
+
+ def __eq__(self, other):
+ if not super(GeneratedBundle, self).__eq__(other):
+ return False
+ return self.file_url == other.file_url
+
+
+def parse_auto_gen(content):
+ """parse the AUTO_GEN_FILE to return a list of Bundle object"""
+ bundles = []
+ for line in content.splitlines():
+ if line.startswith(b'PENDING-v1 '):
+ bundles.append(GeneratingBundle.from_line(line))
+ elif line.startswith(b'DONE-v1 '):
+ bundles.append(GeneratedBundle.from_line(line))
+ return bundles
+
+
+def dumps_auto_gen(bundles):
+ """serialize a list of Bundle as a AUTO_GEN_FILE content"""
+ lines = []
+ for b in bundles:
+ lines.append(b"%s\n" % b.to_line())
+ lines.sort()
+ return b"".join(lines)
+
+
+def read_auto_gen(repo):
+ """read the AUTO_GEN_FILE for the <repo> a list of Bundle object"""
+ data = repo.vfs.tryread(AUTO_GEN_FILE)
+ if not data:
+ return []
+ return parse_auto_gen(data)
+
+
+def write_auto_gen(repo, bundles):
+ """write a list of Bundle objects into the repo's AUTO_GEN_FILE"""
+ assert repo._cb_lock_ref is not None
+ data = dumps_auto_gen(bundles)
+ with repo.vfs(AUTO_GEN_FILE, mode=b'wb', atomictemp=True) as f:
+ f.write(data)
+
+
+def generate_manifest(bundles):
+ """write a list of Bundle objects into the repo's AUTO_GEN_FILE"""
+ bundles = list(bundles)
+ bundles.sort(key=lambda b: b.bundle_type)
+ lines = []
+ for b in bundles:
+ lines.append(b"%s\n" % b.manifest_line())
+ return b"".join(lines)
+
+
+def update_ondisk_manifest(repo):
+ """update the clonebundle manifest with latest url"""
+ with repo.clonebundles_lock():
+ bundles = read_auto_gen(repo)
+
+ per_types = {}
+ for b in bundles:
+ if not (b.ready and b.valid_for(repo)):
+ continue
+ current = per_types.get(b.bundle_type)
+ if current is not None and current.revs >= b.revs:
+ continue
+ per_types[b.bundle_type] = b
+ manifest = generate_manifest(per_types.values())
+ with repo.vfs(
+ bundlecaches.CB_MANIFEST_FILE, mode=b"wb", atomictemp=True
+ ) as f:
+ f.write(manifest)
+
+
+def update_bundle_list(repo, new_bundles=(), del_bundles=()):
+ """modify the repo's AUTO_GEN_FILE
+
+ This method also regenerates the clone bundle manifest when needed"""
+ with repo.clonebundles_lock():
+ bundles = read_auto_gen(repo)
+ if del_bundles:
+ bundles = [b for b in bundles if b not in del_bundles]
+ new_bundles = [b for b in new_bundles if b not in bundles]
+ bundles.extend(new_bundles)
+ write_auto_gen(repo, bundles)
+ all_changed = []
+ all_changed.extend(new_bundles)
+ all_changed.extend(del_bundles)
+ if any(b.ready for b in all_changed):
+ update_ondisk_manifest(repo)
+
+
+def cleanup_tmp_bundle(repo, target):
+ """remove a GeneratingBundle file and entry"""
+ assert not target.ready
+ with repo.clonebundles_lock():
+ repo.vfs.tryunlink(target.filepath)
+ update_bundle_list(repo, del_bundles=[target])
+
+
+def finalize_one_bundle(repo, target):
+ """upload a generated bundle and advertise it in the clonebundles.manifest"""
+ with repo.clonebundles_lock():
+ bundles = read_auto_gen(repo)
+ if target in bundles and target.valid_for(repo):
+ result = upload_bundle(repo, target)
+ update_bundle_list(repo, new_bundles=[result])
+ cleanup_tmp_bundle(repo, target)
+
+
+def find_outdated_bundles(repo, bundles):
+ """finds outdated bundles"""
+ olds = []
+ per_types = {}
+ for b in bundles:
+ if not b.valid_for(repo):
+ olds.append(b)
+ continue
+ l = per_types.setdefault(b.bundle_type, [])
+ l.append(b)
+ for key in sorted(per_types):
+ all = per_types[key]
+ if len(all) > 1:
+ all.sort(key=lambda b: b.revs, reverse=True)
+ olds.extend(all[1:])
+ return olds
+
+
+def collect_garbage(repo):
+ """finds outdated bundles and get them deleted"""
+ with repo.clonebundles_lock():
+ bundles = read_auto_gen(repo)
+ olds = find_outdated_bundles(repo, bundles)
+ for o in olds:
+ delete_bundle(repo, o)
+ update_bundle_list(repo, del_bundles=olds)
+
+
+def upload_bundle(repo, bundle):
+ """upload the result of a GeneratingBundle and return a GeneratedBundle
+
+ The upload is done using the `clone-bundles.upload-command`
+ """
+ cmd = repo.ui.config(b'clone-bundles', b'upload-command')
+ url = repo.ui.config(b'clone-bundles', b'url-template')
+ basename = repo.vfs.basename(bundle.filepath)
+ filepath = procutil.shellquote(bundle.filepath)
+ variables = {
+ b'HGCB_BUNDLE_PATH': filepath,
+ b'HGCB_BUNDLE_BASENAME': basename,
+ }
+ env = procutil.shellenviron(environ=variables)
+ ret = repo.ui.system(cmd, environ=env)
+ if ret:
+ raise error.Abort(b"command returned status %d: %s" % (ret, cmd))
+ url = (
+ url.decode('utf8')
+ .format(basename=basename.decode('utf8'))
+ .encode('utf8')
+ )
+ return bundle.uploaded(url, basename)
+
+
+def delete_bundle(repo, bundle):
+ """delete a bundle from storage"""
+ assert bundle.ready
+ msg = b'clone-bundles: deleting bundle %s\n'
+ msg %= bundle.basename
+ if repo.ui.configbool(b'devel', b'debug.clonebundles'):
+ repo.ui.write(msg)
+ else:
+ repo.ui.debug(msg)
+
+ cmd = repo.ui.config(b'clone-bundles', b'delete-command')
+ variables = {
+ b'HGCB_BUNDLE_URL': bundle.file_url,
+ b'HGCB_BASENAME': bundle.basename,
+ }
+ env = procutil.shellenviron(environ=variables)
+ ret = repo.ui.system(cmd, environ=env)
+ if ret:
+ raise error.Abort(b"command returned status %d: %s" % (ret, cmd))
+
+
+def auto_bundle_needed_actions(repo, bundles, op_id):
+ """find the list of bundles that need action
+
+ returns a list of RequestedBundle objects that need to be generated and
+ uploaded."""
+ create_bundles = []
+ delete_bundles = []
+ repo = repo.filtered(b"immutable")
+ targets = repo.ui.configlist(b'clone-bundles', b'auto-generate.formats')
+ ratio = float(
+ repo.ui.config(b'clone-bundles', b'trigger.below-bundled-ratio')
+ )
+ abs_revs = repo.ui.configint(b'clone-bundles', b'trigger.revs')
+ revs = len(repo.changelog)
+ generic_data = {
+ 'revs': revs,
+ 'head_revs': repo.changelog.headrevs(),
+ 'tip_rev': repo.changelog.tiprev(),
+ 'tip_node': node.hex(repo.changelog.tip()),
+ 'op_id': op_id,
+ }
+ for t in targets:
+ if new_bundle_needed(repo, bundles, ratio, abs_revs, t, revs):
+ data = generic_data.copy()
+ data['bundle_type'] = t
+ b = RequestedBundle(**data)
+ create_bundles.append(b)
+ delete_bundles.extend(find_outdated_bundles(repo, bundles))
+ return create_bundles, delete_bundles
+
+
+def new_bundle_needed(repo, bundles, ratio, abs_revs, bundle_type, revs):
+ """consider the current cached content and trigger new bundles if needed"""
+ threshold = max((revs * ratio), (revs - abs_revs))
+ for b in bundles:
+ if not b.valid_for(repo) or b.bundle_type != bundle_type:
+ continue
+ if b.revs > threshold:
+ return False
+ return True
+
+
+def start_one_bundle(repo, bundle):
+ """start the generation of a single bundle file
+
+ the `bundle` argument should be a RequestedBundle object.
+
+ This data is passed to the `debugmakeclonebundles` "as is".
+ """
+ data = util.pickle.dumps(bundle)
+ cmd = [procutil.hgexecutable(), b'--cwd', repo.path, INTERNAL_CMD]
+ env = procutil.shellenviron()
+ msg = b'clone-bundles: starting bundle generation: %s\n'
+ stdout = None
+ stderr = None
+ waits = []
+ record_wait = None
+ if repo.ui.configbool(b'devel', b'debug.clonebundles'):
+ stdout = procutil.stdout
+ stderr = procutil.stderr
+ repo.ui.write(msg % bundle.bundle_type)
+ record_wait = waits.append
+ else:
+ repo.ui.debug(msg % bundle.bundle_type)
+ bg = procutil.runbgcommand
+ bg(
+ cmd,
+ env,
+ stdin_bytes=data,
+ stdout=stdout,
+ stderr=stderr,
+ record_wait=record_wait,
+ )
+ for f in waits:
+ f()
+
+
+INTERNAL_CMD = b'debug::internal-make-clone-bundles'
+
+
+@command(INTERNAL_CMD, [], b'')
+def debugmakeclonebundles(ui, repo):
+ """Internal command to auto-generate debug bundles"""
+ requested_bundle = util.pickle.load(procutil.stdin)
+ procutil.stdin.close()
+
+ collect_garbage(repo)
+
+ fname = requested_bundle.suggested_filename
+ fpath = repo.vfs.makedirs(b'tmp-bundles')
+ fpath = repo.vfs.join(b'tmp-bundles', fname)
+ bundle = requested_bundle.generating(fpath)
+ update_bundle_list(repo, new_bundles=[bundle])
+
+ requested_bundle.generate_bundle(repo, fpath)
+
+ repo.invalidate()
+ finalize_one_bundle(repo, bundle)
+
+
+def make_auto_bundler(source_repo):
+ reporef = weakref.ref(source_repo)
+
+ def autobundle(tr):
+ repo = reporef()
+ assert repo is not None
+ bundles = read_auto_gen(repo)
+ new, __ = auto_bundle_needed_actions(repo, bundles, b"%d_txn" % id(tr))
+ for data in new:
+ start_one_bundle(repo, data)
+ return None
+
+ return autobundle
+
+
+def reposetup(ui, repo):
+ """install the two pieces needed for automatic clonebundle generation
+
+ - add a "post-close" hook that fires bundling when needed
+ - introduce a clone-bundle lock to let multiple processes meddle with the
+ state files.
+ """
+ if not repo.local():
+ return
+
+ class autobundlesrepo(repo.__class__):
+ def transaction(self, *args, **kwargs):
+ tr = super(autobundlesrepo, self).transaction(*args, **kwargs)
+ enabled = repo.ui.configbool(
+ b'clone-bundles',
+ b'auto-generate.on-change',
+ )
+ targets = repo.ui.configlist(
+ b'clone-bundles', b'auto-generate.formats'
+ )
+ if enabled and targets:
+ tr.addpostclose(CAT_POSTCLOSE, make_auto_bundler(self))
+ return tr
+
+ @localrepo.unfilteredmethod
+ def clonebundles_lock(self, wait=True):
+ '''Lock the repository file related to clone bundles'''
+ if not util.safehasattr(self, '_cb_lock_ref'):
+ self._cb_lock_ref = None
+ l = self._currentlock(self._cb_lock_ref)
+ if l is not None:
+ l.lock()
+ return l
+
+ l = self._lock(
+ vfs=self.vfs,
+ lockname=b"clonebundleslock",
+ wait=wait,
+ releasefn=None,
+ acquirefn=None,
+ desc=_(b'repository %s') % self.origroot,
+ )
+ self._cb_lock_ref = weakref.ref(l)
+ return l
+
+ repo._wlockfreeprefix.add(AUTO_GEN_FILE)
+ repo._wlockfreeprefix.add(bundlecaches.CB_MANIFEST_FILE)
+ repo.__class__ = autobundlesrepo
+
+
+@command(
+ b'admin::clone-bundles-refresh',
+ [
+ (
+ b'',
+ b'background',
+ False,
+ _(b'start bundle generation in the background'),
+ ),
+ ],
+ b'',
+)
+def cmd_admin_clone_bundles_refresh(
+ ui,
+ repo: localrepo.localrepository,
+ background=False,
+):
+ """generate clone bundles according to the configuration
+
+ This runs the logic for automatic generation, removing outdated bundles and
+ generating new ones if necessary. See :hg:`help -e clone-bundles` for
+ details about how to configure this feature.
+ """
+ debug = repo.ui.configbool(b'devel', b'debug.clonebundles')
+ bundles = read_auto_gen(repo)
+ op_id = b"%d_acbr" % os.getpid()
+ create, delete = auto_bundle_needed_actions(repo, bundles, op_id)
+
+ # if some bundles are scheduled for creation in the background, they will
+ # deal with garbage collection too, so no need to synchroniously do it.
+ #
+ # However if no bundles are scheduled for creation, we need to explicitly do
+ # it here.
+ if not (background and create):
+ # we clean up outdated bundles before generating new ones to keep the
+ # last two versions of the bundle around for a while and avoid having to
+ # deal with clients that just got served a manifest.
+ for o in delete:
+ delete_bundle(repo, o)
+ update_bundle_list(repo, del_bundles=delete)
+
+ if create:
+ fpath = repo.vfs.makedirs(b'tmp-bundles')
+
+ if background:
+ for requested_bundle in create:
+ start_one_bundle(repo, requested_bundle)
+ else:
+ for requested_bundle in create:
+ if debug:
+ msg = b'clone-bundles: starting bundle generation: %s\n'
+ repo.ui.write(msg % requested_bundle.bundle_type)
+ fname = requested_bundle.suggested_filename
+ fpath = repo.vfs.join(b'tmp-bundles', fname)
+ generating_bundle = requested_bundle.generating(fpath)
+ update_bundle_list(repo, new_bundles=[generating_bundle])
+ requested_bundle.generate_bundle(repo, fpath)
+ result = upload_bundle(repo, generating_bundle)
+ update_bundle_list(repo, new_bundles=[result])
+ update_ondisk_manifest(repo)
+ cleanup_tmp_bundle(repo, generating_bundle)
+
+
+@command(b'admin::clone-bundles-clear', [], b'')
+def cmd_admin_clone_bundles_clear(ui, repo: localrepo.localrepository):
+ """remove existing clone bundle caches
+
+ See `hg help admin::clone-bundles-refresh` for details on how to regenerate
+ them.
+
+ This command will only affect bundles currently available, it will not
+ affect bundles being asynchronously generated.
+ """
+ bundles = read_auto_gen(repo)
+ delete = [b for b in bundles if b.ready]
+ for o in delete:
+ delete_bundle(repo, o)
+ update_bundle_list(repo, del_bundles=delete)
--- a/hgext/fastexport.py Thu May 04 14:17:28 2023 +0200
+++ b/hgext/fastexport.py Tue May 09 11:35:50 2023 +0200
@@ -69,10 +69,10 @@
return b"refs/heads/" + branch
-def write_data(buf, data, skip_newline):
+def write_data(buf, data, add_newline=False):
buf.append(b"data %d\n" % len(data))
buf.append(data)
- if not skip_newline or data[-1:] != b"\n":
+ if add_newline or data[-1:] != b"\n":
buf.append(b"\n")
@@ -103,7 +103,7 @@
marks[filerev] = mark
data = filectx.data()
buf = [b"blob\n", b"mark :%d\n" % mark]
- write_data(buf, data, False)
+ write_data(buf, data, True)
ui.write(*buf, keepprogressbar=True)
del buf
@@ -122,7 +122,7 @@
convert_to_git_date(ctx.date()),
),
]
- write_data(buf, ctx.description(), True)
+ write_data(buf, ctx.description())
if parents:
buf.append(b"from :%d\n" % marks[parents[0].hex()])
if len(parents) == 2:
--- a/hgext/infinitepush/__init__.py Thu May 04 14:17:28 2023 +0200
+++ b/hgext/infinitepush/__init__.py Tue May 09 11:35:50 2023 +0200
@@ -330,6 +330,11 @@
clientextsetup(ui)
+def uipopulate(ui):
+ if not ui.hasconfig(b"experimental", b"changegroup3"):
+ ui.setconfig(b"experimental", b"changegroup3", False, b"infinitepush")
+
+
def commonsetup(ui):
wireprotov1server.commands[b'listkeyspatterns'] = (
wireprotolistkeyspatterns,
--- a/hgext/narrow/narrowrepo.py Thu May 04 14:17:28 2023 +0200
+++ b/hgext/narrow/narrowrepo.py Tue May 09 11:35:50 2023 +0200
@@ -19,8 +19,8 @@
dirstate = super(narrowrepository, self)._makedirstate()
return narrowdirstate.wrapdirstate(self, dirstate)
- def peer(self, path=None):
- peer = super(narrowrepository, self).peer(path=path)
+ def peer(self, *args, **kwds):
+ peer = super(narrowrepository, self).peer(*args, **kwds)
peer._caps.add(wireprototypes.NARROWCAP)
peer._caps.add(wireprototypes.ELLIPSESCAP)
return peer
--- a/hgext/rebase.py Thu May 04 14:17:28 2023 +0200
+++ b/hgext/rebase.py Tue May 09 11:35:50 2023 +0200
@@ -24,6 +24,7 @@
wdirrev,
)
from mercurial.pycompat import open
+from mercurial.thirdparty.jaraco.collections import Projection
from mercurial import (
bookmarks,
cmdutil,
@@ -52,6 +53,7 @@
util,
)
+
# The following constants are used throughout the rebase module. The ordering of
# their values must be maintained.
@@ -84,13 +86,17 @@
return 1
-def _savegraft(ctx, extra):
- s = ctx.extra().get(b'source', None)
- if s is not None:
- extra[b'source'] = s
- s = ctx.extra().get(b'intermediate-source', None)
- if s is not None:
- extra[b'intermediate-source'] = s
+def retained_extras():
+ """
+ Yield the names of the extras to be retained.
+ """
+ # graft
+ yield b'source'
+ yield b'intermediate-source'
+
+
+def _save_extras(ctx, extra):
+ extra.update(Projection(retained_extras(), ctx.extra()))
def _savebranch(ctx, extra):
@@ -193,7 +199,7 @@
self.date = opts.get('date', None)
e = opts.get('extrafn') # internal, used by e.g. hgsubversion
- self.extrafns = [_savegraft]
+ self.extrafns = [_save_extras]
if e:
self.extrafns = [e]
--- a/mercurial/bundle2.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/bundle2.py Tue May 09 11:35:50 2023 +0200
@@ -1703,6 +1703,7 @@
vfs=None,
compression=None,
compopts=None,
+ allow_internal=False,
):
if bundletype.startswith(b'HG10'):
cg = changegroup.makechangegroup(repo, outgoing, b'01', source)
@@ -1718,6 +1719,14 @@
elif not bundletype.startswith(b'HG20'):
raise error.ProgrammingError(b'unknown bundle type: %s' % bundletype)
+ # enforce that no internal phase are to be bundled
+ bundled_internal = repo.revs(b"%ln and _internal()", outgoing.ancestorsof)
+ if bundled_internal and not allow_internal:
+ count = len(repo.revs(b'%ln and _internal()', outgoing.missing))
+ msg = "backup bundle would contains %d internal changesets"
+ msg %= count
+ raise error.ProgrammingError(msg)
+
caps = {}
if opts.get(b'obsolescence', False):
caps[b'obsmarkers'] = (b'V1',)
@@ -1750,12 +1759,16 @@
part.addparam(
b'nbchanges', b'%d' % cg.extras[b'clcount'], mandatory=False
)
- if opts.get(b'phases') and repo.revs(
- b'%ln and secret()', outgoing.ancestorsof
- ):
- part.addparam(
- b'targetphase', b'%d' % phases.secret, mandatory=False
- )
+ if opts.get(b'phases'):
+ target_phase = phases.draft
+ for head in outgoing.ancestorsof:
+ target_phase = max(target_phase, repo[head].phase())
+ if target_phase > phases.draft:
+ part.addparam(
+ b'targetphase',
+ b'%d' % target_phase,
+ mandatory=False,
+ )
if repository.REPO_FEATURE_SIDE_DATA in repo.features:
part.addparam(b'exp-sidedata', b'1')
--- a/mercurial/bundlecaches.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/bundlecaches.py Tue May 09 11:35:50 2023 +0200
@@ -60,6 +60,7 @@
_bundlespeccgversions = {
b'v1': b'01',
b'v2': b'02',
+ b'v3': b'03',
b'packed1': b's1',
b'bundle2': b'02', # legacy
}
@@ -82,6 +83,14 @@
b'tagsfnodescache': True,
b'revbranchcache': True,
},
+ b'v3': {
+ b'changegroup': True,
+ b'cg.version': b'03',
+ b'obsolescence': False,
+ b'phases': True,
+ b'tagsfnodescache': True,
+ b'revbranchcache': True,
+ },
b'streamv2': {
b'changegroup': False,
b'cg.version': b'02',
--- a/mercurial/bundlerepo.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/bundlerepo.py Tue May 09 11:35:50 2023 +0200
@@ -484,8 +484,8 @@
def cancopy(self):
return False
- def peer(self, path=None):
- return bundlepeer(self, path=path)
+ def peer(self, path=None, remotehidden=False):
+ return bundlepeer(self, path=path, remotehidden=remotehidden)
def getcwd(self):
return encoding.getcwd() # always outside the repo
--- a/mercurial/cmdutil.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/cmdutil.py Tue May 09 11:35:50 2023 +0200
@@ -2754,7 +2754,6 @@
def cat(ui, repo, ctx, matcher, basefm, fntemplate, prefix, **opts):
err = 1
- opts = pycompat.byteskwargs(opts)
def write(path):
filename = None
@@ -2768,7 +2767,7 @@
except OSError:
pass
with formatter.maybereopen(basefm, filename) as fm:
- _updatecatformatter(fm, ctx, matcher, path, opts.get(b'decode'))
+ _updatecatformatter(fm, ctx, matcher, path, opts.get('decode'))
# Automation often uses hg cat on single files, so special case it
# for performance to avoid the cost of parsing the manifest.
@@ -2803,7 +2802,7 @@
basefm,
fntemplate,
subprefix,
- **pycompat.strkwargs(opts),
+ **opts,
):
err = 0
except error.RepoLookupError:
--- a/mercurial/commands.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/commands.py Tue May 09 11:35:50 2023 +0200
@@ -69,6 +69,7 @@
)
from .utils import (
dateutil,
+ procutil,
stringutil,
urlutil,
)
@@ -1665,6 +1666,14 @@
scmutil.nochangesfound(ui, repo, not base and excluded)
return 1
+ # internal changeset are internal implementation details that should not
+ # leave the repository. Bundling with `hg bundle` create such risk.
+ bundled_internal = repo.revs(b"%ln and _internal()", missing)
+ if bundled_internal:
+ msg = _(b"cannot bundle internal changesets")
+ hint = _(b"%d internal changesets selected") % len(bundled_internal)
+ raise error.Abort(msg, hint=hint)
+
if heads:
outgoing = discovery.outgoing(
repo, missingroots=missing, ancestorsof=heads
@@ -1714,8 +1723,9 @@
bundlespec.set_param(
b'obsolescence-mandatory', obs_mand_cfg, overwrite=False
)
- phases_cfg = cfg(b'experimental', b'bundle-phases')
- bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
+ if not bundlespec.params.get(b'phases', False):
+ phases_cfg = cfg(b'experimental', b'bundle-phases')
+ bundlespec.set_param(b'phases', phases_cfg, overwrite=False)
bundle2.writenewbundle(
ui,
@@ -3529,22 +3539,20 @@
"""
cmdutil.check_incompatible_arguments(opts, 'all_files', ['all', 'diff'])
- opts = pycompat.byteskwargs(opts)
- diff = opts.get(b'all') or opts.get(b'diff')
- follow = opts.get(b'follow')
- if opts.get(b'all_files') is None and not diff:
- opts[b'all_files'] = True
+
+ diff = opts.get('all') or opts.get('diff')
+ follow = opts.get('follow')
+ if opts.get('all_files') is None and not diff:
+ opts['all_files'] = True
plaingrep = (
- opts.get(b'all_files')
- and not opts.get(b'rev')
- and not opts.get(b'follow')
+ opts.get('all_files') and not opts.get('rev') and not opts.get('follow')
)
- all_files = opts.get(b'all_files')
+ all_files = opts.get('all_files')
if plaingrep:
- opts[b'rev'] = [b'wdir()']
+ opts['rev'] = [b'wdir()']
reflags = re.M
- if opts.get(b'ignore_case'):
+ if opts.get('ignore_case'):
reflags |= re.I
try:
regexp = util.re.compile(pattern, reflags)
@@ -3555,7 +3563,7 @@
)
return 1
sep, eol = b':', b'\n'
- if opts.get(b'print0'):
+ if opts.get('print0'):
sep = eol = b'\0'
searcher = grepmod.grepsearcher(
@@ -3603,7 +3611,7 @@
b'linenumber',
b'%d',
l.linenum,
- opts.get(b'line_number'),
+ opts.get('line_number'),
b'',
),
]
@@ -3625,14 +3633,14 @@
b'user',
b'%s',
formatuser(ctx.user()),
- opts.get(b'user'),
+ opts.get('user'),
b'',
),
(
b'date',
b'%s',
fm.formatdate(ctx.date(), datefmt),
- opts.get(b'date'),
+ opts.get('date'),
b'',
),
]
@@ -3643,15 +3651,15 @@
field = fieldnamemap.get(name, name)
label = extra_label + (b'grep.%s' % name)
fm.condwrite(cond, field, fmt, data, label=label)
- if not opts.get(b'files_with_matches'):
+ if not opts.get('files_with_matches'):
fm.plain(sep, label=b'grep.sep')
- if not opts.get(b'text') and binary():
+ if not opts.get('text') and binary():
fm.plain(_(b" Binary file matches"))
else:
displaymatches(fm.nested(b'texts', tmpl=b'{text}'), l)
fm.plain(eol)
found = True
- if opts.get(b'files_with_matches'):
+ if opts.get('files_with_matches'):
break
return found
@@ -3677,9 +3685,9 @@
wopts = logcmdutil.walkopts(
pats=pats,
opts=opts,
- revspec=opts[b'rev'],
- include_pats=opts[b'include'],
- exclude_pats=opts[b'exclude'],
+ revspec=opts['rev'],
+ include_pats=opts['include'],
+ exclude_pats=opts['exclude'],
follow=follow,
force_changelog_traversal=all_files,
filter_revisions_by_pats=not all_files,
@@ -3687,7 +3695,7 @@
revs, makefilematcher = logcmdutil.makewalker(repo, wopts)
ui.pager(b'grep')
- fm = ui.formatter(b'grep', opts)
+ fm = ui.formatter(b'grep', pycompat.byteskwargs(opts))
for fn, ctx, pstates, states in searcher.searchfiles(revs, makefilematcher):
r = display(fm, fn, ctx, pstates, states)
found = found or r
@@ -5398,6 +5406,12 @@
_(b'a specific branch you would like to pull'),
_(b'BRANCH'),
),
+ (
+ b'',
+ b'remote-hidden',
+ False,
+ _(b"include changesets hidden on the remote (EXPERIMENTAL)"),
+ ),
]
+ remoteopts,
_(b'[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]...'),
@@ -5435,6 +5449,14 @@
Specifying bookmark as ``.`` is equivalent to specifying the active
bookmark's name.
+ .. container:: verbose
+
+ One can use the `--remote-hidden` flag to pull changesets
+ hidden on the remote. This flag is "best effort", and will only
+ work if the server supports the feature and is configured to
+ allow the user to access hidden changesets. This option is
+ experimental and backwards compatibility is not garanteed.
+
Returns 0 on success, 1 if an update had unresolved files.
"""
@@ -5449,12 +5471,16 @@
for path in urlutil.get_pull_paths(repo, ui, sources):
ui.status(_(b'pulling from %s\n') % urlutil.hidepassword(path.loc))
ui.flush()
- other = hg.peer(repo, opts, path)
+ other = hg.peer(repo, opts, path, remotehidden=opts[b'remote_hidden'])
update_conflict = None
try:
branches = (path.branch, opts.get(b'branch', []))
revs, checkout = hg.addbranchrevs(
- repo, other, branches, opts.get(b'rev')
+ repo,
+ other,
+ branches,
+ opts.get(b'rev'),
+ remotehidden=opts[b'remote_hidden'],
)
pullopargs = {}
@@ -6647,7 +6673,25 @@
raise error.RepoError(
_(b"there is no Mercurial repository here (.hg not found)")
)
- s = wireprotoserver.sshserver(ui, repo)
+ accesshidden = False
+ if repo.filtername is None:
+ allow = ui.configlist(
+ b'experimental', b'server.allow-hidden-access'
+ )
+ user = procutil.getuser()
+ if allow and scmutil.ismember(ui, user, allow):
+ accesshidden = True
+ else:
+ msg = (
+ _(
+ b'ignoring request to access hidden changeset by '
+ b'unauthorized user: %s\n'
+ )
+ % user
+ )
+ ui.warn(msg)
+
+ s = wireprotoserver.sshserver(ui, repo, accesshidden=accesshidden)
s.serve_forever()
return
--- a/mercurial/configitems.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/configitems.py Tue May 09 11:35:50 2023 +0200
@@ -975,7 +975,7 @@
coreconfigitem(
b'experimental',
b'changegroup3',
- default=False,
+ default=True,
)
coreconfigitem(
b'experimental',
@@ -1248,6 +1248,11 @@
)
coreconfigitem(
b'experimental',
+ b'server.allow-hidden-access',
+ default=list,
+)
+coreconfigitem(
+ b'experimental',
b'server.filesdata.recommended-batch-size',
default=50000,
)
--- a/mercurial/crecord.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/crecord.py Tue May 09 11:35:50 2023 +0200
@@ -1990,7 +1990,7 @@
)
# newwin([height, width,] begin_y, begin_x)
self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
- self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences
+ self.statuswin.keypad(True) # interpret arrow-key, etc. esc sequences
# figure out how much space to allocate for the chunk-pad which is
# used for displaying the patch
--- a/mercurial/debugcommands.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/debugcommands.py Tue May 09 11:35:50 2023 +0200
@@ -93,6 +93,7 @@
wireprotoserver,
)
from .interfaces import repository
+from .stabletailgraph import stabletailsort
from .utils import (
cborutil,
compression,
@@ -3644,6 +3645,30 @@
@command(
+ b'debug::stable-tail-sort',
+ [
+ (
+ b'T',
+ b'template',
+ b'{rev}\n',
+ _(b'display with template'),
+ _(b'TEMPLATE'),
+ ),
+ ],
+ b'REV',
+)
+def debug_stable_tail_sort(ui, repo, revspec, template, **opts):
+ """display the stable-tail sort of the ancestors of a given node"""
+ rev = logcmdutil.revsingle(repo, revspec).rev()
+ cl = repo.changelog
+
+ displayer = logcmdutil.maketemplater(ui, repo, template)
+ sorted_revs = stabletailsort._stable_tail_sort(cl, rev)
+ for ancestor_rev in sorted_revs:
+ displayer.show(repo[ancestor_rev])
+
+
+@command(
b"debugbackupbundle",
[
(
@@ -4512,7 +4537,7 @@
peer = None
else:
ui.write(_(b'creating ssh peer from handshake results\n'))
- peer = sshpeer.makepeer(
+ peer = sshpeer._make_peer(
ui,
url,
proc,
@@ -4568,7 +4593,7 @@
)
else:
peer_path = urlutil.try_path(ui, path)
- peer = httppeer.makepeer(ui, peer_path, opener=opener)
+ peer = httppeer._make_peer(ui, peer_path, opener=opener)
# We /could/ populate stdin/stdout with sock.makefile()...
else:
--- a/mercurial/dirstate.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/dirstate.py Tue May 09 11:35:50 2023 +0200
@@ -1760,12 +1760,6 @@
return list(files)
return [f for f in dmap if match(f)]
- def _actualfilename(self, tr):
- if tr:
- return self._pendingfilename
- else:
- return self._filename
-
def all_file_names(self):
"""list all filename currently used by this dirstate
--- a/mercurial/discovery.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/discovery.py Tue May 09 11:35:50 2023 +0200
@@ -104,14 +104,14 @@
if ancestorsof is None:
ancestorsof = cl.heads()
if missingroots:
- discbases = []
- for n in missingroots:
- discbases.extend([p for p in cl.parents(n) if p != repo.nullid])
# TODO remove call to nodesbetween.
# TODO populate attributes on outgoing instance instead of setting
# discbases.
csets, roots, heads = cl.nodesbetween(missingroots, ancestorsof)
included = set(csets)
+ discbases = []
+ for n in csets:
+ discbases.extend([p for p in cl.parents(n) if p != repo.nullid])
ancestorsof = heads
commonheads = [n for n in discbases if n not in included]
elif not commonheads:
--- a/mercurial/dispatch.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/dispatch.py Tue May 09 11:35:50 2023 +0200
@@ -367,12 +367,18 @@
# shenanigans wherein a user does something like pass
# --debugger or --config=ui.debugger=1 as a repo
# name. This used to actually run the debugger.
+ nbargs = 4
+ hashiddenaccess = b'--hidden' in cmdargs
+ if hashiddenaccess:
+ nbargs += 1
if (
- len(req.args) != 4
+ len(req.args) != nbargs
or req.args[0] != b'-R'
or req.args[1].startswith(b'--')
or req.args[2] != b'serve'
or req.args[3] != b'--stdio'
+ or hashiddenaccess
+ and req.args[4] != b'--hidden'
):
raise error.Abort(
_(b'potentially unsafe serve --stdio invocation: %s')
--- a/mercurial/encoding.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/encoding.py Tue May 09 11:35:50 2023 +0200
@@ -657,7 +657,7 @@
pass
s = pycompat.bytestr(s)
- r = b""
+ r = bytearray()
pos = 0
l = len(s)
while pos < l:
@@ -673,7 +673,7 @@
c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
pos += 1
r += c
- return r
+ return bytes(r)
def fromutf8b(s):
@@ -712,7 +712,7 @@
# helper again to walk the string without "decoding" it.
s = pycompat.bytestr(s)
- r = b""
+ r = bytearray()
pos = 0
l = len(s)
while pos < l:
@@ -722,4 +722,4 @@
if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xFF)
r += c
- return r
+ return bytes(r)
--- a/mercurial/helptext/config.txt Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/helptext/config.txt Tue May 09 11:35:50 2023 +0200
@@ -1622,7 +1622,7 @@
in ``http_proxy.no``. (default: False)
``http``
-----------
+--------
Used to configure access to Mercurial repositories via HTTP.
--- a/mercurial/helptext/rust.txt Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/helptext/rust.txt Tue May 09 11:35:50 2023 +0200
@@ -76,8 +76,9 @@
MSRV
====
-The minimum supported Rust version is currently 1.61.0. The project's policy is
-to follow the version from Debian testing, to make the distributions' job easier.
+The minimum supported Rust version is defined in `rust/clippy.toml`.
+The project's policy is to keep it at or below the version from Debian testing,
+to make the distributions' job easier.
rhg
===
--- a/mercurial/hg.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/hg.py Tue May 09 11:35:50 2023 +0200
@@ -65,10 +65,10 @@
sharedbookmarks = b'bookmarks'
-def addbranchrevs(lrepo, other, branches, revs):
+def addbranchrevs(lrepo, other, branches, revs, remotehidden=False):
if util.safehasattr(other, 'peer'):
# a courtesy to callers using a localrepo for other
- peer = other.peer()
+ peer = other.peer(remotehidden=remotehidden)
else:
peer = other
hashbranch, branches = branches
@@ -242,7 +242,15 @@
return repo.filtered(b'visible')
-def peer(uiorrepo, opts, path, create=False, intents=None, createopts=None):
+def peer(
+ uiorrepo,
+ opts,
+ path,
+ create=False,
+ intents=None,
+ createopts=None,
+ remotehidden=False,
+):
'''return a repository peer for the specified path'''
ui = getattr(uiorrepo, 'ui', uiorrepo)
rui = remoteui(uiorrepo, opts)
@@ -260,6 +268,7 @@
create,
intents=intents,
createopts=createopts,
+ remotehidden=remotehidden,
)
_setup_repo_or_peer(rui, peer)
else:
@@ -274,7 +283,7 @@
intents=intents,
createopts=createopts,
)
- peer = repo.peer(path=peer_path)
+ peer = repo.peer(path=peer_path, remotehidden=remotehidden)
return peer
--- a/mercurial/hgweb/common.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/hgweb/common.py Tue May 09 11:35:50 2023 +0200
@@ -13,6 +13,7 @@
import os
import stat
+from ..i18n import _
from ..pycompat import (
getattr,
open,
@@ -20,6 +21,7 @@
from .. import (
encoding,
pycompat,
+ scmutil,
templater,
util,
)
@@ -38,15 +40,33 @@
HTTP_UNSUPPORTED_MEDIA_TYPE = 415
HTTP_SERVER_ERROR = 500
+ismember = scmutil.ismember
-def ismember(ui, username, userlist):
- """Check if username is a member of userlist.
- If userlist has a single '*' member, all users are considered members.
- Can be overridden by extensions to provide more complex authorization
- schemes.
- """
- return userlist == [b'*'] or username in userlist
+def hashiddenaccess(repo, req):
+ if bool(req.qsparams.get(b'access-hidden')):
+ # Disable this by default for now. Main risk is to get critical
+ # information exposed through this. This is expecially risky if
+ # someone decided to make a changeset secret for good reason, but
+ # its predecessors are still draft.
+ #
+ # The feature is currently experimental, so we can still decide to
+ # change the default.
+ ui = repo.ui
+ allow = ui.configlist(b'experimental', b'server.allow-hidden-access')
+ user = req.remoteuser
+ if allow and ismember(ui, user, allow):
+ return True
+ else:
+ msg = (
+ _(
+ b'ignoring request to access hidden changeset by '
+ b'unauthorized user: %r\n'
+ )
+ % user
+ )
+ ui.warn(msg)
+ return False
def checkauthz(hgweb, req, op):
--- a/mercurial/hgweb/hgweb_mod.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/hgweb/hgweb_mod.py Tue May 09 11:35:50 2023 +0200
@@ -39,6 +39,7 @@
)
from . import (
+ common,
request as requestmod,
webcommands,
webutil,
@@ -124,6 +125,16 @@
self.req = req
self.res = res
+ # Only works if the filter actually support being upgraded to show
+ # visible changesets
+ current_filter = repo.filtername
+ if (
+ common.hashiddenaccess(repo, req)
+ and current_filter is not None
+ and current_filter + b'.hidden' in repoview.filtertable
+ ):
+ self.repo = self.repo.filtered(repo.filtername + b'.hidden')
+
self.maxchanges = self.configint(b'web', b'maxchanges')
self.stripecount = self.configint(b'web', b'stripes')
self.maxshortchanges = self.configint(b'web', b'maxshortchanges')
--- a/mercurial/httppeer.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/httppeer.py Tue May 09 11:35:50 2023 +0200
@@ -108,7 +108,14 @@
def makev1commandrequest(
- ui, requestbuilder, caps, capablefn, repobaseurl, cmd, args
+ ui,
+ requestbuilder,
+ caps,
+ capablefn,
+ repobaseurl,
+ cmd,
+ args,
+ remotehidden=False,
):
"""Make an HTTP request to run a command for a version 1 client.
@@ -127,6 +134,8 @@
ui.debug(b"sending %s command\n" % cmd)
q = [(b'cmd', cmd)]
+ if remotehidden:
+ q.append(('access-hidden', '1'))
headersize = 0
# Important: don't use self.capable() here or else you end up
# with infinite recursion when trying to look up capabilities
@@ -381,13 +390,16 @@
class httppeer(wireprotov1peer.wirepeer):
- def __init__(self, ui, path, url, opener, requestbuilder, caps):
- super().__init__(ui, path=path)
+ def __init__(
+ self, ui, path, url, opener, requestbuilder, caps, remotehidden=False
+ ):
+ super().__init__(ui, path=path, remotehidden=remotehidden)
self._url = url
self._caps = caps
self.limitedarguments = caps is not None and b'httppostargs' not in caps
self._urlopener = opener
self._requestbuilder = requestbuilder
+ self._remotehidden = remotehidden
def __del__(self):
for h in self._urlopener.handlers:
@@ -442,6 +454,7 @@
self._url,
cmd,
args,
+ self._remotehidden,
)
resp = sendrequest(self.ui, self._urlopener, req)
@@ -592,7 +605,9 @@
return respurl, info
-def makepeer(ui, path, opener=None, requestbuilder=urlreq.request):
+def _make_peer(
+ ui, path, opener=None, requestbuilder=urlreq.request, remotehidden=False
+):
"""Construct an appropriate HTTP peer instance.
``opener`` is an ``url.opener`` that should be used to establish
@@ -615,11 +630,19 @@
respurl, info = performhandshake(ui, url, opener, requestbuilder)
return httppeer(
- ui, path, respurl, opener, requestbuilder, info[b'v1capabilities']
+ ui,
+ path,
+ respurl,
+ opener,
+ requestbuilder,
+ info[b'v1capabilities'],
+ remotehidden=remotehidden,
)
-def make_peer(ui, path, create, intents=None, createopts=None):
+def make_peer(
+ ui, path, create, intents=None, createopts=None, remotehidden=False
+):
if create:
raise error.Abort(_(b'cannot create new http repository'))
try:
@@ -628,7 +651,7 @@
_(b'Python support for SSL and HTTPS is not installed')
)
- inst = makepeer(ui, path)
+ inst = _make_peer(ui, path, remotehidden=remotehidden)
return inst
except error.RepoError as httpexception:
--- a/mercurial/interfaces/repository.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/interfaces/repository.py Tue May 09 11:35:50 2023 +0200
@@ -388,7 +388,7 @@
limitedarguments = False
- def __init__(self, ui, path=None):
+ def __init__(self, ui, path=None, remotehidden=False):
self.ui = ui
self.path = path
--- a/mercurial/localrepo.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/localrepo.py Tue May 09 11:35:50 2023 +0200
@@ -307,13 +307,17 @@
class localpeer(repository.peer):
'''peer for a local repo; reflects only the most recent API'''
- def __init__(self, repo, caps=None, path=None):
- super(localpeer, self).__init__(repo.ui, path=path)
+ def __init__(self, repo, caps=None, path=None, remotehidden=False):
+ super(localpeer, self).__init__(
+ repo.ui, path=path, remotehidden=remotehidden
+ )
if caps is None:
caps = moderncaps.copy()
- self._repo = repo.filtered(b'served')
-
+ if remotehidden:
+ self._repo = repo.filtered(b'served.hidden')
+ else:
+ self._repo = repo.filtered(b'served')
if repo._wanted_sidedata:
formatted = bundle2.format_remote_wanted_sidedata(repo)
caps.add(b'exp-wanted-sidedata=' + formatted)
@@ -455,8 +459,10 @@
"""peer extension which implements legacy methods too; used for tests with
restricted capabilities"""
- def __init__(self, repo, path=None):
- super(locallegacypeer, self).__init__(repo, caps=legacycaps, path=path)
+ def __init__(self, repo, path=None, remotehidden=False):
+ super(locallegacypeer, self).__init__(
+ repo, caps=legacycaps, path=path, remotehidden=remotehidden
+ )
# Begin of baselegacywirecommands interface.
@@ -1657,8 +1663,10 @@
parts.pop()
return False
- def peer(self, path=None):
- return localpeer(self, path=path) # not cached to avoid reference cycle
+ def peer(self, path=None, remotehidden=False):
+ return localpeer(
+ self, path=path, remotehidden=remotehidden
+ ) # not cached to avoid reference cycle
def unfiltered(self):
"""Return unfiltered version of the repository
--- a/mercurial/mail.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/mail.py Tue May 09 11:35:50 2023 +0200
@@ -54,9 +54,9 @@
self._ui = ui
self._host = host
- def starttls(self, keyfile=None, certfile=None):
+ def starttls(self, keyfile=None, certfile=None, context=None):
if not self.has_extn("starttls"):
- msg = b"STARTTLS extension not supported by server"
+ msg = "STARTTLS extension not supported by server"
raise smtplib.SMTPException(msg)
(resp, reply) = self.docmd("STARTTLS")
if resp == 220:
--- a/mercurial/phases.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/phases.py Tue May 09 11:35:50 2023 +0200
@@ -154,6 +154,7 @@
internal = 96 # non-continuous for compatibility
allphases = (public, draft, secret, archived, internal)
trackedphases = (draft, secret, archived, internal)
+not_public_phases = trackedphases
# record phase names
cmdphasenames = [b'public', b'draft', b'secret'] # known to `hg phase` command
phasenames = dict(enumerate(cmdphasenames))
@@ -171,6 +172,10 @@
remotehiddenphases = (secret, archived, internal)
localhiddenphases = (internal, archived)
+all_internal_phases = tuple(p for p in allphases if p & internal)
+# We do not want any internal content to exit the repository, ever.
+no_bundle_phases = all_internal_phases
+
def supportinternal(repo):
# type: (localrepo.localrepository) -> bool
@@ -826,10 +831,8 @@
cl = repo.changelog
headsbyphase = {i: [] for i in allphases}
- # No need to keep track of secret phase; any heads in the subset that
- # are not mentioned are implicitly secret.
- for phase in allphases[:secret]:
- revset = b"heads(%%ln & %s())" % phasenames[phase]
+ for phase in allphases:
+ revset = b"heads(%%ln & _phase(%d))" % phase
headsbyphase[phase] = [cl.node(r) for r in repo.revs(revset, subset)]
return headsbyphase
--- a/mercurial/repair.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/repair.py Tue May 09 11:35:50 2023 +0200
@@ -34,7 +34,14 @@
def backupbundle(
- repo, bases, heads, node, suffix, compress=True, obsolescence=True
+ repo,
+ bases,
+ heads,
+ node,
+ suffix,
+ compress=True,
+ obsolescence=True,
+ tmp_backup=False,
):
"""create a bundle with the specified revisions as a backup"""
@@ -81,6 +88,7 @@
contentopts,
vfs,
compression=comp,
+ allow_internal=tmp_backup,
)
@@ -197,6 +205,7 @@
b'temp',
compress=False,
obsolescence=False,
+ tmp_backup=True,
)
with ui.uninterruptible():
@@ -335,8 +344,26 @@
def _createstripbackup(repo, stripbases, node, topic):
# backup the changeset we are about to strip
vfs = repo.vfs
- cl = repo.changelog
- backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic)
+ unfi = repo.unfiltered()
+ to_node = unfi.changelog.node
+ # internal changeset are internal implementation details that should not
+ # leave the repository and not be exposed to the users. In addition feature
+ # using them requires to be resistant to strip. See test case for more
+ # details.
+ all_backup = unfi.revs(
+ b"(%ln)::(%ld) and not _internal()",
+ stripbases,
+ unfi.changelog.headrevs(),
+ )
+ if not all_backup:
+ return None
+
+ def to_nodes(revs):
+ return [to_node(r) for r in revs]
+
+ bases = to_nodes(unfi.revs("roots(%ld)", all_backup))
+ heads = to_nodes(unfi.revs("heads(%ld)", all_backup))
+ backupfile = backupbundle(repo, bases, heads, node, topic)
repo.ui.status(_(b"saved backup bundle to %s\n") % vfs.join(backupfile))
repo.ui.log(
b"backupbundle", b"saved backup bundle to %s\n", vfs.join(backupfile)
--- a/mercurial/revlogutils/flagutil.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/revlogutils/flagutil.py Tue May 09 11:35:50 2023 +0200
@@ -176,8 +176,12 @@
vhash = True
if flag not in revlog._flagprocessors:
+ hint = None
+ if flag == REVIDX_EXTSTORED:
+ hint = _(b"the lfs extension must be enabled")
+
message = _(b"missing processor for flag '%#x'") % flag
- raise revlog._flagserrorclass(message)
+ raise revlog._flagserrorclass(message, hint=hint)
processor = revlog._flagprocessors[flag]
if processor is not None:
--- a/mercurial/revset.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/revset.py Tue May 09 11:35:50 2023 +0200
@@ -1967,6 +1967,12 @@
return repo._phasecache.getrevset(repo, targets, subset)
+@predicate(b'_internal()', safe=True)
+def _internal(repo, subset, x):
+ getargs(x, 0, 0, _(b"_internal takes no arguments"))
+ return _phase(repo, subset, *phases.all_internal_phases)
+
+
@predicate(b'_phase(idx)', safe=True)
def phase(repo, subset, x):
l = getargs(x, 1, 1, b"_phase requires one argument")
@@ -2061,7 +2067,7 @@
@predicate(b'_notpublic', safe=True)
def _notpublic(repo, subset, x):
getargs(x, 0, 0, b"_notpublic takes no arguments")
- return _phase(repo, subset, phases.draft, phases.secret)
+ return _phase(repo, subset, *phases.not_public_phases)
# for internal use
--- a/mercurial/scmutil.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/scmutil.py Tue May 09 11:35:50 2023 +0200
@@ -2313,3 +2313,13 @@
mark,
mark,
)
+
+
+def ismember(ui, username, userlist):
+ """Check if username is a member of userlist.
+
+ If userlist has a single '*' member, all users are considered members.
+ Can be overridden by extensions to provide more complex authorization
+ schemes.
+ """
+ return userlist == [b'*'] or username in userlist
--- a/mercurial/sshpeer.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/sshpeer.py Tue May 09 11:35:50 2023 +0200
@@ -177,7 +177,9 @@
ui.develwarn(b'missing close on SSH connection created at:\n%s' % warn)
-def _makeconnection(ui, sshcmd, args, remotecmd, path, sshenv=None):
+def _makeconnection(
+ ui, sshcmd, args, remotecmd, path, sshenv=None, remotehidden=False
+):
"""Create an SSH connection to a server.
Returns a tuple of (process, stdin, stdout, stderr) for the
@@ -187,8 +189,12 @@
sshcmd,
args,
procutil.shellquote(
- b'%s -R %s serve --stdio'
- % (_serverquote(remotecmd), _serverquote(path))
+ b'%s -R %s serve --stdio%s'
+ % (
+ _serverquote(remotecmd),
+ _serverquote(path),
+ b' --hidden' if remotehidden else b'',
+ )
),
)
@@ -372,7 +378,16 @@
class sshv1peer(wireprotov1peer.wirepeer):
def __init__(
- self, ui, path, proc, stdin, stdout, stderr, caps, autoreadstderr=True
+ self,
+ ui,
+ path,
+ proc,
+ stdin,
+ stdout,
+ stderr,
+ caps,
+ autoreadstderr=True,
+ remotehidden=False,
):
"""Create a peer from an existing SSH connection.
@@ -383,7 +398,7 @@
``autoreadstderr`` denotes whether to automatically read from
stderr and to forward its output.
"""
- super().__init__(ui, path=path)
+ super().__init__(ui, path=path, remotehidden=remotehidden)
# self._subprocess is unused. Keeping a handle on the process
# holds a reference and prevents it from being garbage collected.
self._subprocess = proc
@@ -400,6 +415,7 @@
self._caps = caps
self._autoreadstderr = autoreadstderr
self._initstack = b''.join(util.getstackframes(1))
+ self._remotehidden = remotehidden
# Commands that have a "framed" response where the first line of the
# response contains the length of that response.
@@ -568,7 +584,16 @@
self._readerr()
-def makepeer(ui, path, proc, stdin, stdout, stderr, autoreadstderr=True):
+def _make_peer(
+ ui,
+ path,
+ proc,
+ stdin,
+ stdout,
+ stderr,
+ autoreadstderr=True,
+ remotehidden=False,
+):
"""Make a peer instance from existing pipes.
``path`` and ``proc`` are stored on the eventual peer instance and may
@@ -598,6 +623,7 @@
stderr,
caps,
autoreadstderr=autoreadstderr,
+ remotehidden=remotehidden,
)
else:
_cleanuppipes(ui, stdout, stdin, stderr, warn=None)
@@ -606,7 +632,9 @@
)
-def make_peer(ui, path, create, intents=None, createopts=None):
+def make_peer(
+ ui, path, create, intents=None, createopts=None, remotehidden=False
+):
"""Create an SSH peer.
The returned object conforms to the ``wireprotov1peer.wirepeer`` interface.
@@ -655,10 +683,18 @@
raise error.RepoError(_(b'could not create remote repo'))
proc, stdin, stdout, stderr = _makeconnection(
- ui, sshcmd, args, remotecmd, remotepath, sshenv
+ ui,
+ sshcmd,
+ args,
+ remotecmd,
+ remotepath,
+ sshenv,
+ remotehidden=remotehidden,
)
- peer = makepeer(ui, path, proc, stdin, stdout, stderr)
+ peer = _make_peer(
+ ui, path, proc, stdin, stdout, stderr, remotehidden=remotehidden
+ )
# Finally, if supported by the server, notify it about our own
# capabilities.
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/stabletailgraph/stabletailsort.py Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,111 @@
+# stabletailsort.py - stable ordering of revisions
+#
+# Copyright 2021-2023 Pacien TRAN-GIRARD <pacien.trangirard@pacien.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""
+Stable-tail sort computation.
+
+The "stable-tail sort", or STS, is a reverse topological ordering of the
+ancestors of a node, which tends to share large suffixes with the stable-tail
+sort of ancestors and other nodes, giving it its name.
+
+Its properties should make it suitable for making chunks of ancestors with high
+reuse and incrementality for example.
+
+This module and implementation are experimental. Most functions are not yet
+optimised to operate on large production graphs.
+"""
+
+import itertools
+from ..node import nullrev
+from .. import ancestor
+
+
+def _sorted_parents(cl, p1, p2):
+ """
+ Chooses and returns the pair (px, pt) from (p1, p2).
+
+ Where
+ "px" denotes the parent starting the "exclusive" part, and
+ "pt" denotes the parent starting the "Tail" part.
+
+ "px" is chosen as the parent with the lowest rank with the goal of
+ minimising the size of the exclusive part and maximise the size of the
+ tail part, hopefully reducing the overall complexity of the stable-tail
+ sort.
+
+ In case of equal ranks, the stable node ID is used as a tie-breaker.
+ """
+ r1, r2 = cl.fast_rank(p1), cl.fast_rank(p2)
+ if r1 < r2:
+ return (p1, p2)
+ elif r1 > r2:
+ return (p2, p1)
+ elif cl.node(p1) < cl.node(p2):
+ return (p1, p2)
+ else:
+ return (p2, p1)
+
+
+def _nonoedipal_parent_revs(cl, rev):
+ """
+ Returns the non-Å“dipal parent pair of the given revision.
+
+ An Å“dipal merge is a merge with parents p1, p2 with either
+ p1 in ancestors(p2) or p2 in ancestors(p1).
+ In the first case, p1 is the Å“dipal parent.
+ In the second case, p2 is the Å“dipal parent.
+
+ Å’dipal edges start empty exclusive parts. They do not bring new ancestors.
+ As such, they can be skipped when computing any topological sort or any
+ iteration over the ancestors of a node.
+
+ The Å“dipal edges are eliminated here using the rank information.
+ """
+ p1, p2 = cl.parentrevs(rev)
+ if p1 == nullrev or cl.fast_rank(p2) == cl.fast_rank(rev) - 1:
+ return p2, nullrev
+ elif p2 == nullrev or cl.fast_rank(p1) == cl.fast_rank(rev) - 1:
+ return p1, nullrev
+ else:
+ return p1, p2
+
+
+def _stable_tail_sort(cl, head_rev):
+ """
+ Naive topological iterator of the ancestors given by the stable-tail sort.
+
+ The stable-tail sort of a node "h" is defined as the sequence:
+ sts(h) := [h] + excl(h) + sts(pt(h))
+ where excl(h) := u for u in sts(px(h)) if u not in ancestors(pt(h))
+
+ This implementation uses a call-stack whose size is
+ O(number of open merges).
+
+ As such, this implementation exists mainly as a defining reference.
+ """
+ cursor_rev = head_rev
+ while cursor_rev != nullrev:
+ yield cursor_rev
+
+ p1, p2 = _nonoedipal_parent_revs(cl, cursor_rev)
+ if p1 == nullrev:
+ cursor_rev = p2
+ elif p2 == nullrev:
+ cursor_rev = p1
+ else:
+ px, pt = _sorted_parents(cl, p1, p2)
+
+ tail_ancestors = ancestor.lazyancestors(
+ cl.parentrevs, (pt,), inclusive=True
+ )
+ exclusive_ancestors = (
+ a for a in _stable_tail_sort(cl, px) if a not in tail_ancestors
+ )
+
+ excl_part_size = cl.fast_rank(cursor_rev) - cl.fast_rank(pt) - 1
+ yield from itertools.islice(exclusive_ancestors, excl_part_size)
+ cursor_rev = pt
--- a/mercurial/statichttprepo.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/statichttprepo.py Tue May 09 11:35:50 2023 +0200
@@ -119,7 +119,7 @@
def http_error_416(self, req, fp, code, msg, hdrs):
# HTTP's Range Not Satisfiable error
- raise _RangeError(b'Requested Range Not Satisfiable')
+ raise _RangeError('Requested Range Not Satisfiable')
def build_opener(ui, authinfo):
@@ -134,13 +134,13 @@
def __call__(self, path, mode=b'r', *args, **kw):
if mode not in (b'r', b'rb'):
- raise IOError(b'Permission denied')
+ raise IOError('Permission denied')
f = b"/".join((self.base, urlreq.quote(path)))
return httprangereader(f, urlopener)
- def join(self, path):
+ def join(self, path, *insidef):
if path:
- return pathutil.join(self.base, path)
+ return pathutil.join(self.base, path, *insidef)
else:
return self.base
@@ -237,8 +237,8 @@
def local(self):
return False
- def peer(self, path=None):
- return statichttppeer(self, path=path)
+ def peer(self, path=None, remotehidden=False):
+ return statichttppeer(self, path=path, remotehidden=remotehidden)
def wlock(self, wait=True):
raise error.LockUnavailable(
@@ -260,8 +260,12 @@
pass # statichttprepository are read only
-def make_peer(ui, path, create, intents=None, createopts=None):
+def make_peer(
+ ui, path, create, intents=None, createopts=None, remotehidden=False
+):
if create:
raise error.Abort(_(b'cannot create new static-http repository'))
url = path.loc[7:]
- return statichttprepository(ui, url).peer(path=path)
+ return statichttprepository(ui, url).peer(
+ path=path, remotehidden=remotehidden
+ )
--- a/mercurial/store.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/store.py Tue May 09 11:35:50 2023 +0200
@@ -524,7 +524,7 @@
yield (FILETYPE_OTHER | t, u, s)
def walk(self, matcher=None):
- """return file related to data storage (ie: revlogs)
+ """return files related to data storage (ie: revlogs)
yields (file_type, unencoded, size)
--- a/mercurial/templates/json/map Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/templates/json/map Tue May 09 11:35:50 2023 +0200
@@ -65,6 +65,7 @@
"tags": [{join(changesettag, ", ")}],
"user": {author|utf8|json},
"parents": [{join(parent%changesetparent, ", ")}],
+ "children": [{join(child%changesetparent, ", ")}],
"files": [{join(files, ", ")}],
"diff": [{join(diff, ", ")}],
"phase": {phase|json}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/mercurial/thirdparty/jaraco/collections.py Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,56 @@
+# adapted from jaraco.collections 3.9
+
+import collections
+
+
+class Projection(collections.abc.Mapping):
+ """
+ Project a set of keys over a mapping
+
+ >>> sample = {'a': 1, 'b': 2, 'c': 3}
+ >>> prj = Projection(['a', 'c', 'd'], sample)
+ >>> prj == {'a': 1, 'c': 3}
+ True
+
+ Keys should only appear if they were specified and exist in the space.
+
+ >>> sorted(list(prj.keys()))
+ ['a', 'c']
+
+ Attempting to access a key not in the projection
+ results in a KeyError.
+
+ >>> prj['b']
+ Traceback (most recent call last):
+ ...
+ KeyError: 'b'
+
+ Use the projection to update another dict.
+
+ >>> target = {'a': 2, 'b': 2}
+ >>> target.update(prj)
+ >>> target == {'a': 1, 'b': 2, 'c': 3}
+ True
+
+ Also note that Projection keeps a reference to the original dict, so
+ if you modify the original dict, that could modify the Projection.
+
+ >>> del sample['a']
+ >>> dict(prj)
+ {'c': 3}
+ """
+
+ def __init__(self, keys, space):
+ self._keys = tuple(keys)
+ self._space = space
+
+ def __getitem__(self, key):
+ if key not in self._keys:
+ raise KeyError(key)
+ return self._space[key]
+
+ def __iter__(self):
+ return iter(set(self._keys).intersection(self._space))
+
+ def __len__(self):
+ return len(tuple(iter(self)))
--- a/mercurial/thirdparty/sha1dc/lib/sha1.c Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/thirdparty/sha1dc/lib/sha1.c Tue May 09 11:35:50 2023 +0200
@@ -102,6 +102,10 @@
*/
#define SHA1DC_BIGENDIAN
+#elif (defined(__APPLE__) && defined(__BIG_ENDIAN__) && !defined(SHA1DC_BIGENDIAN))
+/* older gcc compilers which are the default on Apple PPC do not define __BYTE_ORDER__ */
+#define SHA1DC_BIGENDIAN
+
/* Not under GCC-alike or glibc or *BSD or newlib or <processor whitelist> or <os whitelist> */
#elif defined(SHA1DC_ON_INTEL_LIKE_PROCESSOR)
/*
--- a/mercurial/transaction.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/transaction.py Tue May 09 11:35:50 2023 +0200
@@ -314,7 +314,7 @@
self._abortcallback = {}
def __repr__(self):
- name = '/'.join(self._names)
+ name = b'/'.join(self._names)
return '<transaction name=%s, count=%d, usages=%d>' % (
name,
self._count,
--- a/mercurial/ui.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/ui.py Tue May 09 11:35:50 2023 +0200
@@ -1107,10 +1107,16 @@
def fout(self):
return self._fout
+ @util.propertycache
+ def _fout_is_a_tty(self):
+ self._isatty(self._fout)
+
@fout.setter
def fout(self, f):
self._fout = f
self._fmsgout, self._fmsgerr = _selectmsgdests(self)
+ if '_fout_is_a_tty' in vars(self):
+ del self._fout_is_a_tty
@property
def ferr(self):
@@ -1234,7 +1240,7 @@
return
# inlined _writenobuf() for speed
- if not opts.get('keepprogressbar', False):
+ if not opts.get('keepprogressbar', self._fout_is_a_tty):
self._progclear()
msg = b''.join(args)
@@ -1273,7 +1279,7 @@
def _writenobuf(self, dest, *args: bytes, **opts: _MsgOpts) -> None:
# update write() as well if you touch this code
- if not opts.get('keepprogressbar', False):
+ if not opts.get('keepprogressbar', self._fout_is_a_tty):
self._progclear()
msg = b''.join(args)
--- a/mercurial/unionrepo.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/unionrepo.py Tue May 09 11:35:50 2023 +0200
@@ -270,8 +270,8 @@
def cancopy(self):
return False
- def peer(self, path=None):
- return unionpeer(self, path=None)
+ def peer(self, path=None, remotehidden=False):
+ return unionpeer(self, path=None, remotehidden=remotehidden)
def getcwd(self):
return encoding.getcwd() # always outside the repo
--- a/mercurial/wireprotoserver.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/wireprotoserver.py Tue May 09 11:35:50 2023 +0200
@@ -317,7 +317,8 @@
proto.checkperm(wireprotov1server.commands[cmd].permission)
- rsp = wireprotov1server.dispatch(repo, proto, cmd)
+ accesshidden = hgwebcommon.hashiddenaccess(repo, req)
+ rsp = wireprotov1server.dispatch(repo, proto, cmd, accesshidden)
if isinstance(rsp, bytes):
setresponse(HTTP_OK, HGTYPE, bodybytes=rsp)
@@ -445,7 +446,7 @@
pass
-def _runsshserver(ui, repo, fin, fout, ev):
+def _runsshserver(ui, repo, fin, fout, ev, accesshidden=False):
# This function operates like a state machine of sorts. The following
# states are defined:
#
@@ -486,7 +487,9 @@
_sshv1respondbytes(fout, b'')
continue
- rsp = wireprotov1server.dispatch(repo, proto, request)
+ rsp = wireprotov1server.dispatch(
+ repo, proto, request, accesshidden=accesshidden
+ )
repo.ui.fout.flush()
repo.ui.ferr.flush()
@@ -521,10 +524,11 @@
class sshserver:
- def __init__(self, ui, repo, logfh=None):
+ def __init__(self, ui, repo, logfh=None, accesshidden=False):
self._ui = ui
self._repo = repo
self._fin, self._fout = ui.protectfinout()
+ self._accesshidden = accesshidden
# Log write I/O to stdout and stderr if configured.
if logfh:
@@ -541,4 +545,6 @@
def serveuntil(self, ev):
"""Serve until a threading.Event is set."""
- _runsshserver(self._ui, self._repo, self._fin, self._fout, ev)
+ _runsshserver(
+ self._ui, self._repo, self._fin, self._fout, ev, self._accesshidden
+ )
--- a/mercurial/wireprotov1server.py Thu May 04 14:17:28 2023 +0200
+++ b/mercurial/wireprotov1server.py Tue May 09 11:35:50 2023 +0200
@@ -23,6 +23,7 @@
exchange,
pushkey as pushkeymod,
pycompat,
+ repoview,
requirements as requirementsmod,
streamclone,
util,
@@ -60,7 +61,7 @@
# wire protocol command can either return a string or one of these classes.
-def getdispatchrepo(repo, proto, command):
+def getdispatchrepo(repo, proto, command, accesshidden=False):
"""Obtain the repo used for processing wire protocol commands.
The intent of this function is to serve as a monkeypatch point for
@@ -68,11 +69,21 @@
specialized circumstances.
"""
viewconfig = repo.ui.config(b'server', b'view')
+
+ # Only works if the filter actually supports being upgraded to show hidden
+ # changesets.
+ if (
+ accesshidden
+ and viewconfig is not None
+ and viewconfig + b'.hidden' in repoview.filtertable
+ ):
+ viewconfig += b'.hidden'
+
return repo.filtered(viewconfig)
-def dispatch(repo, proto, command):
- repo = getdispatchrepo(repo, proto, command)
+def dispatch(repo, proto, command, accesshidden=False):
+ repo = getdispatchrepo(repo, proto, command, accesshidden=accesshidden)
func, spec = commands[command]
args = proto.getargs(spec)
--- a/rust/README.rst Thu May 04 14:17:28 2023 +0200
+++ b/rust/README.rst Tue May 09 11:35:50 2023 +0200
@@ -7,17 +7,19 @@
improves performance in some areas.
There are currently four independent Rust projects:
+
- chg. An implementation of chg, in Rust instead of C.
- hgcli. A project that provides a (mostly) self-contained "hg" binary,
for ease of deployment and a bit of speed, using PyOxidizer. See
- hgcli/README.md.
+ ``hgcli/README.md``.
- hg-core (and hg-cpython): implementation of some
functionality of mercurial in Rust, e.g. ancestry computations in
revision graphs, status or pull discovery. The top-level ``Cargo.toml`` file
defines a workspace containing these crates.
- rhg: a pure Rust implementation of Mercurial, with a fallback mechanism for
- unsupported invocations. It reuses the logic `hg-core` but completely forgoes
- interaction with Python. See `rust/rhg/README.md` for more details.
+ unsupported invocations. It reuses the logic ``hg-core`` but
+ completely forgoes interaction with Python. See
+ ``rust/rhg/README.md`` for more details.
Using Rust code
===============
@@ -41,10 +43,10 @@
================
In the future, compile-time opt-ins may be added
-to the `features` section in ``hg-cpython/Cargo.toml``.
+to the ``features`` section in ``hg-cpython/Cargo.toml``.
-To use features from the Makefile, use the `HG_RUST_FEATURES` environment
-variable: for instance `HG_RUST_FEATURES="some-feature other-feature"`
+To use features from the Makefile, use the ``HG_RUST_FEATURES`` environment
+variable: for instance ``HG_RUST_FEATURES="some-feature other-feature"``.
Profiling
=========
@@ -57,7 +59,7 @@
Creating a ``.cargo/config`` file with the following content enables
debug information in optimized builds. This make profiles more informative
with source file name and line number for Rust stack frames and
-(in some cases) stack frames for Rust functions that have been inlined.
+(in some cases) stack frames for Rust functions that have been inlined::
[profile.release]
debug = true
@@ -69,7 +71,7 @@
as opposed to tools for native code like ``perf``, which attribute
time to the python interpreter instead of python functions).
-Example usage:
+Example usage::
$ make PURE=--rust local # Don't forget to recompile after a code change
$ py-spy record --native --output /tmp/profile.svg -- ./hg ...
@@ -77,9 +79,25 @@
Developing Rust
===============
-The current version of Rust in use is ``1.61.0``, because it's what Debian
-testing has. You can use ``rustup override set 1.61.0`` at the root of the repo
-to make it easier on you.
+Minimum Supported Rust Version
+------------------------------
+
+The minimum supported rust version (MSRV) is specified in the `Clippy`_
+configuration file at ``rust/clippy.toml``. It is set to be ``1.61.0`` as of
+this writing, but keep in mind that the authoritative value is the one
+from the configuration file.
+
+We bump it from time to time, with the general rule being that our
+MSRV should not be greater that the version of the Rust toolchain
+shipping with Debian testing, so that the Rust enhanced Mercurial can
+be eventually packaged in Debian.
+
+To ensure that you are not depending on features introduced in later
+versions, you can issue ``rustup override set x.y.z`` at the root of
+the repository.
+
+Build and development
+---------------------
Go to the ``hg-cpython`` folder::
@@ -117,8 +135,28 @@
using the nightly version because it has been stable enough and provides
comment folding.
-To format the entire Rust workspace::
+Our CI enforces that the code does not need reformatting. Before
+submitting your changes, please format the entire Rust workspace by running::
+
$ cargo +nightly fmt
This requires you to have the nightly toolchain installed.
+
+Linting: code sanity
+--------------------
+
+We're using `Clippy`_, the standard code diagnosis tool of the Rust
+community.
+
+Our CI enforces that the code is free of Clippy warnings, so you might
+want to run it on your side before submitting your changes. Simply do::
+
+ % cargo clippy
+
+from the top of the Rust workspace. Clippy is part of the default
+``rustup`` install, so it should work right away. In case it would
+not, you can install it with ``rustup component add``.
+
+
+.. _Clippy: https://doc.rust-lang.org/stable/clippy/
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/rust/clippy.toml Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,1 @@
+msrv = "1.61.0"
--- a/rust/hg-core/src/checkexec.rs Thu May 04 14:17:28 2023 +0200
+++ b/rust/hg-core/src/checkexec.rs Tue May 09 11:35:50 2023 +0200
@@ -112,8 +112,10 @@
Ok(false)
}
-/// This function is a rust rewrite of [checkexec] function from [posix.py]
-/// Returns true if the filesystem supports execute permissions.
+/// This function is a Rust rewrite of the `checkexec` function from
+/// `posix.py`.
+///
+/// Returns `true` if the filesystem supports execute permissions.
pub fn check_exec(path: impl AsRef<Path>) -> bool {
check_exec_impl(path).unwrap_or(false)
}
--- a/rust/hg-core/src/revlog/changelog.rs Thu May 04 14:17:28 2023 +0200
+++ b/rust/hg-core/src/revlog/changelog.rs Tue May 09 11:35:50 2023 +0200
@@ -1,6 +1,6 @@
use crate::errors::HgError;
-use crate::revlog::Revision;
use crate::revlog::{Node, NodePrefix};
+use crate::revlog::{Revision, NULL_REVISION};
use crate::revlog::{Revlog, RevlogEntry, RevlogError};
use crate::utils::hg_path::HgPath;
use crate::vfs::Vfs;
@@ -9,7 +9,7 @@
use std::borrow::Cow;
use std::fmt::{Debug, Formatter};
-/// A specialized `Revlog` to work with `changelog` data format.
+/// A specialized `Revlog` to work with changelog data format.
pub struct Changelog {
/// The generic `revlog` format.
pub(crate) revlog: Revlog,
@@ -23,7 +23,7 @@
Ok(Self { revlog })
}
- /// Return the `ChangelogEntry` for the given node ID.
+ /// Return the `ChangelogRevisionData` for the given node ID.
pub fn data_for_node(
&self,
node: NodePrefix,
@@ -32,30 +32,29 @@
self.data_for_rev(rev)
}
- /// Return the `RevlogEntry` of the given revision number.
+ /// Return the [`ChangelogEntry`] for the given revision number.
pub fn entry_for_rev(
&self,
rev: Revision,
- ) -> Result<RevlogEntry, RevlogError> {
- self.revlog.get_entry(rev)
+ ) -> Result<ChangelogEntry, RevlogError> {
+ let revlog_entry = self.revlog.get_entry(rev)?;
+ Ok(ChangelogEntry { revlog_entry })
}
- /// Return the `ChangelogEntry` of the given revision number.
+ /// Return the [`ChangelogRevisionData`] for the given revision number.
+ ///
+ /// This is a useful shortcut in case the caller does not need the
+ /// generic revlog information (parents, hashes etc). Otherwise
+ /// consider taking a [`ChangelogEntry`] with
+ /// [entry_for_rev](`Self::entry_for_rev`) and doing everything from there.
pub fn data_for_rev(
&self,
rev: Revision,
) -> Result<ChangelogRevisionData, RevlogError> {
- let bytes = self.revlog.get_rev_data(rev)?;
- if bytes.is_empty() {
- Ok(ChangelogRevisionData::null())
- } else {
- Ok(ChangelogRevisionData::new(bytes).map_err(|err| {
- RevlogError::Other(HgError::CorruptedRepository(format!(
- "Invalid changelog data for revision {}: {:?}",
- rev, err
- )))
- })?)
+ if rev == NULL_REVISION {
+ return Ok(ChangelogRevisionData::null());
}
+ self.entry_for_rev(rev)?.data()
}
pub fn node_from_rev(&self, rev: Revision) -> Option<&Node> {
@@ -70,6 +69,59 @@
}
}
+/// A specialized `RevlogEntry` for `changelog` data format
+///
+/// This is a `RevlogEntry` with the added semantics that the associated
+/// data should meet the requirements for `changelog`, materialized by
+/// the fact that `data()` constructs a `ChangelogRevisionData`.
+/// In case that promise would be broken, the `data` method returns an error.
+#[derive(Clone)]
+pub struct ChangelogEntry<'changelog> {
+ /// Same data, as a generic `RevlogEntry`.
+ pub(crate) revlog_entry: RevlogEntry<'changelog>,
+}
+
+impl<'changelog> ChangelogEntry<'changelog> {
+ pub fn data<'a>(
+ &'a self,
+ ) -> Result<ChangelogRevisionData<'changelog>, RevlogError> {
+ let bytes = self.revlog_entry.data()?;
+ if bytes.is_empty() {
+ Ok(ChangelogRevisionData::null())
+ } else {
+ Ok(ChangelogRevisionData::new(bytes).map_err(|err| {
+ RevlogError::Other(HgError::CorruptedRepository(format!(
+ "Invalid changelog data for revision {}: {:?}",
+ self.revlog_entry.revision(),
+ err
+ )))
+ })?)
+ }
+ }
+
+ /// Obtain a reference to the underlying `RevlogEntry`.
+ ///
+ /// This allows the caller to access the information that is common
+ /// to all revlog entries: revision number, node id, parent revisions etc.
+ pub fn as_revlog_entry(&self) -> &RevlogEntry {
+ &self.revlog_entry
+ }
+
+ pub fn p1_entry(&self) -> Result<Option<ChangelogEntry>, RevlogError> {
+ Ok(self
+ .revlog_entry
+ .p1_entry()?
+ .map(|revlog_entry| Self { revlog_entry }))
+ }
+
+ pub fn p2_entry(&self) -> Result<Option<ChangelogEntry>, RevlogError> {
+ Ok(self
+ .revlog_entry
+ .p2_entry()?
+ .map(|revlog_entry| Self { revlog_entry }))
+ }
+}
+
/// `Changelog` entry which knows how to interpret the `changelog` data bytes.
#[derive(PartialEq)]
pub struct ChangelogRevisionData<'changelog> {
@@ -215,6 +267,8 @@
#[cfg(test)]
mod tests {
use super::*;
+ use crate::vfs::Vfs;
+ use crate::NULL_REVISION;
use pretty_assertions::assert_eq;
#[test]
@@ -268,4 +322,20 @@
);
assert_eq!(data.description(), b"some\ncommit\nmessage");
}
+
+ #[test]
+ fn test_data_from_rev_null() -> Result<(), RevlogError> {
+ // an empty revlog will be enough for this case
+ let temp = tempfile::tempdir().unwrap();
+ let vfs = Vfs { base: temp.path() };
+ std::fs::write(temp.path().join("foo.i"), b"").unwrap();
+ let revlog = Revlog::open(&vfs, "foo.i", None, false).unwrap();
+
+ let changelog = Changelog { revlog };
+ assert_eq!(
+ changelog.data_for_rev(NULL_REVISION)?,
+ ChangelogRevisionData::null()
+ );
+ Ok(())
+ }
}
--- a/rust/hg-core/src/revlog/mod.rs Thu May 04 14:17:28 2023 +0200
+++ b/rust/hg-core/src/revlog/mod.rs Tue May 09 11:35:50 2023 +0200
@@ -400,10 +400,10 @@
/// The revlog entry's bytes and the necessary informations to extract
/// the entry's data.
#[derive(Clone)]
-pub struct RevlogEntry<'a> {
- revlog: &'a Revlog,
+pub struct RevlogEntry<'revlog> {
+ revlog: &'revlog Revlog,
rev: Revision,
- bytes: &'a [u8],
+ bytes: &'revlog [u8],
compressed_len: u32,
uncompressed_len: i32,
base_rev_or_base_of_delta_chain: Option<Revision>,
@@ -413,7 +413,7 @@
hash: Node,
}
-impl<'a> RevlogEntry<'a> {
+impl<'revlog> RevlogEntry<'revlog> {
pub fn revision(&self) -> Revision {
self.rev
}
@@ -430,7 +430,9 @@
self.p1 != NULL_REVISION
}
- pub fn p1_entry(&self) -> Result<Option<RevlogEntry>, RevlogError> {
+ pub fn p1_entry(
+ &self,
+ ) -> Result<Option<RevlogEntry<'revlog>>, RevlogError> {
if self.p1 == NULL_REVISION {
Ok(None)
} else {
@@ -438,7 +440,9 @@
}
}
- pub fn p2_entry(&self) -> Result<Option<RevlogEntry>, RevlogError> {
+ pub fn p2_entry(
+ &self,
+ ) -> Result<Option<RevlogEntry<'revlog>>, RevlogError> {
if self.p2 == NULL_REVISION {
Ok(None)
} else {
@@ -473,7 +477,7 @@
}
/// The data for this entry, after resolving deltas if any.
- pub fn rawdata(&self) -> Result<Cow<'a, [u8]>, HgError> {
+ pub fn rawdata(&self) -> Result<Cow<'revlog, [u8]>, HgError> {
let mut entry = self.clone();
let mut delta_chain = vec![];
@@ -503,8 +507,8 @@
fn check_data(
&self,
- data: Cow<'a, [u8]>,
- ) -> Result<Cow<'a, [u8]>, HgError> {
+ data: Cow<'revlog, [u8]>,
+ ) -> Result<Cow<'revlog, [u8]>, HgError> {
if self.revlog.check_hash(
self.p1,
self.p2,
@@ -525,7 +529,7 @@
}
}
- pub fn data(&self) -> Result<Cow<'a, [u8]>, HgError> {
+ pub fn data(&self) -> Result<Cow<'revlog, [u8]>, HgError> {
let data = self.rawdata()?;
if self.is_censored() {
return Err(HgError::CensoredNodeError);
@@ -535,7 +539,7 @@
/// Extract the data contained in the entry.
/// This may be a delta. (See `is_delta`.)
- fn data_chunk(&self) -> Result<Cow<'a, [u8]>, HgError> {
+ fn data_chunk(&self) -> Result<Cow<'revlog, [u8]>, HgError> {
if self.bytes.is_empty() {
return Ok(Cow::Borrowed(&[]));
}
--- a/rust/hg-core/src/revlog/nodemap.rs Thu May 04 14:17:28 2023 +0200
+++ b/rust/hg-core/src/revlog/nodemap.rs Tue May 09 11:35:50 2023 +0200
@@ -25,6 +25,9 @@
#[derive(Debug, PartialEq)]
pub enum NodeMapError {
+ /// A `NodePrefix` matches several [`Revision`]s.
+ ///
+ /// This can be returned by methods meant for (at most) one match.
MultipleResults,
/// A `Revision` stored in the nodemap could not be found in the index
RevisionNotInIndex(Revision),
@@ -35,8 +38,8 @@
/// ## `RevlogIndex` and `NodeMap`
///
/// One way to think about their relationship is that
-/// the `NodeMap` is a prefix-oriented reverse index of the `Node` information
-/// carried by a [`RevlogIndex`].
+/// the `NodeMap` is a prefix-oriented reverse index of the [`Node`]
+/// information carried by a [`RevlogIndex`].
///
/// Many of the methods in this trait take a `RevlogIndex` argument
/// which is used for validation of their results. This index must naturally
@@ -45,14 +48,10 @@
/// Notably, the `NodeMap` must not store
/// information about more `Revision` values than there are in the index.
/// In these methods, an encountered `Revision` is not in the index, a
-/// [`RevisionNotInIndex`] error is returned.
+/// [RevisionNotInIndex](NodeMapError) error is returned.
///
/// In insert operations, the rule is thus that the `NodeMap` must always
-/// be updated after the `RevlogIndex`
-/// be updated first, and the `NodeMap` second.
-///
-/// [`RevisionNotInIndex`]: enum.NodeMapError.html#variant.RevisionNotInIndex
-/// [`RevlogIndex`]: ../trait.RevlogIndex.html
+/// be updated after the `RevlogIndex` it is about.
pub trait NodeMap {
/// Find the unique `Revision` having the given `Node`
///
@@ -69,8 +68,8 @@
///
/// If no Revision matches the given prefix, `Ok(None)` is returned.
///
- /// If several Revisions match the given prefix, a [`MultipleResults`]
- /// error is returned.
+ /// If several Revisions match the given prefix, a
+ /// [MultipleResults](NodeMapError) error is returned.
fn find_bin(
&self,
idx: &impl RevlogIndex,
@@ -84,17 +83,18 @@
/// returns the number of hexadecimal digits that would had sufficed
/// to find the revision uniquely.
///
- /// Returns `None` if no `Revision` could be found for the prefix.
+ /// Returns `None` if no [`Revision`] could be found for the prefix.
///
- /// If several Revisions match the given prefix, a [`MultipleResults`]
- /// error is returned.
+ /// If several Revisions match the given prefix, a
+ /// [MultipleResults](NodeMapError) error is returned.
fn unique_prefix_len_bin(
&self,
idx: &impl RevlogIndex,
node_prefix: NodePrefix,
) -> Result<Option<usize>, NodeMapError>;
- /// Same as `unique_prefix_len_bin`, with a full `Node` as input
+ /// Same as [unique_prefix_len_bin](Self::unique_prefix_len_bin), with
+ /// a full [`Node`] as input
fn unique_prefix_len_node(
&self,
idx: &impl RevlogIndex,
@@ -113,7 +113,7 @@
) -> Result<(), NodeMapError>;
}
-/// Low level NodeTree [`Blocks`] elements
+/// Low level NodeTree [`Block`] elements
///
/// These are exactly as for instance on persistent storage.
type RawElement = unaligned::I32Be;
@@ -156,7 +156,9 @@
}
}
-/// A logical block of the `NodeTree`, packed with a fixed size.
+const ELEMENTS_PER_BLOCK: usize = 16; // number of different values in a nybble
+
+/// A logical block of the [`NodeTree`], packed with a fixed size.
///
/// These are always used in container types implementing `Index<Block>`,
/// such as `&Block`
@@ -167,21 +169,18 @@
///
/// - absent (value -1)
/// - another `Block` in the same indexable container (value ≥ 0)
-/// - a `Revision` leaf (value ≤ -2)
+/// - a [`Revision`] leaf (value ≤ -2)
///
/// Endianness has to be fixed for consistency on shared storage across
/// different architectures.
///
/// A key difference with the C `nodetree` is that we need to be
/// able to represent the [`Block`] at index 0, hence -1 is the empty marker
-/// rather than 0 and the `Revision` range upper limit of -2 instead of -1.
+/// rather than 0 and the [`Revision`] range upper limit of -2 instead of -1.
///
/// Another related difference is that `NULL_REVISION` (-1) is not
/// represented at all, because we want an immutable empty nodetree
/// to be valid.
-
-const ELEMENTS_PER_BLOCK: usize = 16; // number of different values in a nybble
-
#[derive(Copy, Clone, BytesCast, PartialEq)]
#[repr(transparent)]
pub struct Block([RawElement; ELEMENTS_PER_BLOCK]);
@@ -218,7 +217,7 @@
/// Because of the append only nature of our node trees, we need to
/// keep the original untouched and store new blocks separately.
///
-/// The mutable root `Block` is kept apart so that we don't have to rebump
+/// The mutable root [`Block`] is kept apart so that we don't have to rebump
/// it on each insertion.
pub struct NodeTree {
readonly: Box<dyn Deref<Target = [Block]> + Send>,
@@ -242,7 +241,7 @@
}
}
-/// Return `None` unless the `Node` for `rev` has given prefix in `index`.
+/// Return `None` unless the [`Node`] for `rev` has given prefix in `idx`.
fn has_prefix_or_none(
idx: &impl RevlogIndex,
prefix: NodePrefix,
@@ -260,7 +259,7 @@
}
/// validate that the candidate's node starts indeed with given prefix,
-/// and treat ambiguities related to `NULL_REVISION`.
+/// and treat ambiguities related to [`NULL_REVISION`].
///
/// From the data in the NodeTree, one can only conclude that some
/// revision is the only one for a *subprefix* of the one being looked up.
@@ -304,12 +303,10 @@
/// Create from an opaque bunch of bytes
///
- /// The created `NodeTreeBytes` from `buffer`,
+ /// The created [`NodeTreeBytes`] from `bytes`,
/// of which exactly `amount` bytes are used.
///
/// - `buffer` could be derived from `PyBuffer` and `Mmap` objects.
- /// - `offset` allows for the final file format to include fixed data
- /// (generation number, behavioural flags)
/// - `amount` is expressed in bytes, and is not automatically derived from
/// `bytes`, so that a caller that manages them atomically can perform
/// temporary disk serializations and still rollback easily if needed.
@@ -323,7 +320,7 @@
NodeTree::new(Box::new(NodeTreeBytes::new(bytes, amount)))
}
- /// Retrieve added `Block` and the original immutable data
+ /// Retrieve added [`Block`]s and the original immutable data
pub fn into_readonly_and_added(
self,
) -> (Box<dyn Deref<Target = [Block]> + Send>, Vec<Block>) {
@@ -335,7 +332,7 @@
(readonly, vec)
}
- /// Retrieve added `Blocks` as bytes, ready to be written to persistent
+ /// Retrieve added [`Block]s as bytes, ready to be written to persistent
/// storage
pub fn into_readonly_and_added_bytes(
self,
@@ -381,16 +378,17 @@
///
/// The first returned value is the result of analysing `NodeTree` data
/// *alone*: whereas `None` guarantees that the given prefix is absent
- /// from the `NodeTree` data (but still could match `NULL_NODE`), with
- /// `Some(rev)`, it is to be understood that `rev` is the unique `Revision`
- /// that could match the prefix. Actually, all that can be inferred from
+ /// from the [`NodeTree`] data (but still could match [`NULL_NODE`]), with
+ /// `Some(rev)`, it is to be understood that `rev` is the unique
+ /// [`Revision`] that could match the prefix. Actually, all that can
+ /// be inferred from
/// the `NodeTree` data is that `rev` is the revision with the longest
/// common node prefix with the given prefix.
///
/// The second returned value is the size of the smallest subprefix
/// of `prefix` that would give the same result, i.e. not the
- /// `MultipleResults` error variant (again, using only the data of the
- /// `NodeTree`).
+ /// [MultipleResults](NodeMapError) error variant (again, using only the
+ /// data of the [`NodeTree`]).
fn lookup(
&self,
prefix: NodePrefix,
--- a/rust/hg-core/src/utils.rs Thu May 04 14:17:28 2023 +0200
+++ b/rust/hg-core/src/utils.rs Tue May 09 11:35:50 2023 +0200
@@ -301,7 +301,7 @@
/// calling `merge(key, left_value, right_value)` to resolve keys that exist in
/// both.
///
-/// CC https://github.com/bodil/im-rs/issues/166
+/// CC <https://github.com/bodil/im-rs/issues/166>
pub(crate) fn ordmap_union_with_merge<K, V>(
left: OrdMap<K, V>,
right: OrdMap<K, V>,
--- a/rust/hg-core/src/utils/hg_path.rs Thu May 04 14:17:28 2023 +0200
+++ b/rust/hg-core/src/utils/hg_path.rs Tue May 09 11:35:50 2023 +0200
@@ -479,10 +479,11 @@
}
}
-/// TODO: Once https://www.mercurial-scm.org/wiki/WindowsUTF8Plan is
+/// Create a new [`OsString`] from types referenceable as [`HgPath`].
+///
+/// TODO: Once <https://www.mercurial-scm.org/wiki/WindowsUTF8Plan> is
/// implemented, these conversion utils will have to work differently depending
/// on the repository encoding: either `UTF-8` or `MBCS`.
-
pub fn hg_path_to_os_string<P: AsRef<HgPath>>(
hg_path: P,
) -> Result<OsString, HgPathError> {
@@ -498,12 +499,14 @@
Ok(os_str.to_os_string())
}
+/// Create a new [`PathBuf`] from types referenceable as [`HgPath`].
pub fn hg_path_to_path_buf<P: AsRef<HgPath>>(
hg_path: P,
) -> Result<PathBuf, HgPathError> {
Ok(Path::new(&hg_path_to_os_string(hg_path)?).to_path_buf())
}
+/// Create a new [`HgPathBuf`] from types referenceable as [`OsStr`].
pub fn os_string_to_hg_path_buf<S: AsRef<OsStr>>(
os_string: S,
) -> Result<HgPathBuf, HgPathError> {
@@ -520,6 +523,7 @@
Ok(buf)
}
+/// Create a new [`HgPathBuf`] from types referenceable as [`Path`].
pub fn path_to_hg_path_buf<P: AsRef<Path>>(
path: P,
) -> Result<HgPathBuf, HgPathError> {
--- a/rust/rhg/src/commands/status.rs Thu May 04 14:17:28 2023 +0200
+++ b/rust/rhg/src/commands/status.rs Tue May 09 11:35:50 2023 +0200
@@ -111,6 +111,13 @@
.long("copies"),
)
.arg(
+ Arg::new("print0")
+ .help("end filenames with NUL, for use with xargs")
+ .short('0')
+ .action(clap::ArgAction::SetTrue)
+ .long("print0"),
+ )
+ .arg(
Arg::new("no-status")
.help("hide status prefix")
.short('n')
@@ -213,10 +220,11 @@
let config = invocation.config;
let args = invocation.subcommand_args;
- // TODO add `!args.get_flag("print0") &&` when we support `print0`
+ let print0 = args.get_flag("print0");
let verbose = args.get_flag("verbose")
|| config.get_bool(b"ui", b"verbose")?
|| config.get_bool(b"commands", b"status.verbose")?;
+ let verbose = verbose && !print0;
let all = args.get_flag("all");
let display_states = if all {
@@ -363,6 +371,7 @@
} else {
None
},
+ print0,
};
if display_states.modified {
output.display(b"M ", "status.modified", ds_status.modified)?;
@@ -527,6 +536,7 @@
ui: &'a Ui,
no_status: bool,
relativize: Option<RelativizePaths>,
+ print0: bool,
}
impl DisplayStatusPaths<'_> {
@@ -560,12 +570,15 @@
if !self.no_status {
self.ui.write_stdout_labelled(status_prefix, label)?
}
- self.ui
- .write_stdout_labelled(&format_bytes!(b"{}\n", path), label)?;
+ let linebreak = if self.print0 { b"\x00" } else { b"\n" };
+ self.ui.write_stdout_labelled(
+ &format_bytes!(b"{}{}", path, linebreak),
+ label,
+ )?;
if let Some(source) = copy_source.filter(|_| !self.no_status) {
let label = "status.copied";
self.ui.write_stdout_labelled(
- &format_bytes!(b" {}\n", source),
+ &format_bytes!(b" {}{}", source, linebreak),
label,
)?
}
--- a/setup.py Thu May 04 14:17:28 2023 +0200
+++ b/setup.py Tue May 09 11:35:50 2023 +0200
@@ -1299,9 +1299,11 @@
'mercurial.hgweb',
'mercurial.interfaces',
'mercurial.pure',
+ 'mercurial.stabletailgraph',
'mercurial.templates',
'mercurial.thirdparty',
'mercurial.thirdparty.attr',
+ 'mercurial.thirdparty.jaraco',
'mercurial.thirdparty.zope',
'mercurial.thirdparty.zope.interface',
'mercurial.upgrade_utils',
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/blacklists/nix Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,8 @@
+# Tests to be disabled when building and testing in the Nix sandbox.
+
+# tests enforcing "/usr/bin/env" shebangs, which are patched for nix
+test-run-tests.t
+test-check-shbang.t
+
+# doesn't like the extra setlocale warnings emitted by the nix bash wrappers
+test-locale.t
--- a/tests/common-pattern.py Thu May 04 14:17:28 2023 +0200
+++ b/tests/common-pattern.py Tue May 09 11:35:50 2023 +0200
@@ -10,7 +10,7 @@
(
br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
br'bookmarks%250A'
- br'changegroup%253D01%252C02%250A'
+ br'changegroup%253D01%252C02%252C03%250A'
br'checkheads%253Drelated%250A'
br'digests%253Dmd5%252Csha1%252Csha512%250A'
br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
@@ -26,7 +26,7 @@
(
br'bundlecaps=HG20%2Cbundle2%3DHG20%250A'
br'bookmarks%250A'
- br'changegroup%253D01%252C02%250A'
+ br'changegroup%253D01%252C02%252C03%250A'
br'checkheads%3Drelated%0A'
br'digests%253Dmd5%252Csha1%252Csha512%250A'
br'error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A'
@@ -42,7 +42,7 @@
(
br'bundle2=HG20%0A'
br'bookmarks%0A'
- br'changegroup%3D01%2C02%0A'
+ br'changegroup%3D01%2C02%2C03%0A'
br'checkheads%3Drelated%0A'
br'digests%3Dmd5%2Csha1%2Csha512%0A'
br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
@@ -59,7 +59,7 @@
(
br'bundle2=HG20%0A'
br'bookmarks%0A'
- br'changegroup%3D01%2C02%0A'
+ br'changegroup%3D01%2C02%2C03%0A'
br'checkheads%3Drelated%0A'
br'digests%3Dmd5%2Csha1%2Csha512%0A'
br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
@@ -74,7 +74,7 @@
(
br'bundle2=HG20%0A'
br'bookmarks%0A'
- br'changegroup%3D01%2C02%0A'
+ br'changegroup%3D01%2C02%2C03%0A'
br'digests%3Dmd5%2Csha1%2Csha512%0A'
br'error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A'
br'hgtagsfnodes%0A'
@@ -122,6 +122,11 @@
% (m.group(1), m.group(2))
),
),
+ # `discovery debug output
+ (
+ br'\b(\d+) total queries in \d.\d\d\d\ds\b',
+ lambda m: (br'%s total queries in *.????s (glob)' % m.group(1)),
+ ),
]
# Various platform error strings, keyed on a common replacement string
--- a/tests/filterpyflakes.py Thu May 04 14:17:28 2023 +0200
+++ b/tests/filterpyflakes.py Tue May 09 11:35:50 2023 +0200
@@ -24,10 +24,9 @@
break # pattern matches
if keep:
fn = line.split(':', 1)[0]
- f = open(fn)
- data = f.read()
- f.close()
- if 'no-' 'check-code' in data:
+ with open(fn, 'rb') as f:
+ data = f.read()
+ if b'no-' b'check-code' in data:
continue
lines.append(line)
--- a/tests/notcapable Thu May 04 14:17:28 2023 +0200
+++ b/tests/notcapable Tue May 09 11:35:50 2023 +0200
@@ -15,10 +15,10 @@
if name in b'$CAP'.split(b' '):
return False
return orig(self, name, *args, **kwargs)
-def wrappeer(orig, self, path=None):
+def wrappeer(orig, self, *args, **kwargs):
# Since we're disabling some newer features, we need to make sure local
# repos add in the legacy features again.
- return localrepo.locallegacypeer(self, path=path)
+ return localrepo.locallegacypeer(self, *args, **kwargs)
EOF
echo '[extensions]' >> $HGRCPATH
--- a/tests/test-acl.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-acl.t Tue May 09 11:35:50 2023 +0200
@@ -109,7 +109,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -175,7 +175,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -245,7 +245,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -325,7 +325,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -396,7 +396,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -471,7 +471,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -543,7 +543,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -620,7 +620,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -694,7 +694,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -767,7 +767,7 @@
list of changesets:
ef1ea85a6374b77d6da9dcda9541f498f2d17df7
bundle2-output-bundle: "HG20", 7 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:bookmarks" 37 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
@@ -856,7 +856,7 @@
list of changesets:
ef1ea85a6374b77d6da9dcda9541f498f2d17df7
bundle2-output-bundle: "HG20", 7 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:bookmarks" 37 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
@@ -947,7 +947,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1033,7 +1033,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1117,7 +1117,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1195,7 +1195,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1284,7 +1284,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1374,7 +1374,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1461,7 +1461,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1544,7 +1544,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1631,7 +1631,7 @@
f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd
911600dab2ae7a9baff75958b84fe606851ce955
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1754,7 +1754,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1841,7 +1841,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1919,7 +1919,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -1993,7 +1993,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -2061,7 +2061,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -2153,7 +2153,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -2244,7 +2244,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -2317,7 +2317,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
@@ -2402,7 +2402,7 @@
911600dab2ae7a9baff75958b84fe606851ce955
e8fc755d4d8217ee5b0c2bb41558c40d43b92c01
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 48 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
--- a/tests/test-bad-extension.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-bad-extension.t Tue May 09 11:35:50 2023 +0200
@@ -63,14 +63,11 @@
Exception: bit bucket overflow
*** failed to import extension "badext2": No module named 'badext2'
Traceback (most recent call last):
- ImportError: No module named 'hgext.badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
+ ModuleNotFoundError: No module named 'hgext.badext2'
Traceback (most recent call last):
- ImportError: No module named 'hgext3rd.badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext3rd.badext2' (py36 !)
+ ModuleNotFoundError: No module named 'hgext3rd.badext2'
Traceback (most recent call last):
- ImportError: No module named 'badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'badext2' (py36 !)
+ ModuleNotFoundError: No module named 'badext2'
names of extensions failed to load can be accessed via extensions.notloaded()
@@ -111,25 +108,19 @@
YYYY/MM/DD HH:MM:SS (PID)> - loading extension: badext2
YYYY/MM/DD HH:MM:SS (PID)> - could not import hgext.badext2 (No module named *badext2*): trying hgext3rd.badext2 (glob)
Traceback (most recent call last):
- ImportError: No module named 'hgext.badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
+ ModuleNotFoundError: No module named 'hgext.badext2'
YYYY/MM/DD HH:MM:SS (PID)> - could not import hgext3rd.badext2 (No module named *badext2*): trying badext2 (glob)
Traceback (most recent call last):
- ImportError: No module named 'hgext.badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
+ ModuleNotFoundError: No module named 'hgext.badext2'
Traceback (most recent call last):
- ImportError: No module named 'hgext3rd.badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext3rd.badext2' (py36 !)
+ ModuleNotFoundError: No module named 'hgext3rd.badext2'
*** failed to import extension "badext2": No module named 'badext2'
Traceback (most recent call last):
- ImportError: No module named 'hgext.badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext.badext2' (py36 !)
+ ModuleNotFoundError: No module named 'hgext.badext2'
Traceback (most recent call last):
- ImportError: No module named 'hgext3rd.badext2' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext3rd.badext2' (py36 !)
+ ModuleNotFoundError: No module named 'hgext3rd.badext2'
Traceback (most recent call last):
- ModuleNotFoundError: No module named 'badext2' (py36 !)
- ImportError: No module named 'badext2' (no-py36 !)
+ ModuleNotFoundError: No module named 'badext2'
YYYY/MM/DD HH:MM:SS (PID)> > loaded 2 extensions, total time * (glob)
YYYY/MM/DD HH:MM:SS (PID)> - loading configtable attributes
YYYY/MM/DD HH:MM:SS (PID)> - executing uisetup hooks
--- a/tests/test-bookmarks-pushpull.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-bookmarks-pushpull.t Tue May 09 11:35:50 2023 +0200
@@ -129,10 +129,10 @@
bundle2-output: bundle parameter:
bundle2-output: start of parts
bundle2-output: bundle part: "replycaps"
- bundle2-output-part: "replycaps" 224 bytes payload
+ bundle2-output-part: "replycaps" 227 bytes payload
bundle2-output: part 0: "REPLYCAPS"
bundle2-output: header chunk size: 16
- bundle2-output: payload chunk size: 224
+ bundle2-output: payload chunk size: 227
bundle2-output: closing payload chunk
bundle2-output: bundle part: "check:bookmarks"
bundle2-output-part: "check:bookmarks" 23 bytes payload
@@ -162,9 +162,9 @@
bundle2-input: part parameters: 0
bundle2-input: found a handler for part replycaps
bundle2-input-part: "replycaps" supported
- bundle2-input: payload chunk size: 224
+ bundle2-input: payload chunk size: 227
bundle2-input: payload chunk size: 0
- bundle2-input-part: total payload size 224
+ bundle2-input-part: total payload size 227
bundle2-input: part header size: 22
bundle2-input: part type: "CHECK:BOOKMARKS"
bundle2-input: part id: "1"
@@ -241,10 +241,10 @@
bundle2-output: bundle parameter:
bundle2-output: start of parts
bundle2-output: bundle part: "replycaps"
- bundle2-output-part: "replycaps" 224 bytes payload
+ bundle2-output-part: "replycaps" 227 bytes payload
bundle2-output: part 0: "REPLYCAPS"
bundle2-output: header chunk size: 16
- bundle2-output: payload chunk size: 224
+ bundle2-output: payload chunk size: 227
bundle2-output: closing payload chunk
bundle2-output: bundle part: "check:bookmarks"
bundle2-output-part: "check:bookmarks" 23 bytes payload
@@ -275,9 +275,9 @@
bundle2-input: part parameters: 0
bundle2-input: found a handler for part replycaps
bundle2-input-part: "replycaps" supported
- bundle2-input: payload chunk size: 224
+ bundle2-input: payload chunk size: 227
bundle2-input: payload chunk size: 0
- bundle2-input-part: total payload size 224
+ bundle2-input-part: total payload size 227
bundle2-input: part header size: 22
bundle2-input: part type: "CHECK:BOOKMARKS"
bundle2-input: part id: "1"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-bundle-phase-internal.t Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,286 @@
+=====================================================
+test behavior of the `internal` phase around bundling
+=====================================================
+
+Long story short, internal changeset are internal implementation details and
+they should never leave the repository. Hence, they should never be in a
+bundle.
+
+Setup
+=====
+
+ $ cat << EOF >> $HGRCPATH
+ > [ui]
+ > logtemplate="{node|short} [{phase}] {desc|firstline}"
+ > EOF
+
+
+ $ hg init reference-repo --config format.use-internal-phase=yes
+ $ cd reference-repo
+ $ echo a > a
+ $ hg add a
+ $ hg commit -m "a"
+ $ echo b > b
+ $ hg add b
+ $ hg commit -m "b"
+ $ echo b > c
+ $ hg add c
+ $ hg commit -m "c"
+ $ hg log -G
+ @ 07f0cc02c068 [draft] c
+ |
+ o d2ae7f538514 [draft] b
+ |
+ o cb9a9f314b8b [draft] a
+
+ $ hg up ".^"
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+
+do a shelve
+
+ $ touch a_file.txt
+ $ hg shelve -A
+ adding a_file.txt
+ shelved as default
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ hg log -G --hidden
+ o 2ec3cf310d86 [internal] changes to: b
+ |
+ | o 07f0cc02c068 [draft] c
+ |/
+ @ d2ae7f538514 [draft] b
+ |
+ o cb9a9f314b8b [draft] a
+
+ $ shelved_node=`hg log --rev tip --hidden -T '{node|short}'`
+
+add more changeset above it
+
+ $ hg up 'desc(a)'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ $ echo d > d
+ $ hg add d
+ $ hg commit -m "d"
+ created new head
+ $ echo d > e
+ $ hg add e
+ $ hg commit -m "e"
+ $ hg up null
+ 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
+ $ hg log -G
+ o 636bc07920e3 [draft] e
+ |
+ o 980f7dc84c29 [draft] d
+ |
+ | o 07f0cc02c068 [draft] c
+ | |
+ | o d2ae7f538514 [draft] b
+ |/
+ o cb9a9f314b8b [draft] a
+
+ $ hg log -G --hidden
+ o 636bc07920e3 [draft] e
+ |
+ o 980f7dc84c29 [draft] d
+ |
+ | o 2ec3cf310d86 [internal] changes to: b
+ | |
+ | | o 07f0cc02c068 [draft] c
+ | |/
+ | o d2ae7f538514 [draft] b
+ |/
+ o cb9a9f314b8b [draft] a
+
+ $ cd ..
+
+backup bundle from strip
+========================
+
+strip an ancestors of the internal changeset
+--------------------------------------------
+
+ $ cp -ar reference-repo strip-ancestor
+ $ cd strip-ancestor
+
+The internal change is stripped, yet it should be skipped from the backup bundle.
+
+ $ hg log -G
+ o 636bc07920e3 [draft] e
+ |
+ o 980f7dc84c29 [draft] d
+ |
+ | o 07f0cc02c068 [draft] c
+ | |
+ | o d2ae7f538514 [draft] b
+ |/
+ o cb9a9f314b8b [draft] a
+
+ $ hg debugstrip 'desc(b)'
+ saved backup bundle to $TESTTMP/strip-ancestor/.hg/strip-backup/d2ae7f538514-59bd8bc3-backup.hg
+
+The change should be either gone or hidden
+
+ $ hg log -G
+ o 636bc07920e3 [draft] e
+ |
+ o 980f7dc84c29 [draft] d
+ |
+ o cb9a9f314b8b [draft] a
+
+
+The backup should not include it (as people tend to manipulate these directly)
+
+ $ ls -1 .hg/strip-backup/
+ d2ae7f538514-59bd8bc3-backup.hg
+ $ hg debugbundle .hg/strip-backup/*.hg
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 2, version: 03} (mandatory: True)
+ d2ae7f538514cd87c17547b0de4cea71fe1af9fb
+ 07f0cc02c06869c81ebf33867edef30554020c0d
+ cache:rev-branch-cache -- {} (mandatory: False)
+ phase-heads -- {} (mandatory: True)
+ 07f0cc02c06869c81ebf33867edef30554020c0d draft
+
+Shelve should still work
+
+ $ hg unshelve
+ unshelving change 'default'
+ rebasing shelved changes
+ $ hg status
+ A a_file.txt
+
+ $ cd ..
+
+strip an unrelated changeset with a lower revnum
+------------------------------------------------
+
+ $ cp -ar reference-repo strip-unrelated
+ $ cd strip-unrelated
+
+The internal change is not directly stripped, but it is affected by the strip
+and it is in the "temporary backup" zone. The zone that needs to be put in a
+temporary bundle while we affect data under it.
+
+ $ hg debugstrip 'desc(c)'
+ saved backup bundle to $TESTTMP/strip-unrelated/.hg/strip-backup/07f0cc02c068-8fd0515f-backup.hg
+
+The change should be either gone or hidden
+
+ $ hg log -G
+ o 636bc07920e3 [draft] e
+ |
+ o 980f7dc84c29 [draft] d
+ |
+ | o d2ae7f538514 [draft] b
+ |/
+ o cb9a9f314b8b [draft] a
+
+The backup should not include it (as people tend to manipulate these directly)
+
+ $ ls -1 .hg/strip-backup/
+ 07f0cc02c068-8fd0515f-backup.hg
+ $ hg debugbundle .hg/strip-backup/*.hg
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 1, version: 03} (mandatory: True)
+ 07f0cc02c06869c81ebf33867edef30554020c0d
+ cache:rev-branch-cache -- {} (mandatory: False)
+ phase-heads -- {} (mandatory: True)
+ 07f0cc02c06869c81ebf33867edef30554020c0d draft
+
+Shelve should still work
+
+ $ hg unshelve
+ unshelving change 'default'
+ rebasing shelved changes
+ $ hg status
+ A a_file.txt
+
+ $ cd ..
+
+explicitly strip the internal changeset
+---------------------------------------
+
+ $ cp -ar reference-repo strip-explicit
+ $ cd strip-explicit
+
+The internal change is directly selected for stripping.
+
+ $ hg debugstrip --hidden $shelved_node
+
+The change should be gone
+
+ $ hg log -G --hidden
+ o 636bc07920e3 [draft] e
+ |
+ o 980f7dc84c29 [draft] d
+ |
+ | o 07f0cc02c068 [draft] c
+ | |
+ | o d2ae7f538514 [draft] b
+ |/
+ o cb9a9f314b8b [draft] a
+
+
+We don't need to backup anything
+
+ $ ls -1 .hg/strip-backup/
+
+Shelve should still work
+
+ $ hg unshelve
+ unshelving change 'default'
+ rebasing shelved changes
+ $ hg status
+ A a_file.txt
+
+ $ cd ..
+
+Explicitly bundling the internal change
+=======================================
+
+ $ cd reference-repo
+
+try to bundle it alone explicitly
+---------------------------------
+
+We should not allow it
+
+ $ hg bundle --type v3 --exact --rev $shelved_node --hidden ../internal-01.hg
+ abort: cannot bundle internal changesets
+ (1 internal changesets selected)
+ [255]
+ $ hg debugbundle ../internal-01.hg
+ abort: $ENOENT$: '../internal-01.hg'
+ [255]
+
+try to bundle it with other, somewhat explicitly
+------------------------------------------------
+
+We should not allow it
+
+ $ hg bundle --type v3 --exact --rev 'desc(b)':: --hidden ../internal-02.hg
+ abort: cannot bundle internal changesets
+ (1 internal changesets selected)
+ [255]
+ $ hg debugbundle ../internal-02.hg
+ abort: $ENOENT$: '../internal-02.hg'
+ [255]
+
+bundle visible ancestors
+------------------------
+
+This should succeed as the standard filtering is skipping the internal change naturally
+
+ $ hg bundle --type v3 --exact --rev 'desc(b)':: ../internal-03.hg
+ 2 changesets found
+ $ hg debugbundle ../internal-03.hg
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 2, version: 03} (mandatory: True)
+ d2ae7f538514cd87c17547b0de4cea71fe1af9fb
+ 07f0cc02c06869c81ebf33867edef30554020c0d
+ cache:rev-branch-cache -- {} (mandatory: False)
+ phase-heads -- {} (mandatory: True)
+ 07f0cc02c06869c81ebf33867edef30554020c0d draft
+
+ $ cd ..
+
--- a/tests/test-bundle-phases.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-bundle-phases.t Tue May 09 11:35:50 2023 +0200
@@ -44,6 +44,7 @@
cache:rev-branch-cache -- {} (mandatory: False)
phase-heads -- {} (mandatory: True)
26805aba1e600a82e93661149f2313866a221a7b draft
+ 9bc730a19041f9ec7cb33c626e811aa233efb18c secret
$ hg strip --no-backup C
Phases show on incoming, and are also restored when pulling. Secret commits
@@ -374,6 +375,7 @@
phase-heads -- {} (mandatory: True)
dc0947a82db884575bb76ea10ac97b08536bfa03 public
03ca77807e919db8807c3749086dc36fb478cac0 draft
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4 secret
$ hg strip --no-backup A
$ hg unbundle -q bundle
$ rm bundle
@@ -398,6 +400,7 @@
4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4
cache:rev-branch-cache -- {} (mandatory: False)
phase-heads -- {} (mandatory: True)
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4 secret
$ rm bundle
$ hg bundle --base A -r D bundle
@@ -411,6 +414,7 @@
cache:rev-branch-cache -- {} (mandatory: False)
phase-heads -- {} (mandatory: True)
dc0947a82db884575bb76ea10ac97b08536bfa03 public
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4 secret
$ rm bundle
$ hg bundle --base 'B + C' -r 'D + E' bundle
@@ -423,4 +427,5 @@
cache:rev-branch-cache -- {} (mandatory: False)
phase-heads -- {} (mandatory: True)
03ca77807e919db8807c3749086dc36fb478cac0 draft
+ 4e4f9194f9f181c57f62e823e8bdfa46ab9e4ff4 secret
$ rm bundle
--- a/tests/test-bundle-type.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-bundle-type.t Tue May 09 11:35:50 2023 +0200
@@ -4,127 +4,409 @@
$ hg init t2
$ cd t1
$ echo blablablablabla > file.txt
- $ hg ci -Ama
+ $ hg ci -A -m commit_root
adding file.txt
- $ hg log | grep summary
- summary: a
- $ hg bundle ../b1 ../t2
+ $ echo kapoue > file.txt
+ $ hg ci -m commit_1
+ $ echo scrabageul > file.txt
+ $ hg ci -m commit_2
+ $ hg up 'desc("commit_root")'
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ echo flagabalagla > file.txt
+ $ hg ci -m commit_3
+ created new head
+ $ echo aliofia > file.txt
+ $ hg ci -m commit_4
+ $ echo alklqo > file.txt
+ $ hg ci -m commit_5
+ $ echo peakfeo > file.txt
+ $ hg ci -m commit_6 --secret
+ $ hg phase --public --rev 'desc(commit_3)'
+ $ hg log -GT '[{phase}] {desc|firstline}\n'
+ @ [secret] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [public] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [public] commit_root
+
+
+XXX the bundle generation is defined by a discovery round here. So the secret
+changeset should be excluded.
+
+ $ hg bundle ../b1.hg ../t2
searching for changes
- 1 changesets found
+ 7 changesets found (known-bad-output !)
+ 6 changesets found (missing-correct-output !)
+ $ cd ..
- $ cd ../t2
- $ hg unbundle ../b1
+ $ hg -R t2 unbundle ./b1.hg
adding changesets
adding manifests
adding file changes
- added 1 changesets with 1 changes to 1 files
- new changesets c35a0f9217e6 (1 drafts)
- (run 'hg update' to get a working copy)
- $ hg up
+ added 7 changesets with 7 changes to 1 files (+1 heads) (known-bad-output !)
+ added 6 changesets with 6 changes to 1 files (+1 heads) (missing-correct-output !)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ $ hg -R t2 up
1 files updated, 0 files merged, 0 files removed, 0 files unresolved
- $ hg log | grep summary
- summary: a
- $ cd ..
+ updated to "b9f5f740a8cd: commit_6"
+ 1 other heads for branch "default"
+ $ hg -R t2 log -GT '[{phase}] {desc|firstline}\n'
+ @ [draft] commit_6 (known-bad-output !)
+ | (known-bad-output !)
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
Unknown compression type is rejected
$ hg init t3
- $ cd t3
- $ hg -q unbundle ../b1
- $ hg bundle -a -t unknown out.hg
+ $ hg -R t3 -q unbundle ./b1.hg
+ $ hg -R t3 bundle -a -t unknown out.hg
abort: unknown is not a recognized bundle specification
(see 'hg help bundlespec' for supported values for --type)
[10]
- $ hg bundle -a -t unknown-v2 out.hg
+ $ hg -R t3 bundle -a -t unknown-v2 out.hg
abort: unknown compression is not supported
(see 'hg help bundlespec' for supported values for --type)
[10]
- $ cd ..
+test bundle types
+=================
-test bundle types
+since we use --all, it is okay to include the secret changeset here. It is
+unfortunate that the phase information for the secret one is lost.
$ testbundle() {
> echo % test bundle type $1
- > hg init t$1
- > cd t1
- > hg bundle -t $1 ../b$1 ../t$1
- > f -q -B6 -D ../b$1; echo
- > cd ../t$1
- > hg debugbundle ../b$1
- > hg debugbundle --spec ../b$1
+ > echo '==================================='
+ > hg -R t1 bundle --all --type $1 ./b-$1.hg
+ > f -q -B6 -D ./b-$1.hg; echo
+ > hg debugbundle ./b-$1.hg
+ > hg debugbundle --spec ./b-$1.hg
> echo
- > cd ..
+ > hg init repo-from-type-$1
+ > hg unbundle -R repo-from-type-$1 ./b-$1.hg
+ > hg -R repo-from-type-$1 log -GT '[{phase}] {desc|firstline}\n'
+ > echo
> }
- $ for t in "None" "bzip2" "gzip" "none-v2" "v2" "v1" "gzip-v1"; do
+ $ for t in "None" "bzip2" "gzip" "none-v2" "v2" "v1" "gzip-v1" "v3"; do
> testbundle $t
> done
% test bundle type None
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG20\x00\x00 (esc)
Stream params: {}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
cache:rev-branch-cache -- {} (mandatory: False)
none-v2
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
% test bundle type bzip2
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG20\x00\x00 (esc)
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
cache:rev-branch-cache -- {} (mandatory: False)
bzip2-v2
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
% test bundle type gzip
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG20\x00\x00 (esc)
Stream params: {Compression: GZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
cache:rev-branch-cache -- {} (mandatory: False)
gzip-v2
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
% test bundle type none-v2
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG20\x00\x00 (esc)
Stream params: {}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
cache:rev-branch-cache -- {} (mandatory: False)
none-v2
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
% test bundle type v2
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG20\x00\x00 (esc)
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
cache:rev-branch-cache -- {} (mandatory: False)
bzip2-v2
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
% test bundle type v1
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG10BZ
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
bzip2-v1
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
% test bundle type gzip-v1
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG10GZ
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
gzip-v1
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
+ % test bundle type v3
+ ===================================
+ 7 changesets found
+ HG20\x00\x00 (esc)
+ Stream params: {Compression: BZ}
+ changegroup -- {nbchanges: 7, targetphase: 2, version: 03} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
+ cache:rev-branch-cache -- {} (mandatory: False)
+ phase-heads -- {} (mandatory: True)
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d public
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55 draft
+ 2ea90778052ba7558fab36e3fd5d149512ff986b draft
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5 secret
+ bzip2-v2;cg.version=03
+
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (4 drafts, 1 secrets)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [secret] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [public] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [public] commit_root
+
+
Compression level can be adjusted for bundle2 bundles
@@ -167,36 +449,90 @@
> testbundle $t
> done
% test bundle type zstd
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG20\x00\x00 (esc)
Stream params: {Compression: ZS}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
cache:rev-branch-cache -- {} (mandatory: False)
zstd-v2
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
% test bundle type zstd-v2
- searching for changes
- 1 changesets found
+ ===================================
+ 7 changesets found
HG20\x00\x00 (esc)
Stream params: {Compression: ZS}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
cache:rev-branch-cache -- {} (mandatory: False)
zstd-v2
+ adding changesets
+ adding manifests
+ adding file changes
+ added 7 changesets with 7 changes to 1 files (+1 heads)
+ new changesets ac39af4a9f7d:b9f5f740a8cd (7 drafts)
+ (run 'hg heads' to see heads, 'hg merge' to merge)
+ o [draft] commit_6
+ |
+ o [draft] commit_5
+ |
+ o [draft] commit_4
+ |
+ o [draft] commit_3
+ |
+ | o [draft] commit_2
+ | |
+ | o [draft] commit_1
+ |/
+ o [draft] commit_root
+
+
Explicit request for zstd on non-generaldelta repos
$ hg --config format.usegeneraldelta=false init nogd
$ hg -q -R nogd pull t1
$ hg -R nogd bundle -a -t zstd nogd-zstd
- 1 changesets found
+ 6 changesets found
zstd-v1 always fails
- $ hg -R tzstd bundle -a -t zstd-v1 zstd-v1
+ $ hg -R t1 bundle -a -t zstd-v1 zstd-v1
abort: compression engine zstd is not supported on v1 bundles
(see 'hg help bundlespec' for supported values for --type)
[10]
@@ -243,26 +579,44 @@
Test controlling the changegroup version
$ hg -R t1 bundle --config experimental.changegroup3=yes -a -t v2 ./v2-cg-default.hg
- 1 changesets found
+ 7 changesets found
$ hg debugbundle ./v2-cg-default.hg --part-type changegroup
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
$ hg debugbundle ./v2-cg-default.hg --spec
bzip2-v2
$ hg -R t1 bundle --config experimental.changegroup3=yes -a -t 'v2;cg.version=02' ./v2-cg-02.hg
- 1 changesets found
+ 7 changesets found
$ hg debugbundle ./v2-cg-02.hg --part-type changegroup
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 02} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
$ hg debugbundle ./v2-cg-02.hg --spec
bzip2-v2
$ hg -R t1 bundle --config experimental.changegroup3=yes -a -t 'v2;cg.version=03' ./v2-cg-03.hg
- 1 changesets found
+ 7 changesets found
$ hg debugbundle ./v2-cg-03.hg --part-type changegroup
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 03} (mandatory: True)
- c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf
+ changegroup -- {nbchanges: 7, version: 03} (mandatory: True)
+ ac39af4a9f7d2aaa7d244720e57838be9bf63b03
+ 901e97fadc587978ec52f2fa76af4aefc2d191e8
+ a8c3a1ed30eb71f03f476c5fa7ead831ef991a55
+ 66e2c4b43e0cf8f0bdff0733a0b97ce57874e35d
+ 624e609639853fe22c88d42a8fd1f53a0e9b7ebe
+ 2ea90778052ba7558fab36e3fd5d149512ff986b
+ b9f5f740a8cd76700020e3903ee55ecff78bd3e5
$ hg debugbundle ./v2-cg-03.hg --spec
bzip2-v2;cg.version=03
--- a/tests/test-check-code.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-check-code.t Tue May 09 11:35:50 2023 +0200
@@ -57,6 +57,7 @@
.arcconfig
.clang-format
.editorconfig
+ .gitattributes
.hgignore
.hgsigs
.hgtags
--- a/tests/test-clone-stream.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-clone-stream.t Tue May 09 11:35:50 2023 +0200
@@ -128,6 +128,7 @@
changegroup
01
02
+ 03
checkheads
related
digests
@@ -157,7 +158,7 @@
added 3 changesets with 1088 changes to 1088 files
new changesets 96ee1d7354c4:5223b5e3265f
- $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
+ $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
200 Script output follows
content-type: application/mercurial-0.2
@@ -195,6 +196,7 @@
changegroup
01
02
+ 03
checkheads
related
digests
@@ -224,7 +226,7 @@
added 3 changesets with 1088 changes to 1088 files
new changesets 96ee1d7354c4:5223b5e3265f
- $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
+ $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto 0.2 --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
200 Script output follows
content-type: application/mercurial-0.2
@@ -284,7 +286,7 @@
getbundle requests with stream=1 are uncompressed
- $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto '0.1 0.2 comp=zlib,none' --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
+ $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle' content-type --bodyfile body --hgproto '0.1 0.2 comp=zlib,none' --requestheader "x-hgarg-1=bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=0000000000000000000000000000000000000000&heads=c17445101a72edac06facd130d14808dfbd5c7c2&stream=1"
200 Script output follows
content-type: application/mercurial-0.2
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-clonebundles-autogen.t Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,360 @@
+
+#require no-reposimplestore no-chg
+
+initial setup
+
+ $ hg init server
+ $ cat >> server/.hg/hgrc << EOF
+ > [extensions]
+ > clonebundles =
+ >
+ > [clone-bundles]
+ > auto-generate.on-change = yes
+ > auto-generate.formats = v2
+ > upload-command = cp "\$HGCB_BUNDLE_PATH" "$TESTTMP"/final-upload/
+ > delete-command = rm -f "$TESTTMP/final-upload/\$HGCB_BASENAME"
+ > url-template = file://$TESTTMP/final-upload/{basename}
+ >
+ > [devel]
+ > debug.clonebundles=yes
+ > EOF
+
+ $ mkdir final-upload
+ $ hg clone server client
+ updating to branch default
+ 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ cd client
+
+Test bundles are generated on push
+==================================
+
+ $ touch foo
+ $ hg -q commit -A -m 'add foo'
+ $ touch bar
+ $ hg -q commit -A -m 'add bar'
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ 2 changesets found
+ added 2 changesets with 2 changes to 2 files
+ clone-bundles: starting bundle generation: v2
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-2_revs-aaff8d2ffbbf_tip-*_txn.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-2_revs-aaff8d2ffbbf_tip-*_txn.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Newer bundles are generated with more pushes
+--------------------------------------------
+
+ $ touch baz
+ $ hg -q commit -A -m 'add baz'
+ $ touch buz
+ $ hg -q commit -A -m 'add buz'
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ 4 changesets found
+ added 2 changesets with 2 changes to 2 files
+ clone-bundles: starting bundle generation: v2
+
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-4_revs-6427147b985a_tip-*_txn.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-2_revs-aaff8d2ffbbf_tip-*_txn.hg (glob)
+ full-v2-4_revs-6427147b985a_tip-*_txn.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Older bundles are cleaned up with more pushes
+---------------------------------------------
+
+ $ touch faz
+ $ hg -q commit -A -m 'add faz'
+ $ touch fuz
+ $ hg -q commit -A -m 'add fuz'
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ clone-bundles: deleting bundle full-v2-2_revs-aaff8d2ffbbf_tip-*_txn.hg (glob)
+ 6 changesets found
+ added 2 changesets with 2 changes to 2 files
+ clone-bundles: starting bundle generation: v2
+
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-6_revs-b1010e95ea00_tip-*_txn.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-4_revs-6427147b985a_tip-*_txn.hg (glob)
+ full-v2-6_revs-b1010e95ea00_tip-*_txn.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Test conditions to get them generated
+=====================================
+
+Check ratio
+
+ $ cat >> ../server/.hg/hgrc << EOF
+ > [clone-bundles]
+ > trigger.below-bundled-ratio = 0.5
+ > EOF
+ $ touch far
+ $ hg -q commit -A -m 'add far'
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-6_revs-b1010e95ea00_tip-*_txn.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-4_revs-6427147b985a_tip-*_txn.hg (glob)
+ full-v2-6_revs-b1010e95ea00_tip-*_txn.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Check absolute number of revisions
+
+ $ cat >> ../server/.hg/hgrc << EOF
+ > [clone-bundles]
+ > trigger.revs = 2
+ > EOF
+ $ touch bur
+ $ hg -q commit -A -m 'add bur'
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ clone-bundles: deleting bundle full-v2-4_revs-6427147b985a_tip-*_txn.hg (glob)
+ 8 changesets found
+ added 1 changesets with 1 changes to 1 files
+ clone-bundles: starting bundle generation: v2
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-8_revs-8353e8af1306_tip-*_txn.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-6_revs-b1010e95ea00_tip-*_txn.hg (glob)
+ full-v2-8_revs-8353e8af1306_tip-*_txn.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+(that one would not generate new bundles)
+
+ $ touch tur
+ $ hg -q commit -A -m 'add tur'
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-8_revs-8353e8af1306_tip-*_txn.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-6_revs-b1010e95ea00_tip-*_txn.hg (glob)
+ full-v2-8_revs-8353e8af1306_tip-*_txn.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Test generation through the dedicated command
+=============================================
+
+ $ cat >> ../server/.hg/hgrc << EOF
+ > [clone-bundles]
+ > auto-generate.on-change = no
+ > EOF
+
+Check the command can generate content when needed
+--------------------------------------------------
+
+Do a push that makes the condition fulfilled,
+Yet it should not automatically generate a bundle with
+"auto-generate.on-change" not set.
+
+ $ touch quoi
+ $ hg -q commit -A -m 'add quoi'
+
+ $ pre_push_manifest=`cat ../server/.hg/clonebundles.manifest|f --sha256 | sed 's/.*=//' | cat`
+ $ pre_push_upload=`ls -1 ../final-upload|f --sha256 | sed 's/.*=//' | cat`
+ $ ls -1 ../server/.hg/tmp-bundles
+
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+ $ post_push_manifest=`cat ../server/.hg/clonebundles.manifest|f --sha256 | sed 's/.*=//' | cat`
+ $ post_push_upload=`ls -1 ../final-upload|f --sha256 | sed 's/.*=//' | cat`
+ $ ls -1 ../server/.hg/tmp-bundles
+ $ test "$pre_push_manifest" = "$post_push_manifest"
+ $ test "$pre_push_upload" = "$post_push_upload"
+
+Running the command should detect the stale bundles, and do the full automatic
+generation logic.
+
+ $ hg -R ../server/ admin::clone-bundles-refresh
+ clone-bundles: deleting bundle full-v2-6_revs-b1010e95ea00_tip-*_txn.hg (glob)
+ clone-bundles: starting bundle generation: v2
+ 10 changesets found
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg (glob)
+ full-v2-8_revs-8353e8af1306_tip-*_txn.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Check the command cleans up older bundles when possible
+-------------------------------------------------------
+
+ $ hg -R ../server/ admin::clone-bundles-refresh
+ clone-bundles: deleting bundle full-v2-8_revs-8353e8af1306_tip-*_txn.hg (glob)
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Nothing is generated when the bundles are sufficiently up to date
+-----------------------------------------------------------------
+
+ $ touch feur
+ $ hg -q commit -A -m 'add feur'
+
+ $ pre_push_manifest=`cat ../server/.hg/clonebundles.manifest|f --sha256 | sed 's/.*=//' | cat`
+ $ pre_push_upload=`ls -1 ../final-upload|f --sha256 | sed 's/.*=//' | cat`
+ $ ls -1 ../server/.hg/tmp-bundles
+
+ $ hg push
+ pushing to $TESTTMP/server
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files
+
+ $ post_push_manifest=`cat ../server/.hg/clonebundles.manifest|f --sha256 | sed 's/.*=//' | cat`
+ $ post_push_upload=`ls -1 ../final-upload|f --sha256 | sed 's/.*=//' | cat`
+ $ ls -1 ../server/.hg/tmp-bundles
+ $ test "$pre_push_manifest" = "$post_push_manifest"
+ $ test "$pre_push_upload" = "$post_push_upload"
+
+ $ hg -R ../server/ admin::clone-bundles-refresh
+
+ $ post_refresh_manifest=`cat ../server/.hg/clonebundles.manifest|f --sha256 | sed 's/.*=//' | cat`
+ $ post_refresh_upload=`ls -1 ../final-upload|f --sha256 | sed 's/.*=//' | cat`
+ $ ls -1 ../server/.hg/tmp-bundles
+ $ test "$pre_push_manifest" = "$post_refresh_manifest"
+ $ test "$pre_push_upload" = "$post_refresh_upload"
+
+Test modification of configuration
+==================================
+
+Testing that later runs adapt to configuration changes even if the repository is
+unchanged.
+
+adding more formats
+-------------------
+
+bundle for added formats should be generated
+
+change configuration
+
+ $ cat >> ../server/.hg/hgrc << EOF
+ > [clone-bundles]
+ > auto-generate.formats = v1, v2
+ > EOF
+
+refresh the bundles
+
+ $ hg -R ../server/ admin::clone-bundles-refresh
+ clone-bundles: starting bundle generation: v1
+ 11 changesets found
+
+the bundle for the "new" format should have been added
+
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v1-11_revs-4226b1cd5fda_tip-*_acbr.hg BUNDLESPEC=v1 REQUIRESNI=true (glob)
+ file:/*/$TESTTMP/final-upload/full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v1-11_revs-4226b1cd5fda_tip-*_acbr.hg (glob)
+ full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Changing the ratio
+------------------
+
+Changing the ratio to something that would have triggered a bundle during the last push.
+
+ $ cat >> ../server/.hg/hgrc << EOF
+ > [clone-bundles]
+ > trigger.below-bundled-ratio = 0.95
+ > EOF
+
+refresh the bundles
+
+ $ hg -R ../server/ admin::clone-bundles-refresh
+ clone-bundles: starting bundle generation: v2
+ 11 changesets found
+
+
+the "outdated' bundle should be refreshed
+
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v1-11_revs-4226b1cd5fda_tip-*_acbr.hg BUNDLESPEC=v1 REQUIRESNI=true (glob)
+ file:/*/$TESTTMP/final-upload/full-v2-11_revs-4226b1cd5fda_tip-*_acbr.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v1-11_revs-4226b1cd5fda_tip-*_acbr.hg (glob)
+ full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg (glob)
+ full-v2-11_revs-4226b1cd5fda_tip-*_acbr.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
+
+Test more command options
+=========================
+
+bundle clearing
+---------------
+
+ $ hg -R ../server/ admin::clone-bundles-clear
+ clone-bundles: deleting bundle full-v1-11_revs-4226b1cd5fda_tip-*_acbr.hg (glob)
+ clone-bundles: deleting bundle full-v2-10_revs-3b6f57f17d70_tip-*_acbr.hg (glob)
+ clone-bundles: deleting bundle full-v2-11_revs-4226b1cd5fda_tip-*_acbr.hg (glob)
+
+Nothing should remain
+
+ $ cat ../server/.hg/clonebundles.manifest
+ $ ls -1 ../final-upload
+ $ ls -1 ../server/.hg/tmp-bundles
+
+background generation
+---------------------
+
+generate bundle using background subprocess
+(since we are in devel mode, the command will still wait for the background
+process to end)
+
+ $ hg -R ../server/ admin::clone-bundles-refresh --background
+ 11 changesets found
+ 11 changesets found
+ clone-bundles: starting bundle generation: v1
+ clone-bundles: starting bundle generation: v2
+
+bundles should have been generated
+
+ $ cat ../server/.hg/clonebundles.manifest
+ file:/*/$TESTTMP/final-upload/full-v1-11_revs-4226b1cd5fda_tip-*_acbr.hg BUNDLESPEC=v1 REQUIRESNI=true (glob)
+ file:/*/$TESTTMP/final-upload/full-v2-11_revs-4226b1cd5fda_tip-*_acbr.hg BUNDLESPEC=v2 REQUIRESNI=true (glob)
+ $ ls -1 ../final-upload
+ full-v1-11_revs-4226b1cd5fda_tip-*_acbr.hg (glob)
+ full-v2-11_revs-4226b1cd5fda_tip-*_acbr.hg (glob)
+ $ ls -1 ../server/.hg/tmp-bundles
--- a/tests/test-clonebundles.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-clonebundles.t Tue May 09 11:35:50 2023 +0200
@@ -587,7 +587,7 @@
adding file changes
adding bar revisions
adding foo revisions
- bundle2-input-part: total payload size 920
+ bundle2-input-part: total payload size 936
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
--- a/tests/test-commit-amend.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-commit-amend.t Tue May 09 11:35:50 2023 +0200
@@ -121,15 +121,15 @@
committing changelog
1 changesets found
uncompressed size of bundle content:
- 254 (changelog)
- 163 (manifests)
- 133 a
+ 256 (changelog)
+ 165 (manifests)
+ 135 a
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/47343646fa3d-c2758885-amend.hg
1 changesets found
uncompressed size of bundle content:
- 250 (changelog)
- 163 (manifests)
- 133 a
+ 252 (changelog)
+ 165 (manifests)
+ 135 a
adding branch
adding changesets
adding manifests
@@ -265,15 +265,15 @@
committing changelog
1 changesets found
uncompressed size of bundle content:
- 249 (changelog)
- 163 (manifests)
- 135 a
+ 251 (changelog)
+ 165 (manifests)
+ 137 a
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/a9a13940fc03-7c2e8674-amend.hg
1 changesets found
uncompressed size of bundle content:
- 257 (changelog)
- 163 (manifests)
- 135 a
+ 259 (changelog)
+ 165 (manifests)
+ 137 a
adding branch
adding changesets
adding manifests
@@ -301,15 +301,15 @@
committing changelog
1 changesets found
uncompressed size of bundle content:
- 257 (changelog)
- 163 (manifests)
- 135 a
+ 259 (changelog)
+ 165 (manifests)
+ 137 a
saved backup bundle to $TESTTMP/repo/.hg/strip-backup/64a124ba1b44-10374b8f-amend.hg
1 changesets found
uncompressed size of bundle content:
- 257 (changelog)
- 163 (manifests)
- 137 a
+ 259 (changelog)
+ 165 (manifests)
+ 139 a
adding branch
adding changesets
adding manifests
--- a/tests/test-completion.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-completion.t Tue May 09 11:35:50 2023 +0200
@@ -78,6 +78,7 @@
debug-repair-issue6528
debug-revlog-index
debug-revlog-stats
+ debug::stable-tail-sort
debugancestor
debugantivirusrunning
debugapplystreamclonebundle
@@ -273,6 +274,7 @@
debug-repair-issue6528: to-report, from-report, paranoid, dry-run
debug-revlog-index: changelog, manifest, dir, template
debug-revlog-stats: changelog, manifest, filelogs, template
+ debug::stable-tail-sort: template
debugancestor:
debugantivirusrunning:
debugapplystreamclonebundle:
@@ -364,7 +366,7 @@
parents: rev, style, template
paths: template
phase: public, draft, secret, force, rev
- pull: update, force, confirm, rev, bookmark, branch, ssh, remotecmd, insecure
+ pull: update, force, confirm, rev, bookmark, branch, remote-hidden, ssh, remotecmd, insecure
purge: abort-on-err, all, ignored, dirs, files, print, print0, confirm, include, exclude
push: force, rev, bookmark, all-bookmarks, branch, new-branch, pushvars, publish, ssh, remotecmd, insecure
recover: verify
--- a/tests/test-contrib-perf.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-contrib-perf.t Tue May 09 11:35:50 2023 +0200
@@ -188,6 +188,8 @@
perf::startup
(no help text available)
perf::status benchmark the performance of a single status call
+ perf::stream-locked-section
+ benchmark the initial, repo-locked, section of a stream-clone
perf::tags (no help text available)
perf::templating
test the rendering time of a given template
--- a/tests/test-debugcommands.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-debugcommands.t Tue May 09 11:35:50 2023 +0200
@@ -636,6 +636,7 @@
changegroup
01
02
+ 03
checkheads
related
digests
@@ -673,7 +674,7 @@
devel-peer-request: pairs: 81 bytes
sending hello command
sending between command
- remote: 468
+ remote: 473
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlog-compression-zstd,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
@@ -693,7 +694,7 @@
devel-peer-request: pairs: 81 bytes
sending hello command
sending between command
- remote: 468
+ remote: 473
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlog-compression-zstd,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
@@ -713,7 +714,7 @@
devel-peer-request: pairs: 81 bytes
sending hello command
sending between command
- remote: 444
+ remote: 449
remote: capabilities: batch branchmap $USUAL_BUNDLE2_CAPS$ changegroupsubset getbundle known lookup protocaps pushkey streamreqs=generaldelta,revlogv1,sparserevlog unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash
remote: 1
devel-peer-request: protocaps
--- a/tests/test-generaldelta.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-generaldelta.t Tue May 09 11:35:50 2023 +0200
@@ -163,7 +163,7 @@
saved backup bundle to $TESTTMP/aggressive/.hg/strip-backup/1c5d4dc9a8b8-6c68e60c-backup.hg
$ hg debugbundle .hg/strip-backup/*
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
+ changegroup -- {nbchanges: 1, version: 03} (mandatory: True)
1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9
cache:rev-branch-cache -- {} (mandatory: False)
phase-heads -- {} (mandatory: True)
--- a/tests/test-help.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-help.t Tue May 09 11:35:50 2023 +0200
@@ -987,6 +987,8 @@
dump index data for a revlog
debug-revlog-stats
display statistics about revlogs in the store
+ debug::stable-tail-sort
+ display the stable-tail sort of the ancestors of a given node
debugancestor
find the ancestor revision of two revisions in a given index
debugantivirusrunning
@@ -1780,7 +1782,10 @@
Extension Commands:
- qclone clone main and patch repository at same time
+ admin::clone-bundles-clear remove existing clone bundle caches
+ admin::clone-bundles-refresh generate clone bundles according to the
+ configuration
+ qclone clone main and patch repository at same time
Test unfound topic
--- a/tests/test-hgweb-json.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-hgweb-json.t Tue May 09 11:35:50 2023 +0200
@@ -777,6 +777,7 @@
{
"bookmarks": [],
"branch": "default",
+ "children": [],
"date": [
0.0,
0
@@ -809,6 +810,9 @@
{
"bookmarks": [],
"branch": "default",
+ "children": [
+ "93a8ce14f89156426b7fa981af8042da53f03aa0"
+ ],
"date": [
0.0,
0
@@ -897,6 +901,9 @@
"bookmark1"
],
"branch": "default",
+ "children": [
+ "78896eb0e102174ce9278438a95e12543e4367a7"
+ ],
"date": [
0.0,
0
@@ -957,6 +964,9 @@
{
"bookmarks": [],
"branch": "test-branch",
+ "children": [
+ "ed66c30e87eb65337c05a4229efaa5f1d5285a90"
+ ],
"date": [
0.0,
0
--- a/tests/test-hook.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-hook.t Tue May 09 11:35:50 2023 +0200
@@ -985,13 +985,11 @@
Traceback (most recent call last):
SyntaxError: * (glob)
Traceback (most recent call last):
- ImportError: No module named 'hgext_syntaxerror' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext_syntaxerror' (py36 !)
+ ModuleNotFoundError: No module named 'hgext_syntaxerror'
Traceback (most recent call last):
SyntaxError: * (glob)
Traceback (most recent call last):
- ImportError: No module named 'hgext_syntaxerror' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext_syntaxerror' (py36 !)
+ ModuleNotFoundError: No module named 'hgext_syntaxerror'
Traceback (most recent call last):
raise error.HookLoadError( (py38 !)
mercurial.error.HookLoadError: preoutgoing.syntaxerror hook is invalid: import of "syntaxerror" failed
@@ -1147,21 +1145,16 @@
$ hg --traceback commit -ma 2>&1 | egrep '^exception|ImportError|ModuleNotFoundError|Traceback|HookLoadError|abort'
exception from first failed import attempt:
Traceback (most recent call last):
- ImportError: No module named 'somebogusmodule' (no-py36 !)
- ModuleNotFoundError: No module named 'somebogusmodule' (py36 !)
+ ModuleNotFoundError: No module named 'somebogusmodule'
exception from second failed import attempt:
Traceback (most recent call last):
- ImportError: No module named 'somebogusmodule' (no-py36 !)
- ModuleNotFoundError: No module named 'somebogusmodule' (py36 !)
+ ModuleNotFoundError: No module named 'somebogusmodule'
Traceback (most recent call last):
- ImportError: No module named 'hgext_importfail' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext_importfail' (py36 !)
+ ModuleNotFoundError: No module named 'hgext_importfail'
Traceback (most recent call last):
- ImportError: No module named 'somebogusmodule' (no-py36 !)
- ModuleNotFoundError: No module named 'somebogusmodule' (py36 !)
+ ModuleNotFoundError: No module named 'somebogusmodule'
Traceback (most recent call last):
- ImportError: No module named 'hgext_importfail' (no-py36 !)
- ModuleNotFoundError: No module named 'hgext_importfail' (py36 !)
+ ModuleNotFoundError: No module named 'hgext_importfail'
Traceback (most recent call last):
raise error.HookLoadError( (py38 !)
mercurial.error.HookLoadError: precommit.importfail hook is invalid: import of "importfail" failed
--- a/tests/test-http-bad-server.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-http-bad-server.t Tue May 09 11:35:50 2023 +0200
@@ -130,10 +130,8 @@
readline(*) -> (*) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
readline(*) -> (1?) Accept-Encoding* (glob)
read limit reached; closing socket
@@ -153,7 +151,7 @@
$ hg serve \
> --config badserver.close-after-recv-patterns="GET /\?cmd=batch,user-agent: mercurial/proto-1.0,GET /\?cmd=getbundle" \
- > --config badserver.close-after-recv-bytes=110,26,274 \
+ > --config badserver.close-after-recv-bytes=110,26,281 \
> -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
$ hg clone http://localhost:$HGPORT/ clone
@@ -172,10 +170,8 @@
readline(*) -> (*) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n (glob)
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -191,16 +187,14 @@
readline(*) -> (*) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
- sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
- write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (no-py36 !)
- write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py36 !)
+ sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n
+ sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
readline(24 from ~) -> (*) GET /?cmd=getbundle HTTP* (glob)
read limit reached; closing socket
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
- readline(274 from *) -> (27) Accept-Encoding: identity\r\n (glob)
- readline(247 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
- readline(218 from *) -> (218) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag (glob)
+ readline(281 from *) -> (27) Accept-Encoding: identity\r\n (glob)
+ readline(254 from *) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
+ readline(225 from *) -> (225) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtag (glob)
read limit reached; closing socket
$ rm -f error.log
@@ -228,10 +222,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx httppostargs known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (27) POST /?cmd=batch HTTP/1.1\r\n (glob)
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (41) content-type: application/mercurial-0.1\r\n (glob)
@@ -256,7 +248,6 @@
Traceback (most recent call last):
Exception: connection closed after receiving N bytes
- write(126) -> HTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (no-py36 !)
$ rm -f error.log
@@ -282,14 +273,12 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(1 from 160) -> (0) H (py36 !)
- write(1 from 160) -> (0) H (no-py36 !)
+ sendall(1 from 160) -> (0) H
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob)
Traceback (most recent call last):
Exception: connection closed after sending N bytes
- write(286) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (glob) (no-py36 !)
$ rm -f error.log
@@ -315,10 +304,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(20 from *) -> (0) batch branchmap bund (glob) (py36 !)
- write(160) -> (20) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(20 from *) -> (0) batch branchmap bund (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(20 from *) -> (0) batch branchmap bund (glob)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=capabilities': (glob)
Traceback (most recent call last):
@@ -354,10 +341,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> (568) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -367,14 +352,12 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (py36 !)
- write(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat (no-py36 !)
+ sendall(118 from 159) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: applicat
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob)
Traceback (most recent call last):
Exception: connection closed after sending N bytes
- write(285) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (no-py36 !)
$ rm -f error.log
@@ -400,10 +383,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -413,10 +394,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
- sendall(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (py36 !)
- write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (no-py36 !)
- write(24 from 42) -> (0) 96ee1d7354c4ad7372047672 (no-py36 !)
+ sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n
+ sendall(24 from 42) -> (0) 96ee1d7354c4ad7372047672
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=batch': (glob)
Traceback (most recent call last):
@@ -453,10 +432,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -466,27 +443,23 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
- sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
- write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (no-py36 !)
- write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py36 !)
+ sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n
+ sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
- readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
+ readline(*) -> (447) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (py36 !)
- write(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri (no-py36 !)
+ sendall(129 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercuri
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
Exception: connection closed after sending N bytes
- write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (no-py36 !)
$ rm -f error.log
@@ -505,7 +478,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -6
sendall(162 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunke
write limit reached; closing socket
@@ -513,19 +485,6 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -7
- write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(25 from 28) -> (0) Transfer-Encoding: chunke
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
- write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n
-
-#endif
-
$ rm -f error.log
Server sends empty HTTP body for getbundle
@@ -551,10 +510,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -564,27 +521,23 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
- sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
- write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (no-py36 !)
- write(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (no-py36 !)
+ sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n
+ sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
- readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
+ readline(*) -> (447) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !)
- write(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (no-py36 !)
+ sendall(167 from 167) -> (0) HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
Exception: connection closed after sending N bytes
- write(293) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\nHTTP/1.1 500 Internal Server Error\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nTransfer-Encoding: chunked\r\n\r\n (no-py36 !)
$ rm -f error.log
@@ -611,10 +564,8 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (py36 !)
- sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (py36 !)
- write(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob) (no-py36 !)
- write(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob) (no-py36 !)
+ sendall(160) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: *\r\n\r\n (glob)
+ sendall(*) -> batch branchmap $USUAL_BUNDLE2_CAPS_NO_PHASES$ changegroupsubset compression=none getbundle httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx known lookup pushkey streamreqs=* unbundle=HG10GZ,HG10BZ,HG10UN unbundlehash (glob)
readline(~) -> (26) GET /?cmd=batch HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
@@ -624,23 +575,21 @@
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (py36 !)
- sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n; (py36 !)
- write(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n (no-py36 !)
+ sendall(159) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.1\r\nContent-Length: 42\r\n\r\n
+ sendall(42) -> 96ee1d7354c4ad7372047672c36a1f561e3a6a4c\n;
readline(~) -> (30) GET /?cmd=getbundle HTTP/1.1\r\n
readline(*) -> (27) Accept-Encoding: identity\r\n (glob)
readline(*) -> (29) vary: X-HgArg-1,X-HgProto-1\r\n (glob)
- readline(*) -> (440) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
+ readline(*) -> (447) x-hgarg-1: bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=96ee1d7354c4ad7372047672c36a1f561e3a6a4c&listkeys=phases%2Cbookmarks\r\n (glob)
readline(*) -> (61) x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull\r\n (glob)
readline(*) -> (35) accept: application/mercurial-0.1\r\n (glob)
readline(*) -> (2?) host: localhost:$HGPORT\r\n (glob)
readline(*) -> (49) user-agent: mercurial/proto-1.0 (Mercurial 4.2)\r\n (glob)
readline(*) -> (2) \r\n (glob)
- sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (py36 !)
- sendall(6) -> 1\\r\\n\x04\\r\\n (esc) (py36 !)
- sendall(9) -> 4\r\nnone\r\n (py36 !)
- sendall(9 from 9) -> (0) 4\r\nHG20\r\n (py36 !)
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n (no-py36 !)
+ sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
+ sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
+ sendall(9) -> 4\r\nnone\r\n
+ sendall(9 from 9) -> (0) 4\r\nHG20\r\n
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
@@ -665,7 +614,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -9
sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
@@ -676,21 +624,6 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11
- readline(~) -> (2) \r\n
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(6 from 9) -> (0) 4\r\nHG2
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Server sends incomplete bundle2 stream params length
@@ -709,7 +642,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10
sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
@@ -721,23 +653,6 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12
- readline(~) -> (2) \r\n
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
- write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(9) -> 4\r\nHG20\r\n
- write(6 from 9) -> (0) 4\\r\\n\x00\x00\x00 (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Servers stops after bundle2 stream params header
@@ -756,7 +671,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -10
sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
@@ -768,23 +682,6 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12
- readline(~) -> (2) \r\n
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
- write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(9) -> 4\r\nHG20\r\n
- write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Server stops sending after bundle2 part header length
@@ -803,7 +700,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -11
sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
@@ -816,32 +712,13 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
-
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -13
- readline(~) -> (2) \r\n
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
- write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(9) -> 4\r\nHG20\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Server stops sending after bundle2 part header
----------------------------------------------
$ hg serve \
- > --config badserver.close-after-send-patterns="version02nbchanges1\\r\\n" \
+ > --config badserver.close-after-send-patterns="version03nbchanges1\\r\\n" \
> -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
@@ -856,7 +733,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -12
sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
@@ -864,38 +740,19 @@
sendall(9) -> 4\r\nHG20\r\n
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- sendall(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
+ sendall(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version03nbchanges1\\r\\n (esc)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14
- readline(~) -> (2) \r\n
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
- write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(9) -> 4\r\nHG20\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47 from 47) -> (0) 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Server stops after bundle2 part payload chunk size
--------------------------------------------------
$ hg serve \
- > --config badserver.close-after-send-patterns='1d2\r\n.......' \
+ > --config badserver.close-after-send-patterns='1dc\r\n.......' \
> -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
@@ -910,7 +767,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14
sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
@@ -918,41 +774,21 @@
sendall(9) -> 4\r\nHG20\r\n
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- sendall(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- sendall(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc)
+ sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version03nbchanges1\\r\\n (esc)
+ sendall(9) -> 4\\r\\n\x00\x00\x01\xdc\\r\\n (esc)
+ sendall(12 from 483) -> (0) 1dc\\r\\n\x00\x00\x00\xb4\x96\xee\x1d (esc)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -15
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
- write(28) -> Transfer-Encoding: chunked\r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(9) -> 4\r\nHG20\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- write(12 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1d (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Server stops sending in middle of bundle2 payload chunk
-------------------------------------------------------
$ hg serve \
- > --config badserver.close-after-send-patterns=':jL\0\0\x00\0\0\0\0\0\r\n' \
+ > --config badserver.close-after-send-patterns=':jL\0\0\x00\0\0\0\0\0\0\0\r\n' \
> -p $HGPORT -d --pid-file=hg.pid -E error.log
$ cat hg.pid > $DAEMON_PIDS
@@ -967,7 +803,6 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -14
sendall(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
@@ -975,35 +810,14 @@
sendall(9) -> 4\r\nHG20\r\n
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- sendall(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- sendall(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+ sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version03nbchanges1\\r\\n (esc)
+ sendall(9) -> 4\\r\\n\x00\x00\x01\xdc\\r\\n (esc)
+ sendall(483 from 483) -> (0) 1dc\\r\\n\x00\x00\x00\xb4\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa3j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00j\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
write limit reached; closing socket
$LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16
- readline(~) -> (2) \r\n
- write(167) -> HTTP/1.1 200 Script output follows\r\nServer: badhttpserver\r\nDate: $HTTP_DATE$\r\nContent-Type: application/mercurial-0.2\r\nTransfer-Encoding: chunked\r\n\r\n
- write(41) -> Content-Type: application/mercurial-0.2\r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(9) -> 4\r\nHG20\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- write(473 from 473) -> (0) 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Server stops sending after 0 length payload chunk size
@@ -1027,16 +841,15 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -16
sendall(6) -> 1\\r\\n\x04\\r\\n (esc)
sendall(9) -> 4\r\nnone\r\n
sendall(9) -> 4\r\nHG20\r\n
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- sendall(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- sendall(473) -> 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+ sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version03nbchanges1\\r\\n (esc)
+ sendall(9) -> 4\\r\\n\x00\x00\x01\xdc\\r\\n (esc)
+ sendall(483) -> 1dc\\r\\n\x00\x00\x00\xb4\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa3j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00j\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00 \\r\\n (esc)
sendall(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc)
@@ -1045,28 +858,6 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -17
- write(2) -> \r\n
- write(6) -> 1\\r\\n\x04\\r\\n (esc)
- write(9) -> 4\r\nnone\r\n
- write(9) -> 4\r\nHG20\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- write(473) -> 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00 \\r\\n (esc)
- write(13 from 38) -> (0) 20\\r\\n\x08LISTKEYS (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
Server stops sending after 0 part bundle part header (indicating end of bundle2 payload)
@@ -1091,13 +882,12 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -20
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- sendall(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- sendall(473) -> 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+ sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version03nbchanges1\\r\\n (esc)
+ sendall(9) -> 4\\r\\n\x00\x00\x01\xdc\\r\\n (esc)
+ sendall(483) -> 1dc\\r\\n\x00\x00\x00\xb4\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa3j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00j\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00 \\r\\n (esc)
sendall(38) -> 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc)
@@ -1113,32 +903,6 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -21
- write(9) -> 4\r\nHG20\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- write(473) -> 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00 \\r\\n (esc)
- write(38) -> 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00:\\r\\n (esc)
- write(64) -> 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00#\\r\\n (esc)
- write(41) -> 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9 from 9) -> (0) 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
$ rm -rf clone
@@ -1162,13 +926,12 @@
$ killdaemons.py $DAEMON_PIDS
-#if py36
$ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -21
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- sendall(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- sendall(473) -> 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
+ sendall(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version03nbchanges1\\r\\n (esc)
+ sendall(9) -> 4\\r\\n\x00\x00\x01\xdc\\r\\n (esc)
+ sendall(483) -> 1dc\\r\\n\x00\x00\x00\xb4\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa3j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00j\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
sendall(9) -> 4\\r\\n\x00\x00\x00 \\r\\n (esc)
sendall(38) -> 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc)
@@ -1185,32 +948,5 @@
Traceback (most recent call last):
Exception: connection closed after sending N bytes
-
-#else
- $ "$PYTHON" $TESTDIR/filtertraceback.py < error.log | tail -22
- write(9) -> 4\r\nHG20\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00)\\r\\n (esc)
- write(47) -> 29\\r\\n\x0bCHANGEGROUP\x00\x00\x00\x00\x01\x01\x07\x02 \x01version02nbchanges1\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x01\xd2\\r\\n (esc)
- write(473) -> 1d2\\r\\n\x00\x00\x00\xb2\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00>6a3df4de388f3c4f8e28f4f9a814299a3cbb5f50\\ntest\\n0 0\\nfoo\\n\\ninitial\x00\x00\x00\x00\x00\x00\x00\xa1j=\xf4\xde8\x8f<O\x8e(\xf4\xf9\xa8\x14)\x9a<\xbb_P\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00-foo\x00b80de5d138758541c5f05265ad144ab9fa86d1db\\n\x00\x00\x00\x00\x00\x00\x00\x07foo\x00\x00\x00h\xb8\\r\xe5\xd18u\x85A\xc5\xf0Re\xad\x14J\xb9\xfa\x86\xd1\xdb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x96\xee\x1dsT\xc4\xadsr\x04vr\xc3j\x1fV\x1e:jL\x00\x00\x00\x00\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00 \\r\\n (esc)
- write(38) -> 20\\r\\n\x08LISTKEYS\x00\x00\x00\x01\x01\x00 \x06namespacephases\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00:\\r\\n (esc)
- write(64) -> 3a\r\n96ee1d7354c4ad7372047672c36a1f561e3a6a4c 1\npublishing True\r\n
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00#\\r\\n (esc)
- write(41) -> 23\\r\\n\x08LISTKEYS\x00\x00\x00\x02\x01\x00 namespacebookmarks\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(9) -> 4\\r\\n\x00\x00\x00\x00\\r\\n (esc)
- write(3 from 5) -> (0) 0\r\n
- write limit reached; closing socket
- $LOCALIP - - [$ERRDATE$] Exception happened during processing request '/?cmd=getbundle': (glob)
- Traceback (most recent call last):
- Exception: connection closed after sending N bytes
-
-#endif
-
$ rm -f error.log
$ rm -rf clone
--- a/tests/test-http.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-http.t Tue May 09 11:35:50 2023 +0200
@@ -341,20 +341,20 @@
list of changesets:
7f4e523d01f2cc3765ac8934da3d14db775ff872
bundle2-output-bundle: "HG20", 5 parts total
- bundle2-output-part: "replycaps" 207 bytes payload
+ bundle2-output-part: "replycaps" 210 bytes payload
bundle2-output-part: "check:phases" 24 bytes payload
bundle2-output-part: "check:updated-heads" streamed payload
bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
sending unbundle command
- sending 1023 bytes
+ sending 1036 bytes
devel-peer-request: POST http://localhost:$HGPORT2/?cmd=unbundle
- devel-peer-request: Content-length 1023
+ devel-peer-request: Content-length 1036
devel-peer-request: Content-type application/mercurial-0.1
devel-peer-request: Vary X-HgArg-1,X-HgProto-1
devel-peer-request: X-hgproto-1 0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
devel-peer-request: 16 bytes of commands arguments in headers
- devel-peer-request: 1023 bytes of data
+ devel-peer-request: 1036 bytes of data
devel-peer-request: finished in *.???? seconds (200) (glob)
bundle2-input-bundle: no-transaction
bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported
--- a/tests/test-largefiles.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-largefiles.t Tue May 09 11:35:50 2023 +0200
@@ -1114,16 +1114,16 @@
all local changesets known remotely
6 changesets found
uncompressed size of bundle content:
- 1389 (changelog)
- 1698 (manifests)
- 254 .hglf/large1
- 564 .hglf/large3
- 572 .hglf/sub/large4
- 182 .hglf/sub2/large6
- 182 .hglf/sub2/large7
- 212 normal1
- 457 normal3
- 465 sub/normal4
+ 1401 (changelog)
+ 1710 (manifests)
+ 256 .hglf/large1
+ 570 .hglf/large3
+ 578 .hglf/sub/large4
+ 184 .hglf/sub2/large6
+ 184 .hglf/sub2/large7
+ 214 normal1
+ 463 normal3
+ 471 sub/normal4
adding changesets
adding manifests
adding file changes
--- a/tests/test-lfs-serve-access.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-lfs-serve-access.t Tue May 09 11:35:50 2023 +0200
@@ -66,7 +66,7 @@
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 400 - (glob)
$ rm -f $TESTTMP/access.log $TESTTMP/errors.log
@@ -165,7 +165,7 @@
$LOCALIP - - [$LOGDATE$] "POST /missing/objects/batch HTTP/1.1" 404 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /subdir/mount/point/.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /subdir/mount/point/.hg/lfs/objects/f03217a32529a28a42d03b1244fe09b6e0f9fd06d7b966d4d50567be2abe6c0e HTTP/1.1" 200 - (glob)
@@ -311,7 +311,7 @@
$ cat $TESTTMP/access.log
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
@@ -330,7 +330,7 @@
$LOCALIP - - [$LOGDATE$] "PUT /.hg/lfs/objects/b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c HTTP/1.1" 422 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D392c05922088bacf8e68a6939b480017afbf245d x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=525251863cad618e55d483555f3d00a2ca99597e&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 500 - (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
@@ -487,7 +487,7 @@
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 401 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
- $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&bundlecaps=HG20%2Cbundle2%3DHG20%250Abookmarks%250Achangegroup%253D01%252C02%252C03%250Acheckheads%253Drelated%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Aphases%253Dheads%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps%250Astream%253Dv2&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
+ $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=506bf3d83f78c54b89e81c6411adee19fdf02156+525251863cad618e55d483555f3d00a2ca99597e&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 401 - (glob)
$LOCALIP - - [$LOGDATE$] "POST /.git/info/lfs/objects/batch HTTP/1.1" 200 - (glob)
$LOCALIP - - [$LOGDATE$] "GET /.hg/lfs/objects/276f73cfd75f9fb519810df5f5d96d6594ca2521abd86cbcd92122f7d51a1f3d HTTP/1.1" 200 - (glob)
--- a/tests/test-lfs-serve.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-lfs-serve.t Tue May 09 11:35:50 2023 +0200
@@ -308,9 +308,14 @@
$ hg -R $TESTTMP/client4_pull pull http://localhost:$HGPORT
pulling from http://localhost:$HGPORT/
requesting all changes
- remote: abort: no common changegroup version
- abort: pull failed on remote
- [100]
+ adding changesets
+ adding manifests
+ adding file changes
+ transaction abort!
+ rollback completed
+ abort: missing processor for flag '0x2000'
+ (the lfs extension must be enabled)
+ [50]
$ hg debugrequires -R $TESTTMP/client4_pull/ | grep 'lfs'
[1]
$ hg debugrequires -R $SERVER_PATH --config extensions.lfs= | grep 'lfs'
--- a/tests/test-obsolete-changeset-exchange.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-obsolete-changeset-exchange.t Tue May 09 11:35:50 2023 +0200
@@ -164,7 +164,7 @@
adding manifests
adding file changes
adding foo revisions
- bundle2-input-part: total payload size 476
+ bundle2-input-part: total payload size 486
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
--- a/tests/test-obsolete-distributed.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-obsolete-distributed.t Tue May 09 11:35:50 2023 +0200
@@ -163,7 +163,7 @@
adding manifests
adding file changes
adding c_B1 revisions
- bundle2-input-part: total payload size 485
+ bundle2-input-part: total payload size 495
bundle2-input-part: "listkeys" (params: 1 mandatory) supported
bundle2-input-part: "obsmarkers" supported
bundle2-input-part: total payload size 143
--- a/tests/test-obsolete.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-obsolete.t Tue May 09 11:35:50 2023 +0200
@@ -1600,7 +1600,7 @@
$ hg debugbundle .hg/strip-backup/e008cf283490-*-backup.hg
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
+ changegroup -- {nbchanges: 1, version: 03} (mandatory: True)
e008cf2834908e5d6b0f792a9d4b0e2272260fb8
cache:rev-branch-cache -- {} (mandatory: False)
phase-heads -- {} (mandatory: True)
@@ -1643,7 +1643,7 @@
$ hg debugbundle .hg/strip-backup/e016b03fd86f-*-backup.hg
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 2, version: 02} (mandatory: True)
+ changegroup -- {nbchanges: 2, version: 03} (mandatory: True)
e016b03fd86fcccc54817d120b90b751aaf367d6
b0551702f918510f01ae838ab03a463054c67b46
cache:rev-branch-cache -- {} (mandatory: False)
--- a/tests/test-phase-archived.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-phase-archived.t Tue May 09 11:35:50 2023 +0200
@@ -141,3 +141,58 @@
date: Thu Jan 01 00:00:00 1970 +0000
summary: root
+
+Test that a strip will preserve unrelated changeset archived
+------------------------------------------------------------
+
+prepare a suitable tree
+
+ $ echo foo > bar
+ $ hg add bar
+ $ hg commit -m 'some more commit'
+ $ hg log -G --hidden -T '{rev} {node|short} [{phase}] {desc|firstline}\n'
+ @ 3 f90bf4e57854 [draft] some more commit
+ |
+ o 2 d1e73e428f29 [draft] unbundletesting
+ |
+ | o 1 883aadbbf309 [draft] unbundletesting
+ |/
+ o 0 c1863a3840c6 [draft] root
+
+ $ hg strip --soft --rev '.'
+ 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/f90bf4e57854-56b37ff2-backup.hg
+ $ hg log -G --hidden -T '{rev} {node|short} [{phase}] {desc|firstline}\n'
+ o 3 f90bf4e57854 [archived] some more commit
+ |
+ @ 2 d1e73e428f29 [draft] unbundletesting
+ |
+ | o 1 883aadbbf309 [draft] unbundletesting
+ |/
+ o 0 c1863a3840c6 [draft] root
+
+
+
+Strips the other (lower rev-num) head
+
+ $ hg strip --rev 'min(head() and not .)'
+ saved backup bundle to $TESTTMP/repo/.hg/strip-backup/883aadbbf309-efc55adc-backup.hg
+
+The archived changeset should still be hidden
+
+ $ hg log -G -T '{rev} {node|short} [{phase}] {desc|firstline}\n'
+ @ 1 d1e73e428f29 [draft] unbundletesting
+ |
+ o 0 c1863a3840c6 [draft] root
+
+
+It may still be around:
+
+ $ hg log --hidden -G -T '{rev} {node|short} [{phase}] {desc|firstline}\n'
+ o 2 f90bf4e57854 [archived] some more commit
+ |
+ @ 1 d1e73e428f29 [draft] unbundletesting
+ |
+ o 0 c1863a3840c6 [draft] root
+
+
--- a/tests/test-phases-exchange.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-phases-exchange.t Tue May 09 11:35:50 2023 +0200
@@ -852,9 +852,9 @@
searching for changes
1 changesets found
uncompressed size of bundle content:
- 178 (changelog)
- 165 (manifests)
- 131 a-H
+ 180 (changelog)
+ 167 (manifests)
+ 133 a-H
adding changesets
adding manifests
adding file changes
--- a/tests/test-push-warn.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-push-warn.t Tue May 09 11:35:50 2023 +0200
@@ -151,9 +151,9 @@
searching for changes
2 changesets found
uncompressed size of bundle content:
- 352 (changelog)
- 326 (manifests)
- 25\d foo (re)
+ 356 (changelog)
+ 330 (manifests)
+ 261 foo
adding changesets
adding manifests
adding file changes
--- a/tests/test-rebase-conflicts.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-rebase-conflicts.t Tue May 09 11:35:50 2023 +0200
@@ -296,9 +296,8 @@
bundle2-output-part: "cache:rev-branch-cache" (advisory) streamed payload
bundle2-output-part: "phase-heads" 24 bytes payload
saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-rebase.hg
- 3 changesets found
+ 2 changesets found
list of changesets:
- 4c9fbe56a16f30c0d5dcc40ec1a97bbe3325209c
19c888675e133ab5dff84516926a65672eaf04d9
c1ffa3b5274e92a9388fe782854e295d2e8d0443
bundle2-output-bundle: "HG20", 3 parts total
@@ -309,15 +308,14 @@
bundle2-input-bundle: with-transaction
bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported
adding changesets
- add changeset 4c9fbe56a16f
add changeset 19c888675e13
add changeset c1ffa3b5274e
adding manifests
adding file changes
adding f1.txt revisions
- bundle2-input-part: total payload size 1739
+ bundle2-input-part: total payload size 1255
bundle2-input-part: "cache:rev-branch-cache" (advisory) supported
- bundle2-input-part: total payload size 74
+ bundle2-input-part: total payload size 54
bundle2-input-part: "phase-heads" supported
bundle2-input-part: total payload size 24
bundle2-input-bundle: 3 parts total
--- a/tests/test-rebase-mq-skip.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-rebase-mq-skip.t Tue May 09 11:35:50 2023 +0200
@@ -75,17 +75,17 @@
$TESTTMP/a/.hg/patches/p0.patch
2 changesets found
uncompressed size of bundle content:
- 348 (changelog)
- 324 (manifests)
- 129 p0
- 129 p1
+ 352 (changelog)
+ 328 (manifests)
+ 131 p0
+ 131 p1
saved backup bundle to $TESTTMP/a/.hg/strip-backup/13a46ce44f60-5da6ecfb-rebase.hg
2 changesets found
uncompressed size of bundle content:
- 403 (changelog)
- 324 (manifests)
- 129 p0
- 129 p1
+ 407 (changelog)
+ 328 (manifests)
+ 131 p0
+ 131 p1
adding branch
adding changesets
adding manifests
--- a/tests/test-rebase-newancestor.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-rebase-newancestor.t Tue May 09 11:35:50 2023 +0200
@@ -263,15 +263,15 @@
rebase merging completed
1 changesets found
uncompressed size of bundle content:
- 199 (changelog)
- 216 (manifests)
- 182 other
+ 201 (changelog)
+ 218 (manifests)
+ 184 other
saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/4c5f12f25ebe-f46990e5-rebase.hg
1 changesets found
uncompressed size of bundle content:
- 254 (changelog)
- 167 (manifests)
- 182 other
+ 256 (changelog)
+ 169 (manifests)
+ 184 other
adding branch
adding changesets
adding manifests
--- a/tests/test-remote-hidden.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-remote-hidden.t Tue May 09 11:35:50 2023 +0200
@@ -6,6 +6,8 @@
$ . $TESTDIR/testlib/obsmarker-common.sh
$ cat >> $HGRCPATH << EOF
+ > [ui]
+ > ssh = "$PYTHON" "$RUNTESTDIR/dummyssh"
> [phases]
> # public changeset are not obsolete
> publish=false
@@ -111,3 +113,294 @@
revision: 0
$ killdaemons.py
+
+Test --remote-hidden for local peer
+-----------------------------------
+
+ $ hg clone --pull repo-with-hidden client
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files
+ 2 new obsolescence markers
+ new changesets 5f354f46e585:c33affeb3f6b (1 drafts)
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R client log -G --hidden -v
+ @ 1:c33affeb3f6b c_Amend_New [draft]
+ |
+ o 0:5f354f46e585 c_Public [public]
+
+
+pulling an hidden changeset should fail:
+
+ $ hg -R client pull -r be215fbb8c50
+ pulling from $TESTTMP/repo-with-hidden
+ abort: filtered revision 'be215fbb8c50' (not in 'served' subset)
+ [10]
+
+pulling an hidden changeset with --remote-hidden should succeed:
+
+ $ hg -R client pull --remote-hidden --traceback -r be215fbb8c50
+ pulling from $TESTTMP/repo-with-hidden
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (1 other changesets obsolete on arrival)
+ (run 'hg heads' to see heads)
+ $ hg -R client log -G --hidden -v
+ x 2:be215fbb8c50 c_Amend_Old [draft]
+ |
+ | @ 1:c33affeb3f6b c_Amend_New [draft]
+ |/
+ o 0:5f354f46e585 c_Public [public]
+
+
+Pulling a secret changeset is still forbidden:
+
+secret visible:
+
+ $ hg -R client pull --remote-hidden -r 8d28cbe335f3
+ pulling from $TESTTMP/repo-with-hidden
+ abort: filtered revision '8d28cbe335f3' (not in 'served.hidden' subset)
+ [10]
+
+secret hidden:
+
+ $ hg -R client pull --remote-hidden -r 1c6afd79eb66
+ pulling from $TESTTMP/repo-with-hidden
+ abort: filtered revision '1c6afd79eb66' (not in 'served.hidden' subset)
+ [10]
+
+Test accessing hidden changeset through hgweb
+---------------------------------------------
+
+ $ hg -R repo-with-hidden serve -p $HGPORT -d --pid-file hg.pid --config "experimental.server.allow-hidden-access=*" -E error.log --accesslog access.log
+ $ cat hg.pid >> $DAEMON_PIDS
+
+Hidden changeset are hidden by default:
+
+ $ get-with-headers.py localhost:$HGPORT 'log?style=raw' | grep revision:
+ revision: 2
+ revision: 0
+
+Hidden changeset are visible when requested:
+
+ $ get-with-headers.py localhost:$HGPORT 'log?style=raw&access-hidden=1' | grep revision:
+ revision: 3
+ revision: 2
+ revision: 1
+ revision: 0
+
+Same check on a server that do not allow hidden access:
+```````````````````````````````````````````````````````
+
+ $ hg -R repo-with-hidden serve -p $HGPORT1 -d --pid-file hg2.pid --config "experimental.server.allow-hidden-access=" -E error.log --accesslog access.log
+ $ cat hg2.pid >> $DAEMON_PIDS
+
+Hidden changeset are hidden by default:
+
+ $ get-with-headers.py localhost:$HGPORT1 'log?style=raw' | grep revision:
+ revision: 2
+ revision: 0
+
+Hidden changeset are still hidden despite being the hidden access request:
+
+ $ get-with-headers.py localhost:$HGPORT1 'log?style=raw&access-hidden=1' | grep revision:
+ revision: 2
+ revision: 0
+
+Test --remote-hidden for http peer
+----------------------------------
+
+ $ hg clone --pull http://localhost:$HGPORT client-http
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files
+ 2 new obsolescence markers
+ new changesets 5f354f46e585:c33affeb3f6b (1 drafts)
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R client-http log -G --hidden -v
+ @ 1:c33affeb3f6b c_Amend_New [draft]
+ |
+ o 0:5f354f46e585 c_Public [public]
+
+
+pulling an hidden changeset should fail:
+
+ $ hg -R client-http pull -r be215fbb8c50
+ pulling from http://localhost:$HGPORT/
+ abort: filtered revision 'be215fbb8c50' (not in 'served' subset)
+ [255]
+
+pulling an hidden changeset with --remote-hidden should succeed:
+
+ $ hg -R client-http pull --remote-hidden -r be215fbb8c50
+ pulling from http://localhost:$HGPORT/
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (1 other changesets obsolete on arrival)
+ (run 'hg heads' to see heads)
+ $ hg -R client-http log -G --hidden -v
+ x 2:be215fbb8c50 c_Amend_Old [draft]
+ |
+ | @ 1:c33affeb3f6b c_Amend_New [draft]
+ |/
+ o 0:5f354f46e585 c_Public [public]
+
+
+Pulling a secret changeset is still forbidden:
+
+secret visible:
+
+ $ hg -R client-http pull --remote-hidden -r 8d28cbe335f3
+ pulling from http://localhost:$HGPORT/
+ abort: filtered revision '8d28cbe335f3' (not in 'served.hidden' subset)
+ [255]
+
+secret hidden:
+
+ $ hg -R client-http pull --remote-hidden -r 1c6afd79eb66
+ pulling from http://localhost:$HGPORT/
+ abort: filtered revision '1c6afd79eb66' (not in 'served.hidden' subset)
+ [255]
+
+Same check on a server that do not allow hidden access:
+```````````````````````````````````````````````````````
+
+ $ hg clone --pull http://localhost:$HGPORT1 client-http2
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files
+ 2 new obsolescence markers
+ new changesets 5f354f46e585:c33affeb3f6b (1 drafts)
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R client-http2 log -G --hidden -v
+ @ 1:c33affeb3f6b c_Amend_New [draft]
+ |
+ o 0:5f354f46e585 c_Public [public]
+
+
+pulling an hidden changeset should fail:
+
+ $ hg -R client-http2 pull -r be215fbb8c50
+ pulling from http://localhost:$HGPORT1/
+ abort: filtered revision 'be215fbb8c50' (not in 'served' subset)
+ [255]
+
+pulling an hidden changeset with --remote-hidden should fail too:
+
+ $ hg -R client-http2 pull --remote-hidden -r be215fbb8c50
+ pulling from http://localhost:$HGPORT1/
+ abort: filtered revision 'be215fbb8c50' (not in 'served' subset)
+ [255]
+
+Test --remote-hidden for ssh peer
+----------------------------------
+
+ $ hg clone --pull ssh://user@dummy/repo-with-hidden client-ssh
+ requesting all changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 2 changesets with 2 changes to 1 files
+ 2 new obsolescence markers
+ new changesets 5f354f46e585:c33affeb3f6b (1 drafts)
+ updating to branch default
+ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
+ $ hg -R client-ssh log -G --hidden -v
+ @ 1:c33affeb3f6b c_Amend_New [draft]
+ |
+ o 0:5f354f46e585 c_Public [public]
+
+
+Check on a server that do not allow hidden access:
+``````````````````````````````````````````````````
+
+pulling an hidden changeset should fail:
+
+ $ hg -R client-ssh pull -r be215fbb8c50
+ pulling from ssh://user@dummy/repo-with-hidden
+ abort: filtered revision 'be215fbb8c50' (not in 'served' subset)
+ [255]
+
+pulling an hidden changeset with --remote-hidden should succeed:
+
+ $ hg -R client-ssh pull --remote-hidden -r be215fbb8c50
+ pulling from ssh://user@dummy/repo-with-hidden
+ remote: ignoring request to access hidden changeset by unauthorized user: * (glob)
+ abort: filtered revision 'be215fbb8c50' (not in 'served' subset)
+ [255]
+ $ hg -R client-ssh log -G --hidden -v
+ @ 1:c33affeb3f6b c_Amend_New [draft]
+ |
+ o 0:5f354f46e585 c_Public [public]
+
+
+Check on a server that do allow hidden access:
+``````````````````````````````````````````````
+
+ $ cat << EOF >> repo-with-hidden/.hg/hgrc
+ > [experimental]
+ > server.allow-hidden-access=*
+ > EOF
+
+pulling an hidden changeset should fail:
+
+ $ hg -R client-ssh pull -r be215fbb8c50
+ pulling from ssh://user@dummy/repo-with-hidden
+ abort: filtered revision 'be215fbb8c50' (not in 'served' subset)
+ [255]
+
+pulling an hidden changeset with --remote-hidden should succeed:
+
+ $ hg -R client-ssh pull --remote-hidden -r be215fbb8c50
+ pulling from ssh://user@dummy/repo-with-hidden
+ searching for changes
+ adding changesets
+ adding manifests
+ adding file changes
+ added 1 changesets with 1 changes to 1 files (+1 heads)
+ (1 other changesets obsolete on arrival)
+ (run 'hg heads' to see heads)
+ $ hg -R client-ssh log -G --hidden -v
+ x 2:be215fbb8c50 c_Amend_Old [draft]
+ |
+ | @ 1:c33affeb3f6b c_Amend_New [draft]
+ |/
+ o 0:5f354f46e585 c_Public [public]
+
+
+Pulling a secret changeset is still forbidden:
+
+secret visible:
+
+ $ hg -R client-ssh pull --remote-hidden -r 8d28cbe335f3
+ pulling from ssh://user@dummy/repo-with-hidden
+ abort: filtered revision '8d28cbe335f3' (not in 'served.hidden' subset)
+ [255]
+
+secret hidden:
+
+ $ hg -R client-ssh pull --remote-hidden -r 1c6afd79eb66
+ pulling from ssh://user@dummy/repo-with-hidden
+ abort: filtered revision '1c6afd79eb66' (not in 'served.hidden' subset)
+ [255]
+
+=============
+Final cleanup
+=============
+
+ $ killdaemons.py
--- a/tests/test-ssh.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-ssh.t Tue May 09 11:35:50 2023 +0200
@@ -529,7 +529,7 @@
no changes found
devel-peer-request: getbundle
devel-peer-request: bookmarks: 1 bytes
- devel-peer-request: bundlecaps: 270 bytes
+ devel-peer-request: bundlecaps: 275 bytes
devel-peer-request: cg: 1 bytes
devel-peer-request: common: 122 bytes
devel-peer-request: heads: 122 bytes
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/tests/test-stabletailgraph.t Tue May 09 11:35:50 2023 +0200
@@ -0,0 +1,616 @@
+====================================
+Test for the stabletailgraph package
+====================================
+
+This test file contains a bunch of small test graphs with some minimal yet
+non-trivial structure, on which the various stable-tail graph and stable-tail
+sort functions are tested.
+
+Each case consists of the creation of the interesting graph structure, followed
+by a check, for each noteworthy node, of:
+- the stable-tail sort output (with the linear parts globbed).
+
+In the ASCII art of the diagrams, the side of the exclusive part which is
+followed in priority is denoted with "<" or ">" if it is on the left or right
+respectively.
+
+The intermediary linear parts in the example graph are there to force the
+exclusive part choice (made on a min rank condition).
+
+
+Setup
+=====
+
+Enable the rank computation to test sorting based on the rank.
+
+ $ cat << EOF >> $HGRCPATH
+ > [format]
+ > exp-use-changelog-v2=enable-unstable-format-and-corrupt-my-data
+ >
+ > [alias]
+ > test-sts = debug::stable-tail-sort -T '{tags},'
+ > test-log = log --graph -T '{tags} rank={_fast_rank}' --rev 'tagged()'
+ > EOF
+
+
+Example 1: single merge node
+============================
+
+A base case with one branchpoint "b" and one merge node "e".
+
+The exclusive part, starting with the lowest-ranking parent "c" of "e",
+appears first in stable-tail sort of "e" and "f".
+
+# f
+# |
+# e
+# |
+# --<--
+# | |
+# c d
+# | |
+# --+-- <- at this point, the sort of "e" is done consuming its
+# | exclusive part [c] and jumps back to its other parent "d"
+# b
+# |
+# a
+
+ $ hg init example-1
+ $ cd example-1
+ $ hg debugbuilddag '.:a*a:b*b:c<b+2:d*c/d:e*e:f.'
+ $ hg test-log
+ o f rank=7
+ |
+ o e rank=6
+ |\
+ | o d rank=4
+ | :
+ o : c rank=3
+ :/
+ o b rank=2
+ |
+ o a rank=1
+
+
+Check the sort of the base linear case.
+
+ $ hg test-sts c
+ c,b,a, (no-eol)
+
+Check the stable-tail sort of "e": "c" should come before "d".
+
+ $ hg test-sts e
+ e,c,d,*,b,a, (no-eol) (glob)
+
+Check that the linear descendant of the merge inherits its sort properly.
+
+ $ hg test-sts f
+ f,e,c,d,*,b,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 2: nested exclusive parts, without specific leap
+========================================================
+
+"g" is a merge node whose exclusive part contains a merge node "e".
+We check that the stable-tail sort recurses properly by delegating.
+
+Notice that parts of the sort of "e" is an infix of the sort of "g".
+This is an expected property of the sort.
+
+# g
+# |
+# ---<---
+# | |
+# e | <- while processing the sort in the exclusive part of "g"
+# | | we recursively process the exclusive part of "e"
+# --<-- f
+# | | |
+# c d |
+# | | |
+# --+-- |
+# | |
+# b |
+# | |
+# ---+--- <- done with excl(g), jump to "f"
+# |
+# a
+
+ $ hg init example-2
+ $ cd example-2
+ $ hg debugbuilddag '.:a*a:b*b:c<b+2:d*c/d:e<a+6:f*e/f:g.'
+ $ hg test-log
+ o g rank=13
+ |\
+ | o f rank=7
+ | :
+ o : e rank=6
+ |\ \
+ | o : d rank=4
+ | : :
+ o : : c rank=3
+ :/ /
+ o / b rank=2
+ :/
+ o a rank=1
+
+Display the sort of "e" for reference
+
+ $ hg test-sts e
+ e,c,d,*,b,a, (no-eol) (glob)
+
+Check the correctness of the sort of "g",
+and that a part of the sort of "e" appears as an infix.
+
+ $ hg test-sts g
+ g,e,c,d,*,b,f,*,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 3: shadowing of a final leap
+====================================
+
+We have a merge "f" whose exclusive part contains a merge "d".
+
+The inherited parent of "d" is not in the exclusive part of "f".
+At the end of the exclusive part of "d",
+the leap to "c" is shadowed by the leap to "e", i.e. the inherited part to "f".
+
+Notice that emitting "c" before "e" would break the reverse topological
+ordering.
+
+# f
+# |
+# ---<---
+# | |
+# d |
+# | e
+# --<-- |
+# | | |
+# | +----
+# b |
+# | c
+# | |
+# --+-- <- at this point, jumping to "e", not the shadowed "c"
+# |
+# a
+
+ $ hg init example-3
+ $ cd example-3
+ $ hg debugbuilddag '.:a*a:b<a+2:c*b/c:d<c+3:e*d/e:f.'
+ $ hg test-log
+ o f rank=9
+ |\
+ | o e rank=6
+ | :
+ o : d rank=5
+ |\:
+ | o c rank=3
+ | :
+ o : b rank=2
+ :/
+ o a rank=1
+
+
+Display the sort of "d" for reference:
+
+ $ hg test-sts d
+ d,b,c,*,a, (no-eol) (glob)
+
+Check that we leap from "b" directly to "e" (shadowing the leap to "c"),
+and that "c" is then emitted after "e" (its descendant).
+
+ $ hg test-sts f
+ f,d,b,e,*,c,*,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 4: skipping over nested exclusive part (entirely)
+=========================================================
+
+We have a merge "f" whose exclusive part contains a merge "d".
+
+The exclusive part of "d" is not in the exclusive part of "f".
+However, some of the inherited part of "d" is part of the exclusive part of "f"
+and needs to be iterated over before leaping to the inherited part of "f".
+
+The sort of "d" is partially reused for the ordering of the exclusive part of
+"f". However the reused part is not contiguous in the sort of "d".
+
+# f
+# |
+# ---<---
+# | |
+# d |
+# | e
+# -->-- | <- in the sort of "f", we need to skip "c" and leap to the
+# | | | inherited part of "d"
+# | +----
+# b |
+# | c
+# | |
+# --+--
+# |
+# a
+
+ $ hg init example-4
+ $ cd example-4
+ $ hg debugbuilddag '.:a*a+1:b<a+1:c*b/c:d<c+4:e*d/e:f.'
+ $ hg test-log
+ o f rank=10
+ |\
+ | o e rank=6
+ | :
+ o : d rank=5
+ |\:
+ | o c rank=2
+ | |
+ o | b rank=3
+ |/
+ o a rank=1
+
+
+Display the sort of "d" for reference:
+
+ $ hg test-sts d
+ d,c,b,*,a, (no-eol) (glob)
+
+Check that sort "f" leaps from "d" to "b":
+
+ $ hg test-sts f
+ f,d,b,*,e,*,c,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 5: skipping over nested exclusive part (partially)
+==========================================================
+
+We have a merge "f" whose exclusive part contains a merge "d".
+
+Similar to example 4, but the exclusive part of "d" is only partially
+contained in the inherited part of "f".
+So, we need to leap in the middle of the exclusive part of "d".
+
+# f
+# |
+# ---<---
+# | |
+# d |
+# | e
+# -->-- |
+# | | |
+# | g |
+# | | |
+# | +---- <- in the sort of "f", leaping from "g" to "b"
+# b |
+# | c
+# | |
+# --+--
+# |
+# a
+
+ $ hg init example-5
+ $ cd example-5
+ $ hg debugbuilddag '.:a*a+2:b<a+1:c+1:g*b/g:d<c+6:e*d/e:f.'
+ $ hg test-log
+ o f rank=14
+ |\
+ | o e rank=8
+ | :
+ o : d rank=7
+ |\ \
+ | o : g rank=3
+ | :/
+ | o c rank=2
+ | |
+ o | b rank=4
+ |/
+ o a rank=1
+
+
+Display the sort of "d" for reference:
+
+ $ hg test-sts d
+ d,g,c,b,*,a, (no-eol) (glob)
+
+Check that sort "f" leaps from "g" to "b":
+
+ $ hg test-sts f
+ f,d,g,b,*,e,*,c,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 6: merge in the inherited part
+======================================
+
+Variant of example 2, but with a merge ("f") in the inherited part of "g".
+
+"g" is a merge node whose inherited part contains a merge node "f".
+We check that the stable-tail sort delegates properly after the exclusive part.
+
+# g
+# |
+# ---<---
+# | |
+# d f
+# | |
+# | ---<---
+# | | |
+# | e c
+# | | |
+# ---+ | <- at this point, we're done (for good) with the exclusive
+# | | part of "g"
+# b |
+# | |
+# ---+---
+# |
+# a
+
+ $ hg init example-6
+ $ cd example-6
+ $ hg debugbuilddag '.:a*a:b<a+3:c*b:d*b:e*e/c:f*d/f:g.'
+ $ hg test-log
+ o g rank=9
+ |\
+ | o f rank=7
+ | |\
+ | | o e rank=3
+ | | |
+ o---+ d rank=3
+ / /
+ o | c rank=4
+ : |
+ : o b rank=2
+ :/
+ o a rank=1
+
+
+Display the sort of "f" for reference:
+
+ $ hg test-sts f
+ f,e,b,c,*,a, (no-eol) (glob)
+
+Check that the sort of "g" delegates to the sort of "f" after processing its
+exclusive part of "g":
+
+ $ hg test-sts g
+ g,d,f,e,b,c,*,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 7: postponed iteration of common exclusive ancestors
+============================================================
+
+Sibling merges "j" and "k", with partially shared exclusive parts.
+
+When considering the sort of "l", the iteration over this shared part cannot
+happen when iterating over excl(j) and has to be postponed to excl(k).
+
+# l
+# |
+# ----<----
+# | |
+# j k
+# | |
+# -->-- --<--
+# | | | |
+# g e h i
+# | | | |
+# | --+-- | <- at this point, for the sort of "l", the iteration on
+# f | | the end of excl(j) is postponed to the iteration of
+# | d | excl(k)
+# | | |
+# | c |
+# | | |
+# ---+--- |
+# | |
+# b |
+# | |
+# ----+-----
+# |
+# a
+
+ $ hg init example-7
+ $ cd example-7
+ $ hg debugbuilddag \
+ > '.:a*a:b*b:c*c:d*d:e*b:f<f+3:g<d+2:h<a+6:i*e/g:j*h/i:k*j/k:l.'
+ $ hg test-log
+ o l rank=20
+ |\
+ | o k rank=13
+ | |\
+ o \ \ j rank=10
+ |\ \ \
+ | | | o i rank=7
+ | | | :
+ | | o : h rank=6
+ | | : :
+ | o : : g rank=6
+ | : : :
+ | o : : f rank=3
+ | | : :
+ o---+ : e rank=5
+ / / /
+ | o : d rank=4
+ | | :
+ | o : c rank=3
+ |/ /
+ o / b rank=2
+ :/
+ o a rank=1
+
+
+Display the sort of "j" for reference:
+
+ $ hg test-sts j
+ j,e,d,c,g,*,f,b,a, (no-eol) (glob)
+
+Display the sort of "k" for reference:
+
+ $ hg test-sts k
+ k,h,*,d,c,b,i,*,a, (no-eol) (glob)
+
+Check that the common part of excl(j) and excl(k) is iterated over after "k":
+
+ $ hg test-sts l
+ l,j,e,g,*,f,k,h,*,d,c,b,i,*,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 8: postponed iteration of common ancestors between parts
+================================================================
+
+Sibling merges "g" and "i", with some part shared between the inherited part
+of "g" and the exclusive part of "i".
+
+When considering the sort of "j", the iteration over this shared part cannot
+happen when iterating over inherited(g) and has to be postponed to excl(i).
+
+# j
+# |
+# ----<----
+# | |
+# g i
+# | |
+# --<-- --<--
+# | | | |
+# c f | h
+# | | | |
+# | --+-- | <- at this point, for the sort of "j", the iteration
+# | | | on the end of inherited(g) is postponed to the
+# | e | iteration of excl(k)
+# | | |
+# ---+--- |
+# b |
+# | |
+# ----+-----
+# |
+# a
+
+ $ hg init example-8
+ $ cd example-8
+ $ hg debugbuilddag '.:a*a:b*b:c*b:d*d:e*e:f*c/f:g<a+5:h*e/h:i*g/i:j.'
+ $ hg test-log
+ o j rank=14
+ |\
+ | o i rank=10
+ | |\
+ | | o h rank=6
+ | | :
+ o | : g rank=7
+ |\ \ \
+ | o | : f rank=5
+ | |/ /
+ | o : e rank=4
+ | | :
+ | o : d rank=3
+ | | :
+ o | : c rank=3
+ |/ /
+ o / b rank=2
+ :/
+ o a rank=1
+
+
+Display the sort of "g" for reference:
+
+ $ hg test-sts g
+ g,c,f,e,d,b,a, (no-eol)
+
+Display the sort of "i" for reference:
+
+ $ hg test-sts i
+ i,e,d,b,h,*,a, (no-eol) (glob)
+
+Check that the common part of inherited(g) and excl(k) is iterated over after
+"i":
+
+ $ hg test-sts j
+ j,g,c,f,i,e,d,b,h,*,a, (no-eol) (glob)
+
+ $ cd ..
+
+
+Example 9: postponed iteration of common ancestors between both parts
+=====================================================================
+
+This is a combination of example 7 and 8 at the same time.
+Both excl(i) and excl(j) share a common part.
+Same with inherited(i) and inherited(j).
+
+We test that the walk on the common ancestors in both cases is properly
+postponed when considering sort(k).
+
+# k
+# |
+# ----<----
+# | |
+# i j
+# | |
+# --<-- --<--
+# | | | |
+# c f g h
+# | | | |
+# | e | |
+# | | | |
+# +--]|[--- | <- rest of excl(i) postponed to excl(j)
+# | | |
+# b ----+---- <- rest of inherited(i) postponed to inherited(j)
+# | |
+# | d
+# | |
+# ----+----
+# |
+# a
+
+ $ hg init example-9
+ $ cd example-9
+ $ hg debugbuilddag '.:a*a:b*b:c*a:d*d:e*e:f<b+2:g<d+3:h*c/f:i*g/h:j*i/j:k.'
+ $ hg test-log
+ o k rank=14
+ |\
+ | o j rank=9
+ | |\
+ o \ \ i rank=7
+ |\ \ \
+ | | | o h rank=5
+ | | | :
+ | | o : g rank=4
+ | | : :
+ | o : : f rank=4
+ | | : :
+ | o---+ e rank=3
+ | / /
+ | : o d rank=2
+ | : |
+ o : | c rank=3
+ :/ /
+ o / b rank=2
+ |/
+ o a rank=1
+
+
+Display sort(i) for reference:
+
+ $ hg test-sts i
+ i,c,b,f,e,d,a, (no-eol)
+
+Display sort(j) for reference:
+
+ $ hg test-sts j
+ j,g,*,b,h,*,d,a, (no-eol) (glob)
+
+Check that the end of excl(i) is postponed to excl(j), the end of inherited(i)
+is postponed to inherited(j) in sort(k):
+
+ $ hg test-sts k
+ k,i,c,f,e,j,g,*,b,h,*,d,a, (no-eol) (glob)
+
+ $ cd ..
--- a/tests/test-status.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-status.t Tue May 09 11:35:50 2023 +0200
@@ -246,6 +246,11 @@
! deleted
? unknown
+hg status -0:
+
+ $ hg status -0 --config rhg.on-unsupported=abort
+ A added\x00A copied\x00R removed\x00! deleted\x00? unknown\x00 (no-eol) (esc)
+
hg status -A:
$ hg status -A
--- a/tests/test-strip.t Thu May 04 14:17:28 2023 +0200
+++ b/tests/test-strip.t Tue May 09 11:35:50 2023 +0200
@@ -251,7 +251,7 @@
$ hg debugbundle .hg/strip-backup/*
Stream params: {Compression: BZ}
- changegroup -- {nbchanges: 1, version: 02} (mandatory: True)
+ changegroup -- {nbchanges: 1, version: 03} (mandatory: True)
264128213d290d868c54642d13aeaa3675551a78
cache:rev-branch-cache -- {} (mandatory: False)
phase-heads -- {} (mandatory: True)